diff --git a/.cargo/config.toml b/.cargo/config.toml index a813332b9..bd657aea0 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -7,7 +7,7 @@ linker = "x86_64-linux-gnu-gcc" [http] timeout = 30 # timeout for each HTTP request, in seconds low-speed-limit = 10 # network timeout threshold (bytes/sec) -multiplexing = false +multiplexing = true [net] retry = 3 diff --git a/Cargo.lock b/Cargo.lock index b7122fdad..6d50f6a83 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1422,8 +1422,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f2635620bf0b9d4576eb7bb9a38a55df78bd1205d26fa994b25911a69f212f" dependencies = [ "bitcoin_hashes 0.11.0", - "rand 0.8.5", - "rand_core 0.6.4", + "rand 0.7.3", + "rand_core 0.5.1", "serde", "unicode-normalization", ] @@ -2101,14 +2101,15 @@ version = "0.7.5" dependencies = [ "cfg-if", "console-subscriber", + "log", + "opentelemetry 0.23.0", + "opentelemetry-appender-log", + "opentelemetry-appender-tracing", "opentelemetry-otlp", - "opentelemetry-proto", "opentelemetry-semantic-conventions 0.15.0", + "opentelemetry-stdout", "opentelemetry_sdk", "tracing", - "tracing-flame", - "tracing-log 0.1.4", - "tracing-opentelemetry", "tracing-subscriber 0.3.18", "url", ] @@ -3005,9 +3006,9 @@ checksum = "4f8a51dd197fa6ba5b4dc98a990a43cc13693c23eb0089ebb0fcc1f04152bca6" [[package]] name = "cxx" -version = "1.0.123" +version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8194f089b6da4751d6c1da1ef37c17255df51f9346cdb160f8b096562ae4a85c" +checksum = "273dcfd3acd4e1e276af13ed2a43eea7001318823e7a726a6b3ed39b4acc0b82" dependencies = [ "cc", "cxxbridge-flags", @@ -3017,9 +3018,9 @@ dependencies = [ [[package]] name = "cxx-build" -version = "1.0.123" +version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e8df9a089caae66634d754672d5f909395f30f38af6ff19366980d8a8b57501" +checksum = "d8b2766fbd92be34e9ed143898fce6c572dc009de39506ed6903e5a05b68914e" dependencies = [ "cc", "codespan-reporting", @@ -3032,15 +3033,15 @@ dependencies = [ [[package]] name = "cxxbridge-flags" -version = "1.0.123" +version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25290be4751803672a70b98c68b51c1e7d0a640ab5a4377f240f9d2e70054cd1" +checksum = "839fcd5e43464614ffaa989eaf1c139ef1f0c51672a1ed08023307fa1b909ccd" [[package]] name = "cxxbridge-macro" -version = "1.0.123" +version = "1.0.124" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8cb317cb13604b4752416783bb25070381c36e844743e4146b7f8e55de7d140" +checksum = "4b2c1c1776b986979be68bb2285da855f8d8a35851a769fca8740df7c3d07877" dependencies = [ "proc-macro2", "quote", @@ -3307,22 +3308,22 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.17" +version = "0.99.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "convert_case", "proc-macro2", "quote", "rustc_version", - "syn 1.0.109", + "syn 2.0.66", ] [[package]] name = "diesel" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35b696af9ff4c0d2a507db2c5faafa8aa0205e297e5f11e203a24226d5355e7a" +checksum = "62d6dcd069e7b5fe49a302411f759d4cf1cf2c27fe798ef46fb8baefc053dd2b" dependencies = [ "bitflags 2.5.0", "byteorder", @@ -3336,9 +3337,9 @@ dependencies = [ [[package]] name = "diesel_derives" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6fdd83d5947068817016e939596d246e5367279453f2a3433287894f2f2996" +checksum = "59de76a222c2b8059f789cbe07afbfd8deb8c31dd0bc2a21f85e256c1def8259" dependencies = [ "diesel_table_macro_syntax", "dsl_auto_type", @@ -3544,9 +3545,9 @@ checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" [[package]] name = "dsl_auto_type" -version = "0.1.0" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab32c18ea6760d951659768a3e35ea72fc1ba0916d665a88dfe048b2a41e543f" +checksum = "0892a17df262a24294c382f0d5997571006e7a4348b4327557c4ff1cd4a8bccc" dependencies = [ "darling 0.20.9", "either", @@ -3858,13 +3859,14 @@ dependencies = [ [[package]] name = "expander" -version = "2.1.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00e83c02035136f1592a47964ea60c05a50e4ed8b5892cfac197063850898d4d" +checksum = "e2c470c71d91ecbd179935b24170459e926382eaaa86b590b78814e180d8a8e2" dependencies = [ "blake2 0.10.6", + "file-guard", "fs-err", - "prettier-please", + "prettyplease 0.2.20", "proc-macro2", "quote", "syn 2.0.66", @@ -3956,6 +3958,16 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" +[[package]] +name = "file-guard" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21ef72acf95ec3d7dbf61275be556299490a245f017cf084bd23b4f68cf9407c" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "file-per-thread-logger" version = "0.1.6" @@ -4928,6 +4940,12 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" +[[package]] +name = "hermit-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" + [[package]] name = "hex" version = "0.4.3" @@ -5073,9 +5091,9 @@ checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" [[package]] name = "httparse" -version = "1.9.3" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0e7a4dd27b9476dc40cb050d3632d3bba3a70ddbff012285f7f8559a1e7e545" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" [[package]] name = "httpdate" @@ -5222,7 +5240,7 @@ dependencies = [ "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows-core 0.52.0", + "windows-core", ] [[package]] @@ -5457,18 +5475,18 @@ dependencies = [ [[package]] name = "include_dir" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" +checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd" dependencies = [ "include_dir_macros", ] [[package]] name = "include_dir_macros" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" +checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75" dependencies = [ "proc-macro2", "quote", @@ -7078,9 +7096,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.2" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memfd" @@ -8030,8 +8048,7 @@ dependencies = [ [[package]] name = "opentelemetry" version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b69a91d4893e713e06f724597ad630f1fa76057a5e1026c0ca67054a9032a76" +source = "git+https://github.com/open-telemetry/opentelemetry-rust#da368d4085bcc3bbc8c3b2932a07827bc600dbbd" dependencies = [ "futures-core", "futures-sink", @@ -8041,6 +8058,27 @@ dependencies = [ "thiserror", ] +[[package]] +name = "opentelemetry-appender-log" +version = "0.4.0" +source = "git+https://github.com/open-telemetry/opentelemetry-rust#da368d4085bcc3bbc8c3b2932a07827bc600dbbd" +dependencies = [ + "log", + "opentelemetry 0.23.0", +] + +[[package]] +name = "opentelemetry-appender-tracing" +version = "0.4.0" +source = "git+https://github.com/open-telemetry/opentelemetry-rust#da368d4085bcc3bbc8c3b2932a07827bc600dbbd" +dependencies = [ + "once_cell", + "opentelemetry 0.23.0", + "tracing", + "tracing-core", + "tracing-subscriber 0.3.18", +] + [[package]] name = "opentelemetry-http" version = "0.11.1" @@ -8056,8 +8094,7 @@ dependencies = [ [[package]] name = "opentelemetry-otlp" version = "0.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a94c69209c05319cdf7460c6d4c055ed102be242a0a6245835d7bc42c6ec7f54" +source = "git+https://github.com/open-telemetry/opentelemetry-rust#da368d4085bcc3bbc8c3b2932a07827bc600dbbd" dependencies = [ "async-trait", "futures-core", @@ -8074,8 +8111,7 @@ dependencies = [ [[package]] name = "opentelemetry-proto" version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "984806e6cf27f2b49282e2a05e288f30594f3dbc74eb7a6e99422bc48ed78162" +source = "git+https://github.com/open-telemetry/opentelemetry-rust#da368d4085bcc3bbc8c3b2932a07827bc600dbbd" dependencies = [ "opentelemetry 0.23.0", "opentelemetry_sdk", @@ -8092,26 +8128,39 @@ checksum = "f9ab5bd6c42fb9349dcf28af2ba9a0667f697f9bdcca045d39f2cec5543e2910" [[package]] name = "opentelemetry-semantic-conventions" version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1869fb4bb9b35c5ba8a1e40c9b128a7b4c010d07091e864a29da19e4fe2ca4d7" +source = "git+https://github.com/open-telemetry/opentelemetry-rust#da368d4085bcc3bbc8c3b2932a07827bc600dbbd" + +[[package]] +name = "opentelemetry-stdout" +version = "0.4.0" +source = "git+https://github.com/open-telemetry/opentelemetry-rust#da368d4085bcc3bbc8c3b2932a07827bc600dbbd" +dependencies = [ + "async-trait", + "chrono", + "futures-util", + "opentelemetry 0.23.0", + "opentelemetry_sdk", + "ordered-float", + "serde", + "serde_json", + "thiserror", +] [[package]] name = "opentelemetry_sdk" version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae312d58eaa90a82d2e627fd86e075cf5230b3f11794e2ed74199ebbe572d4fd" +source = "git+https://github.com/open-telemetry/opentelemetry-rust#da368d4085bcc3bbc8c3b2932a07827bc600dbbd" dependencies = [ "async-trait", "futures-channel", "futures-executor", "futures-util", "glob", - "lazy_static", "once_cell", "opentelemetry 0.23.0", - "ordered-float", "percent-encoding", "rand 0.8.5", + "serde_json", "thiserror", "tokio", "tokio-stream", @@ -8335,8 +8384,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4e69bf016dc406eff7d53a7d3f7cf1c2e72c82b9088aac1118591e36dd2cd3e9" dependencies = [ "bitcoin_hashes 0.13.0", - "rand 0.8.5", - "rand_core 0.6.4", + "rand 0.7.3", + "rand_core 0.5.1", "serde", "unicode-normalization", ] @@ -8450,7 +8499,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.1", + "redox_syscall 0.5.2", "smallvec", "windows-targets 0.52.5", ] @@ -8926,13 +8975,13 @@ checksum = "26e85d3456948e650dff0cfc85603915847faf893ed1e66b020bb82ef4557120" [[package]] name = "polling" -version = "3.7.1" +version = "3.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6a007746f34ed64099e88783b0ae369eaa3da6392868ba262e2af9b8fbaea1" +checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" dependencies = [ "cfg-if", "concurrent-queue", - "hermit-abi 0.3.9", + "hermit-abi 0.4.0", "pin-project-lite 0.2.14", "rustix 0.38.34", "tracing", @@ -9039,16 +9088,6 @@ dependencies = [ "termtree", ] -[[package]] -name = "prettier-please" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22020dfcf177fcc7bf5deaf7440af371400c67c0de14c399938d8ed4fb4645d3" -dependencies = [ - "proc-macro2", - "syn 2.0.66", -] - [[package]] name = "pretty_dtoa" version = "0.3.0" @@ -9060,9 +9099,9 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.1.11" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28f53e8b192565862cf99343194579a022eb9c7dd3a8d03134734803c7b3125" +checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" dependencies = [ "proc-macro2", "syn 1.0.109", @@ -9258,7 +9297,7 @@ dependencies = [ "log", "multimap", "petgraph", - "prettyplease 0.1.11", + "prettyplease 0.1.25", "prost 0.11.9", "prost-types 0.11.9", "regex", @@ -9405,9 +9444,9 @@ dependencies = [ [[package]] name = "psl" -version = "2.1.43" +version = "2.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecec637c2e9d0c8c4bf78df069a53a103ebe3dbd0dc7eff1d60c1006a1c97254" +checksum = "bac2a3a4c4b39bd07eccfb97bacc286859b7b1440e5e56fb9969beaace0ae308" dependencies = [ "psl-types", ] @@ -9721,9 +9760,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469052894dcb553421e483e4209ee581a45100d31b4018de03e5a7ad86374a7e" +checksum = "c82cf8cff14456045f55ec4241383baeff27af886adb72ffb2162f99911de0fd" dependencies = [ "bitflags 2.5.0", ] @@ -10027,15 +10066,6 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "runtime-api-chronicle" -version = "1.0.0" -dependencies = [ - "common 0.7.5", - "sp-api", - "sp-core 28.0.0 (git+https://github.com/paritytech/polkadot-sdk.git?tag=polkadot-v1.9.0)", -] - [[package]] name = "runtime-chronicle" version = "1.0.0" @@ -10056,7 +10086,6 @@ dependencies = [ "pallet-sudo", "pallet-timestamp", "parity-scale-codec", - "runtime-api-chronicle", "scale-info", "sp-api", "sp-block-builder", @@ -10396,9 +10425,9 @@ checksum = "700de91d5fd6091442d00fdd9ee790af6d4f0f480562b0f5a1e8f59e90aafe73" [[package]] name = "safe_arch" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f398075ce1e6a179b46f51bd88d0598b92b00d3551f1a2d4ac49e771b56ac354" +checksum = "c3460605018fdc9612bce72735cba0d27efbcd9904780d44c7e3a9948f96148a" dependencies = [ "bytemuck", ] @@ -11796,9 +11825,9 @@ dependencies = [ [[package]] name = "serde_arrow" -version = "0.11.5" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea79c630781d66e4804964730d2db7517f0568edac0ba6c50641933b1d5ad5d1" +checksum = "ff56acef131ef74bacc5e86c5038b524d61dee59d65c9e3e5e0f35b9de98cf99" dependencies = [ "arrow-array", "arrow-buffer", @@ -12609,7 +12638,7 @@ dependencies = [ [[package]] name = "sp-crypto-ec-utils" version = "0.10.0" -source = "git+https://github.com/paritytech/polkadot-sdk#c4aa2ab642419e6751400a6aabaf5df611a4ea37" +source = "git+https://github.com/paritytech/polkadot-sdk#55a13abcd2f67e7fdfc8843f5c4a54798e26a9df" dependencies = [ "ark-bls12-377", "ark-bls12-377-ext", @@ -12682,7 +12711,7 @@ dependencies = [ [[package]] name = "sp-debug-derive" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#c4aa2ab642419e6751400a6aabaf5df611a4ea37" +source = "git+https://github.com/paritytech/polkadot-sdk#55a13abcd2f67e7fdfc8843f5c4a54798e26a9df" dependencies = [ "proc-macro2", "quote", @@ -12715,7 +12744,7 @@ dependencies = [ [[package]] name = "sp-externalities" version = "0.25.0" -source = "git+https://github.com/paritytech/polkadot-sdk#c4aa2ab642419e6751400a6aabaf5df611a4ea37" +source = "git+https://github.com/paritytech/polkadot-sdk#55a13abcd2f67e7fdfc8843f5c4a54798e26a9df" dependencies = [ "environmental", "parity-scale-codec", @@ -12995,7 +13024,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface" version = "24.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#c4aa2ab642419e6751400a6aabaf5df611a4ea37" +source = "git+https://github.com/paritytech/polkadot-sdk#55a13abcd2f67e7fdfc8843f5c4a54798e26a9df" dependencies = [ "bytes", "impl-trait-for-tuples", @@ -13041,7 +13070,7 @@ dependencies = [ [[package]] name = "sp-runtime-interface-proc-macro" version = "17.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#c4aa2ab642419e6751400a6aabaf5df611a4ea37" +source = "git+https://github.com/paritytech/polkadot-sdk#55a13abcd2f67e7fdfc8843f5c4a54798e26a9df" dependencies = [ "Inflector", "expander", @@ -13162,7 +13191,7 @@ source = "git+https://github.com/paritytech/polkadot-sdk.git?tag=polkadot-v1.9.0 [[package]] name = "sp-std" version = "14.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#c4aa2ab642419e6751400a6aabaf5df611a4ea37" +source = "git+https://github.com/paritytech/polkadot-sdk#55a13abcd2f67e7fdfc8843f5c4a54798e26a9df" [[package]] name = "sp-storage" @@ -13194,7 +13223,7 @@ dependencies = [ [[package]] name = "sp-storage" version = "19.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#c4aa2ab642419e6751400a6aabaf5df611a4ea37" +source = "git+https://github.com/paritytech/polkadot-sdk#55a13abcd2f67e7fdfc8843f5c4a54798e26a9df" dependencies = [ "impl-serde", "parity-scale-codec", @@ -13244,7 +13273,7 @@ dependencies = [ [[package]] name = "sp-tracing" version = "16.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#c4aa2ab642419e6751400a6aabaf5df611a4ea37" +source = "git+https://github.com/paritytech/polkadot-sdk#55a13abcd2f67e7fdfc8843f5c4a54798e26a9df" dependencies = [ "parity-scale-codec", "tracing", @@ -13383,7 +13412,7 @@ dependencies = [ [[package]] name = "sp-wasm-interface" version = "20.0.0" -source = "git+https://github.com/paritytech/polkadot-sdk#c4aa2ab642419e6751400a6aabaf5df611a4ea37" +source = "git+https://github.com/paritytech/polkadot-sdk#55a13abcd2f67e7fdfc8843f5c4a54798e26a9df" dependencies = [ "impl-trait-for-tuples", "log", @@ -14438,7 +14467,11 @@ dependencies = [ "percent-encoding", "pin-project", "prost 0.12.6", + "rustls-native-certs 0.7.0", + "rustls-pemfile 2.1.2", + "rustls-pki-types", "tokio", + "tokio-rustls 0.25.0", "tokio-stream", "tower", "tower-layer", @@ -14529,17 +14562,6 @@ dependencies = [ "valuable", ] -[[package]] -name = "tracing-flame" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bae117ee14789185e129aaee5d93750abe67fdc5a9a62650452bfe4e122a3a9" -dependencies = [ - "lazy_static", - "tracing", - "tracing-subscriber 0.3.18", -] - [[package]] name = "tracing-futures" version = "0.2.5" @@ -14572,24 +14594,6 @@ dependencies = [ "tracing-core", ] -[[package]] -name = "tracing-opentelemetry" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f68803492bf28ab40aeccaecc7021096bd256baf7ca77c3d425d89b35a7be4e4" -dependencies = [ - "js-sys", - "once_cell", - "opentelemetry 0.23.0", - "opentelemetry_sdk", - "smallvec", - "tracing", - "tracing-core", - "tracing-log 0.2.0", - "tracing-subscriber 0.3.18", - "web-time", -] - [[package]] name = "tracing-serde" version = "0.1.3" @@ -14806,7 +14810,7 @@ checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", "digest 0.10.7", - "rand 0.8.5", + "rand 0.7.3", "static_assertions", ] @@ -15531,16 +15535,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "web-time" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - [[package]] name = "webpki" version = "0.22.4" @@ -15637,7 +15631,7 @@ version = "0.51.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca229916c5ee38c2f2bc1e9d8f04df975b4bd93f9955dc69fabb5d91270045c9" dependencies = [ - "windows-core 0.51.1", + "windows-core", "windows-targets 0.48.5", ] @@ -15650,15 +15644,6 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "windows-core" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets 0.52.5", -] - [[package]] name = "windows-sys" version = "0.45.0" @@ -16147,9 +16132,9 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "2.0.10+zstd.1.5.6" +version = "2.0.11+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" +checksum = "75652c55c0b6f3e6f12eb786fe1bc960396bf05a1eb3bf1f3691c3610ac2e6d4" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index 4a97ebb58..fb1bb144d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,7 +23,6 @@ members = [ "node/runtime-chronicle", "node/node-chronicle", "crates/embedded-substrate", - "crates/runtime-api-chronicle", "crates/protocol-abstract", "crates/protocol-substrate", "crates/protocol-substrate-chronicle", diff --git a/charts/chronicle/templates/statefulset.yaml b/charts/chronicle/templates/statefulset.yaml index 29646409a..fb78a9010 100644 --- a/charts/chronicle/templates/statefulset.yaml +++ b/charts/chronicle/templates/statefulset.yaml @@ -69,8 +69,6 @@ spec: {{ end }} {{ end }} - echo "Waiting 20 seconds for postgres to start"; - sleep 20; chronicle \ --chronicle-key-from-path /vault/secrets \ --batcher-key-from-path /vault/secrets \ @@ -98,7 +96,13 @@ spec: {{ include "chronicle.id-claims" . }} env: {{ include "lib.safeToYaml" .Values.env | nindent 12 }} - name: OTEL_EXPORTER_OTLP_ENDPOINT - value: "signoz-otel-collector-metrics.observability.svc:4317" + value: "grpc://signoz-otel-collector-metrics.observability.svc.cluster.local:4317" + - name: OTEL_LOG_LEVEL + value: debug + - name: OTEL_TRACES_EXPORTER + value: otlp + - name: OTEL_METRICS_EXPORTER + value: otlp - name: OTEL_RESOURCE_ATTRIBUTES value: "service.name=chronicle,service.instance.id={{ .Release.Name }}" - name: OTEL_SERVICE_NAME diff --git a/crates/api/src/api.rs b/crates/api/src/api.rs index bed50bf1b..248972a34 100644 --- a/crates/api/src/api.rs +++ b/crates/api/src/api.rs @@ -1,229 +1,264 @@ -use std::marker::PhantomData; -use std::time::{Duration, Instant}; +use std::{ + marker::PhantomData, + time::{Duration, Instant}, +}; use async_graphql::futures_util::select; use chrono::{DateTime, Utc}; -use diesel::PgConnection; -use diesel::r2d2::ConnectionManager; +use diesel::{r2d2::ConnectionManager, PgConnection}; use diesel_migrations::MigrationHarness; -use futures::FutureExt; -use futures::StreamExt; +use futures::{FutureExt, StreamExt}; use metrics::histogram; use metrics_exporter_prometheus::PrometheusBuilder; use r2d2::Pool; -use tokio::sync::broadcast::Sender; -use tokio::sync::mpsc; -use tokio::sync::mpsc::Receiver; -use tracing::{debug, error, info, info_span, instrument, Instrument, trace, warn}; +use tokio::sync::{broadcast::Sender, mpsc, mpsc::Receiver}; +use tracing::{debug, error, info, info_span, instrument, trace, warn, Instrument}; use uuid::Uuid; -use chronicle_persistence::{MIGRATIONS, Store, StoreError}; +use chronicle_persistence::{Store, StoreError, MIGRATIONS}; use chronicle_signing::ChronicleSigning; -use common::attributes::Attributes; -use common::identity::AuthId; -use common::ledger::{Commit, SubmissionError, SubmissionStage}; -use common::opa::PolicyAddress; -use common::prov::{ActivityId, AgentId, ChronicleIri, ChronicleTransactionId, EntityId, ExternalId, ExternalIdPart, NamespaceId, ProvModel, Role, SYSTEM_ID, SYSTEM_UUID, UuidPart}; -use common::prov::json_ld::ToJson; -use common::prov::operations::{ActivityExists, ActivityUses, ActsOnBehalfOf, AgentExists, ChronicleOperation, CreateNamespace, DerivationType, EndActivity, EntityDerive, EntityExists, SetAttributes, StartActivity, WasAssociatedWith, WasAttributedTo, WasGeneratedBy, WasInformedBy}; +use common::{ + attributes::Attributes, + identity::AuthId, + ledger::{Commit, SubmissionError, SubmissionStage}, + opa::PolicyAddress, + prov::{ + json_ld::ToJson, + operations::{ + ActivityExists, ActivityUses, ActsOnBehalfOf, AgentExists, ChronicleOperation, + CreateNamespace, DerivationType, EndActivity, EntityDerive, EntityExists, + SetAttributes, StartActivity, WasAssociatedWith, WasAttributedTo, WasGeneratedBy, + WasInformedBy, + }, + ActivityId, AgentId, ChronicleIri, ChronicleTransactionId, EntityId, ExternalId, + ExternalIdPart, NamespaceId, ProvModel, Role, UuidPart, SYSTEM_ID, SYSTEM_UUID, + }, +}; use protocol_substrate::SubxtClientError; -use protocol_substrate_chronicle::{ChronicleEvent, ChronicleTransaction}; -use protocol_substrate_chronicle::protocol::{BlockId, FromBlock, LedgerReader, LedgerWriter}; - -use crate::{ApiError, ChronicleSigned}; -use crate::commands::{ActivityCommand, AgentCommand, ApiCommand, ApiResponse, DepthChargeCommand, EntityCommand, ImportCommand, NamespaceCommand, QueryCommand}; -use crate::dispatch::{ApiDispatch, ApiSendWithReply}; +use protocol_substrate_chronicle::{ + protocol::{BlockId, FromBlock, LedgerReader, LedgerWriter}, + ChronicleEvent, ChronicleTransaction, +}; + +use crate::{ + commands::{ + ActivityCommand, AgentCommand, ApiCommand, ApiResponse, DepthChargeCommand, EntityCommand, + ImportCommand, NamespaceCommand, QueryCommand, + }, + dispatch::{ApiDispatch, ApiSendWithReply}, + ApiError, ChronicleSigned, +}; #[derive(Clone)] pub struct Api< - U: UuidGen + Send + Sync + Clone, - W: LedgerWriter - + Clone - + Send - + Sync - + 'static, + U: UuidGen + Send + Sync + Clone, + W: LedgerWriter + + Clone + + Send + + Sync + + 'static, > { - submit_tx: tokio::sync::broadcast::Sender, - signing: ChronicleSigning, - ledger_writer: W, - store: Store, - uuid_source: PhantomData, + submit_tx: tokio::sync::broadcast::Sender, + signing: ChronicleSigning, + ledger_writer: W, + store: Store, + uuid_source: PhantomData, } impl Api - where - U: UuidGen + Send + Sync + Clone + core::fmt::Debug + 'static, - LEDGER: LedgerWriter - + LedgerReader - + Clone - + Send - + Sync - + 'static, +where + U: UuidGen + Send + Sync + Clone + core::fmt::Debug + 'static, + LEDGER: LedgerWriter + + LedgerReader + + Clone + + Send + + Sync + + 'static, { - #[instrument(skip(ledger))] - pub async fn new( - pool: Pool>, - ledger: LEDGER, - uuidgen: U, - signing: ChronicleSigning, - namespace_bindings: Vec, - policy_address: Option, - liveness_check_interval: Option, - ) -> Result { - let (commit_tx, commit_rx) = mpsc::channel::(10); - - let (commit_notify_tx, _) = tokio::sync::broadcast::channel(20); - let dispatch = - ApiDispatch { tx: commit_tx.clone(), notify_commit: commit_notify_tx.clone() }; - - let store = Store::new(pool.clone())?; - - pool.get()? - .build_transaction() - .run(|connection| connection.run_pending_migrations(MIGRATIONS).map(|_| ())) - .map_err(StoreError::DbMigration)?; - - let system_namespace_uuid = (SYSTEM_ID, Uuid::try_from(SYSTEM_UUID).unwrap()); - - // Append namespace bindings and system namespace - store.namespace_binding(system_namespace_uuid.0, system_namespace_uuid.1)?; - for ns in namespace_bindings { - info!( + #[instrument(skip(ledger))] + pub async fn new( + pool: Pool>, + ledger: LEDGER, + uuidgen: U, + signing: ChronicleSigning, + namespace_bindings: Vec, + policy_address: Option, + liveness_check_interval: Option, + ) -> Result { + let (commit_tx, commit_rx) = mpsc::channel::(10); + + let (commit_notify_tx, _) = tokio::sync::broadcast::channel(20); + let dispatch = + ApiDispatch { tx: commit_tx.clone(), notify_commit: commit_notify_tx.clone() }; + + let store = Store::new(pool.clone())?; + + pool.get()? + .build_transaction() + .run(|connection| connection.run_pending_migrations(MIGRATIONS).map(|_| ())) + .map_err(StoreError::DbMigration)?; + + let system_namespace_uuid = (SYSTEM_ID, Uuid::try_from(SYSTEM_UUID).unwrap()); + + // Append namespace bindings and system namespace + store.namespace_binding(system_namespace_uuid.0, system_namespace_uuid.1)?; + for ns in namespace_bindings { + info!( "Binding namespace with external ID: {}, UUID: {}", ns.external_id_part().as_str(), ns.uuid_part() ); - store.namespace_binding(ns.external_id_part().as_str(), ns.uuid_part().to_owned())? - } - - let reuse_reader = ledger.clone(); - - let last_seen_block = store.get_last_block_id(); - - let start_from_block = if let Ok(Some(start_from_block)) = last_seen_block { - FromBlock::BlockId(start_from_block) - } else { - FromBlock::First //Full catch up, as we have no last seen block - }; - - debug!(start_from_block = ?start_from_block, "Starting from block"); - - Self::event_loop(ledger, signing, commit_rx, commit_notify_tx, store, reuse_reader, start_from_block); - - Self::depth_charge_loop(liveness_check_interval, &dispatch, system_namespace_uuid); - - Ok(dispatch) - } - - fn depth_charge_loop(liveness_check_interval: Option, dispatch: &ApiDispatch, system_namespace_uuid: (&'static str, Uuid)) { - if let Some(interval) = liveness_check_interval { - debug!("Starting liveness depth charge task"); - - let depth_charge_api = dispatch.clone(); - - tokio::task::spawn(async move { - // Configure and install Prometheus exporter - install_prometheus_metrics_exporter(); - - loop { - tokio::time::sleep(Duration::from_secs(interval)).await; - let api = depth_charge_api.clone(); - - let start_time = Instant::now(); - - let response = api - .handle_depth_charge(system_namespace_uuid.0, &system_namespace_uuid.1) - .await; - - match response { - Ok(ApiResponse::DepthChargeSubmitted { tx_id }) => { - let mut tx_notifications = api.notify_commit.subscribe(); - - loop { - let stage = match tx_notifications.recv().await { - Ok(stage) => stage, - Err(e) => { - error!("Error receiving depth charge transaction notifications: {}", e); - continue; - } - }; - - match stage { - SubmissionStage::Submitted(Ok(id)) => - if id == tx_id { - debug!("Depth charge operation submitted: {id}"); - continue; - }, - SubmissionStage::Submitted(Err(err)) => { - if err.tx_id() == &tx_id { - error!("Depth charge transaction rejected by Chronicle: {} {}", + store.namespace_binding(ns.external_id_part().as_str(), ns.uuid_part().to_owned())? + } + + let reuse_reader = ledger.clone(); + + let last_seen_block = store.get_last_block_id(); + + let start_from_block = if let Ok(Some(start_from_block)) = last_seen_block { + FromBlock::BlockId(start_from_block) + } else { + FromBlock::First //Full catch up, as we have no last seen block + }; + + debug!(start_from_block = ?start_from_block, "Starting from block"); + + Self::event_loop( + ledger, + signing, + commit_rx, + commit_notify_tx, + store, + reuse_reader, + start_from_block, + ); + + Self::depth_charge_loop(liveness_check_interval, &dispatch, system_namespace_uuid); + + Ok(dispatch) + } + + fn depth_charge_loop( + liveness_check_interval: Option, + dispatch: &ApiDispatch, + system_namespace_uuid: (&'static str, Uuid), + ) { + if let Some(interval) = liveness_check_interval { + debug!("Starting liveness depth charge task"); + + let depth_charge_api = dispatch.clone(); + + tokio::task::spawn(async move { + // Configure and install Prometheus exporter + install_prometheus_metrics_exporter(); + + loop { + tokio::time::sleep(Duration::from_secs(interval)).await; + let api = depth_charge_api.clone(); + + let start_time = Instant::now(); + + let response = api + .handle_depth_charge(system_namespace_uuid.0, &system_namespace_uuid.1) + .await; + + match response { + Ok(ApiResponse::DepthChargeSubmitted { tx_id }) => { + let mut tx_notifications = api.notify_commit.subscribe(); + + loop { + let stage = match tx_notifications.recv().await { + Ok(stage) => stage, + Err(e) => { + error!("Error receiving depth charge transaction notifications: {}", e); + continue; + }, + }; + + match stage { + SubmissionStage::Submitted(Ok(id)) => + if id == tx_id { + debug!("Depth charge operation submitted: {id}"); + continue; + }, + SubmissionStage::Submitted(Err(err)) => { + if err.tx_id() == &tx_id { + error!("Depth charge transaction rejected by Chronicle: {} {}", err, err.tx_id() ); - break; - } - } - SubmissionStage::Committed(commit, _) => { - if commit.tx_id == tx_id { - let end_time = Instant::now(); - let elapsed_time = end_time - start_time; - debug!( + break; + } + }, + SubmissionStage::Committed(commit, _) => { + if commit.tx_id == tx_id { + let end_time = Instant::now(); + let elapsed_time = end_time - start_time; + debug!( "Depth charge transaction committed: {}", commit.tx_id ); - debug!( + debug!( "Depth charge round trip time: {:.2?}", elapsed_time ); - let hist = histogram!("depth_charge_round_trip",); - - hist.record(elapsed_time.as_millis() as f64); - - break; - } - } - SubmissionStage::NotCommitted((id, contradiction, _)) => { - if id == tx_id { - error!("Depth charge transaction rejected by ledger: {id} {contradiction}"); - break; - } - } - } - } - } - Ok(res) => error!("Unexpected ApiResponse from depth charge: {res:?}"), - Err(e) => error!("ApiError submitting depth charge: {e}"), - } - } - }); - } - } - - fn event_loop(ledger: LEDGER, signing: ChronicleSigning, mut commit_rx: Receiver, commit_notify_tx: Sender, store: Store, reuse_reader: LEDGER, start_from_block: FromBlock) { - tokio::task::spawn(async move { - let mut api = Api:: { - submit_tx: commit_notify_tx.clone(), - signing, - ledger_writer: ledger, - store: store.clone(), - uuid_source: PhantomData, - }; - - loop { - let state_updates = reuse_reader.clone(); - - let state_updates = state_updates.state_updates(start_from_block, None).await; - - if let Err(e) = state_updates { - error!(subscribe_to_events = ?e); - tokio::time::sleep(Duration::from_secs(2)).await; - continue; - } - - let mut state_updates = state_updates.unwrap(); - - loop { - select! { + let hist = histogram!("depth_charge_round_trip",); + + hist.record(elapsed_time.as_millis() as f64); + + break; + } + }, + SubmissionStage::NotCommitted((id, contradiction, _)) => { + if id == tx_id { + error!("Depth charge transaction rejected by ledger: {id} {contradiction}"); + break; + } + }, + } + } + }, + Ok(res) => error!("Unexpected ApiResponse from depth charge: {res:?}"), + Err(e) => error!("ApiError submitting depth charge: {e}"), + } + } + }); + } + } + + fn event_loop( + ledger: LEDGER, + signing: ChronicleSigning, + mut commit_rx: Receiver, + commit_notify_tx: Sender, + store: Store, + reuse_reader: LEDGER, + start_from_block: FromBlock, + ) { + tokio::task::spawn(async move { + let mut api = Api:: { + submit_tx: commit_notify_tx.clone(), + signing, + ledger_writer: ledger, + store: store.clone(), + uuid_source: PhantomData, + }; + + loop { + let state_updates = reuse_reader.clone(); + + let state_updates = state_updates.state_updates(start_from_block, None).await; + + if let Err(e) = state_updates { + error!(subscribe_to_events = ?e); + tokio::time::sleep(Duration::from_secs(2)).await; + continue; + } + + let mut state_updates = state_updates.unwrap(); + + loop { + select! { state = state_updates.next().fuse() =>{ match state { @@ -276,1209 +311,1211 @@ impl Api } complete => break } - } - } - }); - } - - /// Notify after a successful submission, depending on the consistency requirement TODO: set in - /// the transaction - fn submit_blocking( - &mut self, - tx: ChronicleTransaction, - ) -> Result { - let (submission, _id) = futures::executor::block_on(self.ledger_writer.pre_submit(tx))?; - - let res = - futures::executor::block_on(self.ledger_writer.do_submit( - protocol_substrate_chronicle::protocol::WriteConsistency::Weak, - submission, - )); - match res { - Ok(tx_id) => { - self.submit_tx.send(SubmissionStage::submitted(&tx_id)).ok(); - Ok(tx_id) - } - Err((e, id)) => { - // We need the cloneable SubmissionError wrapper here - let submission_error = SubmissionError::communication(&id, e.into()); - self.submit_tx.send(SubmissionStage::submitted_error(&submission_error)).ok(); - Err(submission_error.into()) - } - } - } - - /// Generate and submit the signed identity to send to the Transaction Processor along with the - /// transactions to be applied - fn submit( - &mut self, - id: impl Into, - identity: AuthId, - to_apply: Vec, - ) -> Result { - let identity = identity.signed_identity(&self.signing)?; - let model = ProvModel::from_tx(&to_apply).map_err(ApiError::Contradiction)?; - let tx_id = self.submit_blocking(futures::executor::block_on( - ChronicleTransaction::new(&self.signing, identity, to_apply), - )?)?; - - Ok(ApiResponse::submission(id, model, tx_id)) - } - - /// Checks if ChronicleOperations resulting from Chronicle API calls will result in any changes - /// in state - /// - /// # Arguments - /// * `connection` - Connection to the Chronicle database - /// * `to_apply` - Chronicle operations resulting from an API call - #[instrument(skip(self, connection, to_apply))] - fn check_for_effects( - &mut self, - connection: &mut PgConnection, - to_apply: &Vec, - ) -> Result>, ApiError> { - debug!(checking_for_effects = to_apply.len()); - let mut model = ProvModel::default(); - let mut transactions = Vec::::with_capacity(to_apply.len()); - for op in to_apply { - let mut applied_model = match op { - ChronicleOperation::CreateNamespace(CreateNamespace { id, .. }) => { - let (namespace, _) = - self.ensure_namespace(connection, id.external_id_part())?; - model.namespace_context(&namespace); - model - } - ChronicleOperation::AgentExists(AgentExists { ref namespace, ref id }) => - self.store.apply_prov_model_for_agent_id( - connection, - model, - id, - namespace.external_id_part(), - )?, - ChronicleOperation::ActivityExists(ActivityExists { ref namespace, ref id }) => - self.store.apply_prov_model_for_activity_id( - connection, - model, - id, - namespace.external_id_part(), - )?, - ChronicleOperation::EntityExists(EntityExists { ref namespace, ref id }) => - self.store.apply_prov_model_for_entity_id( - connection, - model, - id, - namespace.external_id_part(), - )?, - ChronicleOperation::ActivityUses(ActivityUses { - ref namespace, - ref id, - ref activity, - }) => self.store.prov_model_for_usage( - connection, - model, - id, - activity, - namespace.external_id_part(), - )?, - ChronicleOperation::SetAttributes(ref o) => match o { - SetAttributes::Activity { namespace, id, .. } => - self.store.apply_prov_model_for_activity_id( - connection, - model, - id, - namespace.external_id_part(), - )?, - SetAttributes::Agent { namespace, id, .. } => - self.store.apply_prov_model_for_agent_id( - connection, - model, - id, - namespace.external_id_part(), - )?, - SetAttributes::Entity { namespace, id, .. } => - self.store.apply_prov_model_for_entity_id( - connection, - model, - id, - namespace.external_id_part(), - )?, - }, - ChronicleOperation::StartActivity(StartActivity { namespace, id, .. }) => - self.store.apply_prov_model_for_activity_id( - connection, - model, - id, - namespace.external_id_part(), - )?, - ChronicleOperation::EndActivity(EndActivity { namespace, id, .. }) => - self.store.apply_prov_model_for_activity_id( - connection, - model, - id, - namespace.external_id_part(), - )?, - ChronicleOperation::WasInformedBy(WasInformedBy { - namespace, - activity, - informing_activity, - }) => { - let model = self.store.apply_prov_model_for_activity_id( - connection, - model, - activity, - namespace.external_id_part(), - )?; - self.store.apply_prov_model_for_activity_id( - connection, - model, - informing_activity, - namespace.external_id_part(), - )? - } - ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf { - activity_id, - responsible_id, - delegate_id, - namespace, - .. - }) => { - let model = self.store.apply_prov_model_for_agent_id( - connection, - model, - responsible_id, - namespace.external_id_part(), - )?; - let model = self.store.apply_prov_model_for_agent_id( - connection, - model, - delegate_id, - namespace.external_id_part(), - )?; - if let Some(id) = activity_id { - self.store.apply_prov_model_for_activity_id( - connection, - model, - id, - namespace.external_id_part(), - )? - } else { - model - } - } - ChronicleOperation::WasAssociatedWith(WasAssociatedWith { - namespace, - activity_id, - agent_id, - .. - }) => { - let model = self.store.apply_prov_model_for_activity_id( - connection, - model, - activity_id, - namespace.external_id_part(), - )?; - - self.store.apply_prov_model_for_agent_id( - connection, - model, - agent_id, - namespace.external_id_part(), - )? - } - ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity }) => { - let model = self.store.apply_prov_model_for_activity_id( - connection, - model, - activity, - namespace.external_id_part(), - )?; - - self.store.apply_prov_model_for_entity_id( - connection, - model, - id, - namespace.external_id_part(), - )? - } - ChronicleOperation::EntityDerive(EntityDerive { - namespace, - id, - used_id, - activity_id, - .. - }) => { - let model = self.store.apply_prov_model_for_entity_id( - connection, - model, - id, - namespace.external_id_part(), - )?; - - let model = self.store.apply_prov_model_for_entity_id( - connection, - model, - used_id, - namespace.external_id_part(), - )?; - - if let Some(id) = activity_id { - self.store.apply_prov_model_for_activity_id( - connection, - model, - id, - namespace.external_id_part(), - )? - } else { - model - } - } - ChronicleOperation::WasAttributedTo(WasAttributedTo { - namespace, - entity_id, - agent_id, - .. - }) => { - let model = self.store.apply_prov_model_for_entity_id( - connection, - model, - entity_id, - namespace.external_id_part(), - )?; - - self.store.apply_prov_model_for_agent_id( - connection, - model, - agent_id, - namespace.external_id_part(), - )? - } - }; - let state = applied_model.clone(); - applied_model.apply(op)?; - if state != applied_model { - transactions.push(op.clone()); - } - - model = applied_model; - } - - if transactions.is_empty() { - Ok(None) - } else { - Ok(Some(transactions)) - } - } - - fn apply_effects_and_submit( - &mut self, - connection: &mut PgConnection, - id: impl Into, - identity: AuthId, - to_apply: Vec, - applying_new_namespace: bool, - ) -> Result { - if applying_new_namespace { - self.submit(id, identity, to_apply) - } else if let Some(to_apply) = self.check_for_effects(connection, &to_apply)? { - info!(sending_operations = to_apply.len()); - self.submit(id, identity, to_apply) - } else { - info!("API call will not result in any data changes"); - let model = ProvModel::from_tx(&to_apply)?; - Ok(ApiResponse::already_recorded(id, model)) - } - } - - /// Ensures that the named namespace exists, returns an existing namespace, and a vector - /// containing a `ChronicleTransaction` to create one if not present - /// - /// A namespace uri is of the form chronicle:ns:{external_id}:{uuid} - /// Namespaces must be globally unique, so are disambiguated by uuid but are locally referred to - /// by external_id only For coordination between chronicle nodes we also need a namespace - /// binding operation to tie the UUID from another instance to an external_id # Arguments - /// * `external_id` - an arbitrary namespace identifier - #[instrument(skip(self, connection))] - fn ensure_namespace( - &mut self, - connection: &mut PgConnection, - id: &ExternalId, - ) -> Result<(NamespaceId, Vec), ApiError> { - match self.store.namespace_by_external_id(connection, id) { - Ok((namespace_id, _)) => { - trace!(%id, "Namespace already exists."); - Ok((namespace_id, vec![])) - } - Err(e) => { - debug!(error = %e, %id, "Namespace does not exist, creating."); - let uuid = Uuid::new_v4(); - let namespace_id = NamespaceId::from_external_id(id, uuid); - let create_namespace_op = - ChronicleOperation::CreateNamespace(CreateNamespace::new(namespace_id.clone())); - Ok((namespace_id, vec![create_namespace_op])) - } - } - } - - /// Creates and submits a (ChronicleTransaction::GenerateEntity), and possibly - /// (ChronicleTransaction::Domaintype) if specified - /// - /// We use our local store for the best guess at the activity, either by external_id or the last - /// one started as a convenience for command line - #[instrument(skip(self))] - async fn activity_generate( - &self, - id: EntityId, - namespace: ExternalId, - activity_id: ActivityId, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let create = ChronicleOperation::WasGeneratedBy(WasGeneratedBy { - namespace, - id: id.clone(), - activity: activity_id, - }); - - to_apply.push(create); - - api.apply_effects_and_submit( - connection, - id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - /// Creates and submits a (ChronicleTransaction::ActivityUses), and possibly - /// (ChronicleTransaction::Domaintype) if specified We use our local store for the best guess at - /// the activity, either by name or the last one started as a convenience for command line - #[instrument(skip(self))] - async fn activity_use( - &self, - id: EntityId, - namespace: ExternalId, - activity_id: ActivityId, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let (id, to_apply) = { - let create = ChronicleOperation::ActivityUses(ActivityUses { - namespace, - id: id.clone(), - activity: activity_id, - }); - - to_apply.push(create); - - (id, to_apply) - }; - - api.apply_effects_and_submit( - connection, - id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - /// Creates and submits a (ChronicleTransaction::ActivityWasInformedBy) - /// - /// We use our local store for the best guess at the activity, either by external_id or the last - /// one started as a convenience for command line - #[instrument(skip(self))] - async fn activity_was_informed_by( - &self, - id: ActivityId, - namespace: ExternalId, - informing_activity_id: ActivityId, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let (id, to_apply) = { - let create = ChronicleOperation::WasInformedBy(WasInformedBy { - namespace, - activity: id.clone(), - informing_activity: informing_activity_id, - }); - - to_apply.push(create); - - (id, to_apply) - }; - - api.apply_effects_and_submit( - connection, - id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - /// Submits operations [`CreateEntity`], and [`SetAttributes::Entity`] - /// - /// We use our local store to see if the agent already exists, disambiguating the URI if so - #[instrument(skip(self))] - async fn create_entity( - &self, - id: EntityId, - namespace_id: ExternalId, - attributes: Attributes, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace_id)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let create = ChronicleOperation::EntityExists(EntityExists { - namespace: namespace.clone(), - id: id.clone(), - }); - - to_apply.push(create); - - let set_type = ChronicleOperation::SetAttributes(SetAttributes::Entity { - id: id.clone(), - namespace, - attributes, - }); - - to_apply.push(set_type); - - api.apply_effects_and_submit( - connection, - id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - /// Submits operations [`CreateActivity`], and [`SetAttributes::Activity`] - /// - /// We use our local store to see if the activity already exists, disambiguating the URI if so - #[instrument(skip(self))] - async fn create_activity( - &self, - activity_id: ExternalId, - namespace_id: ExternalId, - attributes: Attributes, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace_id)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let create = ChronicleOperation::ActivityExists(ActivityExists { - namespace: namespace.clone(), - id: ActivityId::from_external_id(&activity_id), - }); - - to_apply.push(create); - - let set_type = ChronicleOperation::SetAttributes(SetAttributes::Activity { - id: ActivityId::from_external_id(&activity_id), - namespace, - attributes, - }); - - to_apply.push(set_type); - - api.apply_effects_and_submit( - connection, - ActivityId::from_external_id(&activity_id), - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - /// Submits operations [`CreateAgent`], and [`SetAttributes::Agent`] - /// - /// We use our local store to see if the agent already exists, disambiguating the URI if so - #[instrument(skip(self))] - async fn create_agent( - &self, - agent_id: ExternalId, - namespace: ExternalId, - attributes: Attributes, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let create = ChronicleOperation::AgentExists(AgentExists { - id: AgentId::from_external_id(&agent_id), - namespace: namespace.clone(), - }); - - to_apply.push(create); - - let id = AgentId::from_external_id(&agent_id); - let set_type = ChronicleOperation::SetAttributes(SetAttributes::Agent { - id: id.clone(), - namespace, - attributes, - }); - - to_apply.push(set_type); - - api.apply_effects_and_submit( - connection, - id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - /// Creates and submits a (ChronicleTransaction::CreateNamespace) if the external_id part does - /// not already exist in local storage - async fn create_namespace( - &self, - name: &ExternalId, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - let name = name.to_owned(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - connection.build_transaction().run(|connection| { - let (namespace, to_apply) = api.ensure_namespace(connection, &name)?; - - api.submit(namespace, identity, to_apply) - }) - }) - .await? - } - - #[instrument(skip(self))] - async fn depth_charge( - &self, - namespace: NamespaceId, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - let id = ActivityId::from_external_id(Uuid::new_v4().to_string()); - tokio::task::spawn_blocking(move || { - let to_apply = vec![ - ChronicleOperation::StartActivity(StartActivity { - namespace: namespace.clone(), - id: id.clone(), - time: Utc::now().into(), - }), - ChronicleOperation::EndActivity(EndActivity { - namespace, - id, - time: Utc::now().into(), - }), - ]; - api.submit_depth_charge(identity, to_apply) - }) - .await? - } - - fn submit_depth_charge( - &mut self, - identity: AuthId, - to_apply: Vec, - ) -> Result { - let identity = identity.signed_identity(&self.signing)?; - let tx_id = self.submit_blocking(futures::executor::block_on( - ChronicleTransaction::new(&self.signing, identity, to_apply), - )?)?; - Ok(ApiResponse::depth_charge_submission(tx_id)) - } - - #[instrument(skip(self))] - async fn dispatch(&mut self, command: (ApiCommand, AuthId)) -> Result { - match command { - (ApiCommand::DepthCharge(DepthChargeCommand { namespace }), identity) => - self.depth_charge(namespace, identity).await, - (ApiCommand::Import(ImportCommand { operations }), identity) => - self.submit_import_operations(identity, operations).await, - (ApiCommand::NameSpace(NamespaceCommand::Create { id }), identity) => - self.create_namespace(&id, identity).await, - (ApiCommand::Agent(AgentCommand::Create { id, namespace, attributes }), identity) => - self.create_agent(id, namespace, attributes, identity).await, - (ApiCommand::Agent(AgentCommand::UseInContext { id, namespace }), _identity) => - self.use_agent_in_cli_context(id, namespace).await, - ( - ApiCommand::Agent(AgentCommand::Delegate { - id, - delegate, - activity, - namespace, - role, - }), - identity, - ) => self.delegate(namespace, id, delegate, activity, role, identity).await, - ( - ApiCommand::Activity(ActivityCommand::Create { id, namespace, attributes }), - identity, - ) => self.create_activity(id, namespace, attributes, identity).await, - ( - ApiCommand::Activity(ActivityCommand::Instant { id, namespace, time, agent }), - identity, - ) => self.instant(id, namespace, time, agent, identity).await, - ( - ApiCommand::Activity(ActivityCommand::Start { id, namespace, time, agent }), - identity, - ) => self.start_activity(id, namespace, time, agent, identity).await, - ( - ApiCommand::Activity(ActivityCommand::End { id, namespace, time, agent }), - identity, - ) => self.end_activity(id, namespace, time, agent, identity).await, - (ApiCommand::Activity(ActivityCommand::Use { id, namespace, activity }), identity) => - self.activity_use(id, namespace, activity, identity).await, - ( - ApiCommand::Activity(ActivityCommand::WasInformedBy { - id, - namespace, - informing_activity, - }), - identity, - ) => self.activity_was_informed_by(id, namespace, informing_activity, identity).await, - ( - ApiCommand::Activity(ActivityCommand::Associate { - id, - namespace, - responsible, - role, - }), - identity, - ) => self.associate(namespace, responsible, id, role, identity).await, - ( - ApiCommand::Entity(EntityCommand::Attribute { id, namespace, responsible, role }), - identity, - ) => self.attribute(namespace, responsible, id, role, identity).await, - (ApiCommand::Entity(EntityCommand::Create { id, namespace, attributes }), identity) => - self.create_entity(EntityId::from_external_id(&id), namespace, attributes, identity) - .await, - ( - ApiCommand::Activity(ActivityCommand::Generate { id, namespace, activity }), - identity, - ) => self.activity_generate(id, namespace, activity, identity).await, - ( - ApiCommand::Entity(EntityCommand::Derive { - id, - namespace, - activity, - used_entity, - derivation, - }), - identity, - ) => - self.entity_derive(id, namespace, activity, used_entity, derivation, identity) - .await, - (ApiCommand::Query(query), _identity) => self.query(query).await, - } - } - - #[instrument(skip(self))] - async fn delegate( - &self, - namespace: ExternalId, - responsible_id: AgentId, - delegate_id: AgentId, - activity_id: Option, - role: Option, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let tx = ChronicleOperation::agent_acts_on_behalf_of( - namespace, - responsible_id.clone(), - delegate_id, - activity_id, - role, - ); - - to_apply.push(tx); - - api.apply_effects_and_submit( - connection, - responsible_id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - #[instrument(skip(self))] - async fn associate( - &self, - namespace: ExternalId, - responsible_id: AgentId, - activity_id: ActivityId, - role: Option, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let tx = ChronicleOperation::was_associated_with( - namespace, - activity_id, - responsible_id.clone(), - role, - ); - - to_apply.push(tx); - - api.apply_effects_and_submit( - connection, - responsible_id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - #[instrument(skip(self))] - async fn attribute( - &self, - namespace: ExternalId, - responsible_id: AgentId, - entity_id: EntityId, - role: Option, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let tx = ChronicleOperation::was_attributed_to( - namespace, - entity_id, - responsible_id.clone(), - role, - ); - - to_apply.push(tx); - - api.apply_effects_and_submit( - connection, - responsible_id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - #[instrument(skip(self))] - async fn entity_derive( - &self, - id: EntityId, - namespace: ExternalId, - activity_id: Option, - used_id: EntityId, - typ: DerivationType, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let tx = ChronicleOperation::EntityDerive(EntityDerive { - namespace, - id: id.clone(), - used_id: used_id.clone(), - activity_id: activity_id.clone(), - typ, - }); - - to_apply.push(tx); - - api.apply_effects_and_submit( - connection, - id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - async fn query(&self, query: QueryCommand) -> Result { - let api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - let (id, _) = api - .store - .namespace_by_external_id(&mut connection, &ExternalId::from(&query.namespace))?; - Ok(ApiResponse::query_reply(api.store.load_prov_model_for_namespace(&mut connection, &id)?)) - }) - .await? - } - - async fn submit_import_operations( - &self, - identity: AuthId, - operations: Vec, - ) -> Result { - let mut api = self.clone(); - let identity = identity.signed_identity(&self.signing)?; - let model = ProvModel::from_tx(&operations)?; - let signer = self.signing.clone(); - tokio::task::spawn_blocking(move || { - // Check here to ensure that import operations result in data changes - let mut connection = api.store.connection()?; - connection.build_transaction().run(|connection| { - if let Some(operations_to_apply) = api.check_for_effects(connection, &operations)? { - trace!( + } + } + }); + } + + /// Notify after a successful submission, depending on the consistency requirement TODO: set in + /// the transaction + fn submit_blocking( + &mut self, + tx: ChronicleTransaction, + ) -> Result { + let (submission, _id) = futures::executor::block_on(self.ledger_writer.pre_submit(tx))?; + + let res = + futures::executor::block_on(self.ledger_writer.do_submit( + protocol_substrate_chronicle::protocol::WriteConsistency::Weak, + submission, + )); + match res { + Ok(tx_id) => { + self.submit_tx.send(SubmissionStage::submitted(&tx_id)).ok(); + Ok(tx_id) + }, + Err((e, id)) => { + // We need the cloneable SubmissionError wrapper here + let submission_error = SubmissionError::communication(&id, e.into()); + self.submit_tx.send(SubmissionStage::submitted_error(&submission_error)).ok(); + Err(submission_error.into()) + }, + } + } + + /// Generate and submit the signed identity to send to the Transaction Processor along with the + /// transactions to be applied + fn submit( + &mut self, + id: impl Into, + identity: AuthId, + to_apply: Vec, + ) -> Result { + let identity = identity.signed_identity(&self.signing)?; + let model = ProvModel::from_tx(&to_apply).map_err(ApiError::Contradiction)?; + let tx_id = self.submit_blocking(futures::executor::block_on( + ChronicleTransaction::new(&self.signing, identity, to_apply), + )?)?; + + Ok(ApiResponse::submission(id, model, tx_id)) + } + + /// Checks if ChronicleOperations resulting from Chronicle API calls will result in any changes + /// in state + /// + /// # Arguments + /// * `connection` - Connection to the Chronicle database + /// * `to_apply` - Chronicle operations resulting from an API call + #[instrument(skip(self, connection, to_apply))] + fn check_for_effects( + &mut self, + connection: &mut PgConnection, + to_apply: &Vec, + ) -> Result>, ApiError> { + debug!(checking_for_effects = to_apply.len()); + let mut model = ProvModel::default(); + let mut transactions = Vec::::with_capacity(to_apply.len()); + for op in to_apply { + let mut applied_model = match op { + ChronicleOperation::CreateNamespace(CreateNamespace { id, .. }) => { + let (namespace, _) = + self.ensure_namespace(connection, id.external_id_part())?; + model.namespace_context(&namespace); + model + }, + ChronicleOperation::AgentExists(AgentExists { ref namespace, ref id }) => + self.store.apply_prov_model_for_agent_id( + connection, + model, + id, + namespace.external_id_part(), + )?, + ChronicleOperation::ActivityExists(ActivityExists { ref namespace, ref id }) => + self.store.apply_prov_model_for_activity_id( + connection, + model, + id, + namespace.external_id_part(), + )?, + ChronicleOperation::EntityExists(EntityExists { ref namespace, ref id }) => + self.store.apply_prov_model_for_entity_id( + connection, + model, + id, + namespace.external_id_part(), + )?, + ChronicleOperation::ActivityUses(ActivityUses { + ref namespace, + ref id, + ref activity, + }) => self.store.prov_model_for_usage( + connection, + model, + id, + activity, + namespace.external_id_part(), + )?, + ChronicleOperation::SetAttributes(ref o) => match o { + SetAttributes::Activity { namespace, id, .. } => + self.store.apply_prov_model_for_activity_id( + connection, + model, + id, + namespace.external_id_part(), + )?, + SetAttributes::Agent { namespace, id, .. } => + self.store.apply_prov_model_for_agent_id( + connection, + model, + id, + namespace.external_id_part(), + )?, + SetAttributes::Entity { namespace, id, .. } => + self.store.apply_prov_model_for_entity_id( + connection, + model, + id, + namespace.external_id_part(), + )?, + }, + ChronicleOperation::StartActivity(StartActivity { namespace, id, .. }) => + self.store.apply_prov_model_for_activity_id( + connection, + model, + id, + namespace.external_id_part(), + )?, + ChronicleOperation::EndActivity(EndActivity { namespace, id, .. }) => + self.store.apply_prov_model_for_activity_id( + connection, + model, + id, + namespace.external_id_part(), + )?, + ChronicleOperation::WasInformedBy(WasInformedBy { + namespace, + activity, + informing_activity, + }) => { + let model = self.store.apply_prov_model_for_activity_id( + connection, + model, + activity, + namespace.external_id_part(), + )?; + self.store.apply_prov_model_for_activity_id( + connection, + model, + informing_activity, + namespace.external_id_part(), + )? + }, + ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf { + activity_id, + responsible_id, + delegate_id, + namespace, + .. + }) => { + let model = self.store.apply_prov_model_for_agent_id( + connection, + model, + responsible_id, + namespace.external_id_part(), + )?; + let model = self.store.apply_prov_model_for_agent_id( + connection, + model, + delegate_id, + namespace.external_id_part(), + )?; + if let Some(id) = activity_id { + self.store.apply_prov_model_for_activity_id( + connection, + model, + id, + namespace.external_id_part(), + )? + } else { + model + } + }, + ChronicleOperation::WasAssociatedWith(WasAssociatedWith { + namespace, + activity_id, + agent_id, + .. + }) => { + let model = self.store.apply_prov_model_for_activity_id( + connection, + model, + activity_id, + namespace.external_id_part(), + )?; + + self.store.apply_prov_model_for_agent_id( + connection, + model, + agent_id, + namespace.external_id_part(), + )? + }, + ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity }) => { + let model = self.store.apply_prov_model_for_activity_id( + connection, + model, + activity, + namespace.external_id_part(), + )?; + + self.store.apply_prov_model_for_entity_id( + connection, + model, + id, + namespace.external_id_part(), + )? + }, + ChronicleOperation::EntityDerive(EntityDerive { + namespace, + id, + used_id, + activity_id, + .. + }) => { + let model = self.store.apply_prov_model_for_entity_id( + connection, + model, + id, + namespace.external_id_part(), + )?; + + let model = self.store.apply_prov_model_for_entity_id( + connection, + model, + used_id, + namespace.external_id_part(), + )?; + + if let Some(id) = activity_id { + self.store.apply_prov_model_for_activity_id( + connection, + model, + id, + namespace.external_id_part(), + )? + } else { + model + } + }, + ChronicleOperation::WasAttributedTo(WasAttributedTo { + namespace, + entity_id, + agent_id, + .. + }) => { + let model = self.store.apply_prov_model_for_entity_id( + connection, + model, + entity_id, + namespace.external_id_part(), + )?; + + self.store.apply_prov_model_for_agent_id( + connection, + model, + agent_id, + namespace.external_id_part(), + )? + }, + }; + let state = applied_model.clone(); + applied_model.apply(op)?; + if state != applied_model { + transactions.push(op.clone()); + } + + model = applied_model; + } + + if transactions.is_empty() { + Ok(None) + } else { + Ok(Some(transactions)) + } + } + + fn apply_effects_and_submit( + &mut self, + connection: &mut PgConnection, + id: impl Into, + identity: AuthId, + to_apply: Vec, + applying_new_namespace: bool, + ) -> Result { + if applying_new_namespace { + self.submit(id, identity, to_apply) + } else if let Some(to_apply) = self.check_for_effects(connection, &to_apply)? { + info!(sending_operations = to_apply.len()); + self.submit(id, identity, to_apply) + } else { + info!("API call will not result in any data changes"); + let model = ProvModel::from_tx(&to_apply)?; + Ok(ApiResponse::already_recorded(id, model)) + } + } + + /// Ensures that the named namespace exists, returns an existing namespace, and a vector + /// containing a `ChronicleTransaction` to create one if not present + /// + /// A namespace uri is of the form chronicle:ns:{external_id}:{uuid} + /// Namespaces must be globally unique, so are disambiguated by uuid but are locally referred to + /// by external_id only For coordination between chronicle nodes we also need a namespace + /// binding operation to tie the UUID from another instance to an external_id # Arguments + /// * `external_id` - an arbitrary namespace identifier + #[instrument(skip(self, connection))] + fn ensure_namespace( + &mut self, + connection: &mut PgConnection, + id: &ExternalId, + ) -> Result<(NamespaceId, Vec), ApiError> { + match self.store.namespace_by_external_id(connection, id) { + Ok((namespace_id, _)) => { + trace!(%id, "Namespace already exists."); + Ok((namespace_id, vec![])) + }, + Err(e) => { + debug!(error = %e, %id, "Namespace does not exist, creating."); + let uuid = Uuid::new_v4(); + let namespace_id = NamespaceId::from_external_id(id, uuid); + let create_namespace_op = + ChronicleOperation::CreateNamespace(CreateNamespace::new(namespace_id.clone())); + Ok((namespace_id, vec![create_namespace_op])) + }, + } + } + + /// Creates and submits a (ChronicleTransaction::GenerateEntity), and possibly + /// (ChronicleTransaction::Domaintype) if specified + /// + /// We use our local store for the best guess at the activity, either by external_id or the last + /// one started as a convenience for command line + #[instrument(skip(self))] + async fn activity_generate( + &self, + id: EntityId, + namespace: ExternalId, + activity_id: ActivityId, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let create = ChronicleOperation::WasGeneratedBy(WasGeneratedBy { + namespace, + id: id.clone(), + activity: activity_id, + }); + + to_apply.push(create); + + api.apply_effects_and_submit( + connection, + id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + /// Creates and submits a (ChronicleTransaction::ActivityUses), and possibly + /// (ChronicleTransaction::Domaintype) if specified We use our local store for the best guess at + /// the activity, either by name or the last one started as a convenience for command line + #[instrument(skip(self))] + async fn activity_use( + &self, + id: EntityId, + namespace: ExternalId, + activity_id: ActivityId, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let (id, to_apply) = { + let create = ChronicleOperation::ActivityUses(ActivityUses { + namespace, + id: id.clone(), + activity: activity_id, + }); + + to_apply.push(create); + + (id, to_apply) + }; + + api.apply_effects_and_submit( + connection, + id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + /// Creates and submits a (ChronicleTransaction::ActivityWasInformedBy) + /// + /// We use our local store for the best guess at the activity, either by external_id or the last + /// one started as a convenience for command line + #[instrument(skip(self))] + async fn activity_was_informed_by( + &self, + id: ActivityId, + namespace: ExternalId, + informing_activity_id: ActivityId, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let (id, to_apply) = { + let create = ChronicleOperation::WasInformedBy(WasInformedBy { + namespace, + activity: id.clone(), + informing_activity: informing_activity_id, + }); + + to_apply.push(create); + + (id, to_apply) + }; + + api.apply_effects_and_submit( + connection, + id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + /// Submits operations [`CreateEntity`], and [`SetAttributes::Entity`] + /// + /// We use our local store to see if the agent already exists, disambiguating the URI if so + #[instrument(skip(self))] + async fn create_entity( + &self, + id: EntityId, + namespace_id: ExternalId, + attributes: Attributes, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace_id)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let create = ChronicleOperation::EntityExists(EntityExists { + namespace: namespace.clone(), + id: id.clone(), + }); + + to_apply.push(create); + + let set_type = ChronicleOperation::SetAttributes(SetAttributes::Entity { + id: id.clone(), + namespace, + attributes, + }); + + to_apply.push(set_type); + + api.apply_effects_and_submit( + connection, + id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + /// Submits operations [`CreateActivity`], and [`SetAttributes::Activity`] + /// + /// We use our local store to see if the activity already exists, disambiguating the URI if so + #[instrument(skip(self))] + async fn create_activity( + &self, + activity_id: ExternalId, + namespace_id: ExternalId, + attributes: Attributes, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace_id)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let create = ChronicleOperation::ActivityExists(ActivityExists { + namespace: namespace.clone(), + id: ActivityId::from_external_id(&activity_id), + }); + + to_apply.push(create); + + let set_type = ChronicleOperation::SetAttributes(SetAttributes::Activity { + id: ActivityId::from_external_id(&activity_id), + namespace, + attributes, + }); + + to_apply.push(set_type); + + api.apply_effects_and_submit( + connection, + ActivityId::from_external_id(&activity_id), + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + /// Submits operations [`CreateAgent`], and [`SetAttributes::Agent`] + /// + /// We use our local store to see if the agent already exists, disambiguating the URI if so + #[instrument(skip(self))] + async fn create_agent( + &self, + agent_id: ExternalId, + namespace: ExternalId, + attributes: Attributes, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let create = ChronicleOperation::AgentExists(AgentExists { + id: AgentId::from_external_id(&agent_id), + namespace: namespace.clone(), + }); + + to_apply.push(create); + + let id = AgentId::from_external_id(&agent_id); + let set_type = ChronicleOperation::SetAttributes(SetAttributes::Agent { + id: id.clone(), + namespace, + attributes, + }); + + to_apply.push(set_type); + + api.apply_effects_and_submit( + connection, + id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + /// Creates and submits a (ChronicleTransaction::CreateNamespace) if the external_id part does + /// not already exist in local storage + async fn create_namespace( + &self, + name: &ExternalId, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + let name = name.to_owned(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + connection.build_transaction().run(|connection| { + let (namespace, to_apply) = api.ensure_namespace(connection, &name)?; + + api.submit(namespace, identity, to_apply) + }) + }) + .await? + } + + #[instrument(skip(self))] + async fn depth_charge( + &self, + namespace: NamespaceId, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + let id = ActivityId::from_external_id(Uuid::new_v4().to_string()); + tokio::task::spawn_blocking(move || { + let to_apply = vec![ + ChronicleOperation::StartActivity(StartActivity { + namespace: namespace.clone(), + id: id.clone(), + time: Utc::now().into(), + }), + ChronicleOperation::EndActivity(EndActivity { + namespace, + id, + time: Utc::now().into(), + }), + ]; + api.submit_depth_charge(identity, to_apply) + }) + .await? + } + + fn submit_depth_charge( + &mut self, + identity: AuthId, + to_apply: Vec, + ) -> Result { + let identity = identity.signed_identity(&self.signing)?; + let tx_id = self.submit_blocking(futures::executor::block_on( + ChronicleTransaction::new(&self.signing, identity, to_apply), + )?)?; + Ok(ApiResponse::depth_charge_submission(tx_id)) + } + + #[instrument(skip(self))] + async fn dispatch(&mut self, command: (ApiCommand, AuthId)) -> Result { + match command { + (ApiCommand::DepthCharge(DepthChargeCommand { namespace }), identity) => + self.depth_charge(namespace, identity).await, + (ApiCommand::Import(ImportCommand { operations }), identity) => + self.submit_import_operations(identity, operations).await, + (ApiCommand::NameSpace(NamespaceCommand::Create { id }), identity) => + self.create_namespace(&id, identity).await, + (ApiCommand::Agent(AgentCommand::Create { id, namespace, attributes }), identity) => + self.create_agent(id, namespace, attributes, identity).await, + (ApiCommand::Agent(AgentCommand::UseInContext { id, namespace }), _identity) => + self.use_agent_in_cli_context(id, namespace).await, + ( + ApiCommand::Agent(AgentCommand::Delegate { + id, + delegate, + activity, + namespace, + role, + }), + identity, + ) => self.delegate(namespace, id, delegate, activity, role, identity).await, + ( + ApiCommand::Activity(ActivityCommand::Create { id, namespace, attributes }), + identity, + ) => self.create_activity(id, namespace, attributes, identity).await, + ( + ApiCommand::Activity(ActivityCommand::Instant { id, namespace, time, agent }), + identity, + ) => self.instant(id, namespace, time, agent, identity).await, + ( + ApiCommand::Activity(ActivityCommand::Start { id, namespace, time, agent }), + identity, + ) => self.start_activity(id, namespace, time, agent, identity).await, + ( + ApiCommand::Activity(ActivityCommand::End { id, namespace, time, agent }), + identity, + ) => self.end_activity(id, namespace, time, agent, identity).await, + (ApiCommand::Activity(ActivityCommand::Use { id, namespace, activity }), identity) => + self.activity_use(id, namespace, activity, identity).await, + ( + ApiCommand::Activity(ActivityCommand::WasInformedBy { + id, + namespace, + informing_activity, + }), + identity, + ) => self.activity_was_informed_by(id, namespace, informing_activity, identity).await, + ( + ApiCommand::Activity(ActivityCommand::Associate { + id, + namespace, + responsible, + role, + }), + identity, + ) => self.associate(namespace, responsible, id, role, identity).await, + ( + ApiCommand::Entity(EntityCommand::Attribute { id, namespace, responsible, role }), + identity, + ) => self.attribute(namespace, responsible, id, role, identity).await, + (ApiCommand::Entity(EntityCommand::Create { id, namespace, attributes }), identity) => + self.create_entity(EntityId::from_external_id(&id), namespace, attributes, identity) + .await, + ( + ApiCommand::Activity(ActivityCommand::Generate { id, namespace, activity }), + identity, + ) => self.activity_generate(id, namespace, activity, identity).await, + ( + ApiCommand::Entity(EntityCommand::Derive { + id, + namespace, + activity, + used_entity, + derivation, + }), + identity, + ) => + self.entity_derive(id, namespace, activity, used_entity, derivation, identity) + .await, + (ApiCommand::Query(query), _identity) => self.query(query).await, + } + } + + #[instrument(skip(self))] + async fn delegate( + &self, + namespace: ExternalId, + responsible_id: AgentId, + delegate_id: AgentId, + activity_id: Option, + role: Option, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let tx = ChronicleOperation::agent_acts_on_behalf_of( + namespace, + responsible_id.clone(), + delegate_id, + activity_id, + role, + ); + + to_apply.push(tx); + + api.apply_effects_and_submit( + connection, + responsible_id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + #[instrument(skip(self))] + async fn associate( + &self, + namespace: ExternalId, + responsible_id: AgentId, + activity_id: ActivityId, + role: Option, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let tx = ChronicleOperation::was_associated_with( + namespace, + activity_id, + responsible_id.clone(), + role, + ); + + to_apply.push(tx); + + api.apply_effects_and_submit( + connection, + responsible_id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + #[instrument(skip(self))] + async fn attribute( + &self, + namespace: ExternalId, + responsible_id: AgentId, + entity_id: EntityId, + role: Option, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let tx = ChronicleOperation::was_attributed_to( + namespace, + entity_id, + responsible_id.clone(), + role, + ); + + to_apply.push(tx); + + api.apply_effects_and_submit( + connection, + responsible_id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + #[instrument(skip(self))] + async fn entity_derive( + &self, + id: EntityId, + namespace: ExternalId, + activity_id: Option, + used_id: EntityId, + typ: DerivationType, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let tx = ChronicleOperation::EntityDerive(EntityDerive { + namespace, + id: id.clone(), + used_id: used_id.clone(), + activity_id: activity_id.clone(), + typ, + }); + + to_apply.push(tx); + + api.apply_effects_and_submit( + connection, + id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + async fn query(&self, query: QueryCommand) -> Result { + let api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + let (id, _) = api + .store + .namespace_by_external_id(&mut connection, &ExternalId::from(&query.namespace))?; + Ok(ApiResponse::query_reply( + api.store.load_prov_model_for_namespace(&mut connection, &id)?, + )) + }) + .await? + } + + async fn submit_import_operations( + &self, + identity: AuthId, + operations: Vec, + ) -> Result { + let mut api = self.clone(); + let identity = identity.signed_identity(&self.signing)?; + let model = ProvModel::from_tx(&operations)?; + let signer = self.signing.clone(); + tokio::task::spawn_blocking(move || { + // Check here to ensure that import operations result in data changes + let mut connection = api.store.connection()?; + connection.build_transaction().run(|connection| { + if let Some(operations_to_apply) = api.check_for_effects(connection, &operations)? { + trace!( operations_to_apply = operations_to_apply.len(), "Import operations submitted" ); - let tx_id = api.submit_blocking(futures::executor::block_on( - ChronicleTransaction::new(&signer, identity, operations_to_apply), - )?)?; - Ok(ApiResponse::import_submitted(model, tx_id)) - } else { - info!("Import will not result in any data changes"); - Ok(ApiResponse::AlreadyRecordedAll) - } - }) - }) - .await? - } - - #[instrument(level = "trace", skip(self), ret(Debug))] - async fn sync( - &self, - prov: Box, - block_id: &BlockId, - tx_id: ChronicleTransactionId, - ) -> Result { - let api = self.clone(); - let block_id = *block_id; - tokio::task::spawn_blocking(move || { - api.store.apply_prov(&prov)?; - api.store.set_last_block_id(&block_id, tx_id)?; - - Ok(ApiResponse::Unit) - }) - .await? - } - - /// Creates and submits a (ChronicleTransaction::StartActivity) determining the appropriate - /// agent by external_id, or via [use_agent] context - #[instrument(skip(self))] - async fn instant( - &self, - id: ActivityId, - namespace: ExternalId, - time: Option>, - agent: Option, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let agent_id = { - if let Some(agent) = agent { - Some(agent) - } else { - api.store - .get_current_agent(connection) - .ok() - .map(|x| AgentId::from_external_id(x.external_id)) - } - }; - - let now = Utc::now(); - - to_apply.push(ChronicleOperation::StartActivity(StartActivity { - namespace: namespace.clone(), - id: id.clone(), - time: time.unwrap_or(now).into(), - })); - - to_apply.push(ChronicleOperation::EndActivity(EndActivity { - namespace: namespace.clone(), - id: id.clone(), - time: time.unwrap_or(now).into(), - })); - - if let Some(agent_id) = agent_id { - to_apply.push(ChronicleOperation::was_associated_with( - namespace, - id.clone(), - agent_id, - None, - )); - } - - api.apply_effects_and_submit( - connection, - id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - /// Creates and submits a (ChronicleTransaction::StartActivity), determining the appropriate - /// agent by name, or via [use_agent] context - #[instrument(skip(self))] - async fn start_activity( - &self, - id: ActivityId, - namespace: ExternalId, - time: Option>, - agent: Option, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let agent_id = { - if let Some(agent) = agent { - Some(agent) - } else { - api.store - .get_current_agent(connection) - .ok() - .map(|x| AgentId::from_external_id(x.external_id)) - } - }; - - to_apply.push(ChronicleOperation::StartActivity(StartActivity { - namespace: namespace.clone(), - id: id.clone(), - time: time.unwrap_or_else(Utc::now).into(), - })); - - if let Some(agent_id) = agent_id { - to_apply.push(ChronicleOperation::was_associated_with( - namespace, - id.clone(), - agent_id, - None, - )); - } - - api.apply_effects_and_submit( - connection, - id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - /// Creates and submits a (ChronicleTransaction::EndActivity), determining the appropriate agent - /// by name or via [use_agent] context - #[instrument(skip(self))] - async fn end_activity( - &self, - id: ActivityId, - namespace: ExternalId, - time: Option>, - agent: Option, - identity: AuthId, - ) -> Result { - let mut api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - connection.build_transaction().run(|connection| { - let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; - - let applying_new_namespace = !to_apply.is_empty(); - - let agent_id = { - if let Some(agent) = agent { - Some(agent) - } else { - api.store - .get_current_agent(connection) - .ok() - .map(|x| AgentId::from_external_id(x.external_id)) - } - }; - - to_apply.push(ChronicleOperation::EndActivity(EndActivity { - namespace: namespace.clone(), - id: id.clone(), - time: time.unwrap_or_else(Utc::now).into(), - })); - - if let Some(agent_id) = agent_id { - to_apply.push(ChronicleOperation::was_associated_with( - namespace, - id.clone(), - agent_id, - None, - )); - } - - api.apply_effects_and_submit( - connection, - id, - identity, - to_apply, - applying_new_namespace, - ) - }) - }) - .await? - } - - #[instrument(skip(self))] - async fn use_agent_in_cli_context( - &self, - id: AgentId, - namespace: ExternalId, - ) -> Result { - let api = self.clone(); - tokio::task::spawn_blocking(move || { - let mut connection = api.store.connection()?; - - connection.build_transaction().run(|connection| { - api.store.apply_use_agent(connection, id.external_id_part(), &namespace) - })?; - - Ok(ApiResponse::Unit) - }) - .await? - } + let tx_id = api.submit_blocking(futures::executor::block_on( + ChronicleTransaction::new(&signer, identity, operations_to_apply), + )?)?; + Ok(ApiResponse::import_submitted(model, tx_id)) + } else { + info!("Import will not result in any data changes"); + Ok(ApiResponse::AlreadyRecordedAll) + } + }) + }) + .await? + } + + #[instrument(level = "trace", skip(self), ret(Debug))] + async fn sync( + &self, + prov: Box, + block_id: &BlockId, + tx_id: ChronicleTransactionId, + ) -> Result { + let api = self.clone(); + let block_id = *block_id; + tokio::task::spawn_blocking(move || { + api.store.apply_prov(&prov)?; + api.store.set_last_block_id(&block_id, tx_id)?; + + Ok(ApiResponse::Unit) + }) + .await? + } + + /// Creates and submits a (ChronicleTransaction::StartActivity) determining the appropriate + /// agent by external_id, or via [use_agent] context + #[instrument(skip(self))] + async fn instant( + &self, + id: ActivityId, + namespace: ExternalId, + time: Option>, + agent: Option, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let agent_id = { + if let Some(agent) = agent { + Some(agent) + } else { + api.store + .get_current_agent(connection) + .ok() + .map(|x| AgentId::from_external_id(x.external_id)) + } + }; + + let now = Utc::now(); + + to_apply.push(ChronicleOperation::StartActivity(StartActivity { + namespace: namespace.clone(), + id: id.clone(), + time: time.unwrap_or(now).into(), + })); + + to_apply.push(ChronicleOperation::EndActivity(EndActivity { + namespace: namespace.clone(), + id: id.clone(), + time: time.unwrap_or(now).into(), + })); + + if let Some(agent_id) = agent_id { + to_apply.push(ChronicleOperation::was_associated_with( + namespace, + id.clone(), + agent_id, + None, + )); + } + + api.apply_effects_and_submit( + connection, + id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + /// Creates and submits a (ChronicleTransaction::StartActivity), determining the appropriate + /// agent by name, or via [use_agent] context + #[instrument(skip(self))] + async fn start_activity( + &self, + id: ActivityId, + namespace: ExternalId, + time: Option>, + agent: Option, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let agent_id = { + if let Some(agent) = agent { + Some(agent) + } else { + api.store + .get_current_agent(connection) + .ok() + .map(|x| AgentId::from_external_id(x.external_id)) + } + }; + + to_apply.push(ChronicleOperation::StartActivity(StartActivity { + namespace: namespace.clone(), + id: id.clone(), + time: time.unwrap_or_else(Utc::now).into(), + })); + + if let Some(agent_id) = agent_id { + to_apply.push(ChronicleOperation::was_associated_with( + namespace, + id.clone(), + agent_id, + None, + )); + } + + api.apply_effects_and_submit( + connection, + id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + /// Creates and submits a (ChronicleTransaction::EndActivity), determining the appropriate agent + /// by name or via [use_agent] context + #[instrument(skip(self))] + async fn end_activity( + &self, + id: ActivityId, + namespace: ExternalId, + time: Option>, + agent: Option, + identity: AuthId, + ) -> Result { + let mut api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + connection.build_transaction().run(|connection| { + let (namespace, mut to_apply) = api.ensure_namespace(connection, &namespace)?; + + let applying_new_namespace = !to_apply.is_empty(); + + let agent_id = { + if let Some(agent) = agent { + Some(agent) + } else { + api.store + .get_current_agent(connection) + .ok() + .map(|x| AgentId::from_external_id(x.external_id)) + } + }; + + to_apply.push(ChronicleOperation::EndActivity(EndActivity { + namespace: namespace.clone(), + id: id.clone(), + time: time.unwrap_or_else(Utc::now).into(), + })); + + if let Some(agent_id) = agent_id { + to_apply.push(ChronicleOperation::was_associated_with( + namespace, + id.clone(), + agent_id, + None, + )); + } + + api.apply_effects_and_submit( + connection, + id, + identity, + to_apply, + applying_new_namespace, + ) + }) + }) + .await? + } + + #[instrument(skip(self))] + async fn use_agent_in_cli_context( + &self, + id: AgentId, + namespace: ExternalId, + ) -> Result { + let api = self.clone(); + tokio::task::spawn_blocking(move || { + let mut connection = api.store.connection()?; + + connection.build_transaction().run(|connection| { + api.store.apply_use_agent(connection, id.external_id_part(), &namespace) + })?; + + Ok(ApiResponse::Unit) + }) + .await? + } } pub trait UuidGen { - fn uuid() -> Uuid { - Uuid::new_v4() - } + fn uuid() -> Uuid { + Uuid::new_v4() + } } fn install_prometheus_metrics_exporter() { - let metrics_endpoint = "127.0.0.1:9000"; - let metrics_listen_socket = match metrics_endpoint.parse::() { - Ok(addr) => addr, - Err(e) => { - error!("Unable to parse metrics listen socket address: {e:?}"); - return; - } - }; - - if let Err(e) = PrometheusBuilder::new().with_http_listener(metrics_listen_socket).install() { - error!("Prometheus exporter installation for liveness check metrics failed: {e:?}"); - } else { - debug!("Liveness check metrics Prometheus exporter installed with endpoint on {metrics_endpoint}/metrics"); - } + let metrics_endpoint = "127.0.0.1:9000"; + let metrics_listen_socket = match metrics_endpoint.parse::() { + Ok(addr) => addr, + Err(e) => { + error!("Unable to parse metrics listen socket address: {e:?}"); + return; + }, + }; + + if let Err(e) = PrometheusBuilder::new().with_http_listener(metrics_listen_socket).install() { + error!("Prometheus exporter installation for liveness check metrics failed: {e:?}"); + } else { + debug!("Liveness check metrics Prometheus exporter installed with endpoint on {metrics_endpoint}/metrics"); + } } diff --git a/crates/api/src/chronicle_graphql/activity.rs b/crates/api/src/chronicle_graphql/activity.rs index 005c18e8e..511ad0f69 100644 --- a/crates/api/src/chronicle_graphql/activity.rs +++ b/crates/api/src/chronicle_graphql/activity.rs @@ -9,165 +9,165 @@ use common::prov::Role; use crate::chronicle_graphql::DatabaseContext; pub async fn namespace<'a>( - namespaceid: i32, - ctx: &Context<'a>, + namespaceid: i32, + ctx: &Context<'a>, ) -> async_graphql::Result { - use chronicle_persistence::schema::namespace::{self, dsl}; - let store = ctx.data::()?; + use chronicle_persistence::schema::namespace::{self, dsl}; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - Ok(namespace::table - .filter(dsl::id.eq(namespaceid)) - .first::(&mut connection)?) + Ok(namespace::table + .filter(dsl::id.eq(namespaceid)) + .first::(&mut connection)?) } pub async fn was_associated_with<'a>( - id: i32, - ctx: &Context<'a>, + id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result, Option, Option)>> { - use chronicle_persistence::schema::{agent, association, delegation}; - - #[derive(Queryable)] - struct DelegationAgents { - responsible_id: i32, - delegate: Agent, - role: String, - } - - let store = ctx.data::()?; - let mut connection = store.connection()?; - - let delegation_entries = delegation::table - .filter(delegation::dsl::activity_id.eq(id)) - .inner_join(agent::table.on(agent::id.eq(delegation::delegate_id))) - .select((delegation::responsible_id, Agent::as_select(), delegation::role)) - .load::(&mut connection)? - .into_iter(); - - let mut agent_reservoir = HashMap::new(); - let mut agent_delegations = HashMap::new(); - - for delegation_entry in delegation_entries { - let delegate_id = delegation_entry.delegate.id; - agent_reservoir.insert(delegate_id, delegation_entry.delegate); - agent_delegations.insert( - delegation_entry.responsible_id, - ( - delegate_id, - if delegation_entry.role.is_empty() { - None - } else { - Some(Role(delegation_entry.role)) - }, - ), - ); - } - - let res = association::table - .filter(association::dsl::activity_id.eq(id)) - .inner_join(chronicle_persistence::schema::agent::table) - .order(chronicle_persistence::schema::agent::external_id) - .select((Agent::as_select(), association::role)) - .load::<(Agent, Role)>(&mut connection)? - .into_iter() - .map(|(responsible_agent, responsible_role)| { - let responsible_role = - if responsible_role.0.is_empty() { None } else { Some(responsible_role) }; - let (delegate_agent, delegate_role): (Option, Option) = - match agent_delegations.get(&responsible_agent.id) { - Some((delegate_id, optional_role)) => { - let delegate = agent_reservoir.remove(delegate_id).unwrap_or_else(|| { - agent::table.find(delegate_id).first::(&mut connection).unwrap() - }); - let optional_role = optional_role.as_ref().cloned(); - (Some(delegate), optional_role) - } - None => (None, None), - }; - (responsible_agent, responsible_role, delegate_agent, delegate_role) - }) - .collect(); - - Ok(res) + use chronicle_persistence::schema::{agent, association, delegation}; + + #[derive(Queryable)] + struct DelegationAgents { + responsible_id: i32, + delegate: Agent, + role: String, + } + + let store = ctx.data::()?; + let mut connection = store.connection()?; + + let delegation_entries = delegation::table + .filter(delegation::dsl::activity_id.eq(id)) + .inner_join(agent::table.on(agent::id.eq(delegation::delegate_id))) + .select((delegation::responsible_id, Agent::as_select(), delegation::role)) + .load::(&mut connection)? + .into_iter(); + + let mut agent_reservoir = HashMap::new(); + let mut agent_delegations = HashMap::new(); + + for delegation_entry in delegation_entries { + let delegate_id = delegation_entry.delegate.id; + agent_reservoir.insert(delegate_id, delegation_entry.delegate); + agent_delegations.insert( + delegation_entry.responsible_id, + ( + delegate_id, + if delegation_entry.role.is_empty() { + None + } else { + Some(Role(delegation_entry.role)) + }, + ), + ); + } + + let res = association::table + .filter(association::dsl::activity_id.eq(id)) + .inner_join(chronicle_persistence::schema::agent::table) + .order(chronicle_persistence::schema::agent::external_id) + .select((Agent::as_select(), association::role)) + .load::<(Agent, Role)>(&mut connection)? + .into_iter() + .map(|(responsible_agent, responsible_role)| { + let responsible_role = + if responsible_role.0.is_empty() { None } else { Some(responsible_role) }; + let (delegate_agent, delegate_role): (Option, Option) = + match agent_delegations.get(&responsible_agent.id) { + Some((delegate_id, optional_role)) => { + let delegate = agent_reservoir.remove(delegate_id).unwrap_or_else(|| { + agent::table.find(delegate_id).first::(&mut connection).unwrap() + }); + let optional_role = optional_role.as_ref().cloned(); + (Some(delegate), optional_role) + }, + None => (None, None), + }; + (responsible_agent, responsible_role, delegate_agent, delegate_role) + }) + .collect(); + + Ok(res) } pub async fn used<'a>(id: i32, ctx: &Context<'a>) -> async_graphql::Result> { - use chronicle_persistence::schema::usage::{self, dsl}; + use chronicle_persistence::schema::usage::{self, dsl}; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - let res = usage::table - .filter(dsl::activity_id.eq(id)) - .inner_join(chronicle_persistence::schema::entity::table) - .order(chronicle_persistence::schema::entity::external_id) - .select(Entity::as_select()) - .load::(&mut connection)?; + let res = usage::table + .filter(dsl::activity_id.eq(id)) + .inner_join(chronicle_persistence::schema::entity::table) + .order(chronicle_persistence::schema::entity::external_id) + .select(Entity::as_select()) + .load::(&mut connection)?; - Ok(res) + Ok(res) } pub async fn was_informed_by<'a>( - id: i32, - ctx: &Context<'a>, + id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result> { - use chronicle_persistence::schema::wasinformedby::{self, dsl}; + use chronicle_persistence::schema::wasinformedby::{self, dsl}; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - let res = wasinformedby::table - .filter(dsl::activity_id.eq(id)) - .inner_join(chronicle_persistence::schema::activity::table.on( - wasinformedby::informing_activity_id.eq(chronicle_persistence::schema::activity::id), - )) - .order(chronicle_persistence::schema::activity::external_id) - .select(Activity::as_select()) - .load::(&mut connection)?; + let res = wasinformedby::table + .filter(dsl::activity_id.eq(id)) + .inner_join(chronicle_persistence::schema::activity::table.on( + wasinformedby::informing_activity_id.eq(chronicle_persistence::schema::activity::id), + )) + .order(chronicle_persistence::schema::activity::external_id) + .select(Activity::as_select()) + .load::(&mut connection)?; - Ok(res) + Ok(res) } pub async fn generated<'a>(id: i32, ctx: &Context<'a>) -> async_graphql::Result> { - use chronicle_persistence::schema::generation::{self, dsl}; + use chronicle_persistence::schema::generation::{self, dsl}; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - let res = generation::table - .filter(dsl::activity_id.eq(id)) - .inner_join(chronicle_persistence::schema::entity::table) - .select(Entity::as_select()) - .load::(&mut connection)?; + let res = generation::table + .filter(dsl::activity_id.eq(id)) + .inner_join(chronicle_persistence::schema::entity::table) + .select(Entity::as_select()) + .load::(&mut connection)?; - Ok(res) + Ok(res) } pub async fn load_attribute<'a>( - id: i32, - external_id: &str, - ctx: &Context<'a>, + id: i32, + external_id: &str, + ctx: &Context<'a>, ) -> async_graphql::Result> { - use chronicle_persistence::schema::activity_attribute; - - let store = ctx.data::()?; - - let mut connection = store.connection()?; - - Ok(activity_attribute::table - .filter( - activity_attribute::activity_id - .eq(id) - .and(activity_attribute::typename.eq(external_id)), - ) - .select(activity_attribute::value) - .first::(&mut connection) - .optional()? - .as_deref() - .map(serde_json::from_str) - .transpose()?) + use chronicle_persistence::schema::activity_attribute; + + let store = ctx.data::()?; + + let mut connection = store.connection()?; + + Ok(activity_attribute::table + .filter( + activity_attribute::activity_id + .eq(id) + .and(activity_attribute::typename.eq(external_id)), + ) + .select(activity_attribute::value) + .first::(&mut connection) + .optional()? + .as_deref() + .map(serde_json::from_str) + .transpose()?) } diff --git a/crates/api/src/chronicle_graphql/agent.rs b/crates/api/src/chronicle_graphql/agent.rs index a9681d318..4c4593742 100644 --- a/crates/api/src/chronicle_graphql/agent.rs +++ b/crates/api/src/chronicle_graphql/agent.rs @@ -7,86 +7,86 @@ use common::prov::Role; use crate::chronicle_graphql::DatabaseContext; pub async fn namespace<'a>( - namespace_id: i32, - ctx: &Context<'a>, + namespace_id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result { - use chronicle_persistence::schema::namespace::{self, dsl}; - let store = ctx.data::()?; + use chronicle_persistence::schema::namespace::{self, dsl}; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - Ok(namespace::table - .filter(dsl::id.eq(namespace_id)) - .first::(&mut connection)?) + Ok(namespace::table + .filter(dsl::id.eq(namespace_id)) + .first::(&mut connection)?) } pub async fn acted_on_behalf_of<'a>( - id: i32, - ctx: &Context<'a>, + id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result)>> { - use chronicle_persistence::schema::{ - agent as agentdsl, - delegation::{self, dsl}, - }; - - let store = ctx.data::()?; - - let mut connection = store.connection()?; - - Ok(delegation::table - .filter(dsl::delegate_id.eq(id)) - .inner_join(agentdsl::table.on(dsl::responsible_id.eq(agentdsl::id))) - .order(agentdsl::external_id) - .select((Agent::as_select(), dsl::role)) - .load::<(Agent, Role)>(&mut connection)? - .into_iter() - .map(|(a, r)| (a, if r.0.is_empty() { None } else { Some(r) })) - .collect()) + use chronicle_persistence::schema::{ + agent as agentdsl, + delegation::{self, dsl}, + }; + + let store = ctx.data::()?; + + let mut connection = store.connection()?; + + Ok(delegation::table + .filter(dsl::delegate_id.eq(id)) + .inner_join(agentdsl::table.on(dsl::responsible_id.eq(agentdsl::id))) + .order(agentdsl::external_id) + .select((Agent::as_select(), dsl::role)) + .load::<(Agent, Role)>(&mut connection)? + .into_iter() + .map(|(a, r)| (a, if r.0.is_empty() { None } else { Some(r) })) + .collect()) } /// Return the entities an agent has attributed to it along with the roles in which they were /// attributed pub async fn attribution<'a>( - id: i32, - ctx: &Context<'a>, + id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result)>> { - use chronicle_persistence::schema::{ - attribution::{self, dsl}, - entity as entity_dsl, - }; - - let store = ctx.data::()?; - - let mut connection = store.connection()?; - - Ok(attribution::table - .filter(dsl::agent_id.eq(id)) - .inner_join(entity_dsl::table.on(dsl::entity_id.eq(entity_dsl::id))) - .order(entity_dsl::external_id) - .select((Entity::as_select(), dsl::role)) - .load::<(Entity, Role)>(&mut connection)? - .into_iter() - .map(|(entity, role)| (entity, if role.0.is_empty() { None } else { Some(role) })) - .collect()) + use chronicle_persistence::schema::{ + attribution::{self, dsl}, + entity as entity_dsl, + }; + + let store = ctx.data::()?; + + let mut connection = store.connection()?; + + Ok(attribution::table + .filter(dsl::agent_id.eq(id)) + .inner_join(entity_dsl::table.on(dsl::entity_id.eq(entity_dsl::id))) + .order(entity_dsl::external_id) + .select((Entity::as_select(), dsl::role)) + .load::<(Entity, Role)>(&mut connection)? + .into_iter() + .map(|(entity, role)| (entity, if role.0.is_empty() { None } else { Some(role) })) + .collect()) } pub async fn load_attribute<'a>( - id: i32, - external_id: &str, - ctx: &Context<'a>, + id: i32, + external_id: &str, + ctx: &Context<'a>, ) -> async_graphql::Result> { - use chronicle_persistence::schema::agent_attribute; + use chronicle_persistence::schema::agent_attribute; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - Ok(agent_attribute::table - .filter(agent_attribute::agent_id.eq(id).and(agent_attribute::typename.eq(external_id))) - .select(agent_attribute::value) - .first::(&mut connection) - .optional()? - .as_deref() - .map(serde_json::from_str) - .transpose()?) + Ok(agent_attribute::table + .filter(agent_attribute::agent_id.eq(id).and(agent_attribute::typename.eq(external_id))) + .select(agent_attribute::value) + .first::(&mut connection) + .optional()? + .as_deref() + .map(serde_json::from_str) + .transpose()?) } diff --git a/crates/api/src/chronicle_graphql/authorization.rs b/crates/api/src/chronicle_graphql/authorization.rs index 7602c6d70..d3fd02fcd 100644 --- a/crates/api/src/chronicle_graphql/authorization.rs +++ b/crates/api/src/chronicle_graphql/authorization.rs @@ -13,194 +13,194 @@ use super::{JwksUri, UserInfoUri}; #[derive(Debug, Error)] pub enum Error { - #[error("Base64 decoding failure: {0}", source)] - Base64 { - #[from] - #[source] - source: base64::DecodeError, - }, - #[error("JSON decoding failure: {0}", source)] - Json { - #[from] - #[source] - source: serde_json::Error, - }, - #[error("JWT validation failure: {0}", source)] - Jwks { - #[from] - #[source] - source: jwtk::Error, - }, - #[error("web access failure: {0}", source)] - Reqwest { - #[from] - #[source] - source: reqwest::Error, - }, - #[error("formatting error: {0}", message)] - Format { message: String }, - #[error("unexpected response: {0} responded with status {1}", server, status)] - UnexpectedResponse { server: String, status: StatusCode }, + #[error("Base64 decoding failure: {0}", source)] + Base64 { + #[from] + #[source] + source: base64::DecodeError, + }, + #[error("JSON decoding failure: {0}", source)] + Json { + #[from] + #[source] + source: serde_json::Error, + }, + #[error("JWT validation failure: {0}", source)] + Jwks { + #[from] + #[source] + source: jwtk::Error, + }, + #[error("web access failure: {0}", source)] + Reqwest { + #[from] + #[source] + source: reqwest::Error, + }, + #[error("formatting error: {0}", message)] + Format { message: String }, + #[error("unexpected response: {0} responded with status {1}", server, status)] + UnexpectedResponse { server: String, status: StatusCode }, } #[derive(Clone)] pub struct TokenChecker { - client: reqwest::Client, - verifier: Option>, - jwks_uri: Option, - userinfo_uri: Option, - userinfo_cache: Arc>>>, + client: reqwest::Client, + verifier: Option>, + jwks_uri: Option, + userinfo_uri: Option, + userinfo_cache: Arc>>>, } impl TokenChecker { - #[instrument(level = "debug")] - pub fn new( - jwks_uri: Option<&JwksUri>, - userinfo_uri: Option<&UserInfoUri>, - cache_expiry_seconds: u32, - ) -> Self { - Self { - client: reqwest::Client::new(), - verifier: jwks_uri.map(|uri| { - { - RemoteJwksVerifier::new( - uri.full_uri(), - None, - Duration::from_secs(cache_expiry_seconds.into()), - ) - } - .into() - }), - jwks_uri: jwks_uri.cloned(), - userinfo_uri: userinfo_uri.cloned(), - userinfo_cache: Arc::new(Mutex::new(TimedCache::with_lifespan( - cache_expiry_seconds.into(), - ))), - } - } + #[instrument(level = "debug")] + pub fn new( + jwks_uri: Option<&JwksUri>, + userinfo_uri: Option<&UserInfoUri>, + cache_expiry_seconds: u32, + ) -> Self { + Self { + client: reqwest::Client::new(), + verifier: jwks_uri.map(|uri| { + { + RemoteJwksVerifier::new( + uri.full_uri(), + None, + Duration::from_secs(cache_expiry_seconds.into()), + ) + } + .into() + }), + jwks_uri: jwks_uri.cloned(), + userinfo_uri: userinfo_uri.cloned(), + userinfo_cache: Arc::new(Mutex::new(TimedCache::with_lifespan( + cache_expiry_seconds.into(), + ))), + } + } - pub async fn check_status(&self) -> Result<(), Error> { - if let Some(uri) = &self.jwks_uri { - let status = self.client.get(uri.full_uri()).send().await?.status(); - // should respond with JSON web key set - if !status.is_success() { - tracing::warn!("{uri:?} returns {status}"); - return Err(Error::UnexpectedResponse { server: format!("{uri:?}"), status }); - } - } - if let Some(uri) = &self.userinfo_uri { - let status = self.client.get(uri.full_uri()).send().await?.status(); - // should require an authorization token - if !status.is_client_error() || status == StatusCode::NOT_FOUND { - tracing::warn!("{uri:?} without authorization token returns {status}"); - return Err(Error::UnexpectedResponse { server: format!("{uri:?}"), status }); - } - } - Ok(()) - } + pub async fn check_status(&self) -> Result<(), Error> { + if let Some(uri) = &self.jwks_uri { + let status = self.client.get(uri.full_uri()).send().await?.status(); + // should respond with JSON web key set + if !status.is_success() { + tracing::warn!("{uri:?} returns {status}"); + return Err(Error::UnexpectedResponse { server: format!("{uri:?}"), status }); + } + } + if let Some(uri) = &self.userinfo_uri { + let status = self.client.get(uri.full_uri()).send().await?.status(); + // should require an authorization token + if !status.is_client_error() || status == StatusCode::NOT_FOUND { + tracing::warn!("{uri:?} without authorization token returns {status}"); + return Err(Error::UnexpectedResponse { server: format!("{uri:?}"), status }); + } + } + Ok(()) + } - #[instrument(level = "trace", skip_all, err)] - async fn attempt_jwt(&self, token: &str) -> Result, Error> { - use base64::engine::general_purpose::{GeneralPurpose, URL_SAFE_NO_PAD}; - const BASE64_ENGINE: GeneralPurpose = URL_SAFE_NO_PAD; + #[instrument(level = "trace", skip_all, err)] + async fn attempt_jwt(&self, token: &str) -> Result, Error> { + use base64::engine::general_purpose::{GeneralPurpose, URL_SAFE_NO_PAD}; + const BASE64_ENGINE: GeneralPurpose = URL_SAFE_NO_PAD; - if let Some(verifier) = &self.verifier { - verifier.verify::>(token).await?; - } else { - return Err(Error::Format { message: "no JWKS endpoint configured".to_string() }); - } + if let Some(verifier) = &self.verifier { + verifier.verify::>(token).await?; + } else { + return Err(Error::Format { message: "no JWKS endpoint configured".to_string() }); + } - // JWT is composed of three base64-encoded components - let components = token - .split('.') - .map(|component| BASE64_ENGINE.decode(component)) - .collect::>, base64::DecodeError>>()?; - if components.len() != 3 { - return Err(Error::Format { message: format!("JWT has unexpected format: {token}") }); - }; + // JWT is composed of three base64-encoded components + let components = token + .split('.') + .map(|component| BASE64_ENGINE.decode(component)) + .collect::>, base64::DecodeError>>()?; + if components.len() != 3 { + return Err(Error::Format { message: format!("JWT has unexpected format: {token}") }); + }; - if let Value::Object(claims) = serde_json::from_slice(components[1].as_slice())? { - Ok(claims) - } else { - Err(Error::Format { - message: format!("JWT claims have unexpected format: {:?}", components[1]), - }) - } - } + if let Value::Object(claims) = serde_json::from_slice(components[1].as_slice())? { + Ok(claims) + } else { + Err(Error::Format { + message: format!("JWT claims have unexpected format: {:?}", components[1]), + }) + } + } - #[instrument(level = "debug", skip_all, err)] - pub async fn verify_token(&self, token: &str) -> Result, Error> { - let mut claims = Map::new(); - let mut error = None; - match self.attempt_jwt(token).await { - Ok(claims_as_provided) => claims.extend(claims_as_provided), - Err(Error::Jwks { source }) => { - match source { - jwtk::Error::IoError(_) | jwtk::Error::Reqwest(_) => { - tracing::error!(fatal_error = ?source); - super::trigger_shutdown(); - } - _ => (), - } - return Err(Error::Jwks { source }); // abort on JWKS verifier failure - } - Err(err) => error = Some(err), // could tolerate error from what may be opaque token - }; - if let Some(userinfo_uri) = &self.userinfo_uri { - let mut cache = self.userinfo_cache.lock().await; - if let Some(claims_from_userinfo) = cache.cache_get(&token.to_string()) { - tracing::trace!("userinfo cache hit"); - error = None; - claims.extend(claims_from_userinfo.clone()); - } else { - tracing::trace!("userinfo cache miss"); - drop(cache); - let request = self - .client - .get(userinfo_uri.full_uri()) - .header("Authorization", format!("Bearer {token}")); - let response = request.send().await?; - cache = self.userinfo_cache.lock().await; - if response.status() == 200 { - let response_text = &response.text().await?; - if let Ok(claims_from_userinfo) = self.attempt_jwt(response_text).await { - error = None; - claims.extend(claims_from_userinfo.clone()); - cache.cache_set(token.to_string(), claims_from_userinfo); - } else if let Ok(Value::Object(claims_from_userinfo)) = - serde_json::from_str(response_text) - { - error = None; - claims.extend(claims_from_userinfo.clone()); - cache.cache_set(token.to_string(), claims_from_userinfo); - } else { - error = Some(Error::Format { - message: format!( - "UserInfo response has unexpected format: {response_text}" - ), - }); - tracing::error!(fatal_error = ?error.as_ref().unwrap()); - super::trigger_shutdown(); - } - } else { - if error.is_some() { - tracing::trace!("first error before UserInfo was {error:?}"); - } - error = Some(Error::UnexpectedResponse { - server: format!("{userinfo_uri:?}"), - status: response.status(), - }); - if response.status() != StatusCode::UNAUTHORIZED { - tracing::error!(fatal_error = ?error.as_ref().unwrap()); - super::trigger_shutdown(); - } - } - } - } - if let Some(error) = error { - Err(error) - } else { - Ok(claims) - } - } + #[instrument(level = "debug", skip_all, err)] + pub async fn verify_token(&self, token: &str) -> Result, Error> { + let mut claims = Map::new(); + let mut error = None; + match self.attempt_jwt(token).await { + Ok(claims_as_provided) => claims.extend(claims_as_provided), + Err(Error::Jwks { source }) => { + match source { + jwtk::Error::IoError(_) | jwtk::Error::Reqwest(_) => { + tracing::error!(fatal_error = ?source); + super::trigger_shutdown(); + }, + _ => (), + } + return Err(Error::Jwks { source }); // abort on JWKS verifier failure + }, + Err(err) => error = Some(err), // could tolerate error from what may be opaque token + }; + if let Some(userinfo_uri) = &self.userinfo_uri { + let mut cache = self.userinfo_cache.lock().await; + if let Some(claims_from_userinfo) = cache.cache_get(&token.to_string()) { + tracing::trace!("userinfo cache hit"); + error = None; + claims.extend(claims_from_userinfo.clone()); + } else { + tracing::trace!("userinfo cache miss"); + drop(cache); + let request = self + .client + .get(userinfo_uri.full_uri()) + .header("Authorization", format!("Bearer {token}")); + let response = request.send().await?; + cache = self.userinfo_cache.lock().await; + if response.status() == 200 { + let response_text = &response.text().await?; + if let Ok(claims_from_userinfo) = self.attempt_jwt(response_text).await { + error = None; + claims.extend(claims_from_userinfo.clone()); + cache.cache_set(token.to_string(), claims_from_userinfo); + } else if let Ok(Value::Object(claims_from_userinfo)) = + serde_json::from_str(response_text) + { + error = None; + claims.extend(claims_from_userinfo.clone()); + cache.cache_set(token.to_string(), claims_from_userinfo); + } else { + error = Some(Error::Format { + message: format!( + "UserInfo response has unexpected format: {response_text}" + ), + }); + tracing::error!(fatal_error = ?error.as_ref().unwrap()); + super::trigger_shutdown(); + } + } else { + if error.is_some() { + tracing::trace!("first error before UserInfo was {error:?}"); + } + error = Some(Error::UnexpectedResponse { + server: format!("{userinfo_uri:?}"), + status: response.status(), + }); + if response.status() != StatusCode::UNAUTHORIZED { + tracing::error!(fatal_error = ?error.as_ref().unwrap()); + super::trigger_shutdown(); + } + } + } + } + if let Some(error) = error { + Err(error) + } else { + Ok(claims) + } + } } diff --git a/crates/api/src/chronicle_graphql/cursor_project.rs b/crates/api/src/chronicle_graphql/cursor_project.rs index 46fd72ac7..2f773e014 100644 --- a/crates/api/src/chronicle_graphql/cursor_project.rs +++ b/crates/api/src/chronicle_graphql/cursor_project.rs @@ -1,31 +1,31 @@ use async_graphql::{ - connection::{Edge, EmptyFields}, - OutputType, + connection::{Edge, EmptyFields}, + OutputType, }; pub fn project_to_nodes( - rx: I, - start: i64, - limit: i64, + rx: I, + start: i64, + limit: i64, ) -> async_graphql::connection::Connection - where - T: OutputType, - I: IntoIterator, +where + T: OutputType, + I: IntoIterator, { - let rx = Vec::from_iter(rx); - let mut gql = async_graphql::connection::Connection::new( - rx.first().map(|(_, _total)| start > 0).unwrap_or(false), - rx.first().map(|(_, total)| start + limit < *total).unwrap_or(false), - ); + let rx = Vec::from_iter(rx); + let mut gql = async_graphql::connection::Connection::new( + rx.first().map(|(_, _total)| start > 0).unwrap_or(false), + rx.first().map(|(_, total)| start + limit < *total).unwrap_or(false), + ); - gql.edges.append( - &mut rx - .into_iter() - .enumerate() - .map(|(pos, (agent, _count))| { - Edge::with_additional_fields((pos as i32) + (start as i32), agent, EmptyFields) - }) - .collect(), - ); - gql + gql.edges.append( + &mut rx + .into_iter() + .enumerate() + .map(|(pos, (agent, _count))| { + Edge::with_additional_fields((pos as i32) + (start as i32), agent, EmptyFields) + }) + .collect(), + ); + gql } diff --git a/crates/api/src/chronicle_graphql/entity.rs b/crates/api/src/chronicle_graphql/entity.rs index 177d30e70..44b1b4525 100644 --- a/crates/api/src/chronicle_graphql/entity.rs +++ b/crates/api/src/chronicle_graphql/entity.rs @@ -7,147 +7,147 @@ use common::prov::{operations::DerivationType, Role}; use crate::chronicle_graphql::DatabaseContext; async fn typed_derivation<'a>( - id: i32, - ctx: &Context<'a>, - typ: DerivationType, + id: i32, + ctx: &Context<'a>, + typ: DerivationType, ) -> async_graphql::Result> { - use chronicle_persistence::schema::{ - derivation::{self, dsl}, - entity as entitydsl, - }; + use chronicle_persistence::schema::{ + derivation::{self, dsl}, + entity as entitydsl, + }; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - let res = derivation::table - .filter(dsl::generated_entity_id.eq(id).and(dsl::typ.eq(typ))) - .inner_join(entitydsl::table.on(dsl::used_entity_id.eq(entitydsl::id))) - .select(Entity::as_select()) - .load::(&mut connection)?; + let res = derivation::table + .filter(dsl::generated_entity_id.eq(id).and(dsl::typ.eq(typ))) + .inner_join(entitydsl::table.on(dsl::used_entity_id.eq(entitydsl::id))) + .select(Entity::as_select()) + .load::(&mut connection)?; - Ok(res) + Ok(res) } pub async fn namespace<'a>( - namespace_id: i32, - ctx: &Context<'a>, + namespace_id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result { - use chronicle_persistence::schema::namespace::{self, dsl}; + use chronicle_persistence::schema::namespace::{self, dsl}; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - Ok(namespace::table - .filter(dsl::id.eq(namespace_id)) - .first::(&mut connection)?) + Ok(namespace::table + .filter(dsl::id.eq(namespace_id)) + .first::(&mut connection)?) } /// Return the agents to which an entity was attributed along with the roles in which it was /// attributed pub async fn was_attributed_to<'a>( - id: i32, - ctx: &Context<'a>, + id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result)>> { - use chronicle_persistence::schema::{agent, attribution}; - - let store = ctx.data::()?; - let mut connection = store.connection()?; - - let res = attribution::table - .filter(attribution::dsl::entity_id.eq(id)) - .inner_join(agent::table) - .order(agent::external_id) - .select((Agent::as_select(), attribution::role)) - .load::<(Agent, Role)>(&mut connection)? - .into_iter() - .map(|(agent, role)| { - let role = if role.0.is_empty() { None } else { Some(role) }; - (agent, role) - }) - .collect(); - - Ok(res) + use chronicle_persistence::schema::{agent, attribution}; + + let store = ctx.data::()?; + let mut connection = store.connection()?; + + let res = attribution::table + .filter(attribution::dsl::entity_id.eq(id)) + .inner_join(agent::table) + .order(agent::external_id) + .select((Agent::as_select(), attribution::role)) + .load::<(Agent, Role)>(&mut connection)? + .into_iter() + .map(|(agent, role)| { + let role = if role.0.is_empty() { None } else { Some(role) }; + (agent, role) + }) + .collect(); + + Ok(res) } pub async fn was_generated_by<'a>( - id: i32, - ctx: &Context<'a>, + id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result> { - use chronicle_persistence::schema::generation::{self, dsl}; + use chronicle_persistence::schema::generation::{self, dsl}; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - let res = generation::table - .filter(dsl::generated_entity_id.eq(id)) - .inner_join(chronicle_persistence::schema::activity::table) - .select(Activity::as_select()) - .load::(&mut connection)?; + let res = generation::table + .filter(dsl::generated_entity_id.eq(id)) + .inner_join(chronicle_persistence::schema::activity::table) + .select(Activity::as_select()) + .load::(&mut connection)?; - Ok(res) + Ok(res) } pub async fn was_derived_from<'a>( - id: i32, - ctx: &Context<'a>, + id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result> { - use chronicle_persistence::schema::{ - derivation::{self, dsl}, - entity as entitydsl, - }; + use chronicle_persistence::schema::{ + derivation::{self, dsl}, + entity as entitydsl, + }; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; + let mut connection = store.connection()?; - let res = derivation::table - .filter(dsl::generated_entity_id.eq(id)) - .inner_join(entitydsl::table.on(dsl::used_entity_id.eq(entitydsl::id))) - .select(Entity::as_select()) - .load::(&mut connection)?; + let res = derivation::table + .filter(dsl::generated_entity_id.eq(id)) + .inner_join(entitydsl::table.on(dsl::used_entity_id.eq(entitydsl::id))) + .select(Entity::as_select()) + .load::(&mut connection)?; - Ok(res) + Ok(res) } pub async fn had_primary_source<'a>( - id: i32, - ctx: &Context<'a>, + id: i32, + ctx: &Context<'a>, ) -> async_graphql::Result> { - typed_derivation(id, ctx, DerivationType::PrimarySource).await + typed_derivation(id, ctx, DerivationType::PrimarySource).await } pub async fn was_revision_of<'a>(id: i32, ctx: &Context<'a>) -> async_graphql::Result> { - typed_derivation(id, ctx, DerivationType::Revision).await + typed_derivation(id, ctx, DerivationType::Revision).await } pub async fn was_quoted_from<'a>(id: i32, ctx: &Context<'a>) -> async_graphql::Result> { - typed_derivation(id, ctx, DerivationType::Quotation).await + typed_derivation(id, ctx, DerivationType::Quotation).await } pub async fn load_attribute<'a>( - id: i32, - external_id: &str, - ctx: &Context<'a>, + id: i32, + external_id: &str, + ctx: &Context<'a>, ) -> async_graphql::Result> { - use chronicle_persistence::schema::entity_attribute; - - let store = ctx.data::()?; - - let mut connection = store.connection()?; - - Ok(entity_attribute::table - .filter( - entity_attribute::entity_id - .eq(id) - .and(entity_attribute::typename.eq(external_id)), - ) - .select(entity_attribute::value) - .first::(&mut connection) - .optional()? - .as_deref() - .map(serde_json::from_str) - .transpose()?) + use chronicle_persistence::schema::entity_attribute; + + let store = ctx.data::()?; + + let mut connection = store.connection()?; + + Ok(entity_attribute::table + .filter( + entity_attribute::entity_id + .eq(id) + .and(entity_attribute::typename.eq(external_id)), + ) + .select(entity_attribute::value) + .first::(&mut connection) + .optional()? + .as_deref() + .map(serde_json::from_str) + .transpose()?) } diff --git a/crates/api/src/chronicle_graphql/mod.rs b/crates/api/src/chronicle_graphql/mod.rs index c513e438e..11fc4ad78 100644 --- a/crates/api/src/chronicle_graphql/mod.rs +++ b/crates/api/src/chronicle_graphql/mod.rs @@ -7,8 +7,8 @@ use std::{ }; use async_graphql::{ - Context, - Enum, Error, ErrorExtensions, http::{ALL_WEBSOCKET_PROTOCOLS, GraphQLPlaygroundConfig, playground_source}, ObjectType, scalar, Schema, ServerError, SimpleObject, + http::{playground_source, GraphQLPlaygroundConfig, ALL_WEBSOCKET_PROTOCOLS}, + scalar, Context, Enum, Error, ErrorExtensions, ObjectType, Schema, ServerError, SimpleObject, Subscription, SubscriptionType, }; use async_graphql_poem::{ @@ -17,22 +17,22 @@ use async_graphql_poem::{ }; use diesel::{ - PgConnection, prelude::*, - Queryable, r2d2::{ConnectionManager, Pool, PooledConnection}, + r2d2::{ConnectionManager, Pool, PooledConnection}, + PgConnection, Queryable, }; use futures::Stream; use lazy_static::lazy_static; use poem::{ - Endpoint, get, - handler, + get, handler, http::{HeaderValue, StatusCode}, - IntoResponse, listener::{Listener, TcpListener}, - post, Route, Server, web::{ + post, + web::{ headers::authorization::{Bearer, Credentials}, Html, }, + Endpoint, IntoResponse, Route, Server, }; use serde::{Deserialize, Serialize}; use serde_json::json; @@ -46,15 +46,12 @@ use common::{ ledger::{SubmissionError, SubmissionStage}, opa::std::{ExecutorContext, OpaExecutorError}, prov::{ - ChronicleIri, ChronicleTransactionId, ExternalId, ExternalIdPart, json_ld::ToJson, + json_ld::ToJson, ChronicleIri, ChronicleTransactionId, ExternalId, ExternalIdPart, ProvModel, }, }; -use crate::dispatch::ApiDispatch; -use crate::error::ApiError; -use crate::Store; -use crate::StoreError; +use crate::{dispatch::ApiDispatch, error::ApiError, Store, StoreError}; use self::authorization::TokenChecker; @@ -81,9 +78,9 @@ pub type AuthorizationError = authorization::Error; /// * `tx_id` - transaction id for a submitted operation; returns `null` if `submission_result` /// is `SubmissionResult::AlreadyRecorded` pub struct Submission { - context: String, - submission_result: SubmissionResult, - tx_id: Option, + context: String, + submission_result: SubmissionResult, + tx_id: Option, } #[derive(Enum, PartialEq, Eq, Clone, Copy)] @@ -94,26 +91,26 @@ pub struct Submission { /// * `Submission` - operation has been submitted /// * `AlreadyRecorded` - operation will not result in data changes and has not been submitted pub enum SubmissionResult { - Submission, - AlreadyRecorded, + Submission, + AlreadyRecorded, } impl Submission { - pub fn from_submission(subject: &ChronicleIri, tx_id: &ChronicleTransactionId) -> Self { - Submission { - context: subject.to_string(), - submission_result: SubmissionResult::Submission, - tx_id: Some(tx_id.to_string()), - } - } - - pub fn from_already_recorded(subject: &ChronicleIri) -> Self { - Submission { - context: subject.to_string(), - submission_result: SubmissionResult::AlreadyRecorded, - tx_id: None, - } - } + pub fn from_submission(subject: &ChronicleIri, tx_id: &ChronicleTransactionId) -> Self { + Submission { + context: subject.to_string(), + submission_result: SubmissionResult::Submission, + tx_id: Some(tx_id.to_string()), + } + } + + pub fn from_already_recorded(subject: &ChronicleIri) -> Self { + Submission { + context: subject.to_string(), + submission_result: SubmissionResult::AlreadyRecorded, + tx_id: None, + } + } } /// # `TimelineOrder` @@ -121,95 +118,95 @@ impl Submission { /// Specify the order in which multiple results of query data are returned #[derive(Enum, Copy, Clone, Eq, PartialEq, Debug)] pub enum TimelineOrder { - NewestFirst, - OldestFirst, + NewestFirst, + OldestFirst, } #[derive(Error, Debug)] pub enum GraphQlError { - #[error("Database operation failed: {0}")] - Db( - #[from] - #[source] - diesel::result::Error, - ), - - #[error("Connection pool error: {0}")] - R2d2( - #[from] - #[source] - diesel::r2d2::Error, - ), - - #[error("Database connection failed: {0}")] - DbConnection( - #[from] - #[source] - diesel::ConnectionError, - ), - - #[error("API: {0}")] - Api( - #[from] - #[source] - crate::ApiError, - ), - - #[error("I/O: {0}")] - Io( - #[from] - #[source] - std::io::Error, - ), + #[error("Database operation failed: {0}")] + Db( + #[from] + #[source] + diesel::result::Error, + ), + + #[error("Connection pool error: {0}")] + R2d2( + #[from] + #[source] + diesel::r2d2::Error, + ), + + #[error("Database connection failed: {0}")] + DbConnection( + #[from] + #[source] + diesel::ConnectionError, + ), + + #[error("API: {0}")] + Api( + #[from] + #[source] + crate::ApiError, + ), + + #[error("I/O: {0}")] + Io( + #[from] + #[source] + std::io::Error, + ), } impl GraphQlError { - fn error_sources( - mut source: Option<&(dyn std::error::Error + 'static)>, - ) -> Option> { - /* Check if we have any sources to derive reasons from */ - if source.is_some() { - /* Add all the error sources to a list of reasons for the error */ - let mut reasons = Vec::new(); - while let Some(error) = source { - reasons.push(error.to_string()); - source = error.source(); - } - Some(reasons) - } else { - None - } - } + fn error_sources( + mut source: Option<&(dyn std::error::Error + 'static)>, + ) -> Option> { + /* Check if we have any sources to derive reasons from */ + if source.is_some() { + /* Add all the error sources to a list of reasons for the error */ + let mut reasons = Vec::new(); + while let Some(error) = source { + reasons.push(error.to_string()); + source = error.source(); + } + Some(reasons) + } else { + None + } + } } impl ErrorExtensions for GraphQlError { - // lets define our base extensions - fn extend(&self) -> Error { - Error::new(self.to_string()).extend_with(|_err, e| { - if let Some(reasons) = Self::error_sources(custom_error::Error::source(&self)) { - let mut i = 1; - for reason in reasons { - e.set(format!("reason {i}"), reason); - i += 1; - } - } - }) - } + // lets define our base extensions + fn extend(&self) -> Error { + Error::new(self.to_string()).extend_with(|_err, e| { + if let Some(reasons) = Self::error_sources(custom_error::Error::source(&self)) { + let mut i = 1; + for reason in reasons { + e.set(format!("reason {i}"), reason); + i += 1; + } + } + }) + } } pub struct Commit { - pub tx_id: String, + pub tx_id: String, } pub struct Rejection { - pub commit: Commit, - pub reason: String, + pub commit: Commit, + pub reason: String, } #[derive(Enum, PartialEq, Eq, Clone, Copy)] pub enum Stage { - Submit, - Commit, + Submit, + Commit, } #[derive(Serialize, Deserialize)] @@ -218,85 +215,85 @@ scalar!(Delta); #[derive(SimpleObject)] pub struct CommitIdentity { - identity: String, - signature: String, - verifying_key: String, + identity: String, + signature: String, + verifying_key: String, } impl From for CommitIdentity { - fn from(identity: SignedIdentity) -> Self { - CommitIdentity { - identity: identity.identity, - signature: identity.signature.map(|x| hex::encode(&*x)).unwrap_or_default(), - verifying_key: identity.verifying_key.map(hex::encode).unwrap_or_default(), - } - } + fn from(identity: SignedIdentity) -> Self { + CommitIdentity { + identity: identity.identity, + signature: identity.signature.map(|x| hex::encode(&*x)).unwrap_or_default(), + verifying_key: identity.verifying_key.map(hex::encode).unwrap_or_default(), + } + } } #[derive(SimpleObject)] pub struct CommitNotification { - pub stage: Stage, - pub tx_id: String, - pub error: Option, - pub delta: Option, - pub id: Option, + pub stage: Stage, + pub tx_id: String, + pub error: Option, + pub delta: Option, + pub id: Option, } impl CommitNotification { - pub fn from_submission(tx_id: &ChronicleTransactionId) -> Self { - CommitNotification { - stage: Stage::Submit, - tx_id: tx_id.to_string(), - error: None, - delta: None, - id: None, - } - } - - pub fn from_submission_failed(e: &SubmissionError) -> Self { - CommitNotification { - stage: Stage::Submit, - tx_id: e.tx_id().to_string(), - error: Some(e.to_string()), - delta: None, - id: None, - } - } - - pub fn from_contradiction( - tx_id: &ChronicleTransactionId, - contradiction: &str, - id: SignedIdentity, - ) -> Self { - CommitNotification { - stage: Stage::Commit, - tx_id: tx_id.to_string(), - error: Some(contradiction.to_string()), - delta: None, - id: Some(id.into()), - } - } - - pub async fn from_committed( - tx_id: &ChronicleTransactionId, - delta: Box, - id: SignedIdentity, - ) -> Result { - Ok(CommitNotification { - stage: Stage::Commit, - tx_id: tx_id.to_string(), - error: None, - delta: delta - .to_json() - .compact_stable_order() - .await - .ok() - .map(async_graphql::Value::from_json) - .transpose()? - .map(Delta), - id: Some(id.into()), - }) - } + pub fn from_submission(tx_id: &ChronicleTransactionId) -> Self { + CommitNotification { + stage: Stage::Submit, + tx_id: tx_id.to_string(), + error: None, + delta: None, + id: None, + } + } + + pub fn from_submission_failed(e: &SubmissionError) -> Self { + CommitNotification { + stage: Stage::Submit, + tx_id: e.tx_id().to_string(), + error: Some(e.to_string()), + delta: None, + id: None, + } + } + + pub fn from_contradiction( + tx_id: &ChronicleTransactionId, + contradiction: &str, + id: SignedIdentity, + ) -> Self { + CommitNotification { + stage: Stage::Commit, + tx_id: tx_id.to_string(), + error: Some(contradiction.to_string()), + delta: None, + id: Some(id.into()), + } + } + + pub async fn from_committed( + tx_id: &ChronicleTransactionId, + delta: Box, + id: SignedIdentity, + ) -> Result { + Ok(CommitNotification { + stage: Stage::Commit, + tx_id: tx_id.to_string(), + error: None, + delta: delta + .to_json() + .compact_stable_order() + .await + .ok() + .map(async_graphql::Value::from_json) + .transpose()? + .map(Delta), + id: Some(id.into()), + }) + } } pub struct Subscription; @@ -307,13 +304,13 @@ pub struct Subscription; /// /// [^note](https://graphql.org/blog/subscriptions-in-graphql-and-relay/) impl Subscription { - async fn commit_notifications<'a>( - &self, - ctx: &Context<'a>, - ) -> impl Stream { - let api = ctx.data_unchecked::().clone(); - let mut rx = api.notify_commit.subscribe(); - async_stream::stream! { + async fn commit_notifications<'a>( + &self, + ctx: &Context<'a>, + ) -> impl Stream { + let api = ctx.data_unchecked::().clone(); + let mut rx = api.notify_commit.subscribe(); + async_stream::stream! { loop { match rx.recv().await { Ok(SubmissionStage::Submitted(Ok(submission))) => @@ -338,643 +335,643 @@ impl Subscription { } } } - } + } } #[handler] async fn gql_playground() -> impl IntoResponse { - Html(playground_source(GraphQLPlaygroundConfig::new("/").subscription_endpoint("/ws"))) + Html(playground_source(GraphQLPlaygroundConfig::new("/").subscription_endpoint("/ws"))) } #[derive(Debug, Clone)] pub struct ChronicleGraphQl - where - Query: ObjectType + 'static, - Mutation: ObjectType + 'static, +where + Query: ObjectType + 'static, + Mutation: ObjectType + 'static, { - query: Query, - mutation: Mutation, + query: Query, + mutation: Mutation, } #[derive(Clone)] pub struct JwksUri { - uri: Url, + uri: Url, } impl JwksUri { - pub fn new(uri: Url) -> Self { - Self { uri } - } - - // not ToString to prevent accidental disclosure of credentials - pub fn full_uri(&self) -> String { - self.uri.to_string() - } + pub fn new(uri: Url) -> Self { + Self { uri } + } + + // not ToString to prevent accidental disclosure of credentials + pub fn full_uri(&self) -> String { + self.uri.to_string() + } } impl core::fmt::Debug for JwksUri { - fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - write!( - fmt, - r#"JwksUri {{ uri: Url {{ scheme: {:?}, cannot_be_a_base: {:?}, username: {:?}, password: ***SECRET***, host: {:?}, port: {:?}, path: {:?}, query: {:?}, fragment: {:?} }} }}"#, - self.uri.scheme(), - self.uri.cannot_be_a_base(), - self.uri.username(), - self.uri.host(), - self.uri.port(), - self.uri.path(), - self.uri.query(), - self.uri.fragment(), - )?; - Ok(()) - } + fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { + write!( + fmt, + r#"JwksUri {{ uri: Url {{ scheme: {:?}, cannot_be_a_base: {:?}, username: {:?}, password: ***SECRET***, host: {:?}, port: {:?}, path: {:?}, query: {:?}, fragment: {:?} }} }}"#, + self.uri.scheme(), + self.uri.cannot_be_a_base(), + self.uri.username(), + self.uri.host(), + self.uri.port(), + self.uri.path(), + self.uri.query(), + self.uri.fragment(), + )?; + Ok(()) + } } #[derive(Clone)] pub struct UserInfoUri { - uri: Url, + uri: Url, } impl UserInfoUri { - pub fn new(uri: Url) -> Self { - Self { uri } - } - - // not ToString to prevent accidental disclosure of credentials - pub fn full_uri(&self) -> String { - self.uri.to_string() - } + pub fn new(uri: Url) -> Self { + Self { uri } + } + + // not ToString to prevent accidental disclosure of credentials + pub fn full_uri(&self) -> String { + self.uri.to_string() + } } impl core::fmt::Debug for UserInfoUri { - fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - write!( - fmt, - r#"UserInfoUri {{ uri: Url {{ scheme: {:?}, cannot_be_a_base: {:?}, username: {:?}, password: ***SECRET***, host: {:?}, port: {:?}, path: {:?}, query: {:?}, fragment: {:?} }} }}"#, - self.uri.scheme(), - self.uri.cannot_be_a_base(), - self.uri.username(), - self.uri.host(), - self.uri.port(), - self.uri.path(), - self.uri.query(), - self.uri.fragment(), - )?; - Ok(()) - } + fn fmt(&self, fmt: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { + write!( + fmt, + r#"UserInfoUri {{ uri: Url {{ scheme: {:?}, cannot_be_a_base: {:?}, username: {:?}, password: ***SECRET***, host: {:?}, port: {:?}, path: {:?}, query: {:?}, fragment: {:?} }} }}"#, + self.uri.scheme(), + self.uri.cannot_be_a_base(), + self.uri.username(), + self.uri.host(), + self.uri.port(), + self.uri.path(), + self.uri.query(), + self.uri.fragment(), + )?; + Ok(()) + } } #[derive(Clone, Debug)] pub struct SecurityConf { - jwks_uri: Option, - userinfo_uri: Option, - id_claims: Option>, - jwt_must_claim: HashMap, - allow_anonymous: bool, - opa: ExecutorContext, + jwks_uri: Option, + userinfo_uri: Option, + id_claims: Option>, + jwt_must_claim: HashMap, + allow_anonymous: bool, + opa: ExecutorContext, } impl SecurityConf { - pub fn new( - jwks_uri: Option, - userinfo_uri: Option, - id_claims: Option>, - jwt_must_claim: HashMap, - allow_anonymous: bool, - opa: ExecutorContext, - ) -> Self { - Self { jwks_uri, userinfo_uri, id_claims, jwt_must_claim, allow_anonymous, opa } - } - - pub fn as_endpoint_conf(&self, cache_expiry_seconds: u32) -> EndpointSecurityConfiguration { - EndpointSecurityConfiguration::new( - TokenChecker::new( - self.jwks_uri.as_ref(), - self.userinfo_uri.as_ref(), - cache_expiry_seconds, - ), - self.jwt_must_claim.clone(), - self.allow_anonymous, - ) - } + pub fn new( + jwks_uri: Option, + userinfo_uri: Option, + id_claims: Option>, + jwt_must_claim: HashMap, + allow_anonymous: bool, + opa: ExecutorContext, + ) -> Self { + Self { jwks_uri, userinfo_uri, id_claims, jwt_must_claim, allow_anonymous, opa } + } + + pub fn as_endpoint_conf(&self, cache_expiry_seconds: u32) -> EndpointSecurityConfiguration { + EndpointSecurityConfiguration::new( + TokenChecker::new( + self.jwks_uri.as_ref(), + self.userinfo_uri.as_ref(), + cache_expiry_seconds, + ), + self.jwt_must_claim.clone(), + self.allow_anonymous, + ) + } } #[async_trait::async_trait] pub trait ChronicleApiServer { - async fn serve_api( - &self, - pool: Pool>, - api: ApiDispatch, - addresses: Vec, - security_conf: &SecurityConf, - serve_graphql: bool, - serve_data: bool, - ) -> Result<(), ApiError>; + async fn serve_api( + &self, + pool: Pool>, + api: ApiDispatch, + addresses: Vec, + security_conf: &SecurityConf, + serve_graphql: bool, + serve_data: bool, + ) -> Result<(), ApiError>; } impl ChronicleGraphQl - where - Query: ObjectType + Copy, - Mutation: ObjectType + Copy, +where + Query: ObjectType + Copy, + Mutation: ObjectType + Copy, { - pub fn new(query: Query, mutation: Mutation) -> Self { - Self { query, mutation } - } - - pub fn exportable_schema(&self) -> String - where - Query: ObjectType + Copy, - Mutation: ObjectType + Copy, - { - let schema = Schema::build(self.query, self.mutation, Subscription).finish(); - - schema.sdl() - } + pub fn new(query: Query, mutation: Mutation) -> Self { + Self { query, mutation } + } + + pub fn exportable_schema(&self) -> String + where + Query: ObjectType + Copy, + Mutation: ObjectType + Copy, + { + let schema = Schema::build(self.query, self.mutation, Subscription).finish(); + + schema.sdl() + } } fn check_required_claim(must_value: &str, actual_value: &serde_json::Value) -> bool { - match actual_value { - serde_json::Value::String(actual_value) => must_value == actual_value, - serde_json::Value::Array(actual_values) => actual_values - .iter() - .any(|actual_value| check_required_claim(must_value, actual_value)), - _ => false, - } + match actual_value { + serde_json::Value::String(actual_value) => must_value == actual_value, + serde_json::Value::Array(actual_values) => actual_values + .iter() + .any(|actual_value| check_required_claim(must_value, actual_value)), + _ => false, + } } #[instrument(level = "trace", ret(Debug))] fn check_required_claims( - must_claim: &HashMap, - actual_claims: &serde_json::Map, + must_claim: &HashMap, + actual_claims: &serde_json::Map, ) -> bool { - for (name, value) in must_claim { - if let Some(json) = actual_claims.get(name) { - if !check_required_claim(value, json) { - return false; - } - } else { - return false; - } - } - true + for (name, value) in must_claim { + if let Some(json) = actual_claims.get(name) { + if !check_required_claim(value, json) { + return false; + } + } else { + return false; + } + } + true } async fn check_claims( - secconf: &EndpointSecurityConfiguration, - req: &poem::Request, + secconf: &EndpointSecurityConfiguration, + req: &poem::Request, ) -> Result, poem::Error> { - if let Some(authorization) = req.header("Authorization") { - if let Ok(authorization) = HeaderValue::from_str(authorization) { - let bearer_token_maybe: Option = Credentials::decode(&authorization); - if let Some(bearer_token) = bearer_token_maybe { - if let Ok(claims) = secconf.checker.verify_token(bearer_token.token()).await { - if check_required_claims(&secconf.must_claim, &claims) { - return Ok(Some(JwtClaims(claims))); - } - } - } - } - tracing::trace!("rejected authorization from {}: {:?}", req.remote_addr(), authorization); - Err(poem::error::Error::from_string( - "Authorization header present but without a satisfactory bearer token", - StatusCode::UNAUTHORIZED, - )) - } else if secconf.allow_anonymous { - tracing::trace!("anonymous access from {}", req.remote_addr()); - Ok(None) - } else { - tracing::trace!("rejected anonymous access from {}", req.remote_addr()); - Err(poem::error::Error::from_string( - "required Authorization header not present", - StatusCode::UNAUTHORIZED, - )) - } + if let Some(authorization) = req.header("Authorization") { + if let Ok(authorization) = HeaderValue::from_str(authorization) { + let bearer_token_maybe: Option = Credentials::decode(&authorization); + if let Some(bearer_token) = bearer_token_maybe { + if let Ok(claims) = secconf.checker.verify_token(bearer_token.token()).await { + if check_required_claims(&secconf.must_claim, &claims) { + return Ok(Some(JwtClaims(claims))); + } + } + } + } + tracing::trace!("rejected authorization from {}: {:?}", req.remote_addr(), authorization); + Err(poem::error::Error::from_string( + "Authorization header present but without a satisfactory bearer token", + StatusCode::UNAUTHORIZED, + )) + } else if secconf.allow_anonymous { + tracing::trace!("anonymous access from {}", req.remote_addr()); + Ok(None) + } else { + tracing::trace!("rejected anonymous access from {}", req.remote_addr()); + Err(poem::error::Error::from_string( + "required Authorization header not present", + StatusCode::UNAUTHORIZED, + )) + } } async fn execute_opa_check( - opa_executor: &ExecutorContext, - claim_parser: &Option, - claims: Option<&JwtClaims>, - construct_data: impl FnOnce(&AuthId) -> OpaData, + opa_executor: &ExecutorContext, + claim_parser: &Option, + claims: Option<&JwtClaims>, + construct_data: impl FnOnce(&AuthId) -> OpaData, ) -> Result<(), OpaExecutorError> { - // If unable to get an external_id from the JwtClaims or no claims found, - // identity will be `Anonymous` - let identity = match (claims, claim_parser) { - (Some(claims), Some(parser)) => parser.identity(claims).unwrap_or(AuthId::anonymous()), - _ => AuthId::anonymous(), - }; - - // Create OPA context data for the user identity - let opa_data = construct_data(&identity); - - // Execute OPA check - match opa_executor.evaluate(&identity, &opa_data).await { - Err(error) => { - tracing::warn!( + // If unable to get an external_id from the JwtClaims or no claims found, + // identity will be `Anonymous` + let identity = match (claims, claim_parser) { + (Some(claims), Some(parser)) => parser.identity(claims).unwrap_or(AuthId::anonymous()), + _ => AuthId::anonymous(), + }; + + // Create OPA context data for the user identity + let opa_data = construct_data(&identity); + + // Execute OPA check + match opa_executor.evaluate(&identity, &opa_data).await { + Err(error) => { + tracing::warn!( "{error}: attempt to violate policy rules by identity: {identity}, in context: {:#?}", opa_data ); - Err(error) - } - ok => ok, - } + Err(error) + }, + ok => ok, + } } #[derive(Clone)] pub struct EndpointSecurityConfiguration { - checker: TokenChecker, - pub must_claim: HashMap, - pub allow_anonymous: bool, + checker: TokenChecker, + pub must_claim: HashMap, + pub allow_anonymous: bool, } impl EndpointSecurityConfiguration { - pub fn new( - checker: TokenChecker, - must_claim: HashMap, - allow_anonymous: bool, - ) -> Self { - Self { checker, must_claim, allow_anonymous } - } - - async fn check_status(&self) -> Result<(), AuthorizationError> { - self.checker.check_status().await - } + pub fn new( + checker: TokenChecker, + must_claim: HashMap, + allow_anonymous: bool, + ) -> Self { + Self { checker, must_claim, allow_anonymous } + } + + async fn check_status(&self) -> Result<(), AuthorizationError> { + self.checker.check_status().await + } } struct QueryEndpoint { - secconf: EndpointSecurityConfiguration, - schema: Schema, + secconf: EndpointSecurityConfiguration, + schema: Schema, } impl QueryEndpoint - where - Q: ObjectType + 'static, - M: ObjectType + 'static, - S: SubscriptionType + 'static, +where + Q: ObjectType + 'static, + M: ObjectType + 'static, + S: SubscriptionType + 'static, { - #[instrument(level = "debug", skip_all, ret(Debug))] - async fn respond( - &self, - req: poem::Request, - prepare_req: impl FnOnce(GraphQLBatchRequest) -> async_graphql::BatchRequest, - ) -> poem::Result { - use poem::{FromRequest, IntoResponse}; - let (req, mut body) = req.split(); - let req = prepare_req(GraphQLBatchRequest::from_request(&req, &mut body).await?); - Ok(GraphQLBatchResponse(self.schema.execute_batch(req).await).into_response()) - } + #[instrument(level = "debug", skip_all, ret(Debug))] + async fn respond( + &self, + req: poem::Request, + prepare_req: impl FnOnce(GraphQLBatchRequest) -> async_graphql::BatchRequest, + ) -> poem::Result { + use poem::{FromRequest, IntoResponse}; + let (req, mut body) = req.split(); + let req = prepare_req(GraphQLBatchRequest::from_request(&req, &mut body).await?); + Ok(GraphQLBatchResponse(self.schema.execute_batch(req).await).into_response()) + } } impl Endpoint for QueryEndpoint - where - Q: ObjectType + 'static, - M: ObjectType + 'static, - S: SubscriptionType + 'static, +where + Q: ObjectType + 'static, + M: ObjectType + 'static, + S: SubscriptionType + 'static, { - type Output = poem::Response; - - async fn call(&self, req: poem::Request) -> poem::Result { - let checked_claims = check_claims(&self.secconf, &req).await?; - self.respond(req, |api_req| { - if let Some(claims) = checked_claims { - api_req.0.data(claims) - } else { - api_req.0 - } - }) - .await - } + type Output = poem::Response; + + async fn call(&self, req: poem::Request) -> poem::Result { + let checked_claims = check_claims(&self.secconf, &req).await?; + self.respond(req, |api_req| { + if let Some(claims) = checked_claims { + api_req.0.data(claims) + } else { + api_req.0 + } + }) + .await + } } struct SubscriptionEndpoint { - secconf: EndpointSecurityConfiguration, - schema: Schema, + secconf: EndpointSecurityConfiguration, + schema: Schema, } impl SubscriptionEndpoint - where - Q: ObjectType + 'static, - M: ObjectType + 'static, - S: SubscriptionType + 'static, +where + Q: ObjectType + 'static, + M: ObjectType + 'static, + S: SubscriptionType + 'static, { - #[instrument(level = "trace", skip(self, req), ret(Debug))] - async fn respond( - &self, - req: poem::Request, - data: async_graphql::Data, - ) -> poem::Result { - use poem::{FromRequest, IntoResponse}; - let (req, mut body) = req.split(); - let websocket = poem::web::websocket::WebSocket::from_request(&req, &mut body).await?; - let protocol = GraphQLProtocol::from_request(&req, &mut body).await?; - let schema = self.schema.clone(); - Ok(websocket - .protocols(ALL_WEBSOCKET_PROTOCOLS) - .on_upgrade(move |stream| { - GraphQLWebSocket::new(stream, schema, protocol).with_data(data).serve() - }) - .into_response()) - } + #[instrument(level = "trace", skip(self, req), ret(Debug))] + async fn respond( + &self, + req: poem::Request, + data: async_graphql::Data, + ) -> poem::Result { + use poem::{FromRequest, IntoResponse}; + let (req, mut body) = req.split(); + let websocket = poem::web::websocket::WebSocket::from_request(&req, &mut body).await?; + let protocol = GraphQLProtocol::from_request(&req, &mut body).await?; + let schema = self.schema.clone(); + Ok(websocket + .protocols(ALL_WEBSOCKET_PROTOCOLS) + .on_upgrade(move |stream| { + GraphQLWebSocket::new(stream, schema, protocol).with_data(data).serve() + }) + .into_response()) + } } impl Endpoint for SubscriptionEndpoint - where - Q: ObjectType + 'static, - M: ObjectType + 'static, - S: SubscriptionType + 'static, +where + Q: ObjectType + 'static, + M: ObjectType + 'static, + S: SubscriptionType + 'static, { - type Output = poem::Response; - - async fn call(&self, req: poem::Request) -> poem::Result { - let checked_claims = check_claims(&self.secconf, &req).await?; - self.respond( - req, - if let Some(claims) = checked_claims { - let mut data = async_graphql::Data::default(); - data.insert(claims); - data - } else { - async_graphql::Data::default() - }, - ) - .await - } + type Output = poem::Response; + + async fn call(&self, req: poem::Request) -> poem::Result { + let checked_claims = check_claims(&self.secconf, &req).await?; + self.respond( + req, + if let Some(claims) = checked_claims { + let mut data = async_graphql::Data::default(); + data.insert(claims); + data + } else { + async_graphql::Data::default() + }, + ) + .await + } } struct IriEndpoint { - secconf: Option, - store: chronicle_persistence::Store, - opa_executor: ExecutorContext, - claim_parser: Option, + secconf: Option, + store: chronicle_persistence::Store, + opa_executor: ExecutorContext, + claim_parser: Option, } impl IriEndpoint { - async fn response_for_query( - &self, - claims: Option<&JwtClaims>, - prov_type: &str, - id: &ID, - ns: &ExternalId, - retrieve: impl FnOnce( - PooledConnection>, - &ID, - &ExternalId, - ) -> Result, - ) -> poem::Result { - match execute_opa_check(&self.opa_executor, &self.claim_parser, claims, |identity| { - OpaData::operation( - identity, - &json!("ReadData"), - &json!({ + async fn response_for_query( + &self, + claims: Option<&JwtClaims>, + prov_type: &str, + id: &ID, + ns: &ExternalId, + retrieve: impl FnOnce( + PooledConnection>, + &ID, + &ExternalId, + ) -> Result, + ) -> poem::Result { + match execute_opa_check(&self.opa_executor, &self.claim_parser, claims, |identity| { + OpaData::operation( + identity, + &json!("ReadData"), + &json!({ "type": prov_type, "id": id.external_id_part(), "namespace": ns }), - ) - }) - .await - { - Ok(()) => match self.store.connection() { - Ok(connection) => match retrieve(connection, id, ns) { - Ok(data) => match data.to_json().compact().await { - Ok(mut json) => { - use serde_json::Value; - if let Value::Object(mut map) = json { - map.insert( - "@context".to_string(), - Value::String("/context".to_string()), - ); - json = Value::Object(map); - } - Ok(IntoResponse::into_response(poem::web::Json(json))) - } - Err(error) => { - tracing::error!("JSON failed compaction: {error}"); - Ok(poem::Response::builder() - .status(StatusCode::INTERNAL_SERVER_ERROR) - .body("failed to compact JSON response")) - } - }, - Err(StoreError::Db(diesel::result::Error::NotFound)) | - Err(StoreError::RecordNotFound) => { - tracing::debug!("not found: {prov_type} {} in {ns}", id.external_id_part()); - Ok(poem::Response::builder() - .status(StatusCode::NOT_FOUND) - .body(format!("the specified {prov_type} does not exist"))) - } - Err(error) => { - tracing::error!("failed to retrieve from database: {error}"); - Ok(poem::Response::builder() - .status(StatusCode::INTERNAL_SERVER_ERROR) - .body("failed to fetch from backend storage")) - } - }, - Err(error) => { - tracing::error!("failed to connect to database: {error}"); - Ok(poem::Response::builder() - .status(StatusCode::INTERNAL_SERVER_ERROR) - .body("failed to access backend storage")) - } - }, - Err(_) => Ok(poem::Response::builder() - .status(StatusCode::FORBIDDEN) - .body("violation of policy rules")), - } - } - - async fn parse_ns_iri_from_uri_path( - &self, - req: poem::Request, - ) -> poem::Result> { - use poem::{FromRequest, Response, web::Path}; - - #[derive(Clone, Debug, Serialize, Deserialize)] - struct NamespacedIri { - ns: String, - iri: String, - } - - #[derive(Clone, Debug, Serialize, Deserialize)] - struct Iri { - iri: String, - } - - impl From for NamespacedIri { - fn from(value: Iri) -> Self { - NamespacedIri { ns: "default".to_string(), iri: value.iri } - } - } - - let (req, mut body) = req.split(); - - let ns_iri: poem::Result> = - poem::web::Path::from_request(&req, &mut body).await; - - let ns_iri: NamespacedIri = match ns_iri { - Ok(Path(nsi)) => nsi, - Err(_) => { - let path: Path = poem::web::Path::from_request(&req, &mut body).await?; - path.0.into() - } - }; - - match ChronicleIri::from_str(&ns_iri.iri) { - Ok(iri) => Ok(Ok((ns_iri.ns.into(), iri))), - Err(error) => - Ok(Err(Response::builder().status(StatusCode::NOT_FOUND).body(error.to_string()))), - } - } - - #[instrument(level = "trace", skip(self, req), ret(Debug))] - async fn respond( - &self, - req: poem::Request, - claims: Option<&JwtClaims>, - ) -> poem::Result { - match self.parse_ns_iri_from_uri_path(req).await? { - Ok((ns, ChronicleIri::Activity(id))) => - self.response_for_query(claims, "activity", &id, &ns, |mut conn, id, ns| { - self.store.prov_model_for_activity_id(&mut conn, id, ns) - }) - .await, - Ok((ns, ChronicleIri::Agent(id))) => - self.response_for_query(claims, "agent", &id, &ns, |mut conn, id, ns| { - self.store.prov_model_for_agent_id(&mut conn, id, ns) - }) - .await, - Ok((ns, ChronicleIri::Entity(id))) => - self.response_for_query(claims, "entity", &id, &ns, |mut conn, id, ns| { - self.store.prov_model_for_entity_id(&mut conn, id, ns) - }) - .await, - Ok(_) => Ok(poem::Response::builder() - .status(StatusCode::NOT_FOUND) - .body("may query only: activity, agent, entity")), - Err(rsp) => Ok(rsp), - } - } + ) + }) + .await + { + Ok(()) => match self.store.connection() { + Ok(connection) => match retrieve(connection, id, ns) { + Ok(data) => match data.to_json().compact().await { + Ok(mut json) => { + use serde_json::Value; + if let Value::Object(mut map) = json { + map.insert( + "@context".to_string(), + Value::String("/context".to_string()), + ); + json = Value::Object(map); + } + Ok(IntoResponse::into_response(poem::web::Json(json))) + }, + Err(error) => { + tracing::error!("JSON failed compaction: {error}"); + Ok(poem::Response::builder() + .status(StatusCode::INTERNAL_SERVER_ERROR) + .body("failed to compact JSON response")) + }, + }, + Err(StoreError::Db(diesel::result::Error::NotFound)) | + Err(StoreError::RecordNotFound) => { + tracing::debug!("not found: {prov_type} {} in {ns}", id.external_id_part()); + Ok(poem::Response::builder() + .status(StatusCode::NOT_FOUND) + .body(format!("the specified {prov_type} does not exist"))) + }, + Err(error) => { + tracing::error!("failed to retrieve from database: {error}"); + Ok(poem::Response::builder() + .status(StatusCode::INTERNAL_SERVER_ERROR) + .body("failed to fetch from backend storage")) + }, + }, + Err(error) => { + tracing::error!("failed to connect to database: {error}"); + Ok(poem::Response::builder() + .status(StatusCode::INTERNAL_SERVER_ERROR) + .body("failed to access backend storage")) + }, + }, + Err(_) => Ok(poem::Response::builder() + .status(StatusCode::FORBIDDEN) + .body("violation of policy rules")), + } + } + + async fn parse_ns_iri_from_uri_path( + &self, + req: poem::Request, + ) -> poem::Result> { + use poem::{web::Path, FromRequest, Response}; + + #[derive(Clone, Debug, Serialize, Deserialize)] + struct NamespacedIri { + ns: String, + iri: String, + } + + #[derive(Clone, Debug, Serialize, Deserialize)] + struct Iri { + iri: String, + } + + impl From for NamespacedIri { + fn from(value: Iri) -> Self { + NamespacedIri { ns: "default".to_string(), iri: value.iri } + } + } + + let (req, mut body) = req.split(); + + let ns_iri: poem::Result> = + poem::web::Path::from_request(&req, &mut body).await; + + let ns_iri: NamespacedIri = match ns_iri { + Ok(Path(nsi)) => nsi, + Err(_) => { + let path: Path = poem::web::Path::from_request(&req, &mut body).await?; + path.0.into() + }, + }; + + match ChronicleIri::from_str(&ns_iri.iri) { + Ok(iri) => Ok(Ok((ns_iri.ns.into(), iri))), + Err(error) => + Ok(Err(Response::builder().status(StatusCode::NOT_FOUND).body(error.to_string()))), + } + } + + #[instrument(level = "trace", skip(self, req), ret(Debug))] + async fn respond( + &self, + req: poem::Request, + claims: Option<&JwtClaims>, + ) -> poem::Result { + match self.parse_ns_iri_from_uri_path(req).await? { + Ok((ns, ChronicleIri::Activity(id))) => + self.response_for_query(claims, "activity", &id, &ns, |mut conn, id, ns| { + self.store.prov_model_for_activity_id(&mut conn, id, ns) + }) + .await, + Ok((ns, ChronicleIri::Agent(id))) => + self.response_for_query(claims, "agent", &id, &ns, |mut conn, id, ns| { + self.store.prov_model_for_agent_id(&mut conn, id, ns) + }) + .await, + Ok((ns, ChronicleIri::Entity(id))) => + self.response_for_query(claims, "entity", &id, &ns, |mut conn, id, ns| { + self.store.prov_model_for_entity_id(&mut conn, id, ns) + }) + .await, + Ok(_) => Ok(poem::Response::builder() + .status(StatusCode::NOT_FOUND) + .body("may query only: activity, agent, entity")), + Err(rsp) => Ok(rsp), + } + } } impl Endpoint for IriEndpoint { - type Output = poem::Response; - - async fn call(&self, req: poem::Request) -> poem::Result { - let checked_claims = if let Some(secconf) = &self.secconf { - check_claims(secconf, &req).await? - } else { - None - }; - self.respond(req, checked_claims.as_ref()).await - } + type Output = poem::Response; + + async fn call(&self, req: poem::Request) -> poem::Result { + let checked_claims = if let Some(secconf) = &self.secconf { + check_claims(secconf, &req).await? + } else { + None + }; + self.respond(req, checked_claims.as_ref()).await + } } struct LdContextEndpoint; impl Endpoint for LdContextEndpoint { - type Output = poem::Response; + type Output = poem::Response; - async fn call(&self, _req: poem::Request) -> poem::Result { - let context: &serde_json::Value = &common::context::PROV; - Ok(IntoResponse::into_response(poem::web::Json(context))) - } + async fn call(&self, _req: poem::Request) -> poem::Result { + let context: &serde_json::Value = &common::context::PROV; + Ok(IntoResponse::into_response(poem::web::Json(context))) + } } #[derive(Clone, Debug)] pub struct AuthFromJwt { - id_claims: BTreeSet, - allow_anonymous: bool, + id_claims: BTreeSet, + allow_anonymous: bool, } impl AuthFromJwt { - #[instrument(level = "debug", ret(Debug))] - fn identity(&self, claims: &JwtClaims) -> Result { - AuthId::from_jwt_claims(claims, &self.id_claims) - } + #[instrument(level = "debug", ret(Debug))] + fn identity(&self, claims: &JwtClaims) -> Result { + AuthId::from_jwt_claims(claims, &self.id_claims) + } } #[async_trait::async_trait] impl async_graphql::extensions::Extension for AuthFromJwt { - async fn prepare_request( - &self, - ctx: &async_graphql::extensions::ExtensionContext<'_>, - mut request: async_graphql::Request, - next: async_graphql::extensions::NextPrepareRequest<'_>, - ) -> async_graphql::ServerResult { - if let Some(claims) = ctx.data_opt::() { - match self.identity(claims) { - Ok(chronicle_id) => request = request.data(chronicle_id), - Err(error) if self.allow_anonymous => { - debug!("Identity could not be determined: {:?}", error) - } - Err(error) => { - warn!( + async fn prepare_request( + &self, + ctx: &async_graphql::extensions::ExtensionContext<'_>, + mut request: async_graphql::Request, + next: async_graphql::extensions::NextPrepareRequest<'_>, + ) -> async_graphql::ServerResult { + if let Some(claims) = ctx.data_opt::() { + match self.identity(claims) { + Ok(chronicle_id) => request = request.data(chronicle_id), + Err(error) if self.allow_anonymous => { + debug!("Identity could not be determined: {:?}", error) + }, + Err(error) => { + warn!( "Rejecting request because required identity could not be determined: {:?}", error ); - return Err(ServerError::new("Authorization header present but identity could not be determined from bearer token", None)); - } - } - } - next.run(ctx, request).await - } + return Err(ServerError::new("Authorization header present but identity could not be determined from bearer token", None)); + }, + } + } + next.run(ctx, request).await + } } impl async_graphql::extensions::ExtensionFactory for AuthFromJwt { - fn create(&self) -> Arc { - Arc::new(AuthFromJwt { - id_claims: self.id_claims.clone(), - allow_anonymous: self.allow_anonymous, - }) - } + fn create(&self) -> Arc { + Arc::new(AuthFromJwt { + id_claims: self.id_claims.clone(), + allow_anonymous: self.allow_anonymous, + }) + } } #[derive(Clone, Debug)] pub struct OpaCheck { - pub claim_parser: Option, + pub claim_parser: Option, } #[async_trait::async_trait] impl async_graphql::extensions::Extension for OpaCheck { - #[instrument(level = "trace", skip_all, ret(Debug))] - async fn resolve( - &self, - ctx: &async_graphql::extensions::ExtensionContext<'_>, - info: async_graphql::extensions::ResolveInfo<'_>, - next: async_graphql::extensions::NextResolve<'_>, - ) -> async_graphql::ServerResult> { - use async_graphql::ServerError; - use serde_json::Value; - if let Some(opa_executor) = ctx.data_opt::() { - match execute_opa_check( - opa_executor, - &self.claim_parser, - ctx.data_opt::(), - |identity| { - OpaData::graphql( - identity, - &Value::String(info.parent_type.to_string()), - &Value::Array( - info.path_node.to_string_vec().into_iter().map(Value::String).collect(), - ), - ) - }, - ) - .await - { - Ok(()) => next.run(ctx, info).await, - Err(_) => Err(ServerError::new("violation of policy rules", None)), - } - } else { - Err(ServerError::new("cannot check policy rules", None)) - } - } + #[instrument(level = "trace", skip_all, ret(Debug))] + async fn resolve( + &self, + ctx: &async_graphql::extensions::ExtensionContext<'_>, + info: async_graphql::extensions::ResolveInfo<'_>, + next: async_graphql::extensions::NextResolve<'_>, + ) -> async_graphql::ServerResult> { + use async_graphql::ServerError; + use serde_json::Value; + if let Some(opa_executor) = ctx.data_opt::() { + match execute_opa_check( + opa_executor, + &self.claim_parser, + ctx.data_opt::(), + |identity| { + OpaData::graphql( + identity, + &Value::String(info.parent_type.to_string()), + &Value::Array( + info.path_node.to_string_vec().into_iter().map(Value::String).collect(), + ), + ) + }, + ) + .await + { + Ok(()) => next.run(ctx, info).await, + Err(_) => Err(ServerError::new("violation of policy rules", None)), + } + } else { + Err(ServerError::new("cannot check policy rules", None)) + } + } } #[async_trait::async_trait] impl async_graphql::extensions::ExtensionFactory for OpaCheck { - fn create(&self) -> Arc { - Arc::new(OpaCheck { claim_parser: self.claim_parser.clone() }) - } + fn create(&self) -> Arc { + Arc::new(OpaCheck { claim_parser: self.claim_parser.clone() }) + } } lazy_static! { @@ -982,161 +979,160 @@ lazy_static! { } pub fn trigger_shutdown() { - SHUTDOWN_SIGNAL.add_permits(1); + SHUTDOWN_SIGNAL.add_permits(1); } async fn await_shutdown() { - let _permit = SHUTDOWN_SIGNAL.acquire().await.unwrap(); + let _permit = SHUTDOWN_SIGNAL.acquire().await.unwrap(); } - #[derive(Clone)] struct DatabaseContext { - pool: Pool>, + pool: Pool>, } impl DatabaseContext { - fn new(pool: &Pool>) -> Result { - Ok(DatabaseContext { pool: pool.clone() }) - } + fn new(pool: &Pool>) -> Result { + Ok(DatabaseContext { pool: pool.clone() }) + } - fn connection(&self) -> Result>, r2d2::Error> { - self.pool.get() - } + fn connection(&self) -> Result>, r2d2::Error> { + self.pool.get() + } } pub fn construct_schema( - query: Query, - mutation: Mutation, - subscription: Subscription, - claim_parser: Option, - pool: &Pool>, - api: &ApiDispatch, - opa: ExecutorContext, + query: Query, + mutation: Mutation, + subscription: Subscription, + claim_parser: Option, + pool: &Pool>, + api: &ApiDispatch, + opa: ExecutorContext, ) -> Result, StoreError> - where - Query: ObjectType + Copy + 'static, - Mutation: ObjectType + Copy + 'static, - Subscription: SubscriptionType + 'static, +where + Query: ObjectType + Copy + 'static, + Mutation: ObjectType + Copy + 'static, + Subscription: SubscriptionType + 'static, { - let mut schema = Schema::build(query, mutation, subscription) - .extension(OpaCheck { claim_parser: claim_parser.clone() }); - - if let Some(claim_parser) = &claim_parser { - schema = schema.extension(claim_parser.clone()); - } - - Ok(schema - .data(api.clone()) - .data(opa.clone()) - .data(AuthId::anonymous()) - .data(DatabaseContext::new(pool)?) - .finish()) + let mut schema = Schema::build(query, mutation, subscription) + .extension(OpaCheck { claim_parser: claim_parser.clone() }); + + if let Some(claim_parser) = &claim_parser { + schema = schema.extension(claim_parser.clone()); + } + + Ok(schema + .data(api.clone()) + .data(opa.clone()) + .data(AuthId::anonymous()) + .data(DatabaseContext::new(pool)?) + .finish()) } #[async_trait::async_trait] impl ChronicleApiServer for ChronicleGraphQl - where - Query: ObjectType + Copy, - Mutation: ObjectType + Copy, +where + Query: ObjectType + Copy, + Mutation: ObjectType + Copy, { - async fn serve_api( - &self, - pool: Pool>, - api: ApiDispatch, - addresses: Vec, - sec: &SecurityConf, - serve_graphql: bool, - serve_data: bool, - ) -> Result<(), ApiError> { - tracing::info!("Serve graphql on {:?}", addresses); - let sec = sec.clone(); - let claim_parser = sec - .id_claims - .map(|id_claims| AuthFromJwt { id_claims, allow_anonymous: sec.allow_anonymous }); - - let schema = construct_schema( - self.query, - self.mutation, - Subscription, - claim_parser.clone(), - &pool, - &api, - sec.opa.clone(), - )?; - - let iri_endpoint = |secconf| IriEndpoint { - secconf, - store: chronicle_persistence::Store::new(pool.clone()).unwrap(), - opa_executor: sec.opa.clone(), - claim_parser: claim_parser.clone(), - }; - - let mut app = Route::new(); - - match (&sec.jwks_uri, &sec.userinfo_uri) { - (None, None) => { - tracing::warn!("API endpoint uses no authentication"); - - if serve_graphql { - app = app - .at("/", get(gql_playground).post(GraphQL::new(schema.clone()))) - .at("/ws", get(GraphQLSubscription::new(schema))) - }; - if serve_data { - app = app - .at("/context", get(LdContextEndpoint)) - .at("/data/:iri", get(iri_endpoint(None))) - .at("/data/:ns/:iri", get(iri_endpoint(None))) - }; - } - (jwks_uri, userinfo_uri) => { - const CACHE_EXPIRY_SECONDS: u32 = 100; - if let Some(uri) = jwks_uri { - tracing::debug!(oidc_jwks_endpoint = ?uri); - } - if let Some(uri) = userinfo_uri { - tracing::debug!(oidc_userinfo_endpoint = ?uri); - } - - let secconf = || { - EndpointSecurityConfiguration::new( - TokenChecker::new( - jwks_uri.as_ref(), - userinfo_uri.as_ref(), - CACHE_EXPIRY_SECONDS, - ), - sec.jwt_must_claim.clone(), - sec.allow_anonymous, - ) - }; - - secconf().check_status().await?; - - if serve_graphql { - app = app - .at("/", post(QueryEndpoint { secconf: secconf(), schema: schema.clone() })) - .at("/ws", get(SubscriptionEndpoint { secconf: secconf(), schema })) - }; - if serve_data { - app = app - .at("/context", get(LdContextEndpoint)) - .at("/data/:iri", get(iri_endpoint(Some(secconf())))) - .at("/data/:ns/:iri", get(iri_endpoint(Some(secconf())))) - }; - } - } - - let listener = addresses - .into_iter() - .map(|address| TcpListener::bind(address).boxed()) - .reduce(|listener_1, listener_2| listener_1.combine(listener_2).boxed()) - .unwrap(); - - Server::new(listener) - .run_with_graceful_shutdown(app, await_shutdown(), None) - .await?; - - Ok(()) - } + async fn serve_api( + &self, + pool: Pool>, + api: ApiDispatch, + addresses: Vec, + sec: &SecurityConf, + serve_graphql: bool, + serve_data: bool, + ) -> Result<(), ApiError> { + tracing::info!("Serve graphql on {:?}", addresses); + let sec = sec.clone(); + let claim_parser = sec + .id_claims + .map(|id_claims| AuthFromJwt { id_claims, allow_anonymous: sec.allow_anonymous }); + + let schema = construct_schema( + self.query, + self.mutation, + Subscription, + claim_parser.clone(), + &pool, + &api, + sec.opa.clone(), + )?; + + let iri_endpoint = |secconf| IriEndpoint { + secconf, + store: chronicle_persistence::Store::new(pool.clone()).unwrap(), + opa_executor: sec.opa.clone(), + claim_parser: claim_parser.clone(), + }; + + let mut app = Route::new(); + + match (&sec.jwks_uri, &sec.userinfo_uri) { + (None, None) => { + tracing::warn!("API endpoint uses no authentication"); + + if serve_graphql { + app = app + .at("/", get(gql_playground).post(GraphQL::new(schema.clone()))) + .at("/ws", get(GraphQLSubscription::new(schema))) + }; + if serve_data { + app = app + .at("/context", get(LdContextEndpoint)) + .at("/data/:iri", get(iri_endpoint(None))) + .at("/data/:ns/:iri", get(iri_endpoint(None))) + }; + }, + (jwks_uri, userinfo_uri) => { + const CACHE_EXPIRY_SECONDS: u32 = 100; + if let Some(uri) = jwks_uri { + tracing::debug!(oidc_jwks_endpoint = ?uri); + } + if let Some(uri) = userinfo_uri { + tracing::debug!(oidc_userinfo_endpoint = ?uri); + } + + let secconf = || { + EndpointSecurityConfiguration::new( + TokenChecker::new( + jwks_uri.as_ref(), + userinfo_uri.as_ref(), + CACHE_EXPIRY_SECONDS, + ), + sec.jwt_must_claim.clone(), + sec.allow_anonymous, + ) + }; + + secconf().check_status().await?; + + if serve_graphql { + app = app + .at("/", post(QueryEndpoint { secconf: secconf(), schema: schema.clone() })) + .at("/ws", get(SubscriptionEndpoint { secconf: secconf(), schema })) + }; + if serve_data { + app = app + .at("/context", get(LdContextEndpoint)) + .at("/data/:iri", get(iri_endpoint(Some(secconf())))) + .at("/data/:ns/:iri", get(iri_endpoint(Some(secconf())))) + }; + }, + } + + let listener = addresses + .into_iter() + .map(|address| TcpListener::bind(address).boxed()) + .reduce(|listener_1, listener_2| listener_1.combine(listener_2).boxed()) + .unwrap(); + + Server::new(listener) + .run_with_graceful_shutdown(app, await_shutdown(), None) + .await?; + + Ok(()) + } } diff --git a/crates/api/src/chronicle_graphql/mutation.rs b/crates/api/src/chronicle_graphql/mutation.rs index ab55d1f17..c0f6fa41a 100644 --- a/crates/api/src/chronicle_graphql/mutation.rs +++ b/crates/api/src/chronicle_graphql/mutation.rs @@ -5,386 +5,388 @@ use chrono::{DateTime, Utc}; use common::{ attributes::Attributes, identity::AuthId, - prov::{ActivityId, AgentId, EntityId, operations::DerivationType, Role}, + prov::{operations::DerivationType, ActivityId, AgentId, EntityId, Role}, }; -use crate::commands::{ActivityCommand, AgentCommand, ApiCommand, ApiResponse, EntityCommand}; -use crate::dispatch::ApiDispatch; +use crate::{ + commands::{ActivityCommand, AgentCommand, ApiCommand, ApiResponse, EntityCommand}, + dispatch::ApiDispatch, +}; use super::Submission; async fn transaction_context<'a>( - res: ApiResponse, - _ctx: &Context<'a>, + res: ApiResponse, + _ctx: &Context<'a>, ) -> async_graphql::Result { - match res { - ApiResponse::Submission { subject, tx_id, .. } => - Ok(Submission::from_submission(&subject, &tx_id)), - ApiResponse::AlreadyRecorded { subject, .. } => - Ok(Submission::from_already_recorded(&subject)), - _ => unreachable!(), - } + match res { + ApiResponse::Submission { subject, tx_id, .. } => + Ok(Submission::from_submission(&subject, &tx_id)), + ApiResponse::AlreadyRecorded { subject, .. } => + Ok(Submission::from_already_recorded(&subject)), + _ => unreachable!(), + } } async fn derivation<'a>( - ctx: &Context<'a>, - namespace: Option, - generated_entity: EntityId, - used_entity: EntityId, - derivation: DerivationType, + ctx: &Context<'a>, + namespace: Option, + generated_entity: EntityId, + used_entity: EntityId, + derivation: DerivationType, ) -> async_graphql::Result { - let api = ctx.data::()?; - - let identity = ctx.data::()?.to_owned(); - - let namespace = namespace.unwrap_or_else(|| "default".into()).into(); - - let res = api - .dispatch( - ApiCommand::Entity(EntityCommand::Derive { - id: generated_entity, - namespace, - activity: None, - used_entity, - derivation, - }), - identity, - ) - .await?; - - transaction_context(res, ctx).await + let api = ctx.data::()?; + + let identity = ctx.data::()?.to_owned(); + + let namespace = namespace.unwrap_or_else(|| "default".into()).into(); + + let res = api + .dispatch( + ApiCommand::Entity(EntityCommand::Derive { + id: generated_entity, + namespace, + activity: None, + used_entity, + derivation, + }), + identity, + ) + .await?; + + transaction_context(res, ctx).await } pub async fn agent<'a>( - ctx: &Context<'a>, - external_id: String, - namespace: Option, - attributes: Attributes, + ctx: &Context<'a>, + external_id: String, + namespace: Option, + attributes: Attributes, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()); - let res = api - .dispatch( - ApiCommand::Agent(AgentCommand::Create { - id: external_id.into(), - namespace: namespace.into(), - attributes, - }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Agent(AgentCommand::Create { + id: external_id.into(), + namespace: namespace.into(), + attributes, + }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn activity<'a>( - ctx: &Context<'a>, - external_id: String, - namespace: Option, - attributes: Attributes, + ctx: &Context<'a>, + external_id: String, + namespace: Option, + attributes: Attributes, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()); - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::Create { - id: external_id.into(), - namespace: namespace.into(), - attributes, - }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::Create { + id: external_id.into(), + namespace: namespace.into(), + attributes, + }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn entity<'a>( - ctx: &Context<'a>, - external_id: String, - namespace: Option, - attributes: Attributes, + ctx: &Context<'a>, + external_id: String, + namespace: Option, + attributes: Attributes, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()); - let res = api - .dispatch( - ApiCommand::Entity(EntityCommand::Create { - id: external_id.into(), - namespace: namespace.into(), - attributes, - }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Entity(EntityCommand::Create { + id: external_id.into(), + namespace: namespace.into(), + attributes, + }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn acted_on_behalf_of<'a>( - ctx: &Context<'a>, - namespace: Option, - responsible_id: AgentId, - delegate_id: AgentId, - activity_id: Option, - role: Option, + ctx: &Context<'a>, + namespace: Option, + responsible_id: AgentId, + delegate_id: AgentId, + activity_id: Option, + role: Option, ) -> async_graphql::Result { - let api = ctx.data::()?; - - let identity = ctx.data::()?.to_owned(); - - let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); - - let res = api - .dispatch( - ApiCommand::Agent(AgentCommand::Delegate { - id: responsible_id, - delegate: delegate_id, - activity: activity_id, - namespace, - role, - }), - identity, - ) - .await?; - - transaction_context(res, ctx).await + let api = ctx.data::()?; + + let identity = ctx.data::()?.to_owned(); + + let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); + + let res = api + .dispatch( + ApiCommand::Agent(AgentCommand::Delegate { + id: responsible_id, + delegate: delegate_id, + activity: activity_id, + namespace, + role, + }), + identity, + ) + .await?; + + transaction_context(res, ctx).await } pub async fn was_derived_from<'a>( - ctx: &Context<'a>, - namespace: Option, - generated_entity: EntityId, - used_entity: EntityId, + ctx: &Context<'a>, + namespace: Option, + generated_entity: EntityId, + used_entity: EntityId, ) -> async_graphql::Result { - derivation(ctx, namespace, generated_entity, used_entity, DerivationType::None).await + derivation(ctx, namespace, generated_entity, used_entity, DerivationType::None).await } pub async fn was_revision_of<'a>( - ctx: &Context<'a>, - namespace: Option, - generated_entity: EntityId, - used_entity: EntityId, + ctx: &Context<'a>, + namespace: Option, + generated_entity: EntityId, + used_entity: EntityId, ) -> async_graphql::Result { - derivation(ctx, namespace, generated_entity, used_entity, DerivationType::Revision).await + derivation(ctx, namespace, generated_entity, used_entity, DerivationType::Revision).await } pub async fn had_primary_source<'a>( - ctx: &Context<'a>, - namespace: Option, - generated_entity: EntityId, - used_entity: EntityId, + ctx: &Context<'a>, + namespace: Option, + generated_entity: EntityId, + used_entity: EntityId, ) -> async_graphql::Result { - derivation(ctx, namespace, generated_entity, used_entity, DerivationType::PrimarySource).await + derivation(ctx, namespace, generated_entity, used_entity, DerivationType::PrimarySource).await } pub async fn was_quoted_from<'a>( - ctx: &Context<'a>, - namespace: Option, - generated_entity: EntityId, - used_entity: EntityId, + ctx: &Context<'a>, + namespace: Option, + generated_entity: EntityId, + used_entity: EntityId, ) -> async_graphql::Result { - derivation(ctx, namespace, generated_entity, used_entity, DerivationType::Quotation).await + derivation(ctx, namespace, generated_entity, used_entity, DerivationType::Quotation).await } pub async fn start_activity<'a>( - ctx: &Context<'a>, - id: ActivityId, - namespace: Option, - agent: Option, // deprecated, slated for removal in CHRON-185 - time: Option>, + ctx: &Context<'a>, + id: ActivityId, + namespace: Option, + agent: Option, // deprecated, slated for removal in CHRON-185 + time: Option>, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::Start { id, namespace, time, agent }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::Start { id, namespace, time, agent }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn end_activity<'a>( - ctx: &Context<'a>, - id: ActivityId, - namespace: Option, - agent: Option, // deprecated, slated for removal in CHRON-185 - time: Option>, + ctx: &Context<'a>, + id: ActivityId, + namespace: Option, + agent: Option, // deprecated, slated for removal in CHRON-185 + time: Option>, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::End { id, namespace, time, agent }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::End { id, namespace, time, agent }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn instant_activity<'a>( - ctx: &Context<'a>, - id: ActivityId, - namespace: Option, - agent: Option, // deprecated, slated for removal in CHRON-185 - time: Option>, + ctx: &Context<'a>, + id: ActivityId, + namespace: Option, + agent: Option, // deprecated, slated for removal in CHRON-185 + time: Option>, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::Instant { id, namespace, time, agent }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::Instant { id, namespace, time, agent }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn was_associated_with<'a>( - ctx: &Context<'a>, - namespace: Option, - responsible: AgentId, - activity: ActivityId, - role: Option, + ctx: &Context<'a>, + namespace: Option, + responsible: AgentId, + activity: ActivityId, + role: Option, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::Associate { - id: activity, - responsible, - role, - namespace, - }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::Associate { + id: activity, + responsible, + role, + namespace, + }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn was_attributed_to<'a>( - ctx: &Context<'a>, - namespace: Option, - responsible: AgentId, - id: EntityId, - role: Option, + ctx: &Context<'a>, + namespace: Option, + responsible: AgentId, + id: EntityId, + role: Option, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); - let res = api - .dispatch( - ApiCommand::Entity(EntityCommand::Attribute { id, namespace, responsible, role }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Entity(EntityCommand::Attribute { id, namespace, responsible, role }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn used<'a>( - ctx: &Context<'a>, - activity: ActivityId, - entity: EntityId, - namespace: Option, + ctx: &Context<'a>, + activity: ActivityId, + entity: EntityId, + namespace: Option, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::Use { id: entity, namespace, activity }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::Use { id: entity, namespace, activity }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn was_informed_by<'a>( - ctx: &Context<'a>, - activity: ActivityId, - informing_activity: ActivityId, - namespace: Option, + ctx: &Context<'a>, + activity: ActivityId, + informing_activity: ActivityId, + namespace: Option, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::WasInformedBy { - id: activity, - namespace, - informing_activity, - }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::WasInformedBy { + id: activity, + namespace, + informing_activity, + }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } pub async fn was_generated_by<'a>( - ctx: &Context<'a>, - activity: ActivityId, - entity: EntityId, - namespace: Option, + ctx: &Context<'a>, + activity: ActivityId, + entity: EntityId, + namespace: Option, ) -> async_graphql::Result { - let api = ctx.data::()?; + let api = ctx.data::()?; - let identity = ctx.data::()?.to_owned(); + let identity = ctx.data::()?.to_owned(); - let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); + let namespace = namespace.unwrap_or_else(|| "default".to_owned()).into(); - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::Generate { id: entity, namespace, activity }), - identity, - ) - .await?; + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::Generate { id: entity, namespace, activity }), + identity, + ) + .await?; - transaction_context(res, ctx).await + transaction_context(res, ctx).await } diff --git a/crates/api/src/chronicle_graphql/query.rs b/crates/api/src/chronicle_graphql/query.rs index 342f739bf..0d6338541 100644 --- a/crates/api/src/chronicle_graphql/query.rs +++ b/crates/api/src/chronicle_graphql/query.rs @@ -1,17 +1,17 @@ use async_graphql::{ - connection::{Connection, EmptyFields, query}, - Context, ID, + connection::{query, Connection, EmptyFields}, + Context, ID, }; use chrono::{DateTime, NaiveDate, TimeZone, Utc}; use diesel::{debug_query, pg::Pg, prelude::*}; use tracing::{debug, instrument}; use chronicle_persistence::{ - cursor::Cursorize, - queryable::{Activity, Agent, Entity}, - schema::generation, + cursor::Cursorize, + queryable::{Activity, Agent, Entity}, + schema::generation, }; -use common::{prov::{ActivityId, AgentId, DomaintypeId, EntityId, ExternalIdPart}}; +use common::prov::{ActivityId, AgentId, DomaintypeId, EntityId, ExternalIdPart}; use crate::chronicle_graphql::DatabaseContext; @@ -20,301 +20,301 @@ use super::{cursor_project::project_to_nodes, GraphQlError, TimelineOrder}; #[allow(clippy::too_many_arguments)] #[instrument(skip(ctx))] pub async fn activity_timeline<'a>( - ctx: &Context<'a>, - activity_types: Option>, - for_agent: Option>, - for_entity: Option>, - from: Option>, - to: Option>, - order: Option, - namespace: Option, - after: Option, - before: Option, - first: Option, - last: Option, + ctx: &Context<'a>, + activity_types: Option>, + for_agent: Option>, + for_entity: Option>, + from: Option>, + to: Option>, + order: Option, + namespace: Option, + after: Option, + before: Option, + first: Option, + last: Option, ) -> async_graphql::Result> { - use chronicle_persistence::schema::{ - activity, agent, association, delegation, entity, namespace::dsl as nsdsl, usage, - wasinformedby, - }; - - let store = ctx.data::()?; - - let mut connection = store.connection()?; - let ns = namespace.unwrap_or_else(|| "default".into()); - - // Default from and to to the maximum possible time range - let from = from.or_else(|| { - Some( - Utc.from_utc_datetime( - &NaiveDate::from_ymd_opt(1582, 10, 16) - .expect("Invalid date") - .and_hms_opt(0, 0, 0) - .expect("Invalid time"), - ), - ) - }); - - let to = to.or_else(|| Some(Utc::now())); - - let mut sql_query = activity::table - .left_join(wasinformedby::table.on(wasinformedby::activity_id.eq(activity::id))) - .left_join(usage::table.on(usage::activity_id.eq(activity::id))) - .left_join(generation::table.on(generation::activity_id.eq(activity::id))) - .left_join(association::table.on(association::activity_id.eq(activity::id))) - .left_join( - delegation::table.on(delegation::activity_id.nullable().eq(activity::id.nullable())), - ) - .left_join( - entity::table.on(entity::id - .eq(usage::entity_id) - .or(entity::id.eq(generation::generated_entity_id))), - ) - .left_join( - agent::table.on(agent::id - .eq(association::agent_id) - .or(agent::id.eq(delegation::delegate_id)) - .or(agent::id.eq(delegation::responsible_id))), - ) - .inner_join(nsdsl::namespace.on(activity::namespace_id.eq(nsdsl::id))) - .filter(nsdsl::external_id.eq(&**ns)) - .filter(activity::started.ge(from.map(|x| x.naive_utc()))) - .filter(activity::ended.le(to.map(|x| x.naive_utc()))) - .distinct() - .select(Activity::as_select()) - .into_boxed(); - - if let Some(for_entity) = for_entity { - if !for_entity.is_empty() { - sql_query = sql_query.filter(entity::external_id.eq_any( - for_entity.iter().map(|x| x.external_id_part().clone()).collect::>(), - )) - } - } - - if let Some(for_agent) = for_agent { - if !for_agent.is_empty() { - sql_query = - sql_query.filter(agent::external_id.eq_any( - for_agent.iter().map(|x| x.external_id_part().clone()).collect::>(), - )) - } - } - - if let Some(activity_types) = activity_types { - if !activity_types.is_empty() { - sql_query = sql_query.filter(activity::domaintype.eq_any( - activity_types.iter().map(|x| x.external_id_part().clone()).collect::>(), - )); - } - } - - if order.unwrap_or(TimelineOrder::NewestFirst) == TimelineOrder::NewestFirst { - sql_query = sql_query.order_by(activity::started.desc()); - } else { - sql_query = sql_query.order_by(activity::started.asc()); - }; - - query(after, before, first, last, |after, before, first, last| async move { - debug!("Cursor query {}", debug_query::(&sql_query).to_string()); - let rx = sql_query.cursor(after, before, first, last); - - let start = rx.start; - let limit = rx.limit; - - let rx = rx.load::<(Activity, i64)>(&mut connection)?; - - Ok::<_, GraphQlError>(project_to_nodes(rx, start, limit)) - }) - .await + use chronicle_persistence::schema::{ + activity, agent, association, delegation, entity, namespace::dsl as nsdsl, usage, + wasinformedby, + }; + + let store = ctx.data::()?; + + let mut connection = store.connection()?; + let ns = namespace.unwrap_or_else(|| "default".into()); + + // Default from and to to the maximum possible time range + let from = from.or_else(|| { + Some( + Utc.from_utc_datetime( + &NaiveDate::from_ymd_opt(1582, 10, 16) + .expect("Invalid date") + .and_hms_opt(0, 0, 0) + .expect("Invalid time"), + ), + ) + }); + + let to = to.or_else(|| Some(Utc::now())); + + let mut sql_query = activity::table + .left_join(wasinformedby::table.on(wasinformedby::activity_id.eq(activity::id))) + .left_join(usage::table.on(usage::activity_id.eq(activity::id))) + .left_join(generation::table.on(generation::activity_id.eq(activity::id))) + .left_join(association::table.on(association::activity_id.eq(activity::id))) + .left_join( + delegation::table.on(delegation::activity_id.nullable().eq(activity::id.nullable())), + ) + .left_join( + entity::table.on(entity::id + .eq(usage::entity_id) + .or(entity::id.eq(generation::generated_entity_id))), + ) + .left_join( + agent::table.on(agent::id + .eq(association::agent_id) + .or(agent::id.eq(delegation::delegate_id)) + .or(agent::id.eq(delegation::responsible_id))), + ) + .inner_join(nsdsl::namespace.on(activity::namespace_id.eq(nsdsl::id))) + .filter(nsdsl::external_id.eq(&**ns)) + .filter(activity::started.ge(from.map(|x| x.naive_utc()))) + .filter(activity::ended.le(to.map(|x| x.naive_utc()))) + .distinct() + .select(Activity::as_select()) + .into_boxed(); + + if let Some(for_entity) = for_entity { + if !for_entity.is_empty() { + sql_query = sql_query.filter(entity::external_id.eq_any( + for_entity.iter().map(|x| x.external_id_part().clone()).collect::>(), + )) + } + } + + if let Some(for_agent) = for_agent { + if !for_agent.is_empty() { + sql_query = + sql_query.filter(agent::external_id.eq_any( + for_agent.iter().map(|x| x.external_id_part().clone()).collect::>(), + )) + } + } + + if let Some(activity_types) = activity_types { + if !activity_types.is_empty() { + sql_query = sql_query.filter(activity::domaintype.eq_any( + activity_types.iter().map(|x| x.external_id_part().clone()).collect::>(), + )); + } + } + + if order.unwrap_or(TimelineOrder::NewestFirst) == TimelineOrder::NewestFirst { + sql_query = sql_query.order_by(activity::started.desc()); + } else { + sql_query = sql_query.order_by(activity::started.asc()); + }; + + query(after, before, first, last, |after, before, first, last| async move { + debug!("Cursor query {}", debug_query::(&sql_query).to_string()); + let rx = sql_query.cursor(after, before, first, last); + + let start = rx.start; + let limit = rx.limit; + + let rx = rx.load::<(Activity, i64)>(&mut connection)?; + + Ok::<_, GraphQlError>(project_to_nodes(rx, start, limit)) + }) + .await } #[allow(clippy::too_many_arguments)] pub async fn entities_by_type<'a>( - ctx: &Context<'a>, - typ: Option, - namespace: Option, - after: Option, - before: Option, - first: Option, - last: Option, + ctx: &Context<'a>, + typ: Option, + namespace: Option, + after: Option, + before: Option, + first: Option, + last: Option, ) -> async_graphql::Result> { - use chronicle_persistence::schema::{entity, namespace::dsl as nsdsl}; + use chronicle_persistence::schema::{entity, namespace::dsl as nsdsl}; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; - let ns = namespace.unwrap_or_else(|| "default".into()); + let mut connection = store.connection()?; + let ns = namespace.unwrap_or_else(|| "default".into()); - let sql_query = entity::table - .inner_join(nsdsl::namespace) - .filter( - nsdsl::external_id - .eq(&**ns) - .and(entity::domaintype.eq(typ.as_ref().map(|x| x.external_id_part().to_owned()))), - ) - .select(Entity::as_select()) - .order_by(entity::external_id.asc()); + let sql_query = entity::table + .inner_join(nsdsl::namespace) + .filter( + nsdsl::external_id + .eq(&**ns) + .and(entity::domaintype.eq(typ.as_ref().map(|x| x.external_id_part().to_owned()))), + ) + .select(Entity::as_select()) + .order_by(entity::external_id.asc()); - query(after, before, first, last, |after, before, first, last| async move { - debug!("Cursor query {}", debug_query::(&sql_query).to_string()); - let rx = sql_query.cursor(after, before, first, last); + query(after, before, first, last, |after, before, first, last| async move { + debug!("Cursor query {}", debug_query::(&sql_query).to_string()); + let rx = sql_query.cursor(after, before, first, last); - let start = rx.start; - let limit = rx.limit; + let start = rx.start; + let limit = rx.limit; - let rx = rx.load::<(Entity, i64)>(&mut connection)?; + let rx = rx.load::<(Entity, i64)>(&mut connection)?; - Ok::<_, GraphQlError>(project_to_nodes(rx, start, limit)) - }) - .await + Ok::<_, GraphQlError>(project_to_nodes(rx, start, limit)) + }) + .await } #[allow(clippy::too_many_arguments)] pub async fn activities_by_type<'a>( - ctx: &Context<'a>, - typ: Option, - namespace: Option, - after: Option, - before: Option, - first: Option, - last: Option, + ctx: &Context<'a>, + typ: Option, + namespace: Option, + after: Option, + before: Option, + first: Option, + last: Option, ) -> async_graphql::Result> { - use chronicle_persistence::schema::{activity, namespace::dsl as nsdsl}; + use chronicle_persistence::schema::{activity, namespace::dsl as nsdsl}; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; - let ns = namespace.unwrap_or_else(|| "default".into()); + let mut connection = store.connection()?; + let ns = namespace.unwrap_or_else(|| "default".into()); - let sql_query = - activity::table - .inner_join(nsdsl::namespace) - .filter(nsdsl::external_id.eq(&**ns).and( - activity::domaintype.eq(typ.as_ref().map(|x| x.external_id_part().to_owned())), - )) - .select(Activity::as_select()) - .order_by(activity::external_id.asc()); + let sql_query = + activity::table + .inner_join(nsdsl::namespace) + .filter(nsdsl::external_id.eq(&**ns).and( + activity::domaintype.eq(typ.as_ref().map(|x| x.external_id_part().to_owned())), + )) + .select(Activity::as_select()) + .order_by(activity::external_id.asc()); - query(after, before, first, last, |after, before, first, last| async move { - debug!("Cursor query {}", debug_query::(&sql_query).to_string()); - let rx = sql_query.cursor(after, before, first, last); + query(after, before, first, last, |after, before, first, last| async move { + debug!("Cursor query {}", debug_query::(&sql_query).to_string()); + let rx = sql_query.cursor(after, before, first, last); - let start = rx.start; - let limit = rx.limit; + let start = rx.start; + let limit = rx.limit; - let rx = rx.load::<(Activity, i64)>(&mut connection)?; + let rx = rx.load::<(Activity, i64)>(&mut connection)?; - Ok::<_, GraphQlError>(project_to_nodes(rx, start, limit)) - }) - .await + Ok::<_, GraphQlError>(project_to_nodes(rx, start, limit)) + }) + .await } #[allow(clippy::too_many_arguments)] pub async fn agents_by_type<'a>( - ctx: &Context<'a>, - typ: Option, - namespace: Option, - after: Option, - before: Option, - first: Option, - last: Option, + ctx: &Context<'a>, + typ: Option, + namespace: Option, + after: Option, + before: Option, + first: Option, + last: Option, ) -> async_graphql::Result> { - use chronicle_persistence::schema::{agent, namespace::dsl as nsdsl}; + use chronicle_persistence::schema::{agent, namespace::dsl as nsdsl}; - let store = ctx.data::()?; + let store = ctx.data::()?; - let mut connection = store.connection()?; - let ns = namespace.unwrap_or_else(|| "default".into()); + let mut connection = store.connection()?; + let ns = namespace.unwrap_or_else(|| "default".into()); - let sql_query = agent::table - .inner_join(nsdsl::namespace) - .filter( - nsdsl::external_id - .eq(&**ns) - .and(agent::domaintype.eq(typ.as_ref().map(|x| x.external_id_part().to_owned()))), - ) - .select(Agent::as_select()) - .order_by(agent::external_id.asc()); + let sql_query = agent::table + .inner_join(nsdsl::namespace) + .filter( + nsdsl::external_id + .eq(&**ns) + .and(agent::domaintype.eq(typ.as_ref().map(|x| x.external_id_part().to_owned()))), + ) + .select(Agent::as_select()) + .order_by(agent::external_id.asc()); - query(after, before, first, last, |after, before, first, last| async move { - debug!("Cursor query {}", debug_query::(&sql_query).to_string()); - let rx = sql_query.cursor(after, before, first, last); + query(after, before, first, last, |after, before, first, last| async move { + debug!("Cursor query {}", debug_query::(&sql_query).to_string()); + let rx = sql_query.cursor(after, before, first, last); - let start = rx.start; - let limit = rx.limit; + let start = rx.start; + let limit = rx.limit; - let rx = rx.load::<(Agent, i64)>(&mut connection)?; + let rx = rx.load::<(Agent, i64)>(&mut connection)?; - Ok::<_, GraphQlError>(project_to_nodes(rx, start, limit)) - }) - .await + Ok::<_, GraphQlError>(project_to_nodes(rx, start, limit)) + }) + .await } pub async fn agent_by_id<'a>( - ctx: &Context<'a>, - id: AgentId, - namespace: Option, + ctx: &Context<'a>, + id: AgentId, + namespace: Option, ) -> async_graphql::Result> { - use chronicle_persistence::schema::{ - agent::{self, dsl}, - namespace::dsl as nsdsl, - }; - - let store = ctx.data::()?; - - let ns = namespace.unwrap_or_else(|| "default".into()); - let mut connection = store.connection()?; - - Ok(agent::table - .inner_join(nsdsl::namespace) - .filter(dsl::external_id.eq(id.external_id_part()).and(nsdsl::external_id.eq(&ns))) - .select(Agent::as_select()) - .first::(&mut connection) - .optional()?) + use chronicle_persistence::schema::{ + agent::{self, dsl}, + namespace::dsl as nsdsl, + }; + + let store = ctx.data::()?; + + let ns = namespace.unwrap_or_else(|| "default".into()); + let mut connection = store.connection()?; + + Ok(agent::table + .inner_join(nsdsl::namespace) + .filter(dsl::external_id.eq(id.external_id_part()).and(nsdsl::external_id.eq(&ns))) + .select(Agent::as_select()) + .first::(&mut connection) + .optional()?) } pub async fn activity_by_id<'a>( - ctx: &Context<'a>, - id: ActivityId, - namespace: Option, + ctx: &Context<'a>, + id: ActivityId, + namespace: Option, ) -> async_graphql::Result> { - use chronicle_persistence::schema::{ - activity::{self, dsl}, - namespace::dsl as nsdsl, - }; - - let store = ctx.data::()?; - - let ns = namespace.unwrap_or_else(|| "default".into()); - let mut connection = store.connection()?; - - Ok(activity::table - .inner_join(nsdsl::namespace) - .filter(dsl::external_id.eq(id.external_id_part()).and(nsdsl::external_id.eq(&ns))) - .select(Activity::as_select()) - .first::(&mut connection) - .optional()?) + use chronicle_persistence::schema::{ + activity::{self, dsl}, + namespace::dsl as nsdsl, + }; + + let store = ctx.data::()?; + + let ns = namespace.unwrap_or_else(|| "default".into()); + let mut connection = store.connection()?; + + Ok(activity::table + .inner_join(nsdsl::namespace) + .filter(dsl::external_id.eq(id.external_id_part()).and(nsdsl::external_id.eq(&ns))) + .select(Activity::as_select()) + .first::(&mut connection) + .optional()?) } pub async fn entity_by_id<'a>( - ctx: &Context<'a>, - id: EntityId, - namespace: Option, + ctx: &Context<'a>, + id: EntityId, + namespace: Option, ) -> async_graphql::Result> { - use chronicle_persistence::schema::{ - entity::{self, dsl}, - namespace::dsl as nsdsl, - }; - - let store = ctx.data::()?; - let ns = namespace.unwrap_or_else(|| "default".into()); - let mut connection = store.connection()?; - - Ok(entity::table - .inner_join(nsdsl::namespace) - .filter(dsl::external_id.eq(id.external_id_part()).and(nsdsl::external_id.eq(&ns))) - .select(Entity::as_select()) - .first::(&mut connection) - .optional()?) + use chronicle_persistence::schema::{ + entity::{self, dsl}, + namespace::dsl as nsdsl, + }; + + let store = ctx.data::()?; + let ns = namespace.unwrap_or_else(|| "default".into()); + let mut connection = store.connection()?; + + Ok(entity::table + .inner_join(nsdsl::namespace) + .filter(dsl::external_id.eq(id.external_id_part()).and(nsdsl::external_id.eq(&ns))) + .select(Entity::as_select()) + .first::(&mut connection) + .optional()?) } diff --git a/crates/api/src/commands.rs b/crates/api/src/commands.rs index 624aa1a7d..34ccc25c4 100644 --- a/crates/api/src/commands.rs +++ b/crates/api/src/commands.rs @@ -8,301 +8,301 @@ use futures::AsyncRead; use serde::{Deserialize, Serialize}; use common::{ - attributes::Attributes, - prov::{ - ActivityId, - AgentId, ChronicleIri, ChronicleTransactionId, EntityId, ExternalId, NamespaceId, - operations::{ChronicleOperation, DerivationType}, ProvModel, Role, - }, + attributes::Attributes, + prov::{ + operations::{ChronicleOperation, DerivationType}, + ActivityId, AgentId, ChronicleIri, ChronicleTransactionId, EntityId, ExternalId, + NamespaceId, ProvModel, Role, + }, }; #[derive(Debug, Clone, Serialize, Deserialize)] pub enum NamespaceCommand { - Create { id: ExternalId }, + Create { id: ExternalId }, } #[derive(Debug, Clone, Serialize, Deserialize)] pub enum AgentCommand { - Create { - id: ExternalId, - namespace: ExternalId, - attributes: Attributes, - }, - UseInContext { - id: AgentId, - namespace: ExternalId, - }, - Delegate { - id: AgentId, - delegate: AgentId, - activity: Option, - namespace: ExternalId, - role: Option, - }, + Create { + id: ExternalId, + namespace: ExternalId, + attributes: Attributes, + }, + UseInContext { + id: AgentId, + namespace: ExternalId, + }, + Delegate { + id: AgentId, + delegate: AgentId, + activity: Option, + namespace: ExternalId, + role: Option, + }, } #[derive(Debug, Clone, Serialize, Deserialize)] pub enum ActivityCommand { - Create { - id: ExternalId, - namespace: ExternalId, - attributes: Attributes, - }, - Instant { - id: ActivityId, - namespace: ExternalId, - time: Option>, - agent: Option, - }, - Start { - id: ActivityId, - namespace: ExternalId, - time: Option>, - agent: Option, - }, - End { - id: ActivityId, - namespace: ExternalId, - time: Option>, - agent: Option, - }, - Use { - id: EntityId, - namespace: ExternalId, - activity: ActivityId, - }, - Generate { - id: EntityId, - namespace: ExternalId, - activity: ActivityId, - }, - WasInformedBy { - id: ActivityId, - namespace: ExternalId, - informing_activity: ActivityId, - }, - Associate { - id: ActivityId, - namespace: ExternalId, - responsible: AgentId, - role: Option, - }, + Create { + id: ExternalId, + namespace: ExternalId, + attributes: Attributes, + }, + Instant { + id: ActivityId, + namespace: ExternalId, + time: Option>, + agent: Option, + }, + Start { + id: ActivityId, + namespace: ExternalId, + time: Option>, + agent: Option, + }, + End { + id: ActivityId, + namespace: ExternalId, + time: Option>, + agent: Option, + }, + Use { + id: EntityId, + namespace: ExternalId, + activity: ActivityId, + }, + Generate { + id: EntityId, + namespace: ExternalId, + activity: ActivityId, + }, + WasInformedBy { + id: ActivityId, + namespace: ExternalId, + informing_activity: ActivityId, + }, + Associate { + id: ActivityId, + namespace: ExternalId, + responsible: AgentId, + role: Option, + }, } impl ActivityCommand { - pub fn create( - external_id: impl AsRef, - namespace: impl AsRef, - attributes: Attributes, - ) -> Self { - Self::Create { - id: external_id.as_ref().into(), - namespace: namespace.as_ref().into(), - attributes, - } - } + pub fn create( + external_id: impl AsRef, + namespace: impl AsRef, + attributes: Attributes, + ) -> Self { + Self::Create { + id: external_id.as_ref().into(), + namespace: namespace.as_ref().into(), + attributes, + } + } - pub fn start( - id: ActivityId, - namespace: impl AsRef, - time: Option>, - agent: Option, - ) -> Self { - Self::Start { id, namespace: namespace.as_ref().into(), time, agent } - } + pub fn start( + id: ActivityId, + namespace: impl AsRef, + time: Option>, + agent: Option, + ) -> Self { + Self::Start { id, namespace: namespace.as_ref().into(), time, agent } + } - pub fn end( - id: ActivityId, - namespace: impl AsRef, - time: Option>, - agent: Option, - ) -> Self { - Self::End { id, namespace: namespace.as_ref().into(), time, agent } - } + pub fn end( + id: ActivityId, + namespace: impl AsRef, + time: Option>, + agent: Option, + ) -> Self { + Self::End { id, namespace: namespace.as_ref().into(), time, agent } + } - pub fn instant( - id: ActivityId, - namespace: impl AsRef, - time: Option>, - agent: Option, - ) -> Self { - Self::End { id, namespace: namespace.as_ref().into(), time, agent } - } + pub fn instant( + id: ActivityId, + namespace: impl AsRef, + time: Option>, + agent: Option, + ) -> Self { + Self::End { id, namespace: namespace.as_ref().into(), time, agent } + } - pub fn r#use(id: EntityId, namespace: impl AsRef, activity: ActivityId) -> Self { - Self::Use { id, namespace: namespace.as_ref().into(), activity } - } + pub fn r#use(id: EntityId, namespace: impl AsRef, activity: ActivityId) -> Self { + Self::Use { id, namespace: namespace.as_ref().into(), activity } + } - pub fn was_informed_by( - id: ActivityId, - namespace: impl AsRef, - informing_activity: ActivityId, - ) -> Self { - Self::WasInformedBy { id, namespace: namespace.as_ref().into(), informing_activity } - } + pub fn was_informed_by( + id: ActivityId, + namespace: impl AsRef, + informing_activity: ActivityId, + ) -> Self { + Self::WasInformedBy { id, namespace: namespace.as_ref().into(), informing_activity } + } - pub fn generate(id: EntityId, namespace: impl AsRef, activity: ActivityId) -> Self { - Self::Generate { id, namespace: namespace.as_ref().into(), activity } - } + pub fn generate(id: EntityId, namespace: impl AsRef, activity: ActivityId) -> Self { + Self::Generate { id, namespace: namespace.as_ref().into(), activity } + } } #[derive(Clone)] pub enum PathOrFile { - Path(PathBuf), - File(Arc>>), + Path(PathBuf), + File(Arc>>), } impl core::fmt::Debug for PathOrFile { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - PathOrFile::Path(path) => f.debug_struct("Path").field("path", path).finish(), - PathOrFile::File(_) => f - .debug_struct("File") - .field("file", &"Non serialisable variant, used in process") - .finish(), - } - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + PathOrFile::Path(path) => f.debug_struct("Path").field("path", path).finish(), + PathOrFile::File(_) => f + .debug_struct("File") + .field("file", &"Non serialisable variant, used in process") + .finish(), + } + } } impl Serialize for PathOrFile { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - match self { - PathOrFile::Path(path) => path.serialize(serializer), - _ => { - unreachable!() - } - } - } + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + match self { + PathOrFile::Path(path) => path.serialize(serializer), + _ => { + unreachable!() + }, + } + } } impl<'de> Deserialize<'de> for PathOrFile { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - Ok(PathOrFile::Path(PathBuf::deserialize(deserializer)?)) - } + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Ok(PathOrFile::Path(PathBuf::deserialize(deserializer)?)) + } } #[derive(Debug, Clone, Serialize, Deserialize)] pub enum EntityCommand { - Create { - id: ExternalId, - namespace: ExternalId, - attributes: Attributes, - }, - Attribute { - id: EntityId, - namespace: ExternalId, - responsible: AgentId, - role: Option, - }, - Derive { - id: EntityId, - namespace: ExternalId, - derivation: DerivationType, - activity: Option, - used_entity: EntityId, - }, + Create { + id: ExternalId, + namespace: ExternalId, + attributes: Attributes, + }, + Attribute { + id: EntityId, + namespace: ExternalId, + responsible: AgentId, + role: Option, + }, + Derive { + id: EntityId, + namespace: ExternalId, + derivation: DerivationType, + activity: Option, + used_entity: EntityId, + }, } impl EntityCommand { - pub fn create( - external_id: impl AsRef, - namespace: impl AsRef, - attributes: Attributes, - ) -> Self { - Self::Create { - id: external_id.as_ref().into(), - namespace: namespace.as_ref().into(), - attributes, - } - } + pub fn create( + external_id: impl AsRef, + namespace: impl AsRef, + attributes: Attributes, + ) -> Self { + Self::Create { + id: external_id.as_ref().into(), + namespace: namespace.as_ref().into(), + attributes, + } + } - pub fn detach( - id: EntityId, - namespace: impl AsRef, - derivation: DerivationType, - activity: Option, - used_entity: EntityId, - ) -> Self { - Self::Derive { id, namespace: namespace.as_ref().into(), derivation, activity, used_entity } - } + pub fn detach( + id: EntityId, + namespace: impl AsRef, + derivation: DerivationType, + activity: Option, + used_entity: EntityId, + ) -> Self { + Self::Derive { id, namespace: namespace.as_ref().into(), derivation, activity, used_entity } + } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct QueryCommand { - pub namespace: String, + pub namespace: String, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct DepthChargeCommand { - pub namespace: NamespaceId, + pub namespace: NamespaceId, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ImportCommand { - pub operations: Vec, + pub operations: Vec, } #[derive(Debug, Clone, Serialize, Deserialize)] pub enum ApiCommand { - NameSpace(NamespaceCommand), - Agent(AgentCommand), - Activity(ActivityCommand), - Entity(EntityCommand), - Query(QueryCommand), - DepthCharge(DepthChargeCommand), - Import(ImportCommand), + NameSpace(NamespaceCommand), + Agent(AgentCommand), + Activity(ActivityCommand), + Entity(EntityCommand), + Query(QueryCommand), + DepthCharge(DepthChargeCommand), + Import(ImportCommand), } #[derive(Debug)] pub enum ApiResponse { - /// The api has successfully executed the operation, but has no useful output - Unit, - /// The operation will not result in any data changes - AlreadyRecorded { subject: ChronicleIri, prov: Box }, - /// The aggregate operation will not result in any data changes - AlreadyRecordedAll, - /// The api has validated the command and submitted a transaction to a ledger - Submission { subject: ChronicleIri, prov: Box, tx_id: ChronicleTransactionId }, - /// The api has successfully executed the query - QueryReply { prov: Box }, - /// The api has submitted the import transactions to a ledger - ImportSubmitted { prov: Box, tx_id: ChronicleTransactionId }, - /// The api has submitted the depth charge transaction to a ledger - DepthChargeSubmitted { tx_id: ChronicleTransactionId }, + /// The api has successfully executed the operation, but has no useful output + Unit, + /// The operation will not result in any data changes + AlreadyRecorded { subject: ChronicleIri, prov: Box }, + /// The aggregate operation will not result in any data changes + AlreadyRecordedAll, + /// The api has validated the command and submitted a transaction to a ledger + Submission { subject: ChronicleIri, prov: Box, tx_id: ChronicleTransactionId }, + /// The api has successfully executed the query + QueryReply { prov: Box }, + /// The api has submitted the import transactions to a ledger + ImportSubmitted { prov: Box, tx_id: ChronicleTransactionId }, + /// The api has submitted the depth charge transaction to a ledger + DepthChargeSubmitted { tx_id: ChronicleTransactionId }, } impl ApiResponse { - pub fn submission( - subject: impl Into, - prov: ProvModel, - tx_id: ChronicleTransactionId, - ) -> Self { - ApiResponse::Submission { subject: subject.into(), prov: Box::new(prov), tx_id } - } + pub fn submission( + subject: impl Into, + prov: ProvModel, + tx_id: ChronicleTransactionId, + ) -> Self { + ApiResponse::Submission { subject: subject.into(), prov: Box::new(prov), tx_id } + } - pub fn unit() -> Self { - ApiResponse::Unit - } + pub fn unit() -> Self { + ApiResponse::Unit + } - pub fn query_reply(prov: ProvModel) -> Self { - ApiResponse::QueryReply { prov: Box::new(prov) } - } + pub fn query_reply(prov: ProvModel) -> Self { + ApiResponse::QueryReply { prov: Box::new(prov) } + } - pub fn already_recorded(subject: impl Into, prov: ProvModel) -> Self { - ApiResponse::AlreadyRecorded { subject: subject.into(), prov: Box::new(prov) } - } + pub fn already_recorded(subject: impl Into, prov: ProvModel) -> Self { + ApiResponse::AlreadyRecorded { subject: subject.into(), prov: Box::new(prov) } + } - pub fn depth_charge_submission(tx_id: ChronicleTransactionId) -> Self { - ApiResponse::DepthChargeSubmitted { tx_id } - } + pub fn depth_charge_submission(tx_id: ChronicleTransactionId) -> Self { + ApiResponse::DepthChargeSubmitted { tx_id } + } - pub fn import_submitted(prov: ProvModel, tx_id: ChronicleTransactionId) -> Self { - ApiResponse::ImportSubmitted { prov: Box::new(prov), tx_id } - } + pub fn import_submitted(prov: ProvModel, tx_id: ChronicleTransactionId) -> Self { + ApiResponse::ImportSubmitted { prov: Box::new(prov), tx_id } + } } diff --git a/crates/api/src/dispatch.rs b/crates/api/src/dispatch.rs index 900a8cb81..c4c1605fe 100644 --- a/crates/api/src/dispatch.rs +++ b/crates/api/src/dispatch.rs @@ -1,84 +1,86 @@ -use tokio::sync::mpsc; -use tokio::sync::mpsc::Sender; +use tokio::sync::{mpsc, mpsc::Sender}; use tracing::{error, instrument, trace}; use uuid::Uuid; -use common::identity::AuthId; -use common::ledger::SubmissionStage; -use common::prov::NamespaceId; -use common::prov::operations::ChronicleOperation; +use common::{ + identity::AuthId, + ledger::SubmissionStage, + prov::{operations::ChronicleOperation, NamespaceId}, +}; -use crate::ApiError; -use crate::commands::{ApiCommand, ApiResponse, DepthChargeCommand, ImportCommand}; +use crate::{ + commands::{ApiCommand, ApiResponse, DepthChargeCommand, ImportCommand}, + ApiError, +}; pub type ApiSendWithReply = ((ApiCommand, AuthId), Sender>); #[derive(Debug, Clone)] /// A clonable api handle pub struct ApiDispatch { - pub(crate) tx: Sender, - pub notify_commit: tokio::sync::broadcast::Sender, + pub(crate) tx: Sender, + pub notify_commit: tokio::sync::broadcast::Sender, } impl ApiDispatch { - #[instrument] - pub async fn dispatch( - &self, - command: ApiCommand, - identity: AuthId, - ) -> Result { - let (reply_tx, mut reply_rx) = mpsc::channel(1); - trace!(?command, "Dispatch command to api"); - self.tx.clone().send(((command, identity), reply_tx)).await?; + #[instrument] + pub async fn dispatch( + &self, + command: ApiCommand, + identity: AuthId, + ) -> Result { + let (reply_tx, mut reply_rx) = mpsc::channel(1); + trace!(?command, "Dispatch command to api"); + self.tx.clone().send(((command, identity), reply_tx)).await?; - let reply = reply_rx.recv().await; + let reply = reply_rx.recv().await; - if let Some(Err(ref error)) = reply { - error!(?error, "Api dispatch"); - } + if let Some(Err(ref error)) = reply { + error!(?error, "Api dispatch"); + } - reply.ok_or(ApiError::ApiShutdownRx {})? - } + reply.ok_or(ApiError::ApiShutdownRx {})? + } - #[instrument] - pub async fn handle_import_command( - &self, - identity: AuthId, - operations: Vec, - ) -> Result { - self.import_operations(identity, operations).await - } + #[instrument] + pub async fn handle_import_command( + &self, + identity: AuthId, + operations: Vec, + ) -> Result { + self.import_operations(identity, operations).await + } - #[instrument] - async fn import_operations( - &self, - identity: AuthId, - operations: Vec, - ) -> Result { - self.dispatch(ApiCommand::Import(ImportCommand { operations }), identity.clone()) - .await - } + #[instrument] + async fn import_operations( + &self, + identity: AuthId, + operations: Vec, + ) -> Result { + self.dispatch(ApiCommand::Import(ImportCommand { operations }), identity.clone()) + .await + } - #[instrument] - pub async fn handle_depth_charge( - &self, - namespace: &str, - uuid: &Uuid, - ) -> Result { - self.dispatch_depth_charge( - AuthId::Chronicle, - NamespaceId::from_external_id(namespace, *uuid), - ) - .await - } + #[instrument] + pub async fn handle_depth_charge( + &self, + namespace: &str, + uuid: &Uuid, + ) -> Result { + self.dispatch_depth_charge( + AuthId::Chronicle, + NamespaceId::from_external_id(namespace, *uuid), + ) + .await + } - #[instrument] - async fn dispatch_depth_charge( - &self, - identity: AuthId, - namespace: NamespaceId, - ) -> Result { - self.dispatch(ApiCommand::DepthCharge(DepthChargeCommand { namespace }), identity.clone()) - .await - } + #[instrument] + async fn dispatch_depth_charge( + &self, + identity: AuthId, + namespace: NamespaceId, + ) -> Result { + self.dispatch(ApiCommand::DepthCharge(DepthChargeCommand { namespace }), identity.clone()) + .await + } } diff --git a/crates/api/src/error.rs b/crates/api/src/error.rs index 8dd5f8e7a..95051eaae 100644 --- a/crates/api/src/error.rs +++ b/crates/api/src/error.rs @@ -1,153 +1,152 @@ -use std::convert::Infallible; -use std::net::AddrParseError; +use std::{convert::Infallible, net::AddrParseError}; use thiserror::Error; -use tokio::sync::mpsc::error::SendError; -use tokio::task::JoinError; +use tokio::{sync::mpsc::error::SendError, task::JoinError}; use user_error::UFE; use chronicle_signing::SecretError; -use common::identity::IdentityError; -use common::ledger::SubmissionError; -use common::prov::{Contradiction, ProcessorError}; +use common::{ + identity::IdentityError, + ledger::SubmissionError, + prov::{Contradiction, ProcessorError}, +}; use protocol_substrate::SubxtClientError; -use crate::chronicle_graphql; -use crate::dispatch::ApiSendWithReply; +use crate::{chronicle_graphql, dispatch::ApiSendWithReply}; #[derive(Error, Debug)] pub enum ApiError { - #[error("Storage: {0:?}")] - Store( - #[from] - #[source] - chronicle_persistence::StoreError, - ), - - #[error("Storage: {0:?}")] - ArrowService(#[source] anyhow::Error), - - #[error("Transaction failed: {0}")] - Transaction( - #[from] - #[source] - diesel::result::Error, - ), - - #[error("Invalid IRI: {0}")] - Iri( - #[from] - #[source] - iref::Error, - ), - - #[error("JSON-LD processing: {0}")] - JsonLD(String), - - #[error("Signing: {0}")] - Signing( - #[from] - #[source] - SecretError, - ), - - #[error("No agent is currently in use, please call agent use or supply an agent in your call")] - NoCurrentAgent, - - #[error("Api shut down before reply")] - ApiShutdownRx, - - #[error("Api shut down before send: {0}")] - ApiShutdownTx( - #[from] - #[source] - SendError, - ), - - #[error("Invalid socket address: {0}")] - AddressParse( - #[from] - #[source] - AddrParseError, - ), - - #[error("Connection pool: {0}")] - ConnectionPool( - #[from] - #[source] - r2d2::Error, - ), - - #[error("IO error: {0}")] - InputOutput( - #[from] - #[source] - std::io::Error, - ), - - #[error("Blocking thread pool: {0}")] - Join( - #[from] - #[source] - JoinError, - ), - - #[error("No appropriate activity to end")] - NotCurrentActivity, - - #[error("Processor: {0}")] - ProcessorError( - #[from] - #[source] - ProcessorError, - ), - - #[error("Identity: {0}")] - IdentityError( - #[from] - #[source] - IdentityError, - ), - - #[error("Authentication endpoint error: {0}")] - AuthenticationEndpoint( - #[from] - #[source] - chronicle_graphql::AuthorizationError, - ), - - #[error("Substrate : {0}")] - ClientError( - #[from] - #[source] - SubxtClientError, - ), - - #[error("Submission : {0}")] - Submission( - #[from] - #[source] - SubmissionError, - ), - - #[error("Contradiction: {0}")] - Contradiction(Contradiction), - - #[error("Embedded substrate: {0}")] - EmbeddedSubstrate(anyhow::Error), + #[error("Storage: {0:?}")] + Store( + #[from] + #[source] + chronicle_persistence::StoreError, + ), + + #[error("Storage: {0:?}")] + ArrowService(#[source] anyhow::Error), + + #[error("Transaction failed: {0}")] + Transaction( + #[from] + #[source] + diesel::result::Error, + ), + + #[error("Invalid IRI: {0}")] + Iri( + #[from] + #[source] + iref::Error, + ), + + #[error("JSON-LD processing: {0}")] + JsonLD(String), + + #[error("Signing: {0}")] + Signing( + #[from] + #[source] + SecretError, + ), + + #[error("No agent is currently in use, please call agent use or supply an agent in your call")] + NoCurrentAgent, + + #[error("Api shut down before reply")] + ApiShutdownRx, + + #[error("Api shut down before send: {0}")] + ApiShutdownTx( + #[from] + #[source] + SendError, + ), + + #[error("Invalid socket address: {0}")] + AddressParse( + #[from] + #[source] + AddrParseError, + ), + + #[error("Connection pool: {0}")] + ConnectionPool( + #[from] + #[source] + r2d2::Error, + ), + + #[error("IO error: {0}")] + InputOutput( + #[from] + #[source] + std::io::Error, + ), + + #[error("Blocking thread pool: {0}")] + Join( + #[from] + #[source] + JoinError, + ), + + #[error("No appropriate activity to end")] + NotCurrentActivity, + + #[error("Processor: {0}")] + ProcessorError( + #[from] + #[source] + ProcessorError, + ), + + #[error("Identity: {0}")] + IdentityError( + #[from] + #[source] + IdentityError, + ), + + #[error("Authentication endpoint error: {0}")] + AuthenticationEndpoint( + #[from] + #[source] + chronicle_graphql::AuthorizationError, + ), + + #[error("Substrate : {0}")] + ClientError( + #[from] + #[source] + SubxtClientError, + ), + + #[error("Submission : {0}")] + Submission( + #[from] + #[source] + SubmissionError, + ), + + #[error("Contradiction: {0}")] + Contradiction(Contradiction), + + #[error("Embedded substrate: {0}")] + EmbeddedSubstrate(anyhow::Error), } /// Ugly but we need this until ! is stable, see impl From for ApiError { - fn from(_: Infallible) -> Self { - unreachable!() - } + fn from(_: Infallible) -> Self { + unreachable!() + } } impl From for ApiError { - fn from(x: Contradiction) -> Self { - Self::Contradiction(x) - } + fn from(x: Contradiction) -> Self { + Self::Contradiction(x) + } } impl UFE for ApiError {} diff --git a/crates/api/src/lib.rs b/crates/api/src/lib.rs index 57196f343..922aef36f 100644 --- a/crates/api/src/lib.rs +++ b/crates/api/src/lib.rs @@ -1,52 +1,43 @@ #![cfg_attr(feature = "strict", deny(warnings))] - - - - - pub use api::{Api, UuidGen}; -pub use chronicle_persistence::Store; -pub use chronicle_persistence::StoreError; +pub use chronicle_persistence::{Store, StoreError}; use chronicle_signing::ChronicleKnownKeyNamesSigner; -use common::{ - identity::{AuthId, IdentityError, SignedIdentity}, -}; +use common::identity::{AuthId, IdentityError, SignedIdentity}; pub use dispatch::ApiDispatch; pub use error::ApiError; - pub mod chronicle_graphql; pub mod commands; -pub mod import; -mod error; mod api; mod dispatch; +mod error; +pub mod import; pub trait ChronicleSigned { - /// Get the user identity's [`SignedIdentity`] - fn signed_identity( - &self, - store: &S, - ) -> Result; + /// Get the user identity's [`SignedIdentity`] + fn signed_identity( + &self, + store: &S, + ) -> Result; } impl ChronicleSigned for AuthId { - fn signed_identity( - &self, - store: &S, - ) -> Result { - let signable = self.to_string(); - let signature = futures::executor::block_on(store.chronicle_sign(signable.as_bytes())) - .map_err(|e| IdentityError::Signing(e.into()))?; - let public_key = futures::executor::block_on(store.chronicle_verifying()) - .map_err(|e| IdentityError::Signing(e.into()))?; - - Ok(SignedIdentity { - identity: signable, - signature: signature.into(), - verifying_key: Some(public_key.to_bytes().to_vec()), - }) - } + fn signed_identity( + &self, + store: &S, + ) -> Result { + let signable = self.to_string(); + let signature = futures::executor::block_on(store.chronicle_sign(signable.as_bytes())) + .map_err(|e| IdentityError::Signing(e.into()))?; + let public_key = futures::executor::block_on(store.chronicle_verifying()) + .map_err(|e| IdentityError::Signing(e.into()))?; + + Ok(SignedIdentity { + identity: signable, + signature: signature.into(), + verifying_key: Some(public_key.to_bytes().to_vec()), + }) + } } diff --git a/crates/chronicle-arrow/src/lib.rs b/crates/chronicle-arrow/src/lib.rs index 8eb9f2c06..22d48ec24 100644 --- a/crates/chronicle-arrow/src/lib.rs +++ b/crates/chronicle-arrow/src/lib.rs @@ -1,34 +1,30 @@ -use std::{sync::Arc, vec::Vec}; -use std::net::SocketAddr; +use std::{net::SocketAddr, sync::Arc, vec::Vec}; use arrow_flight::{ - Action, ActionType, Criteria, Empty, flight_service_server::FlightService, FlightData, - FlightDescriptor, FlightInfo, HandshakeRequest, HandshakeResponse, PutResult, SchemaResult, - Ticket, + decode::FlightRecordBatchStream, flight_service_server::FlightService, Action, ActionType, + Criteria, Empty, FlightData, FlightDescriptor, FlightEndpoint, FlightInfo, HandshakeRequest, + HandshakeResponse, IpcMessage, PutResult, SchemaAsIpc, SchemaResult, Ticket, }; -use arrow_flight::{FlightEndpoint, IpcMessage, SchemaAsIpc}; -use arrow_flight::decode::FlightRecordBatchStream; use arrow_schema::ArrowError; -use diesel::{PgConnection, r2d2::ConnectionManager}; +use diesel::{r2d2::ConnectionManager, PgConnection}; use futures::{ future::join_all, - FutureExt, - stream::{self, BoxStream}, StreamExt, + stream::{self, BoxStream}, + FutureExt, StreamExt, }; use lazy_static::lazy_static; use r2d2::Pool; use serde::Serialize; use thiserror::Error; -use tokio::sync::broadcast; -use tokio::task::spawn_blocking; -use tonic::{Request, Response, Status, Streaming, transport::Server}; +use tokio::{sync::broadcast, task::spawn_blocking}; +use tonic::{transport::Server, Request, Response, Status, Streaming}; use tracing::{info, instrument}; -use api::ApiDispatch; -use api::ApiError; -use api::chronicle_graphql::EndpointSecurityConfiguration; -use common::{domain::TypeName, prov::ExternalIdPart}; -use common::prov::{DomaintypeId, ParseIriError}; +use api::{chronicle_graphql::EndpointSecurityConfiguration, ApiDispatch, ApiError}; +use common::{ + domain::TypeName, + prov::{DomaintypeId, ExternalIdPart, ParseIriError}, +}; use meta::{DomainTypeMeta, Term}; use query::{ activity_count_by_type, agent_count_by_type, entity_count_by_type, EntityAndReferences, @@ -39,8 +35,8 @@ use crate::{ operations::{batch_to_flight_data, process_record_batch}, peekablestream::PeekableFlightDataStream, query::{ - ActivityAndReferences, AgentAndReferences, load_activities_by_type, load_agents_by_type, - load_entities_by_type, + load_activities_by_type, load_agents_by_type, load_entities_by_type, ActivityAndReferences, + AgentAndReferences, }, }; @@ -51,583 +47,583 @@ mod query; #[derive(Error, Debug)] pub enum ChronicleArrowError { - #[error("Arrow error: {0}")] - ArrowSchemaError( - #[from] - #[source] - ArrowError, - ), - #[error("Missing schema for the requested entity or activity")] - MissingSchemaError, - - #[error("Schema field not found: {0}")] - SchemaFieldNotFound(String), - - #[error("Missing column: {0}")] - MissingColumn(String), - - #[error("Column type mismatch for: {0}")] - ColumnTypeMismatch(String), - - #[error("Invalid value: {0}")] - InvalidValue(String), - - #[error("Invalid descriptor path")] - InvalidDescriptorPath, - - #[error("Metadata not found")] - MetadataNotFound, - - #[error("API error: {0}")] - ApiError( - #[from] - #[source] - ApiError, - ), - #[error("Parse IRI : {0}")] - IriError( - #[from] - #[source] - ParseIriError, - ), - - #[error("Database connection pool error: {0}")] - PoolError( - #[from] - #[source] - r2d2::Error, - ), - - #[error("Diesel error: {0}")] - DieselError( - #[from] - #[source] - diesel::result::Error, - ), - - #[error("Serde JSON error: {0}")] - SerdeJsonError( - #[from] - #[source] - serde_json::Error, - ), - - #[error("Join error: {0}")] - JoinError( - #[from] - #[source] - tokio::task::JoinError, - ), - - #[error("UUID parse error: {0}")] - UuidParseError( - #[from] - #[source] - uuid::Error, - ), + #[error("Arrow error: {0}")] + ArrowSchemaError( + #[from] + #[source] + ArrowError, + ), + #[error("Missing schema for the requested entity or activity")] + MissingSchemaError, + + #[error("Schema field not found: {0}")] + SchemaFieldNotFound(String), + + #[error("Missing column: {0}")] + MissingColumn(String), + + #[error("Column type mismatch for: {0}")] + ColumnTypeMismatch(String), + + #[error("Invalid value: {0}")] + InvalidValue(String), + + #[error("Invalid descriptor path")] + InvalidDescriptorPath, + + #[error("Metadata not found")] + MetadataNotFound, + + #[error("API error: {0}")] + ApiError( + #[from] + #[source] + ApiError, + ), + #[error("Parse IRI : {0}")] + IriError( + #[from] + #[source] + ParseIriError, + ), + + #[error("Database connection pool error: {0}")] + PoolError( + #[from] + #[source] + r2d2::Error, + ), + + #[error("Diesel error: {0}")] + DieselError( + #[from] + #[source] + diesel::result::Error, + ), + + #[error("Serde JSON error: {0}")] + SerdeJsonError( + #[from] + #[source] + serde_json::Error, + ), + + #[error("Join error: {0}")] + JoinError( + #[from] + #[source] + tokio::task::JoinError, + ), + + #[error("UUID parse error: {0}")] + UuidParseError( + #[from] + #[source] + uuid::Error, + ), } #[instrument(skip(pool, term, domaintype))] pub async fn calculate_count_by_metadata_term( - pool: &Pool>, - term: &Term, - domaintype: Option, + pool: &Pool>, + term: &Term, + domaintype: Option, ) -> Result { - let pool = pool.clone(); - match term { - Term::Entity => - spawn_blocking(move || { - entity_count_by_type( - &pool, - domaintype.map(|x| x.to_string()).iter().map(|s| s.as_str()).collect(), - ) - }) - .await, - Term::Agent => - spawn_blocking(move || { - agent_count_by_type( - &pool, - domaintype.map(|x| x.to_string()).iter().map(|s| s.as_str()).collect(), - ) - }) - .await, - Term::Activity => - spawn_blocking(move || { - activity_count_by_type( - &pool, - domaintype.map(|x| x.to_string()).iter().map(|s| s.as_str()).collect(), - ) - }) - .await, - _ => Ok(Ok(0)), - } - .map_err(|e| Status::from_error(e.into())) - .and_then(|res| res.map_err(|e| Status::from_error(e.into()))) + let pool = pool.clone(); + match term { + Term::Entity => + spawn_blocking(move || { + entity_count_by_type( + &pool, + domaintype.map(|x| x.to_string()).iter().map(|s| s.as_str()).collect(), + ) + }) + .await, + Term::Agent => + spawn_blocking(move || { + agent_count_by_type( + &pool, + domaintype.map(|x| x.to_string()).iter().map(|s| s.as_str()).collect(), + ) + }) + .await, + Term::Activity => + spawn_blocking(move || { + activity_count_by_type( + &pool, + domaintype.map(|x| x.to_string()).iter().map(|s| s.as_str()).collect(), + ) + }) + .await, + _ => Ok(Ok(0)), + } + .map_err(|e| Status::from_error(e.into())) + .and_then(|res| res.map_err(|e| Status::from_error(e.into()))) } async fn create_flight_info_for_type( - pool: Arc>>, - domain_items: Vec, - term: Term, - record_batch_size: usize, + pool: Arc>>, + domain_items: Vec, + term: Term, + record_batch_size: usize, ) -> BoxStream<'static, Result> { - stream::iter(domain_items.into_iter().map(|item| Ok::<_, tonic::Status>(item))) - .then(move |item| { - let pool = pool.clone(); - async move { - let item = item?; // Handle the Result from the iterator - let descriptor_path = vec![term.to_string(), item.as_type_name()]; - let metadata = - get_domain_type_meta_from_cache(&descriptor_path).ok_or_else(|| { - Status::from_error(Box::new(ChronicleArrowError::MissingSchemaError)) - })?; - - let count = calculate_count_by_metadata_term( - &pool, - &term, - Some(item.as_type_name().to_string()), - ) - .await?; - - let tickets = (0..count) - .step_by(record_batch_size as _) - .map(|start| { - let end = std::cmp::min(start as usize + record_batch_size, count as usize); - - let ticket_metadata = ChronicleTicket::new( - term, - metadata.typ.as_ref().map(|x| x.as_domain_type_id()), - start as _, - (end - start as usize) as _, - ); - Ticket::try_from(ticket_metadata) - .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e)))) - }) - .collect::, _>>()?; - - let mut flight_info = FlightInfo::new(); - - for ticket in tickets { - flight_info = - flight_info.with_endpoint(FlightEndpoint::new().with_ticket(ticket)); - } - - Ok(flight_info - .with_descriptor(FlightDescriptor::new_path(descriptor_path)) - .try_with_schema(&metadata.schema) - .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))? - .with_total_records(count)) - } - }) - .boxed() + stream::iter(domain_items.into_iter().map(|item| Ok::<_, tonic::Status>(item))) + .then(move |item| { + let pool = pool.clone(); + async move { + let item = item?; // Handle the Result from the iterator + let descriptor_path = vec![term.to_string(), item.as_type_name()]; + let metadata = + get_domain_type_meta_from_cache(&descriptor_path).ok_or_else(|| { + Status::from_error(Box::new(ChronicleArrowError::MissingSchemaError)) + })?; + + let count = calculate_count_by_metadata_term( + &pool, + &term, + Some(item.as_type_name().to_string()), + ) + .await?; + + let tickets = (0..count) + .step_by(record_batch_size as _) + .map(|start| { + let end = std::cmp::min(start as usize + record_batch_size, count as usize); + + let ticket_metadata = ChronicleTicket::new( + term, + metadata.typ.as_ref().map(|x| x.as_domain_type_id()), + start as _, + (end - start as usize) as _, + ); + Ticket::try_from(ticket_metadata) + .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e)))) + }) + .collect::, _>>()?; + + let mut flight_info = FlightInfo::new(); + + for ticket in tickets { + flight_info = + flight_info.with_endpoint(FlightEndpoint::new().with_ticket(ticket)); + } + + Ok(flight_info + .with_descriptor(FlightDescriptor::new_path(descriptor_path)) + .try_with_schema(&metadata.schema) + .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))? + .with_total_records(count)) + } + }) + .boxed() } #[derive(Clone)] pub struct FlightServiceImpl { - domain: common::domain::ChronicleDomainDef, - pool: r2d2::Pool>, - api: ApiDispatch, - record_batch_size: usize, - security: EndpointSecurityConfiguration, + domain: common::domain::ChronicleDomainDef, + pool: r2d2::Pool>, + api: ApiDispatch, + record_batch_size: usize, + security: EndpointSecurityConfiguration, } impl FlightServiceImpl { - pub fn new( - domain: &common::domain::ChronicleDomainDef, - pool: &r2d2::Pool>, - api: &ApiDispatch, - security: EndpointSecurityConfiguration, - record_batch_size: usize, - ) -> Self { - Self { - domain: domain.clone(), - pool: pool.clone(), - api: api.clone(), - security, - record_batch_size, - } - } + pub fn new( + domain: &common::domain::ChronicleDomainDef, + pool: &r2d2::Pool>, + api: &ApiDispatch, + security: EndpointSecurityConfiguration, + record_batch_size: usize, + ) -> Self { + Self { + domain: domain.clone(), + pool: pool.clone(), + api: api.clone(), + security, + record_batch_size, + } + } } #[derive(Debug, Serialize, serde::Deserialize)] struct ChronicleTicket { - term: Term, - descriptor_path: Vec, - typ: Option, - start: u64, - count: u64, + term: Term, + descriptor_path: Vec, + typ: Option, + start: u64, + count: u64, } impl ChronicleTicket { - pub fn new(term: Term, typ: Option, start: u64, count: u64) -> Self { - Self { - term, - descriptor_path: vec![ - term.to_string(), - typ.as_ref() - .map(|x| x.external_id_part().to_string()) - .unwrap_or_else(|| format!("Prov{}", term)), - ], - typ, - start, - count, - } - } - - pub fn descriptor_path(&self) -> &Vec { - &self.descriptor_path - } + pub fn new(term: Term, typ: Option, start: u64, count: u64) -> Self { + Self { + term, + descriptor_path: vec![ + term.to_string(), + typ.as_ref() + .map(|x| x.external_id_part().to_string()) + .unwrap_or_else(|| format!("Prov{}", term)), + ], + typ, + start, + count, + } + } + + pub fn descriptor_path(&self) -> &Vec { + &self.descriptor_path + } } impl TryFrom for Ticket { - type Error = serde_json::Error; + type Error = serde_json::Error; - fn try_from(ticket: ChronicleTicket) -> Result { - let ticket_bytes = serde_json::to_vec(&ticket)?; - Ok(Ticket { ticket: ticket_bytes.into() }) - } + fn try_from(ticket: ChronicleTicket) -> Result { + let ticket_bytes = serde_json::to_vec(&ticket)?; + Ok(Ticket { ticket: ticket_bytes.into() }) + } } impl TryFrom for ChronicleTicket { - type Error = serde_json::Error; + type Error = serde_json::Error; - fn try_from(ticket: Ticket) -> Result { - let ticket_data = ticket.ticket.to_vec(); - serde_json::from_slice(&ticket_data) - } + fn try_from(ticket: Ticket) -> Result { + let ticket_data = ticket.ticket.to_vec(); + serde_json::from_slice(&ticket_data) + } } fn parse_flight_descriptor_path(descriptor: &FlightDescriptor) -> Result<(Term, String), Status> { - let path = &descriptor.path; - if path.is_empty() { - return Err(Status::invalid_argument("FlightDescriptor path is empty")); - } + let path = &descriptor.path; + if path.is_empty() { + return Err(Status::invalid_argument("FlightDescriptor path is empty")); + } - let term = path[0] - .parse::() - .map_err(|_| Status::invalid_argument("First element of the path must be a valid Term"))?; + let term = path[0] + .parse::() + .map_err(|_| Status::invalid_argument("First element of the path must be a valid Term"))?; - Ok((term, path[1].to_string())) + Ok((term, path[1].to_string())) } #[tonic::async_trait] impl FlightService for FlightServiceImpl { - type DoActionStream = BoxStream<'static, Result>; - type DoExchangeStream = BoxStream<'static, Result>; - type DoGetStream = BoxStream<'static, Result>; - type DoPutStream = BoxStream<'static, Result>; - type HandshakeStream = BoxStream<'static, Result>; - type ListActionsStream = BoxStream<'static, Result>; - type ListFlightsStream = BoxStream<'static, Result>; - - async fn handshake( - &self, - _request: Request>, - ) -> Result, Status> { - Ok(Response::new(Box::pin(futures::stream::empty()) as Self::HandshakeStream)) - } - - #[instrument(skip(self, _request))] - async fn list_flights( - &self, - _request: Request, - ) -> Result, Status> { - let entity_flights_stream = create_flight_info_for_type( - Arc::new(self.pool.clone()), - self.domain.entities.to_vec(), - Term::Entity, - self.record_batch_size, - ) - .await; - let activities_flights_stream = create_flight_info_for_type( - Arc::new(self.pool.clone()), - self.domain.activities.to_vec(), - Term::Activity, - self.record_batch_size, - ) - .await; - let agents_flights_stream = create_flight_info_for_type( - Arc::new(self.pool.clone()), - self.domain.agents.to_vec(), - Term::Agent, - self.record_batch_size, - ) - .await; - - let combined_stream = futures::stream::select_all(vec![ - entity_flights_stream, - activities_flights_stream, - agents_flights_stream, - ]) - .boxed(); - - Ok(Response::new(combined_stream as Self::ListFlightsStream)) - } - - #[instrument(skip(self, request))] - async fn get_flight_info( - &self, - request: Request, - ) -> Result, Status> { - let descriptor = request.into_inner(); - - let (term, type_name) = parse_flight_descriptor_path(&descriptor)?; - - let mut flight_info_stream = match term { - Term::Entity => { - let definition = self - .domain - .entities - .iter() - .find(|&item| item.as_type_name() == type_name) - .ok_or_else(|| { - Status::not_found(format!( - "Definition not found for term: {:?}, type_name: {}", - term, type_name - )) - })?; - create_flight_info_for_type( - Arc::new(self.pool.clone()), - vec![definition.clone()], - term, - self.record_batch_size, - ) - .boxed() - } - Term::Activity => { - let definition = self - .domain - .activities - .iter() - .find(|&item| item.as_type_name() == type_name) - .ok_or_else(|| { - Status::not_found(format!( - "Definition not found for term: {:?}, type_name: {}", - term, type_name - )) - })?; - create_flight_info_for_type( - Arc::new(self.pool.clone()), - vec![definition.clone()], - term, - self.record_batch_size, - ) - .boxed() - } - Term::Agent => { - let definition = self - .domain - .agents - .iter() - .find(|&item| item.as_type_name() == type_name) - .ok_or_else(|| { - Status::not_found(format!( - "Definition not found for term: {:?}, type_name: {}", - term, type_name - )) - })?; - create_flight_info_for_type( - Arc::new(self.pool.clone()), - vec![definition.clone()], - term, - self.record_batch_size, - ) - .boxed() - } - _ => - return Err(Status::not_found(format!( - "Definition not found for term: {:?}, type_name: {}", - term, type_name - ))), - } - .await; - - let flight_info = flight_info_stream - .next() - .await - .ok_or(Status::not_found("No flight info for descriptor"))? - .map_err(|e| Status::from_error(e.into()))?; - - Ok(Response::new(flight_info)) - } - - #[instrument(skip(self, request))] - async fn get_schema( - &self, - request: Request, - ) -> Result, Status> { - let descriptor = request.into_inner(); - - let schema = get_domain_type_meta_from_cache(&descriptor.path) - .ok_or_else(|| ChronicleArrowError::MissingSchemaError) - .map_err(|e| Status::internal(format!("Failed to get cached schema: {}", e)))?; - - let options = arrow_ipc::writer::IpcWriteOptions::default(); - let ipc_message_result = SchemaAsIpc::new(&schema.schema, &options).try_into(); - match ipc_message_result { - Ok(IpcMessage(schema)) => Ok(Response::new(SchemaResult { schema })), - Err(e) => - Err(Status::internal(format!("Failed to convert schema to IPC message: {}", e))), - } - } - - #[instrument(skip(self))] - async fn do_get( - &self, - request: Request, - ) -> Result, Status> { - let ticket = request.into_inner(); - let ticket: ChronicleTicket = ticket - .try_into() - .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))?; - - let meta = get_domain_type_meta_from_cache(&ticket.descriptor_path) - .ok_or(Status::from_error(Box::new(ChronicleArrowError::InvalidDescriptorPath)))?; - - tracing::debug!(ticket = ?ticket); - - let terms_result = match ticket.term { - Term::Entity => { - let pool = self.pool.clone(); - let meta_clone = meta.clone(); - let result = tokio::task::spawn_blocking(move || { - load_entities_by_type( - &pool, - &ticket.typ, - &meta_clone.attributes, - ticket.start, - ticket.count, - ) - }) - .await - .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))? - .map_err(|e| Status::from_error(Box::new(e)))?; - - let (entities, _returned_records, _total_records) = result; - - EntityAndReferences::to_record_batch(entities, &meta).map_err(|e| { - Status::internal(format!("Failed to convert to record batch: {}", e)) - })? - } - Term::Activity => { - let pool = self.pool.clone(); - let result = tokio::task::spawn_blocking(move || { - load_activities_by_type(&pool, &ticket.typ, ticket.start, ticket.count) - }) - .await - .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))? - .map_err(|e| Status::from_error(Box::new(e)))?; - - let (activities, _returned_records, _total_records) = result; - - ActivityAndReferences::to_record_batch(activities, &meta).map_err(|e| { - Status::internal(format!("Failed to convert to record batch: {}", e)) - })? - } - Term::Agent => { - let pool = self.pool.clone(); - let result = tokio::task::spawn_blocking(move || { - load_agents_by_type(&pool, &ticket.typ, ticket.start, ticket.count) - }) - .await - .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))? - .map_err(|e| Status::from_error(Box::new(e)))?; - - let (agents, _returned_records, _total_records) = result; - - AgentAndReferences::to_record_batch(agents, &meta).map_err(|e| { - Status::internal(format!("Failed to convert to record batch: {}", e)) - })? - } - Term::Namespace => { - tracing::error!("Attempted to put namespaces, which is not supported."); - return Err(Status::internal("Cannot put namespaces")); - } - }; - - let flight_data_result = batch_to_flight_data( - &FlightDescriptor::new_path(ticket.descriptor_path), - &meta, - terms_result, - ); - - match flight_data_result { - Ok(flight_data) => { - let stream = futures::stream::iter(flight_data.into_iter().map(Ok)).boxed(); - Ok(Response::new(stream)) - } - Err(e) => Err(Status::internal(e.to_string())), - } - } - - #[instrument(skip(self, request))] - async fn do_put( - &self, - request: Request>, - ) -> Result, Status> { - let mut stream = request.map(PeekableFlightDataStream::new).into_inner(); - let first_item = stream.peek().await; - - let flight_descriptor = match &first_item { - Some(Ok(flight_data)) => match flight_data.flight_descriptor.clone() { - Some(descriptor) => descriptor, - None => return Err(Status::invalid_argument("Flight data has no descriptor")), - }, - Some(Err(e)) => - return Err(Status::internal(format!("Failed to get first item from stream: {}", e))), - None => { - return Err(Status::invalid_argument("Stream is empty")); - } - }; - - let filtered_stream = stream.filter_map(|item| async move { - match item { - Ok(flight_data) => { - tracing::trace!("Processing flight data item {:?}", flight_data); - Some(Ok(flight_data)) - } - Err(e) => { - tracing::error!(error = %e, "Error processing stream item."); - None - } - } - }); - - let mut decoder = FlightRecordBatchStream::new_from_flight_data(filtered_stream); - while let Some(batch) = decoder.next().await { - let batch = batch?; - tracing::debug!("Processing batch of: {:?}", batch.num_rows()); - process_record_batch(&flight_descriptor.path, batch, &self.api) - .await - .map_err(|e| Status::from_error(e.into()))?; - } - Ok(Response::new(Box::pin(stream::empty()) as Self::DoPutStream)) - } - - #[tracing::instrument(skip(self, _request))] - async fn do_action( - &self, - _request: Request, - ) -> Result, Status> { - tracing::info!("No actions available, returning empty stream."); - Ok(Response::new(Box::pin(stream::empty()))) - } - - #[tracing::instrument(skip(self, _request))] - async fn list_actions( - &self, - _request: Request, - ) -> Result, Status> { - tracing::info!("No actions available."); - Ok(Response::new(Box::pin(stream::empty()))) - } - - async fn do_exchange( - &self, - _request: Request>, - ) -> Result, Status> { - Err(Status::unimplemented("Implement do_exchange")) - } + type DoActionStream = BoxStream<'static, Result>; + type DoExchangeStream = BoxStream<'static, Result>; + type DoGetStream = BoxStream<'static, Result>; + type DoPutStream = BoxStream<'static, Result>; + type HandshakeStream = BoxStream<'static, Result>; + type ListActionsStream = BoxStream<'static, Result>; + type ListFlightsStream = BoxStream<'static, Result>; + + async fn handshake( + &self, + _request: Request>, + ) -> Result, Status> { + Ok(Response::new(Box::pin(futures::stream::empty()) as Self::HandshakeStream)) + } + + #[instrument(skip(self, _request))] + async fn list_flights( + &self, + _request: Request, + ) -> Result, Status> { + let entity_flights_stream = create_flight_info_for_type( + Arc::new(self.pool.clone()), + self.domain.entities.to_vec(), + Term::Entity, + self.record_batch_size, + ) + .await; + let activities_flights_stream = create_flight_info_for_type( + Arc::new(self.pool.clone()), + self.domain.activities.to_vec(), + Term::Activity, + self.record_batch_size, + ) + .await; + let agents_flights_stream = create_flight_info_for_type( + Arc::new(self.pool.clone()), + self.domain.agents.to_vec(), + Term::Agent, + self.record_batch_size, + ) + .await; + + let combined_stream = futures::stream::select_all(vec![ + entity_flights_stream, + activities_flights_stream, + agents_flights_stream, + ]) + .boxed(); + + Ok(Response::new(combined_stream as Self::ListFlightsStream)) + } + + #[instrument(skip(self, request))] + async fn get_flight_info( + &self, + request: Request, + ) -> Result, Status> { + let descriptor = request.into_inner(); + + let (term, type_name) = parse_flight_descriptor_path(&descriptor)?; + + let mut flight_info_stream = match term { + Term::Entity => { + let definition = self + .domain + .entities + .iter() + .find(|&item| item.as_type_name() == type_name) + .ok_or_else(|| { + Status::not_found(format!( + "Definition not found for term: {:?}, type_name: {}", + term, type_name + )) + })?; + create_flight_info_for_type( + Arc::new(self.pool.clone()), + vec![definition.clone()], + term, + self.record_batch_size, + ) + .boxed() + }, + Term::Activity => { + let definition = self + .domain + .activities + .iter() + .find(|&item| item.as_type_name() == type_name) + .ok_or_else(|| { + Status::not_found(format!( + "Definition not found for term: {:?}, type_name: {}", + term, type_name + )) + })?; + create_flight_info_for_type( + Arc::new(self.pool.clone()), + vec![definition.clone()], + term, + self.record_batch_size, + ) + .boxed() + }, + Term::Agent => { + let definition = self + .domain + .agents + .iter() + .find(|&item| item.as_type_name() == type_name) + .ok_or_else(|| { + Status::not_found(format!( + "Definition not found for term: {:?}, type_name: {}", + term, type_name + )) + })?; + create_flight_info_for_type( + Arc::new(self.pool.clone()), + vec![definition.clone()], + term, + self.record_batch_size, + ) + .boxed() + }, + _ => + return Err(Status::not_found(format!( + "Definition not found for term: {:?}, type_name: {}", + term, type_name + ))), + } + .await; + + let flight_info = flight_info_stream + .next() + .await + .ok_or(Status::not_found("No flight info for descriptor"))? + .map_err(|e| Status::from_error(e.into()))?; + + Ok(Response::new(flight_info)) + } + + #[instrument(skip(self, request))] + async fn get_schema( + &self, + request: Request, + ) -> Result, Status> { + let descriptor = request.into_inner(); + + let schema = get_domain_type_meta_from_cache(&descriptor.path) + .ok_or_else(|| ChronicleArrowError::MissingSchemaError) + .map_err(|e| Status::internal(format!("Failed to get cached schema: {}", e)))?; + + let options = arrow_ipc::writer::IpcWriteOptions::default(); + let ipc_message_result = SchemaAsIpc::new(&schema.schema, &options).try_into(); + match ipc_message_result { + Ok(IpcMessage(schema)) => Ok(Response::new(SchemaResult { schema })), + Err(e) => + Err(Status::internal(format!("Failed to convert schema to IPC message: {}", e))), + } + } + + #[instrument(skip(self))] + async fn do_get( + &self, + request: Request, + ) -> Result, Status> { + let ticket = request.into_inner(); + let ticket: ChronicleTicket = ticket + .try_into() + .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))?; + + let meta = get_domain_type_meta_from_cache(&ticket.descriptor_path) + .ok_or(Status::from_error(Box::new(ChronicleArrowError::InvalidDescriptorPath)))?; + + tracing::debug!(ticket = ?ticket); + + let terms_result = match ticket.term { + Term::Entity => { + let pool = self.pool.clone(); + let meta_clone = meta.clone(); + let result = tokio::task::spawn_blocking(move || { + load_entities_by_type( + &pool, + &ticket.typ, + &meta_clone.attributes, + ticket.start, + ticket.count, + ) + }) + .await + .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))? + .map_err(|e| Status::from_error(Box::new(e)))?; + + let (entities, _returned_records, _total_records) = result; + + EntityAndReferences::to_record_batch(entities, &meta).map_err(|e| { + Status::internal(format!("Failed to convert to record batch: {}", e)) + })? + }, + Term::Activity => { + let pool = self.pool.clone(); + let result = tokio::task::spawn_blocking(move || { + load_activities_by_type(&pool, &ticket.typ, ticket.start, ticket.count) + }) + .await + .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))? + .map_err(|e| Status::from_error(Box::new(e)))?; + + let (activities, _returned_records, _total_records) = result; + + ActivityAndReferences::to_record_batch(activities, &meta).map_err(|e| { + Status::internal(format!("Failed to convert to record batch: {}", e)) + })? + }, + Term::Agent => { + let pool = self.pool.clone(); + let result = tokio::task::spawn_blocking(move || { + load_agents_by_type(&pool, &ticket.typ, ticket.start, ticket.count) + }) + .await + .map_err(|e| Status::from_error(Box::new(ChronicleArrowError::from(e))))? + .map_err(|e| Status::from_error(Box::new(e)))?; + + let (agents, _returned_records, _total_records) = result; + + AgentAndReferences::to_record_batch(agents, &meta).map_err(|e| { + Status::internal(format!("Failed to convert to record batch: {}", e)) + })? + }, + Term::Namespace => { + tracing::error!("Attempted to put namespaces, which is not supported."); + return Err(Status::internal("Cannot put namespaces")); + }, + }; + + let flight_data_result = batch_to_flight_data( + &FlightDescriptor::new_path(ticket.descriptor_path), + &meta, + terms_result, + ); + + match flight_data_result { + Ok(flight_data) => { + let stream = futures::stream::iter(flight_data.into_iter().map(Ok)).boxed(); + Ok(Response::new(stream)) + }, + Err(e) => Err(Status::internal(e.to_string())), + } + } + + #[instrument(skip(self, request))] + async fn do_put( + &self, + request: Request>, + ) -> Result, Status> { + let mut stream = request.map(PeekableFlightDataStream::new).into_inner(); + let first_item = stream.peek().await; + + let flight_descriptor = match &first_item { + Some(Ok(flight_data)) => match flight_data.flight_descriptor.clone() { + Some(descriptor) => descriptor, + None => return Err(Status::invalid_argument("Flight data has no descriptor")), + }, + Some(Err(e)) => + return Err(Status::internal(format!("Failed to get first item from stream: {}", e))), + None => { + return Err(Status::invalid_argument("Stream is empty")); + }, + }; + + let filtered_stream = stream.filter_map(|item| async move { + match item { + Ok(flight_data) => { + tracing::trace!("Processing flight data item {:?}", flight_data); + Some(Ok(flight_data)) + }, + Err(e) => { + tracing::error!(error = %e, "Error processing stream item."); + None + }, + } + }); + + let mut decoder = FlightRecordBatchStream::new_from_flight_data(filtered_stream); + while let Some(batch) = decoder.next().await { + let batch = batch?; + tracing::debug!("Processing batch of: {:?}", batch.num_rows()); + process_record_batch(&flight_descriptor.path, batch, &self.api) + .await + .map_err(|e| Status::from_error(e.into()))?; + } + Ok(Response::new(Box::pin(stream::empty()) as Self::DoPutStream)) + } + + #[tracing::instrument(skip(self, _request))] + async fn do_action( + &self, + _request: Request, + ) -> Result, Status> { + tracing::info!("No actions available, returning empty stream."); + Ok(Response::new(Box::pin(stream::empty()))) + } + + #[tracing::instrument(skip(self, _request))] + async fn list_actions( + &self, + _request: Request, + ) -> Result, Status> { + tracing::info!("No actions available."); + Ok(Response::new(Box::pin(stream::empty()))) + } + + async fn do_exchange( + &self, + _request: Request>, + ) -> Result, Status> { + Err(Status::unimplemented("Implement do_exchange")) + } } lazy_static! { @@ -637,44 +633,44 @@ lazy_static! { /// Triggers a shutdown signal across the application. pub fn trigger_shutdown() { - let _ = SHUTDOWN_CHANNEL.0.send(()); + let _ = SHUTDOWN_CHANNEL.0.send(()); } /// Returns a receiver for the shutdown signal. pub async fn await_shutdown() { - SHUTDOWN_CHANNEL.0.subscribe().recv().await.ok(); + SHUTDOWN_CHANNEL.0.subscribe().recv().await.ok(); } #[instrument(skip(pool, api, security))] pub async fn run_flight_service( - domain: &common::domain::ChronicleDomainDef, - pool: &Pool>, - api: &ApiDispatch, - security: EndpointSecurityConfiguration, - addrs: &Vec, - record_batch_size: usize, + domain: &common::domain::ChronicleDomainDef, + pool: &Pool>, + api: &ApiDispatch, + security: EndpointSecurityConfiguration, + addrs: &Vec, + record_batch_size: usize, ) -> Result<(), tonic::transport::Error> { - meta::cache_domain_schemas(domain); - let mut services = vec![]; - for addr in addrs { - let flight_service = - FlightServiceImpl::new(domain, pool, api, security.clone(), record_batch_size); + meta::cache_domain_schemas(domain); + let mut services = vec![]; + for addr in addrs { + let flight_service = + FlightServiceImpl::new(domain, pool, api, security.clone(), record_batch_size); - info!("Starting flight service at {}", addr); + info!("Starting flight service at {}", addr); - let server = Server::builder() - .add_service(arrow_flight::flight_service_server::FlightServiceServer::new( - flight_service, - )) - .serve_with_shutdown(*addr, await_shutdown()); + let server = Server::builder() + .add_service(arrow_flight::flight_service_server::FlightServiceServer::new( + flight_service, + )) + .serve_with_shutdown(*addr, await_shutdown()); - services.push(server); - } + services.push(server); + } - let results: Result, _> = join_all(services.into_iter()).await.into_iter().collect(); - results?; + let results: Result, _> = join_all(services.into_iter()).await.into_iter().collect(); + results?; - Ok(()) + Ok(()) } #[cfg(test)] @@ -683,7 +679,7 @@ mod tests { use arrow_array::RecordBatch; use arrow_flight::{ - Criteria, decode::FlightRecordBatchStream, flight_service_client::FlightServiceClient, + decode::FlightRecordBatchStream, flight_service_client::FlightServiceClient, Criteria, FlightData, FlightDescriptor, FlightInfo, SchemaAsIpc, }; use arrow_ipc::writer::{self, IpcWriteOptions}; @@ -691,7 +687,7 @@ mod tests { use chrono::{TimeZone, Utc}; use futures::{pin_mut, stream, StreamExt}; use portpicker::pick_unused_port; - use tonic::{Request, Status, transport::Channel}; + use tonic::{transport::Channel, Request, Status}; use uuid::Uuid; use api::{ @@ -703,11 +699,11 @@ mod tests { attributes::{Attribute, Attributes}, domain::{ChronicleDomainDef, PrimitiveType}, identity::AuthId, - prov::{NamespaceId, operations::ChronicleOperation}, + prov::{operations::ChronicleOperation, NamespaceId}, }; use crate::{ - meta::{cache_domain_schemas, DomainTypeMeta, get_domain_type_meta_from_cache}, + meta::{cache_domain_schemas, get_domain_type_meta_from_cache, DomainTypeMeta}, query::{ ActedOnBehalfOfRef, ActivityAndReferences, ActivityAssociationRef, AgentAndReferences, AgentAttributionRef, AgentInteraction, DerivationRef, EntityAndReferences, @@ -716,43 +712,43 @@ mod tests { }; async fn setup_test_environment<'a>( - domain: &ChronicleDomainDef, - ) -> Result< - (FlightServiceClient, TestDispatch<'a>), - Box, - > { - chronicle_telemetry::telemetry(false, chronicle_telemetry::ConsoleLogging::Pretty); - let api = test_api().await; - let port = pick_unused_port().expect("No ports free"); - let addr = SocketAddr::from(([127, 0, 0, 1], port)); - let pool = api.temporary_database().connection_pool().unwrap(); - let dispatch = api.api_dispatch().clone(); - let domain = domain.clone(); - tokio::spawn(async move { - super::run_flight_service( - &domain, - &pool, - &dispatch, - EndpointSecurityConfiguration::new( - TokenChecker::new(None, None, 30), - HashMap::default(), - true, - ), - &vec![addr], - 10, - ) - .await - .unwrap(); - }); - - tokio::time::sleep(Duration::from_secs(5)).await; - - let client = FlightServiceClient::connect(format!("http://{}", addr)).await?; - Ok((client, api)) - } - - fn create_test_domain_def() -> ChronicleDomainDef { - let yaml = r#" + domain: &ChronicleDomainDef, + ) -> Result< + (FlightServiceClient, TestDispatch<'a>), + Box, + > { + chronicle_telemetry::telemetry(chronicle_telemetry::ConsoleLogging::Pretty); + let api = test_api().await; + let port = pick_unused_port().expect("No ports free"); + let addr = SocketAddr::from(([127, 0, 0, 1], port)); + let pool = api.temporary_database().connection_pool().unwrap(); + let dispatch = api.api_dispatch().clone(); + let domain = domain.clone(); + tokio::spawn(async move { + super::run_flight_service( + &domain, + &pool, + &dispatch, + EndpointSecurityConfiguration::new( + TokenChecker::new(None, None, 30), + HashMap::default(), + true, + ), + &vec![addr], + 10, + ) + .await + .unwrap(); + }); + + tokio::time::sleep(Duration::from_secs(5)).await; + + let client = FlightServiceClient::connect(format!("http://{}", addr)).await?; + Ok((client, api)) + } + + fn create_test_domain_def() -> ChronicleDomainDef { + let yaml = r#" name: Manufacturing attributes: BatchID: @@ -788,356 +784,354 @@ roles: - MANUFACTURER "#; - ChronicleDomainDef::from_input_string(yaml).unwrap() - } - - fn create_attributes( - typ: Option<&(dyn common::domain::TypeName + Send + Sync)>, - attributes: &[(String, PrimitiveType)], - ) -> Attributes { - Attributes::new( - typ.map(|x| x.as_domain_type_id()), - attributes - .iter() - .map(|(name, typ)| { - let value = match typ { - PrimitiveType::String => - serde_json::Value::String(format!("{}-value", name)), - PrimitiveType::Int => - serde_json::Value::Number(serde_json::Number::from(42)), - PrimitiveType::Bool => serde_json::Value::Bool(true), - PrimitiveType::JSON => - serde_json::Value::String(format!("{{\"{}\": \"example\"}}", name)), - }; - Attribute::new(name, value) - }) - .collect(), - ) - } - - fn create_test_entity( - attributes: Vec<(String, PrimitiveType)>, - meta: &DomainTypeMeta, - count: u32, - ) -> RecordBatch { - let mut entities = Vec::new(); - for i in 0..count { - let entity = EntityAndReferences { - id: format!("{}-{}", meta.typ.as_ref().map(|x| x.as_type_name()).unwrap(), i), - namespace_name: "default".to_string(), - namespace_uuid: Uuid::default().into_bytes(), - attributes: create_attributes(meta.typ.as_deref(), &attributes), - was_generated_by: vec![format!("activity-{}", i), format!("activity-{}", i + 1)], - was_attributed_to: vec![ - EntityAttributionRef { - agent: format!("agent-{}", i), - role: Some("CERTIFIER".to_string()), - }, - EntityAttributionRef { - agent: format!("agent-{}", i + 1), - role: Some("MANUFACTURER".to_string()), - }, - ], - was_derived_from: vec![ - DerivationRef { - source: format!("entity-d-{}", i), - activity: format!("activity-d-{}", i), - }, - DerivationRef { - source: format!("entity-d-{}", i), - activity: format!("activity-d-{}", i), - }, - ], - was_quoted_from: vec![ - DerivationRef { - source: format!("entity-q-{}", i), - activity: format!("activity-q-{}", i), - }, - DerivationRef { - source: format!("entity-q-{}", i), - activity: format!("activity-q-{}", i), - }, - ], - was_revision_of: vec![ - DerivationRef { - source: format!("entity-r-{}", i), - activity: format!("activity-r-{}", i), - }, - DerivationRef { - source: format!("entity-r-{}", i), - activity: format!("activity-r-{}", i), - }, - ], - had_primary_source: vec![ - DerivationRef { - source: format!("entity-ps-{}", i), - activity: format!("activity-ps-{}", i), - }, - DerivationRef { - source: format!("entity-ps-{}", i), - activity: format!("activity-ps-{}", i), - }, - ], - }; - entities.push(entity); - } - - EntityAndReferences::to_record_batch(entities.into_iter(), meta) - .expect("Failed to convert entities to record batch") - } - - fn create_test_activity( - attributes: Vec<(String, PrimitiveType)>, - meta: &DomainTypeMeta, - count: u32, - ) -> RecordBatch { - let mut activities = Vec::new(); - for i in 0..count { - let activity = ActivityAndReferences { - id: format!("{}-{}", meta.typ.as_ref().map(|x| x.as_type_name()).unwrap(), i), - namespace_name: "default".to_string(), - namespace_uuid: Uuid::default().into_bytes(), - attributes: create_attributes(meta.typ.as_deref(), &attributes), - started: Some(Utc.with_ymd_and_hms(2022, 1, 1, 0, 0, 0).unwrap()), - ended: Some(Utc.with_ymd_and_hms(2022, 1, 2, 0, 0, 0).unwrap()), - generated: vec![format!("entity-{}", i), format!("entity-{}", i + 1)], - was_informed_by: vec![format!("activity-{}", i), format!("activity-{}", i + 1)], - was_associated_with: vec![ActivityAssociationRef { - responsible: AgentInteraction { - agent: format!("agent-{}", i), - role: Some("ROLE_TYPE".to_string()), - }, - delegated: vec![AgentInteraction { - agent: format!("delegated-agent-{}", i), - role: Some("DELEGATED_ROLE".to_string()), - }], - }], - used: vec![format!("entity-{}", i), format!("entity-{}", i + 1)], - }; - activities.push(activity); - } - - ActivityAndReferences::to_record_batch(activities.into_iter(), meta) - .expect("Failed to convert activities to record batch") - } - - fn create_test_agent( - attributes: Vec<(String, PrimitiveType)>, - meta: &DomainTypeMeta, - count: u32, - ) -> RecordBatch { - let mut agents = Vec::new(); - for i in 0..count { - let agent = AgentAndReferences { - id: format!("{}-{}", meta.typ.as_ref().map(|x| x.as_type_name()).unwrap(), i), - namespace_name: "default".to_string(), - namespace_uuid: Uuid::default().into_bytes(), - attributes: create_attributes(meta.typ.as_deref(), &attributes), - acted_on_behalf_of: vec![ActedOnBehalfOfRef { - agent: format!("agent-{}", i), - role: Some("DELEGATED_CERTIFIER".to_string()), - activity: format!("activity-{}", i), - }], - was_attributed_to: vec![AgentAttributionRef { - entity: format!("entity-{}", i), - role: Some("UNSPECIFIED_INTERACTION".to_string()), - }], - }; - agents.push(agent); - } - - AgentAndReferences::to_record_batch(agents.into_iter(), meta) - .expect("Failed to convert agents to record batch") - } - - pub fn batches_to_flight_data( - descriptor: &FlightDescriptor, - meta: &DomainTypeMeta, - batches: Vec, - ) -> Result, ArrowError> { - let options = IpcWriteOptions::default(); - let schema_flight_data: FlightData = - std::convert::Into::::into(SchemaAsIpc::new(&meta.schema, &options)) - .with_descriptor(descriptor.clone()); - let mut dictionaries = vec![]; - let mut flight_data = vec![]; - - let data_gen = writer::IpcDataGenerator::default(); - let mut dictionary_tracker = writer::DictionaryTracker::new(false); - - for batch in batches.iter() { - let (encoded_dictionaries, encoded_batch) = - data_gen.encoded_batch(batch, &mut dictionary_tracker, &options)?; - - dictionaries.extend(encoded_dictionaries.into_iter().map(Into::into)); - let next: FlightData = encoded_batch.into(); - flight_data.push(next); - } - - let mut stream = vec![schema_flight_data]; - stream.extend(dictionaries); - stream.extend(flight_data); - let flight_data: Vec<_> = stream.into_iter().collect(); - Ok(flight_data) - } - - async fn create_test_flight_data( - count: u32, - ) -> Result>, Box> { - let entity_meta = get_domain_type_meta_from_cache(&vec![ - "Entity".to_string(), - "CertificateEntity".to_owned(), - ]) - .expect("Failed to get entity meta"); - let entity_batch = create_test_entity( - vec![("certIDAttribute".to_string(), PrimitiveType::String)], - &entity_meta, - count, - ); - let entity_flight_data = batches_to_flight_data( - &FlightDescriptor::new_path(vec!["Entity".to_string(), "CertificateEntity".to_owned()]), - &entity_meta, - vec![entity_batch], - )?; - - let activity_meta = get_domain_type_meta_from_cache(&vec![ - "Activity".to_string(), - "ItemManufacturedActivity".to_owned(), - ]) - .expect("Failed to get activity meta"); - let activity_batch = create_test_activity( - vec![("batchIDAttribute".to_string(), PrimitiveType::String)], - &activity_meta, - count, - ); - let activity_flight_data = batches_to_flight_data( - &FlightDescriptor::new_path(vec![ - "Activity".to_string(), - "ItemManufacturedActivity".to_owned(), - ]), - &activity_meta, - vec![activity_batch], - )?; - - let agent_meta = get_domain_type_meta_from_cache(&vec![ - "Agent".to_string(), - "ContractorAgent".to_owned(), - ]) - .expect("Failed to get agent meta"); - let agent_batch = create_test_agent( - vec![ - ("companyNameAttribute".to_string(), PrimitiveType::String), - ("locationAttribute".to_string(), PrimitiveType::String), - ], - &agent_meta, - count, - ); - let agent_flight_data = batches_to_flight_data( - &FlightDescriptor::new_path(vec!["Agent".to_string(), "ContractorAgent".to_owned()]), - &agent_meta, - vec![agent_batch], - )?; - - let combined_flight_data = - vec![entity_flight_data, agent_flight_data, activity_flight_data]; - - Ok(combined_flight_data) - } - - async fn put_test_data( - count: u32, - client: &mut FlightServiceClient, - api: &mut TestDispatch<'_>, - ) -> Result<(), Box> { - let create_namespace_operation = ChronicleOperation::create_namespace( - NamespaceId::from_external_id("default", Uuid::default()), - ); - api.dispatch( - ApiCommand::Import(ImportCommand { operations: vec![create_namespace_operation] }), - AuthId::anonymous(), - ) - .await - .map_err(|e| Status::from_error(e.into()))?; - - for flight_data in create_test_flight_data(count).await? { - client.do_put(stream::iter(flight_data)).await?; - } - - Ok(()) - } - - async fn stable_sorted_flight_info( - client: &mut FlightServiceClient, - ) -> Result, Box> { - let list_flights_response = client.list_flights(Request::new(Criteria::default())).await?; - - let flights = list_flights_response.into_inner().collect::>().await; - let mut valid_flights: Vec = - flights.into_iter().filter_map(Result::ok).collect(); - - valid_flights.sort_by(|a, b| { - a.flight_descriptor - .as_ref() - .map(|a| a.path.clone()) - .cmp(&b.flight_descriptor.as_ref().map(|b| b.path.clone())) - }); - Ok(valid_flights) - } - - async fn load_flights( - flights: &[FlightInfo], - client: &mut FlightServiceClient, - ) -> Result>, Box> { - let mut all_flight_data_results = Vec::new(); - for flight_info in flights { - for endpoint in &flight_info.endpoint { - if let Some(ticket) = &endpoint.ticket { - let request = Request::new(ticket.clone()); - let mut stream = client.do_get(request).await?.into_inner(); - let mut flight_data_results = Vec::new(); - while let Some(flight_data) = stream.message().await? { - flight_data_results.push(flight_data); - } - all_flight_data_results.push(flight_data_results); - } - } - } - Ok(all_flight_data_results) - } - - async fn decode_flight_data( - flight_data: Vec, - ) -> Result, Box> { - let decoder = FlightRecordBatchStream::new_from_flight_data(stream::iter( - flight_data.into_iter().map(Ok), - )); - let mut record_batches = Vec::new(); - pin_mut!(decoder); - while let Some(batch) = decoder.next().await.transpose()? { - record_batches.push(batch); - } - Ok(record_batches) - } - - #[tokio::test] - //Test using a reasonably large data set, over the endpoint paging boundary size so we can - // observe it - async fn flight_service_info() { - chronicle_telemetry::full_telemetry( - false, - None, - chronicle_telemetry::ConsoleLogging::Pretty, - ); - let domain = create_test_domain_def(); - let (mut client, mut api) = setup_test_environment(&domain).await.unwrap(); - cache_domain_schemas(&domain); - put_test_data(22, &mut client, &mut api).await.unwrap(); - - tokio::time::sleep(Duration::from_secs(10)).await; - - let flights = stable_sorted_flight_info(&mut client).await.unwrap(); - - insta::assert_debug_snapshot!(flights, @r###" + ChronicleDomainDef::from_input_string(yaml).unwrap() + } + + fn create_attributes( + typ: Option<&(dyn common::domain::TypeName + Send + Sync)>, + attributes: &[(String, PrimitiveType)], + ) -> Attributes { + Attributes::new( + typ.map(|x| x.as_domain_type_id()), + attributes + .iter() + .map(|(name, typ)| { + let value = match typ { + PrimitiveType::String => + serde_json::Value::String(format!("{}-value", name)), + PrimitiveType::Int => + serde_json::Value::Number(serde_json::Number::from(42)), + PrimitiveType::Bool => serde_json::Value::Bool(true), + PrimitiveType::JSON => + serde_json::Value::String(format!("{{\"{}\": \"example\"}}", name)), + }; + Attribute::new(name, value) + }) + .collect(), + ) + } + + fn create_test_entity( + attributes: Vec<(String, PrimitiveType)>, + meta: &DomainTypeMeta, + count: u32, + ) -> RecordBatch { + let mut entities = Vec::new(); + for i in 0..count { + let entity = EntityAndReferences { + id: format!("{}-{}", meta.typ.as_ref().map(|x| x.as_type_name()).unwrap(), i), + namespace_name: "default".to_string(), + namespace_uuid: Uuid::default().into_bytes(), + attributes: create_attributes(meta.typ.as_deref(), &attributes), + was_generated_by: vec![format!("activity-{}", i), format!("activity-{}", i + 1)], + was_attributed_to: vec![ + EntityAttributionRef { + agent: format!("agent-{}", i), + role: Some("CERTIFIER".to_string()), + }, + EntityAttributionRef { + agent: format!("agent-{}", i + 1), + role: Some("MANUFACTURER".to_string()), + }, + ], + was_derived_from: vec![ + DerivationRef { + source: format!("entity-d-{}", i), + activity: format!("activity-d-{}", i), + }, + DerivationRef { + source: format!("entity-d-{}", i), + activity: format!("activity-d-{}", i), + }, + ], + was_quoted_from: vec![ + DerivationRef { + source: format!("entity-q-{}", i), + activity: format!("activity-q-{}", i), + }, + DerivationRef { + source: format!("entity-q-{}", i), + activity: format!("activity-q-{}", i), + }, + ], + was_revision_of: vec![ + DerivationRef { + source: format!("entity-r-{}", i), + activity: format!("activity-r-{}", i), + }, + DerivationRef { + source: format!("entity-r-{}", i), + activity: format!("activity-r-{}", i), + }, + ], + had_primary_source: vec![ + DerivationRef { + source: format!("entity-ps-{}", i), + activity: format!("activity-ps-{}", i), + }, + DerivationRef { + source: format!("entity-ps-{}", i), + activity: format!("activity-ps-{}", i), + }, + ], + }; + entities.push(entity); + } + + EntityAndReferences::to_record_batch(entities.into_iter(), meta) + .expect("Failed to convert entities to record batch") + } + + fn create_test_activity( + attributes: Vec<(String, PrimitiveType)>, + meta: &DomainTypeMeta, + count: u32, + ) -> RecordBatch { + let mut activities = Vec::new(); + for i in 0..count { + let activity = ActivityAndReferences { + id: format!("{}-{}", meta.typ.as_ref().map(|x| x.as_type_name()).unwrap(), i), + namespace_name: "default".to_string(), + namespace_uuid: Uuid::default().into_bytes(), + attributes: create_attributes(meta.typ.as_deref(), &attributes), + started: Some(Utc.with_ymd_and_hms(2022, 1, 1, 0, 0, 0).unwrap()), + ended: Some(Utc.with_ymd_and_hms(2022, 1, 2, 0, 0, 0).unwrap()), + generated: vec![format!("entity-{}", i), format!("entity-{}", i + 1)], + was_informed_by: vec![format!("activity-{}", i), format!("activity-{}", i + 1)], + was_associated_with: vec![ActivityAssociationRef { + responsible: AgentInteraction { + agent: format!("agent-{}", i), + role: Some("ROLE_TYPE".to_string()), + }, + delegated: vec![AgentInteraction { + agent: format!("delegated-agent-{}", i), + role: Some("DELEGATED_ROLE".to_string()), + }], + }], + used: vec![format!("entity-{}", i), format!("entity-{}", i + 1)], + }; + activities.push(activity); + } + + ActivityAndReferences::to_record_batch(activities.into_iter(), meta) + .expect("Failed to convert activities to record batch") + } + + fn create_test_agent( + attributes: Vec<(String, PrimitiveType)>, + meta: &DomainTypeMeta, + count: u32, + ) -> RecordBatch { + let mut agents = Vec::new(); + for i in 0..count { + let agent = AgentAndReferences { + id: format!("{}-{}", meta.typ.as_ref().map(|x| x.as_type_name()).unwrap(), i), + namespace_name: "default".to_string(), + namespace_uuid: Uuid::default().into_bytes(), + attributes: create_attributes(meta.typ.as_deref(), &attributes), + acted_on_behalf_of: vec![ActedOnBehalfOfRef { + agent: format!("agent-{}", i), + role: Some("DELEGATED_CERTIFIER".to_string()), + activity: format!("activity-{}", i), + }], + was_attributed_to: vec![AgentAttributionRef { + entity: format!("entity-{}", i), + role: Some("UNSPECIFIED_INTERACTION".to_string()), + }], + }; + agents.push(agent); + } + + AgentAndReferences::to_record_batch(agents.into_iter(), meta) + .expect("Failed to convert agents to record batch") + } + + pub fn batches_to_flight_data( + descriptor: &FlightDescriptor, + meta: &DomainTypeMeta, + batches: Vec, + ) -> Result, ArrowError> { + let options = IpcWriteOptions::default(); + let schema_flight_data: FlightData = + std::convert::Into::::into(SchemaAsIpc::new(&meta.schema, &options)) + .with_descriptor(descriptor.clone()); + let mut dictionaries = vec![]; + let mut flight_data = vec![]; + + let data_gen = writer::IpcDataGenerator::default(); + let mut dictionary_tracker = writer::DictionaryTracker::new(false); + + for batch in batches.iter() { + let (encoded_dictionaries, encoded_batch) = + data_gen.encoded_batch(batch, &mut dictionary_tracker, &options)?; + + dictionaries.extend(encoded_dictionaries.into_iter().map(Into::into)); + let next: FlightData = encoded_batch.into(); + flight_data.push(next); + } + + let mut stream = vec![schema_flight_data]; + stream.extend(dictionaries); + stream.extend(flight_data); + let flight_data: Vec<_> = stream.into_iter().collect(); + Ok(flight_data) + } + + async fn create_test_flight_data( + count: u32, + ) -> Result>, Box> { + let entity_meta = get_domain_type_meta_from_cache(&vec![ + "Entity".to_string(), + "CertificateEntity".to_owned(), + ]) + .expect("Failed to get entity meta"); + let entity_batch = create_test_entity( + vec![("certIDAttribute".to_string(), PrimitiveType::String)], + &entity_meta, + count, + ); + let entity_flight_data = batches_to_flight_data( + &FlightDescriptor::new_path(vec!["Entity".to_string(), "CertificateEntity".to_owned()]), + &entity_meta, + vec![entity_batch], + )?; + + let activity_meta = get_domain_type_meta_from_cache(&vec![ + "Activity".to_string(), + "ItemManufacturedActivity".to_owned(), + ]) + .expect("Failed to get activity meta"); + let activity_batch = create_test_activity( + vec![("batchIDAttribute".to_string(), PrimitiveType::String)], + &activity_meta, + count, + ); + let activity_flight_data = batches_to_flight_data( + &FlightDescriptor::new_path(vec![ + "Activity".to_string(), + "ItemManufacturedActivity".to_owned(), + ]), + &activity_meta, + vec![activity_batch], + )?; + + let agent_meta = get_domain_type_meta_from_cache(&vec![ + "Agent".to_string(), + "ContractorAgent".to_owned(), + ]) + .expect("Failed to get agent meta"); + let agent_batch = create_test_agent( + vec![ + ("companyNameAttribute".to_string(), PrimitiveType::String), + ("locationAttribute".to_string(), PrimitiveType::String), + ], + &agent_meta, + count, + ); + let agent_flight_data = batches_to_flight_data( + &FlightDescriptor::new_path(vec!["Agent".to_string(), "ContractorAgent".to_owned()]), + &agent_meta, + vec![agent_batch], + )?; + + let combined_flight_data = + vec![entity_flight_data, agent_flight_data, activity_flight_data]; + + Ok(combined_flight_data) + } + + async fn put_test_data( + count: u32, + client: &mut FlightServiceClient, + api: &mut TestDispatch<'_>, + ) -> Result<(), Box> { + let create_namespace_operation = ChronicleOperation::create_namespace( + NamespaceId::from_external_id("default", Uuid::default()), + ); + api.dispatch( + ApiCommand::Import(ImportCommand { operations: vec![create_namespace_operation] }), + AuthId::anonymous(), + ) + .await + .map_err(|e| Status::from_error(e.into()))?; + + for flight_data in create_test_flight_data(count).await? { + client.do_put(stream::iter(flight_data)).await?; + } + + Ok(()) + } + + async fn stable_sorted_flight_info( + client: &mut FlightServiceClient, + ) -> Result, Box> { + let list_flights_response = client.list_flights(Request::new(Criteria::default())).await?; + + let flights = list_flights_response.into_inner().collect::>().await; + let mut valid_flights: Vec = + flights.into_iter().filter_map(Result::ok).collect(); + + valid_flights.sort_by(|a, b| { + a.flight_descriptor + .as_ref() + .map(|a| a.path.clone()) + .cmp(&b.flight_descriptor.as_ref().map(|b| b.path.clone())) + }); + Ok(valid_flights) + } + + async fn load_flights( + flights: &[FlightInfo], + client: &mut FlightServiceClient, + ) -> Result>, Box> { + let mut all_flight_data_results = Vec::new(); + for flight_info in flights { + for endpoint in &flight_info.endpoint { + if let Some(ticket) = &endpoint.ticket { + let request = Request::new(ticket.clone()); + let mut stream = client.do_get(request).await?.into_inner(); + let mut flight_data_results = Vec::new(); + while let Some(flight_data) = stream.message().await? { + flight_data_results.push(flight_data); + } + all_flight_data_results.push(flight_data_results); + } + } + } + Ok(all_flight_data_results) + } + + async fn decode_flight_data( + flight_data: Vec, + ) -> Result, Box> { + let decoder = FlightRecordBatchStream::new_from_flight_data(stream::iter( + flight_data.into_iter().map(Ok), + )); + let mut record_batches = Vec::new(); + pin_mut!(decoder); + while let Some(batch) = decoder.next().await.transpose()? { + record_batches.push(batch); + } + Ok(record_batches) + } + + #[tokio::test] + //Test using a reasonably large data set, over the endpoint paging boundary size so we can + // observe it + async fn flight_service_info() { + chronicle_telemetry::telemetry( + chronicle_telemetry::ConsoleLogging::Pretty, + ); + let domain = create_test_domain_def(); + let (mut client, mut api) = setup_test_environment(&domain).await.unwrap(); + cache_domain_schemas(&domain); + put_test_data(22, &mut client, &mut api).await.unwrap(); + + tokio::time::sleep(Duration::from_secs(10)).await; + + let flights = stable_sorted_flight_info(&mut client).await.unwrap(); + + insta::assert_debug_snapshot!(flights, @r###" [ FlightInfo { schema: b"\xff\xff\xff\xff8\x04\0\0\x10\0\0\0\0\0\n\0\x0c\0\n\0\t\0\x04\0\n\0\0\0\x10\0\0\0\0\x01\x04\0\x08\0\x08\0\0\0\x04\0\x08\0\0\0\x04\0\0\0\t\0\0\0\xb4\x03\0\0p\x03\0\0H\x03\0\0\x04\x03\0\0\xb8\x02\0\0`\x02\0\0\x04\x02\0\0\xa4\x01\0\0\x04\0\0\0\x80\xfc\xff\xff\x18\0\0\0\x0c\0\0\0\0\0\0\x0ct\x01\0\0\x01\0\0\0\x08\0\0\0t\xfc\xff\xffD\xfd\xff\xff\x1c\0\0\0\x0c\0\0\0\0\0\x01\rH\x01\0\0\x02\0\0\0\xbc\0\0\0\x08\0\0\0\x98\xfc\xff\xff\xc4\xfc\xff\xff\x18\0\0\0\x0c\0\0\0\0\0\0\x0c\x90\0\0\0\x01\0\0\0\x08\0\0\0\xb8\xfc\xff\xff\x88\xfd\xff\xff\x1c\0\0\0\x0c\0\0\0\0\0\x01\rd\0\0\0\x02\0\0\04\0\0\0\x08\0\0\0\xdc\xfc\xff\xff\xac\xfd\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\x01\x05\x0c\0\0\0\0\0\0\0\xf8\xfc\xff\xff\x04\0\0\0role\0\0\0\00\xfd\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\0\x05\x0c\0\0\0\0\0\0\0 \xfd\xff\xff\x05\0\0\0agent\0\0\0\x04\0\0\0item\0\0\0\0\t\0\0\0delegated\0\0\0t\xfd\xff\xff\x1c\0\0\0\x0c\0\0\0\0\0\0\rd\0\0\0\x02\0\0\04\0\0\0\x08\0\0\0l\xfd\xff\xff<\xfe\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\x01\x05\x0c\0\0\0\0\0\0\0\x88\xfd\xff\xff\x04\0\0\0role\0\0\0\0\xc0\xfd\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\0\x05\x0c\0\0\0\0\0\0\0\xb0\xfd\xff\xff\x05\0\0\0agent\0\0\0\x0b\0\0\0responsible\0\x04\0\0\0item\0\0\0\0\x13\0\0\0was_associated_with\0\x1c\xfe\xff\xff\x18\0\0\0\x0c\0\0\0\0\0\0\x0c8\0\0\0\x01\0\0\0\x08\0\0\0\x10\xfe\xff\xff<\xfe\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\0\x05\x0c\0\0\0\0\0\0\0,\xfe\xff\xff\x04\0\0\0item\0\0\0\0\x0f\0\0\0was_informed_by\0x\xfe\xff\xff\x18\0\0\0\x0c\0\0\0\0\0\0\x0c8\0\0\0\x01\0\0\0\x08\0\0\0l\xfe\xff\xff\x98\xfe\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\0\x05\x0c\0\0\0\0\0\0\0\x88\xfe\xff\xff\x04\0\0\0item\0\0\0\0\t\0\0\0generated\0\0\0\xd0\xfe\xff\xff\x18\0\0\0\x0c\0\0\0\0\0\0\x0c8\0\0\0\x01\0\0\0\x08\0\0\0\xc4\xfe\xff\xff\xf0\xfe\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\0\x05\x0c\0\0\0\0\0\0\0\xe0\xfe\xff\xff\x04\0\0\0item\0\0\0\0\x04\0\0\0used\0\0\0\0\xc8\xff\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\x01\n\x1c\0\0\0\0\0\0\0\xb8\xff\xff\xff\x08\0\0\0\0\0\x03\0\x03\0\0\0UTC\0\x05\0\0\0ended\0\0\0\x10\0\x14\0\x10\0\x0e\0\x0f\0\x04\0\0\0\x08\0\x10\0\0\0\x1c\0\0\0\x0c\0\0\0\0\0\x01\n$\0\0\0\0\0\0\0\x08\0\x0c\0\n\0\x04\0\x08\0\0\0\x08\0\0\0\0\0\x03\0\x03\0\0\0UTC\0\x07\0\0\0started\0\xac\xff\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\0\x05\x0c\0\0\0\0\0\0\0\x9c\xff\xff\xff\x02\0\0\0id\0\0\xd0\xff\xff\xff\x14\0\0\0\x0c\0\0\0\0\0\0\x05\x0c\0\0\0\0\0\0\0\xc0\xff\xff\xff\x0e\0\0\0namespace_uuid\0\0\x10\0\x14\0\x10\0\0\0\x0f\0\x04\0\0\0\x08\0\x10\0\0\0\x18\0\0\0\x0c\0\0\0\0\0\0\x05\x10\0\0\0\0\0\0\0\x04\0\x04\0\x04\0\0\0\x0e\0\0\0namespace_name\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0", @@ -1301,38 +1295,38 @@ roles: }, ] "###); - } - - #[tokio::test] - async fn get_and_put_are_isomorphic() { - chronicle_telemetry::telemetry(false, chronicle_telemetry::ConsoleLogging::Pretty); - let domain = create_test_domain_def(); - let (mut client, mut api) = setup_test_environment(&domain).await.unwrap(); - cache_domain_schemas(&domain); - put_test_data(8, &mut client, &mut api).await.unwrap(); - - tokio::time::sleep(Duration::from_secs(2)).await; - - let flights = stable_sorted_flight_info(&mut client).await.unwrap(); - let flight_data = load_flights(&flights, &mut client).await.unwrap(); - - let mut decoded_flight_data = vec![]; - - for flight_data in flight_data.into_iter() { - decoded_flight_data - .push(decode_flight_data(flight_data).await.expect("Failed to decode flight data")); - } - - let json_arrays = decoded_flight_data - .into_iter() - .map(|batch| { - let batch_refs: Vec<&RecordBatch> = batch.iter().collect(); - arrow::json::writer::record_batches_to_json_rows(&batch_refs) - .expect("Failed to convert record batches to JSON") - }) - .collect::>(); - - insta::assert_debug_snapshot!(json_arrays, @r###" + } + + #[tokio::test] + async fn get_and_put_are_isomorphic() { + chronicle_telemetry::telemetry( chronicle_telemetry::ConsoleLogging::Pretty); + let domain = create_test_domain_def(); + let (mut client, mut api) = setup_test_environment(&domain).await.unwrap(); + cache_domain_schemas(&domain); + put_test_data(8, &mut client, &mut api).await.unwrap(); + + tokio::time::sleep(Duration::from_secs(2)).await; + + let flights = stable_sorted_flight_info(&mut client).await.unwrap(); + let flight_data = load_flights(&flights, &mut client).await.unwrap(); + + let mut decoded_flight_data = vec![]; + + for flight_data in flight_data.into_iter() { + decoded_flight_data + .push(decode_flight_data(flight_data).await.expect("Failed to decode flight data")); + } + + let json_arrays = decoded_flight_data + .into_iter() + .map(|batch| { + let batch_refs: Vec<&RecordBatch> = batch.iter().collect(); + arrow::json::writer::record_batches_to_json_rows(&batch_refs) + .expect("Failed to convert record batches to JSON") + }) + .collect::>(); + + insta::assert_debug_snapshot!(json_arrays, @r###" [ [ { @@ -2034,5 +2028,5 @@ roles: ], ] "###); - } + } } diff --git a/crates/chronicle-arrow/src/meta.rs b/crates/chronicle-arrow/src/meta.rs index ce32679e3..3aa54794a 100644 --- a/crates/chronicle-arrow/src/meta.rs +++ b/crates/chronicle-arrow/src/meta.rs @@ -1,276 +1,276 @@ use std::{ - collections::HashMap, - sync::{Arc, Mutex}, + collections::HashMap, + fmt, + str::FromStr, + sync::{Arc, Mutex}, }; -use std::fmt; -use std::str::FromStr; use arrow_schema::{Schema, SchemaBuilder}; use common::domain::{ - ActivityDef, AgentDef, ChronicleDomainDef, EntityDef, PrimitiveType, TypeName, + ActivityDef, AgentDef, ChronicleDomainDef, EntityDef, PrimitiveType, TypeName, }; fn field_for_domain_primitive(prim: &PrimitiveType) -> Option { - match prim { - PrimitiveType::String => Some(arrow_schema::DataType::Utf8), - PrimitiveType::Int => Some(arrow_schema::DataType::Int64), - PrimitiveType::Bool => Some(arrow_schema::DataType::Boolean), - PrimitiveType::JSON => Some(arrow_schema::DataType::Binary), - } + match prim { + PrimitiveType::String => Some(arrow_schema::DataType::Utf8), + PrimitiveType::Int => Some(arrow_schema::DataType::Int64), + PrimitiveType::Bool => Some(arrow_schema::DataType::Boolean), + PrimitiveType::JSON => Some(arrow_schema::DataType::Binary), + } } #[tracing::instrument] fn schema_for_namespace() -> Schema { - let mut builder = SchemaBuilder::new(); + let mut builder = SchemaBuilder::new(); - builder.push(arrow_schema::Field::new("name", arrow_schema::DataType::Utf8, false)); - builder.push(arrow_schema::Field::new("uuid", arrow_schema::DataType::Utf8, false)); + builder.push(arrow_schema::Field::new("name", arrow_schema::DataType::Utf8, false)); + builder.push(arrow_schema::Field::new("uuid", arrow_schema::DataType::Utf8, false)); - builder.finish() + builder.finish() } pub fn attribution_struct() -> arrow_schema::DataType { - arrow_schema::DataType::Struct( - vec![ - arrow_schema::Field::new("agent", arrow_schema::DataType::Utf8, false), - arrow_schema::Field::new("role", arrow_schema::DataType::Utf8, true), - ] - .into(), - ) + arrow_schema::DataType::Struct( + vec![ + arrow_schema::Field::new("agent", arrow_schema::DataType::Utf8, false), + arrow_schema::Field::new("role", arrow_schema::DataType::Utf8, true), + ] + .into(), + ) } pub fn derivation_struct() -> arrow_schema::DataType { - arrow_schema::DataType::Struct( - vec![ - arrow_schema::Field::new("source", arrow_schema::DataType::Utf8, false), - arrow_schema::Field::new("activity", arrow_schema::DataType::Utf8, false), - ] - .into(), - ) + arrow_schema::DataType::Struct( + vec![ + arrow_schema::Field::new("source", arrow_schema::DataType::Utf8, false), + arrow_schema::Field::new("activity", arrow_schema::DataType::Utf8, false), + ] + .into(), + ) } pub fn qualified_agent_struct() -> arrow_schema::DataType { - arrow_schema::DataType::Struct( - vec![ - arrow_schema::Field::new("agent", arrow_schema::DataType::Utf8, false), - arrow_schema::Field::new("role", arrow_schema::DataType::Utf8, true), - ] - .into(), - ) + arrow_schema::DataType::Struct( + vec![ + arrow_schema::Field::new("agent", arrow_schema::DataType::Utf8, false), + arrow_schema::Field::new("role", arrow_schema::DataType::Utf8, true), + ] + .into(), + ) } pub fn association_struct() -> arrow_schema::DataType { - arrow_schema::DataType::Struct( - vec![ - arrow_schema::Field::new("responsible", qualified_agent_struct(), false), - arrow_schema::Field::new( - "delegated", - arrow_schema::DataType::new_list( - qualified_agent_struct(), - true, // Set the List type as non-nullable - ), - false, - ), - ] - .into(), - ) + arrow_schema::DataType::Struct( + vec![ + arrow_schema::Field::new("responsible", qualified_agent_struct(), false), + arrow_schema::Field::new( + "delegated", + arrow_schema::DataType::new_list( + qualified_agent_struct(), + true, // Set the List type as non-nullable + ), + false, + ), + ] + .into(), + ) } pub fn agent_delegation_struct() -> arrow_schema::DataType { - arrow_schema::DataType::Struct( - vec![ - arrow_schema::Field::new("agent", arrow_schema::DataType::Utf8, false), - arrow_schema::Field::new("activity", arrow_schema::DataType::Utf8, false), - arrow_schema::Field::new("role", arrow_schema::DataType::Utf8, true), - ] - .into(), - ) + arrow_schema::DataType::Struct( + vec![ + arrow_schema::Field::new("agent", arrow_schema::DataType::Utf8, false), + arrow_schema::Field::new("activity", arrow_schema::DataType::Utf8, false), + arrow_schema::Field::new("role", arrow_schema::DataType::Utf8, true), + ] + .into(), + ) } pub fn agent_attribution_struct() -> arrow_schema::DataType { - arrow_schema::DataType::Struct( - vec![ - arrow_schema::Field::new("entity", arrow_schema::DataType::Utf8, false), - arrow_schema::Field::new("role", arrow_schema::DataType::Utf8, true), - ] - .into(), - ) + arrow_schema::DataType::Struct( + vec![ + arrow_schema::Field::new("entity", arrow_schema::DataType::Utf8, false), + arrow_schema::Field::new("role", arrow_schema::DataType::Utf8, true), + ] + .into(), + ) } pub fn schema_for_entity(entity: &EntityDef) -> Schema { - let mut builder = SchemaBuilder::new(); - - builder.push(arrow_schema::Field::new("namespace_name", arrow_schema::DataType::Utf8, false)); - builder.push(arrow_schema::Field::new("namespace_uuid", arrow_schema::DataType::Utf8, false)); - builder.push(arrow_schema::Field::new("id", arrow_schema::DataType::Utf8, false)); - - for attribute in &entity.attributes { - if let Some(data_type) = field_for_domain_primitive(&attribute.primitive_type) { - builder.push(arrow_schema::Field::new( - &attribute.preserve_inflection(), - data_type, - true, - )); - } - } - - builder.push(arrow_schema::Field::new( - "was_generated_by", - arrow_schema::DataType::new_list(arrow_schema::DataType::Utf8, false), - false, - )); - - builder.push(arrow_schema::Field::new( - "was_attributed_to", - arrow_schema::DataType::new_list(attribution_struct(), false), - false, - )); - - builder.push(arrow_schema::Field::new( - "was_derived_from", - arrow_schema::DataType::new_list(derivation_struct(), false), - false, - )); - - builder.push(arrow_schema::Field::new( - "had_primary_source", - arrow_schema::DataType::new_list(derivation_struct(), false), - false, - )); - - builder.push(arrow_schema::Field::new( - "was_quoted_from", - arrow_schema::DataType::new_list(derivation_struct(), false), - false, - )); - - builder.push(arrow_schema::Field::new( - "was_revision_of", - arrow_schema::DataType::new_list(derivation_struct(), false), - false, - )); - - builder.finish() + let mut builder = SchemaBuilder::new(); + + builder.push(arrow_schema::Field::new("namespace_name", arrow_schema::DataType::Utf8, false)); + builder.push(arrow_schema::Field::new("namespace_uuid", arrow_schema::DataType::Utf8, false)); + builder.push(arrow_schema::Field::new("id", arrow_schema::DataType::Utf8, false)); + + for attribute in &entity.attributes { + if let Some(data_type) = field_for_domain_primitive(&attribute.primitive_type) { + builder.push(arrow_schema::Field::new( + &attribute.preserve_inflection(), + data_type, + true, + )); + } + } + + builder.push(arrow_schema::Field::new( + "was_generated_by", + arrow_schema::DataType::new_list(arrow_schema::DataType::Utf8, false), + false, + )); + + builder.push(arrow_schema::Field::new( + "was_attributed_to", + arrow_schema::DataType::new_list(attribution_struct(), false), + false, + )); + + builder.push(arrow_schema::Field::new( + "was_derived_from", + arrow_schema::DataType::new_list(derivation_struct(), false), + false, + )); + + builder.push(arrow_schema::Field::new( + "had_primary_source", + arrow_schema::DataType::new_list(derivation_struct(), false), + false, + )); + + builder.push(arrow_schema::Field::new( + "was_quoted_from", + arrow_schema::DataType::new_list(derivation_struct(), false), + false, + )); + + builder.push(arrow_schema::Field::new( + "was_revision_of", + arrow_schema::DataType::new_list(derivation_struct(), false), + false, + )); + + builder.finish() } pub fn schema_for_activity(activity: &ActivityDef) -> Schema { - let mut builder = SchemaBuilder::new(); - - builder.push(arrow_schema::Field::new("namespace_name", arrow_schema::DataType::Utf8, false)); - builder.push(arrow_schema::Field::new("namespace_uuid", arrow_schema::DataType::Utf8, false)); - builder.push(arrow_schema::Field::new("id", arrow_schema::DataType::Utf8, false)); - - for attribute in &activity.attributes { - if let Some(typ) = field_for_domain_primitive(&attribute.primitive_type) { - builder.push(arrow_schema::Field::new(&attribute.preserve_inflection(), typ, true)); - } - } - - builder.push(arrow_schema::Field::new( - "started", - arrow_schema::DataType::Timestamp(arrow_schema::TimeUnit::Nanosecond, Some("UTC".into())), - true, - )); - - builder.push(arrow_schema::Field::new( - "ended", - arrow_schema::DataType::Timestamp(arrow_schema::TimeUnit::Nanosecond, Some("UTC".into())), - true, - )); - - builder.push(arrow_schema::Field::new( - "used", - arrow_schema::DataType::new_list(arrow_schema::DataType::Utf8, false), - false, - )); - - builder.push(arrow_schema::Field::new( - "generated", - arrow_schema::DataType::new_list(arrow_schema::DataType::Utf8, false), - false, - )); - - builder.push(arrow_schema::Field::new( - "was_informed_by", - arrow_schema::DataType::new_list(arrow_schema::DataType::Utf8, false), - false, - )); - - builder.push(arrow_schema::Field::new( - "was_associated_with", - arrow_schema::DataType::new_list(association_struct(), true), - false, - )); - - builder.finish() + let mut builder = SchemaBuilder::new(); + + builder.push(arrow_schema::Field::new("namespace_name", arrow_schema::DataType::Utf8, false)); + builder.push(arrow_schema::Field::new("namespace_uuid", arrow_schema::DataType::Utf8, false)); + builder.push(arrow_schema::Field::new("id", arrow_schema::DataType::Utf8, false)); + + for attribute in &activity.attributes { + if let Some(typ) = field_for_domain_primitive(&attribute.primitive_type) { + builder.push(arrow_schema::Field::new(&attribute.preserve_inflection(), typ, true)); + } + } + + builder.push(arrow_schema::Field::new( + "started", + arrow_schema::DataType::Timestamp(arrow_schema::TimeUnit::Nanosecond, Some("UTC".into())), + true, + )); + + builder.push(arrow_schema::Field::new( + "ended", + arrow_schema::DataType::Timestamp(arrow_schema::TimeUnit::Nanosecond, Some("UTC".into())), + true, + )); + + builder.push(arrow_schema::Field::new( + "used", + arrow_schema::DataType::new_list(arrow_schema::DataType::Utf8, false), + false, + )); + + builder.push(arrow_schema::Field::new( + "generated", + arrow_schema::DataType::new_list(arrow_schema::DataType::Utf8, false), + false, + )); + + builder.push(arrow_schema::Field::new( + "was_informed_by", + arrow_schema::DataType::new_list(arrow_schema::DataType::Utf8, false), + false, + )); + + builder.push(arrow_schema::Field::new( + "was_associated_with", + arrow_schema::DataType::new_list(association_struct(), true), + false, + )); + + builder.finish() } pub fn schema_for_agent(agent: &AgentDef) -> Schema { - let mut builder = SchemaBuilder::new(); - builder.push(arrow_schema::Field::new("namespace_name", arrow_schema::DataType::Utf8, false)); - builder.push(arrow_schema::Field::new("namespace_uuid", arrow_schema::DataType::Utf8, false)); - - builder.push(arrow_schema::Field::new("id", arrow_schema::DataType::Utf8, false)); - for attribute in &agent.attributes { - if let Some(typ) = field_for_domain_primitive(&attribute.primitive_type) { - builder.push(arrow_schema::Field::new(&attribute.preserve_inflection(), typ, true)); - } - } - - builder.push(arrow_schema::Field::new( - "acted_on_behalf_of", - arrow_schema::DataType::new_list(agent_delegation_struct(), false), - false, - )); - - builder.push(arrow_schema::Field::new( - "was_attributed_to", - arrow_schema::DataType::new_list(agent_attribution_struct(), false), - false, - )); - - builder.finish() + let mut builder = SchemaBuilder::new(); + builder.push(arrow_schema::Field::new("namespace_name", arrow_schema::DataType::Utf8, false)); + builder.push(arrow_schema::Field::new("namespace_uuid", arrow_schema::DataType::Utf8, false)); + + builder.push(arrow_schema::Field::new("id", arrow_schema::DataType::Utf8, false)); + for attribute in &agent.attributes { + if let Some(typ) = field_for_domain_primitive(&attribute.primitive_type) { + builder.push(arrow_schema::Field::new(&attribute.preserve_inflection(), typ, true)); + } + } + + builder.push(arrow_schema::Field::new( + "acted_on_behalf_of", + arrow_schema::DataType::new_list(agent_delegation_struct(), false), + false, + )); + + builder.push(arrow_schema::Field::new( + "was_attributed_to", + arrow_schema::DataType::new_list(agent_attribution_struct(), false), + false, + )); + + builder.finish() } #[derive(Debug, Clone, Copy, PartialEq, Eq, serde::Serialize, serde::Deserialize)] pub(crate) enum Term { - Namespace, - Entity, - Activity, - Agent, + Namespace, + Entity, + Activity, + Agent, } impl FromStr for Term { - type Err = (); - - fn from_str(s: &str) -> Result { - match s { - "Namespace" => Ok(Term::Namespace), - "Entity" => Ok(Term::Entity), - "Activity" => Ok(Term::Activity), - "Agent" => Ok(Term::Agent), - _ => Err(()), - } - } + type Err = (); + + fn from_str(s: &str) -> Result { + match s { + "Namespace" => Ok(Term::Namespace), + "Entity" => Ok(Term::Entity), + "Activity" => Ok(Term::Activity), + "Agent" => Ok(Term::Agent), + _ => Err(()), + } + } } impl fmt::Display for Term { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - match self { - Term::Namespace => write!(f, "Namespace"), - Term::Entity => write!(f, "Entity"), - Term::Activity => write!(f, "Activity"), - Term::Agent => write!(f, "Agent"), - } - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Term::Namespace => write!(f, "Namespace"), + Term::Entity => write!(f, "Entity"), + Term::Activity => write!(f, "Activity"), + Term::Agent => write!(f, "Agent"), + } + } } pub(crate) struct DomainTypeMeta { - pub schema: Arc, - pub term: Term, - pub typ: Option>, - pub attributes: Vec<(String, PrimitiveType)>, + pub schema: Arc, + pub term: Term, + pub typ: Option>, + pub attributes: Vec<(String, PrimitiveType)>, } lazy_static::lazy_static! { @@ -279,95 +279,95 @@ lazy_static::lazy_static! { } pub fn get_domain_type_meta_from_cache( - descriptor_path: &Vec, + descriptor_path: &Vec, ) -> Option> { - let cache = SCHEMA_CACHE.lock().unwrap(); - cache.get(descriptor_path).cloned() + let cache = SCHEMA_CACHE.lock().unwrap(); + cache.get(descriptor_path).cloned() } #[tracing::instrument(skip(domain_type, type_name, schema), fields( term, schema = ? schema, type_name = type_name ))] pub fn cache_metadata( - term: Term, - domain_type: Box, - type_name: String, - attributes: Vec<(String, PrimitiveType)>, - schema: Schema, + term: Term, + domain_type: Box, + type_name: String, + attributes: Vec<(String, PrimitiveType)>, + schema: Schema, ) { - let mut cache = SCHEMA_CACHE.lock().expect("Failed to lock SCHEMA_CACHE"); - let domain_type_meta = Arc::new(DomainTypeMeta { - schema: schema.into(), - term, - typ: Some(domain_type), - attributes, - }); - cache.insert(vec![term.to_string(), type_name], domain_type_meta); + let mut cache = SCHEMA_CACHE.lock().expect("Failed to lock SCHEMA_CACHE"); + let domain_type_meta = Arc::new(DomainTypeMeta { + schema: schema.into(), + term, + typ: Some(domain_type), + attributes, + }); + cache.insert(vec![term.to_string(), type_name], domain_type_meta); } pub fn cache_namespace_schema() { - let mut cache = SCHEMA_CACHE.lock().unwrap(); - cache.insert( - vec!["Namespace".to_string()], - Arc::new(DomainTypeMeta { - schema: schema_for_namespace().into(), - term: Term::Namespace, - typ: None, - attributes: vec![], - }), - ); + let mut cache = SCHEMA_CACHE.lock().unwrap(); + cache.insert( + vec!["Namespace".to_string()], + Arc::new(DomainTypeMeta { + schema: schema_for_namespace().into(), + term: Term::Namespace, + typ: None, + attributes: vec![], + }), + ); } #[tracing::instrument(skip(domain_def))] pub fn cache_domain_schemas(domain_def: &ChronicleDomainDef) { - for entity in &domain_def.entities { - let schema = schema_for_entity(entity); - - let attributes = entity - .attributes - .iter() - .map(|attr| (attr.preserve_inflection(), attr.primitive_type)) - .collect(); - cache_metadata( - Term::Entity, - Box::new(entity.clone()), - entity.as_type_name(), - attributes, - schema, - ); - } - - for agent in &domain_def.agents { - let schema = schema_for_agent(agent); - - let attributes = agent - .attributes - .iter() - .map(|attr| (attr.preserve_inflection(), attr.primitive_type)) - .collect(); - cache_metadata( - Term::Agent, - Box::new(agent.clone()), - agent.as_type_name(), - attributes, - schema, - ); - } - - for activity in &domain_def.activities { - let schema = schema_for_activity(activity); - - let attributes = activity - .attributes - .iter() - .map(|attr| (attr.preserve_inflection(), attr.primitive_type)) - .collect(); - cache_metadata( - Term::Activity, - Box::new(activity.clone()), - activity.as_type_name(), - attributes, - schema, - ); - } + for entity in &domain_def.entities { + let schema = schema_for_entity(entity); + + let attributes = entity + .attributes + .iter() + .map(|attr| (attr.preserve_inflection(), attr.primitive_type)) + .collect(); + cache_metadata( + Term::Entity, + Box::new(entity.clone()), + entity.as_type_name(), + attributes, + schema, + ); + } + + for agent in &domain_def.agents { + let schema = schema_for_agent(agent); + + let attributes = agent + .attributes + .iter() + .map(|attr| (attr.preserve_inflection(), attr.primitive_type)) + .collect(); + cache_metadata( + Term::Agent, + Box::new(agent.clone()), + agent.as_type_name(), + attributes, + schema, + ); + } + + for activity in &domain_def.activities { + let schema = schema_for_activity(activity); + + let attributes = activity + .attributes + .iter() + .map(|attr| (attr.preserve_inflection(), attr.primitive_type)) + .collect(); + cache_metadata( + Term::Activity, + Box::new(activity.clone()), + activity.as_type_name(), + attributes, + schema, + ); + } } diff --git a/crates/chronicle-arrow/src/operations/activity.rs b/crates/chronicle-arrow/src/operations/activity.rs index 995f29947..97344ed78 100644 --- a/crates/chronicle-arrow/src/operations/activity.rs +++ b/crates/chronicle-arrow/src/operations/activity.rs @@ -2,258 +2,258 @@ use arrow_array::{Array, RecordBatch}; use futures::StreamExt; use common::{ - attributes::Attributes, - prov::{ - ActivityId, - AgentId, EntityId, NamespaceId, operations::{ChronicleOperation, SetAttributes}, Role, - }, + attributes::Attributes, + prov::{ + operations::{ChronicleOperation, SetAttributes}, + ActivityId, AgentId, EntityId, NamespaceId, Role, + }, }; use crate::{ - ChronicleArrowError, - query::{ActivityAssociationRef, AgentInteraction}, + query::{ActivityAssociationRef, AgentInteraction}, + ChronicleArrowError, }; use super::{string_list_column, with_implied}; fn get_used( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result, ChronicleArrowError> { - string_list_column(record_batch, "used", row_index) + string_list_column(record_batch, "used", row_index) } fn get_generated( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result, ChronicleArrowError> { - string_list_column(record_batch, "generated", row_index) + string_list_column(record_batch, "generated", row_index) } fn get_was_informed_by( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result, ChronicleArrowError> { - string_list_column(record_batch, "was_informed_by", row_index) + string_list_column(record_batch, "was_informed_by", row_index) } fn opt_time_column( - record_batch: &RecordBatch, - column_name: &str, - row_index: usize, + record_batch: &RecordBatch, + column_name: &str, + row_index: usize, ) -> Result>, ChronicleArrowError> { - let column_index = record_batch - .schema() - .index_of(column_name) - .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; - let column = record_batch.column(column_index); - - if let Some(timestamp_array) = - column.as_any().downcast_ref::() - { - let naive_time = timestamp_array.value_as_datetime(row_index); - let time = naive_time - .map(|nt| chrono::DateTime::::from_naive_utc_and_offset(nt, chrono::Utc)); - Ok(time) - } else { - Ok(None) - } + let column_index = record_batch + .schema() + .index_of(column_name) + .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; + let column = record_batch.column(column_index); + + if let Some(timestamp_array) = + column.as_any().downcast_ref::() + { + let naive_time = timestamp_array.value_as_datetime(row_index); + let time = naive_time + .map(|nt| chrono::DateTime::::from_naive_utc_and_offset(nt, chrono::Utc)); + Ok(time) + } else { + Ok(None) + } } fn get_started( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result>, ChronicleArrowError> { - opt_time_column(record_batch, "started", row_index) + opt_time_column(record_batch, "started", row_index) } fn get_ended( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result>, ChronicleArrowError> { - opt_time_column(record_batch, "ended", row_index) + opt_time_column(record_batch, "ended", row_index) } fn get_was_associated_with( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result, ChronicleArrowError> { - use arrow_array::{ListArray, StringArray, StructArray}; - - let column_index = record_batch - .schema() - .index_of("was_associated_with") - .map_err(|_| ChronicleArrowError::MissingColumn("was_associated_with".to_string()))?; - let column = record_batch.column(column_index); - let list_array = column - .as_any() - .downcast_ref::() - .ok_or(ChronicleArrowError::ColumnTypeMismatch("Expected ListArray".to_string()))?; - let binding = list_array.value(row_index); - let struct_array = binding - .as_any() - .downcast_ref::() - .ok_or(ChronicleArrowError::ColumnTypeMismatch("Expected StructArray".to_string()))?; - - let mut associations = Vec::new(); - for i in 0..struct_array.len() { - let responsible_struct_array = - struct_array.column(0).as_any().downcast_ref::().ok_or( - ChronicleArrowError::ColumnTypeMismatch( - "Expected StructArray for responsible".to_string(), - ), - )?; - - let agent_array = responsible_struct_array - .column(0) - .as_any() - .downcast_ref::() - .ok_or(ChronicleArrowError::ColumnTypeMismatch( - "Expected StringArray for agent".to_string(), - ))?; - let role_array = responsible_struct_array - .column(1) - .as_any() - .downcast_ref::() - .ok_or(ChronicleArrowError::ColumnTypeMismatch( - "Expected StringArray for role".to_string(), - ))?; - - let agent = agent_array.value(i).to_string(); - let role = Some(role_array.value(i).to_string()); - - // Handling the delegated field, which is a ListArray of StructArray - let delegated_list_array = - struct_array.column(1).as_any().downcast_ref::().ok_or( - ChronicleArrowError::ColumnTypeMismatch( - "Expected ListArray for delegated".to_string(), - ), - )?; - let delegated_binding = delegated_list_array.value(i); - let delegated_struct_array = delegated_binding - .as_any() - .downcast_ref::() - .ok_or(ChronicleArrowError::ColumnTypeMismatch( - "Expected StructArray for delegated".to_string(), - ))?; - - let mut delegated_agents = Vec::new(); - for j in 0..delegated_struct_array.len() { - let delegated_agent_array = - delegated_struct_array.column(0).as_any().downcast_ref::().ok_or( - ChronicleArrowError::ColumnTypeMismatch( - "Expected StringArray for delegated agent".to_string(), - ), - )?; - let delegated_role_array = - delegated_struct_array.column(1).as_any().downcast_ref::().ok_or( - ChronicleArrowError::ColumnTypeMismatch( - "Expected StringArray for delegated role".to_string(), - ), - )?; - - let delegated_agent = delegated_agent_array.value(j).to_string(); - let delegated_role = Some(delegated_role_array.value(j).to_string()); - - delegated_agents - .push(AgentInteraction { agent: delegated_agent, role: delegated_role }); - } - - associations.push(ActivityAssociationRef { - responsible: AgentInteraction { agent, role }, - delegated: delegated_agents, - }); - } - - Ok(associations) + use arrow_array::{ListArray, StringArray, StructArray}; + + let column_index = record_batch + .schema() + .index_of("was_associated_with") + .map_err(|_| ChronicleArrowError::MissingColumn("was_associated_with".to_string()))?; + let column = record_batch.column(column_index); + let list_array = column + .as_any() + .downcast_ref::() + .ok_or(ChronicleArrowError::ColumnTypeMismatch("Expected ListArray".to_string()))?; + let binding = list_array.value(row_index); + let struct_array = binding + .as_any() + .downcast_ref::() + .ok_or(ChronicleArrowError::ColumnTypeMismatch("Expected StructArray".to_string()))?; + + let mut associations = Vec::new(); + for i in 0..struct_array.len() { + let responsible_struct_array = + struct_array.column(0).as_any().downcast_ref::().ok_or( + ChronicleArrowError::ColumnTypeMismatch( + "Expected StructArray for responsible".to_string(), + ), + )?; + + let agent_array = responsible_struct_array + .column(0) + .as_any() + .downcast_ref::() + .ok_or(ChronicleArrowError::ColumnTypeMismatch( + "Expected StringArray for agent".to_string(), + ))?; + let role_array = responsible_struct_array + .column(1) + .as_any() + .downcast_ref::() + .ok_or(ChronicleArrowError::ColumnTypeMismatch( + "Expected StringArray for role".to_string(), + ))?; + + let agent = agent_array.value(i).to_string(); + let role = Some(role_array.value(i).to_string()); + + // Handling the delegated field, which is a ListArray of StructArray + let delegated_list_array = + struct_array.column(1).as_any().downcast_ref::().ok_or( + ChronicleArrowError::ColumnTypeMismatch( + "Expected ListArray for delegated".to_string(), + ), + )?; + let delegated_binding = delegated_list_array.value(i); + let delegated_struct_array = delegated_binding + .as_any() + .downcast_ref::() + .ok_or(ChronicleArrowError::ColumnTypeMismatch( + "Expected StructArray for delegated".to_string(), + ))?; + + let mut delegated_agents = Vec::new(); + for j in 0..delegated_struct_array.len() { + let delegated_agent_array = + delegated_struct_array.column(0).as_any().downcast_ref::().ok_or( + ChronicleArrowError::ColumnTypeMismatch( + "Expected StringArray for delegated agent".to_string(), + ), + )?; + let delegated_role_array = + delegated_struct_array.column(1).as_any().downcast_ref::().ok_or( + ChronicleArrowError::ColumnTypeMismatch( + "Expected StringArray for delegated role".to_string(), + ), + )?; + + let delegated_agent = delegated_agent_array.value(j).to_string(); + let delegated_role = Some(delegated_role_array.value(j).to_string()); + + delegated_agents + .push(AgentInteraction { agent: delegated_agent, role: delegated_role }); + } + + associations.push(ActivityAssociationRef { + responsible: AgentInteraction { agent, role }, + delegated: delegated_agents, + }); + } + + Ok(associations) } pub fn activity_operations( - ns: &NamespaceId, - id: &str, - attributes: Attributes, - row_index: usize, - record_batch: &RecordBatch, + ns: &NamespaceId, + id: &str, + attributes: Attributes, + row_index: usize, + record_batch: &RecordBatch, ) -> Result, ChronicleArrowError> { - let mut operations = vec![ - ChronicleOperation::activity_exists(ns.clone(), ActivityId::from_external_id(id)), - ChronicleOperation::set_attributes(SetAttributes::activity( - ns.clone(), - ActivityId::from_external_id(id), - attributes, - )), - ]; - - let generated_ids = get_generated(record_batch, row_index)?; - - for entity_id in generated_ids { - operations.push(ChronicleOperation::was_generated_by( - ns.clone(), - EntityId::from_external_id(&entity_id), - ActivityId::from_external_id(id), - )); - } - - let used_ids = get_used(record_batch, row_index)?; - - for used_id in used_ids { - operations.push(ChronicleOperation::activity_used( - ns.clone(), - ActivityId::from_external_id(id), - EntityId::from_external_id(&used_id), - )); - } - - let was_informed_by_ids = get_was_informed_by(record_batch, row_index)?; - - for informed_by_id in was_informed_by_ids { - operations.push(ChronicleOperation::was_informed_by( - ns.clone(), - ActivityId::from_external_id(id), - ActivityId::from_external_id(&informed_by_id), - )); - } - - let started = get_started(record_batch, row_index)?; - - if let Some(started) = started { - operations.push(ChronicleOperation::start_activity( - ns.clone(), - ActivityId::from_external_id(id), - started, - )); - } - - let ended = get_ended(record_batch, row_index)?; - - if let Some(ended) = ended { - operations.push(ChronicleOperation::end_activity( - ns.clone(), - ActivityId::from_external_id(id), - ended, - )); - } - - let was_associated_with_refs = get_was_associated_with(record_batch, row_index)?; - - for association_ref in was_associated_with_refs { - operations.push(ChronicleOperation::was_associated_with( - ns.clone(), - ActivityId::from_external_id(id), - AgentId::from_external_id(&association_ref.responsible.agent), - association_ref.responsible.role.map(Role), - )); - - for delegated in &association_ref.delegated { - operations.push(ChronicleOperation::agent_acts_on_behalf_of( - ns.clone(), - AgentId::from_external_id(id), - AgentId::from_external_id(&association_ref.responsible.agent), - Some(ActivityId::from_external_id(id)), - delegated.role.as_ref().map(|role| Role(role.clone())), - )); - } - } - - Ok(with_implied(operations)) + let mut operations = vec![ + ChronicleOperation::activity_exists(ns.clone(), ActivityId::from_external_id(id)), + ChronicleOperation::set_attributes(SetAttributes::activity( + ns.clone(), + ActivityId::from_external_id(id), + attributes, + )), + ]; + + let generated_ids = get_generated(record_batch, row_index)?; + + for entity_id in generated_ids { + operations.push(ChronicleOperation::was_generated_by( + ns.clone(), + EntityId::from_external_id(&entity_id), + ActivityId::from_external_id(id), + )); + } + + let used_ids = get_used(record_batch, row_index)?; + + for used_id in used_ids { + operations.push(ChronicleOperation::activity_used( + ns.clone(), + ActivityId::from_external_id(id), + EntityId::from_external_id(&used_id), + )); + } + + let was_informed_by_ids = get_was_informed_by(record_batch, row_index)?; + + for informed_by_id in was_informed_by_ids { + operations.push(ChronicleOperation::was_informed_by( + ns.clone(), + ActivityId::from_external_id(id), + ActivityId::from_external_id(&informed_by_id), + )); + } + + let started = get_started(record_batch, row_index)?; + + if let Some(started) = started { + operations.push(ChronicleOperation::start_activity( + ns.clone(), + ActivityId::from_external_id(id), + started, + )); + } + + let ended = get_ended(record_batch, row_index)?; + + if let Some(ended) = ended { + operations.push(ChronicleOperation::end_activity( + ns.clone(), + ActivityId::from_external_id(id), + ended, + )); + } + + let was_associated_with_refs = get_was_associated_with(record_batch, row_index)?; + + for association_ref in was_associated_with_refs { + operations.push(ChronicleOperation::was_associated_with( + ns.clone(), + ActivityId::from_external_id(id), + AgentId::from_external_id(&association_ref.responsible.agent), + association_ref.responsible.role.map(Role), + )); + + for delegated in &association_ref.delegated { + operations.push(ChronicleOperation::agent_acts_on_behalf_of( + ns.clone(), + AgentId::from_external_id(id), + AgentId::from_external_id(&association_ref.responsible.agent), + Some(ActivityId::from_external_id(id)), + delegated.role.as_ref().map(|role| Role(role.clone())), + )); + } + } + + Ok(with_implied(operations)) } diff --git a/crates/chronicle-arrow/src/operations/agent.rs b/crates/chronicle-arrow/src/operations/agent.rs index 8ef92e7cd..53f28118a 100644 --- a/crates/chronicle-arrow/src/operations/agent.rs +++ b/crates/chronicle-arrow/src/operations/agent.rs @@ -1,91 +1,91 @@ use arrow_array::RecordBatch; use common::{ - attributes::Attributes, - prov::{ - ActivityId, - AgentId, EntityId, NamespaceId, operations::{ChronicleOperation, SetAttributes}, Role, - }, + attributes::Attributes, + prov::{ + operations::{ChronicleOperation, SetAttributes}, + ActivityId, AgentId, EntityId, NamespaceId, Role, + }, }; use crate::{ - ChronicleArrowError, - query::{ActedOnBehalfOfRef, AgentAttributionRef}, + query::{ActedOnBehalfOfRef, AgentAttributionRef}, + ChronicleArrowError, }; use super::{struct_2_list_column_opt_string, struct_3_list_column_opt_string, with_implied}; fn get_agent_attribution( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result, ChronicleArrowError> { - Ok(struct_2_list_column_opt_string( - record_batch, - "was_attributed_to", - row_index, - "entity", - "role", - )? - .into_iter() - .map(|(entity, role)| AgentAttributionRef { entity, role }) - .collect()) + Ok(struct_2_list_column_opt_string( + record_batch, + "was_attributed_to", + row_index, + "entity", + "role", + )? + .into_iter() + .map(|(entity, role)| AgentAttributionRef { entity, role }) + .collect()) } fn get_acted_on_behalf_of( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result, ChronicleArrowError> { - Ok(struct_3_list_column_opt_string( - record_batch, - "acted_on_behalf_of", - row_index, - "agent", - "activity", - "role", - )? - .into_iter() - .map(|(agent, activity, role)| ActedOnBehalfOfRef { agent, role, activity }) - .collect()) + Ok(struct_3_list_column_opt_string( + record_batch, + "acted_on_behalf_of", + row_index, + "agent", + "activity", + "role", + )? + .into_iter() + .map(|(agent, activity, role)| ActedOnBehalfOfRef { agent, role, activity }) + .collect()) } pub fn agent_operations( - ns: &NamespaceId, - id: &str, - attributes: Attributes, - row_index: usize, - record_batch: &RecordBatch, + ns: &NamespaceId, + id: &str, + attributes: Attributes, + row_index: usize, + record_batch: &RecordBatch, ) -> Result, ChronicleArrowError> { - let mut operations = vec![ - ChronicleOperation::agent_exists(ns.clone(), AgentId::from_external_id(id)), - ChronicleOperation::set_attributes(SetAttributes::agent( - ns.clone(), - AgentId::from_external_id(id), - attributes, - )), - ]; + let mut operations = vec![ + ChronicleOperation::agent_exists(ns.clone(), AgentId::from_external_id(id)), + ChronicleOperation::set_attributes(SetAttributes::agent( + ns.clone(), + AgentId::from_external_id(id), + attributes, + )), + ]; - let was_attributed_to_refs = get_agent_attribution(record_batch, row_index)?; + let was_attributed_to_refs = get_agent_attribution(record_batch, row_index)?; - for was_attributed_to_ref in was_attributed_to_refs { - operations.push(ChronicleOperation::was_attributed_to( - ns.clone(), - EntityId::from_external_id(was_attributed_to_ref.entity), - AgentId::from_external_id(id), - was_attributed_to_ref.role.map(Role::from), - )); - } + for was_attributed_to_ref in was_attributed_to_refs { + operations.push(ChronicleOperation::was_attributed_to( + ns.clone(), + EntityId::from_external_id(was_attributed_to_ref.entity), + AgentId::from_external_id(id), + was_attributed_to_ref.role.map(Role::from), + )); + } - let acted_on_behalf_of_refs = get_acted_on_behalf_of(record_batch, row_index)?; + let acted_on_behalf_of_refs = get_acted_on_behalf_of(record_batch, row_index)?; - for acted_on_behalf_of_ref in acted_on_behalf_of_refs { - operations.push(ChronicleOperation::agent_acts_on_behalf_of( - ns.clone(), - AgentId::from_external_id(id), - AgentId::from_external_id(acted_on_behalf_of_ref.agent), - Some(ActivityId::from_external_id(acted_on_behalf_of_ref.activity)), - acted_on_behalf_of_ref.role.map(Role::from), - )); - } + for acted_on_behalf_of_ref in acted_on_behalf_of_refs { + operations.push(ChronicleOperation::agent_acts_on_behalf_of( + ns.clone(), + AgentId::from_external_id(id), + AgentId::from_external_id(acted_on_behalf_of_ref.agent), + Some(ActivityId::from_external_id(acted_on_behalf_of_ref.activity)), + acted_on_behalf_of_ref.role.map(Role::from), + )); + } - Ok(with_implied(operations)) + Ok(with_implied(operations)) } diff --git a/crates/chronicle-arrow/src/operations/entity.rs b/crates/chronicle-arrow/src/operations/entity.rs index 860571365..8ff910a56 100644 --- a/crates/chronicle-arrow/src/operations/entity.rs +++ b/crates/chronicle-arrow/src/operations/entity.rs @@ -1,140 +1,140 @@ use arrow_array::RecordBatch; use common::{ - attributes::Attributes, - prov::{ - ActivityId, - AgentId, EntityId, NamespaceId, operations::{ChronicleOperation, DerivationType, SetAttributes}, Role, - }, + attributes::Attributes, + prov::{ + operations::{ChronicleOperation, DerivationType, SetAttributes}, + ActivityId, AgentId, EntityId, NamespaceId, Role, + }, }; use crate::{ - ChronicleArrowError, - query::{DerivationRef, EntityAttributionRef}, + query::{DerivationRef, EntityAttributionRef}, + ChronicleArrowError, }; use super::{ - string_list_column, struct_2_list_column, struct_2_list_column_opt_string, with_implied, + string_list_column, struct_2_list_column, struct_2_list_column_opt_string, with_implied, }; fn get_was_generated_by( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result, ChronicleArrowError> { - string_list_column(record_batch, "was_generated_by", row_index) + string_list_column(record_batch, "was_generated_by", row_index) } fn get_entity_was_attributed_to( - record_batch: &RecordBatch, - row_index: usize, + record_batch: &RecordBatch, + row_index: usize, ) -> Result, ChronicleArrowError> { - Ok(struct_2_list_column_opt_string( - record_batch, - "was_attributed_to", - row_index, - "agent", - "role", - )? - .into_iter() - .map(|(agent, role)| EntityAttributionRef { agent, role }) - .collect()) + Ok(struct_2_list_column_opt_string( + record_batch, + "was_attributed_to", + row_index, + "agent", + "role", + )? + .into_iter() + .map(|(agent, role)| EntityAttributionRef { agent, role }) + .collect()) } fn get_derivation( - column_name: &str, - record_batch: &RecordBatch, - row_index: usize, + column_name: &str, + record_batch: &RecordBatch, + row_index: usize, ) -> Result, ChronicleArrowError> { - Ok(struct_2_list_column(record_batch, column_name, row_index, "source", "activity")? - .into_iter() - .map(|(target, activity)| DerivationRef { source: target, activity }) - .collect()) + Ok(struct_2_list_column(record_batch, column_name, row_index, "source", "activity")? + .into_iter() + .map(|(target, activity)| DerivationRef { source: target, activity }) + .collect()) } pub fn entity_operations( - ns: &NamespaceId, - id: &str, - attributes: Attributes, - row_index: usize, - record_batch: &RecordBatch, + ns: &NamespaceId, + id: &str, + attributes: Attributes, + row_index: usize, + record_batch: &RecordBatch, ) -> Result, ChronicleArrowError> { - let mut operations = vec![ - ChronicleOperation::entity_exists(ns.clone(), EntityId::from_external_id(id)), - ChronicleOperation::set_attributes(SetAttributes::entity( - ns.clone(), - EntityId::from_external_id(id), - attributes, - )), - ]; - - let was_generated_by_ids = get_was_generated_by(record_batch, row_index)?; - - for generated_by_id in was_generated_by_ids { - operations.push(ChronicleOperation::was_generated_by( - ns.clone(), - EntityId::from_external_id(id), - ActivityId::from_external_id(&generated_by_id), - )); - } - - let was_attributed_to_refs = get_entity_was_attributed_to(record_batch, row_index)?; - - for was_attributed_to_ref in was_attributed_to_refs { - operations.push(ChronicleOperation::was_attributed_to( - ns.clone(), - EntityId::from_external_id(id), - AgentId::from_external_id(was_attributed_to_ref.agent), - was_attributed_to_ref.role.map(Role::from), - )) - } - - let was_derived_from_refs = get_derivation("was_derived_from", record_batch, row_index)?; - - for was_derived_from_ref in was_derived_from_refs { - operations.push(ChronicleOperation::entity_derive( - ns.clone(), - EntityId::from_external_id(was_derived_from_ref.source), - EntityId::from_external_id(id), - Some(ActivityId::from_external_id(was_derived_from_ref.activity)), - DerivationType::None, - )) - } - - let had_primary_source_refs = get_derivation("had_primary_source", record_batch, row_index)?; - - for had_primary_source_ref in had_primary_source_refs { - operations.push(ChronicleOperation::entity_derive( - ns.clone(), - EntityId::from_external_id(had_primary_source_ref.source), - EntityId::from_external_id(id), - Some(ActivityId::from_external_id(had_primary_source_ref.activity)), - DerivationType::PrimarySource, - )) - } - - let was_quoted_from_refs = get_derivation("was_quoted_from", record_batch, row_index)?; - - for was_quoted_from_ref in was_quoted_from_refs { - operations.push(ChronicleOperation::entity_derive( - ns.clone(), - EntityId::from_external_id(was_quoted_from_ref.source), - EntityId::from_external_id(id), - Some(ActivityId::from_external_id(was_quoted_from_ref.activity)), - DerivationType::Quotation, - )) - } - - let was_revision_of_refs = get_derivation("was_revision_of", record_batch, row_index)?; - - for was_revision_of_ref in was_revision_of_refs { - operations.push(ChronicleOperation::entity_derive( - ns.clone(), - EntityId::from_external_id(was_revision_of_ref.source), - EntityId::from_external_id(id), - Some(ActivityId::from_external_id(was_revision_of_ref.activity)), - DerivationType::Revision, - )) - } - - Ok(with_implied(operations)) + let mut operations = vec![ + ChronicleOperation::entity_exists(ns.clone(), EntityId::from_external_id(id)), + ChronicleOperation::set_attributes(SetAttributes::entity( + ns.clone(), + EntityId::from_external_id(id), + attributes, + )), + ]; + + let was_generated_by_ids = get_was_generated_by(record_batch, row_index)?; + + for generated_by_id in was_generated_by_ids { + operations.push(ChronicleOperation::was_generated_by( + ns.clone(), + EntityId::from_external_id(id), + ActivityId::from_external_id(&generated_by_id), + )); + } + + let was_attributed_to_refs = get_entity_was_attributed_to(record_batch, row_index)?; + + for was_attributed_to_ref in was_attributed_to_refs { + operations.push(ChronicleOperation::was_attributed_to( + ns.clone(), + EntityId::from_external_id(id), + AgentId::from_external_id(was_attributed_to_ref.agent), + was_attributed_to_ref.role.map(Role::from), + )) + } + + let was_derived_from_refs = get_derivation("was_derived_from", record_batch, row_index)?; + + for was_derived_from_ref in was_derived_from_refs { + operations.push(ChronicleOperation::entity_derive( + ns.clone(), + EntityId::from_external_id(was_derived_from_ref.source), + EntityId::from_external_id(id), + Some(ActivityId::from_external_id(was_derived_from_ref.activity)), + DerivationType::None, + )) + } + + let had_primary_source_refs = get_derivation("had_primary_source", record_batch, row_index)?; + + for had_primary_source_ref in had_primary_source_refs { + operations.push(ChronicleOperation::entity_derive( + ns.clone(), + EntityId::from_external_id(had_primary_source_ref.source), + EntityId::from_external_id(id), + Some(ActivityId::from_external_id(had_primary_source_ref.activity)), + DerivationType::PrimarySource, + )) + } + + let was_quoted_from_refs = get_derivation("was_quoted_from", record_batch, row_index)?; + + for was_quoted_from_ref in was_quoted_from_refs { + operations.push(ChronicleOperation::entity_derive( + ns.clone(), + EntityId::from_external_id(was_quoted_from_ref.source), + EntityId::from_external_id(id), + Some(ActivityId::from_external_id(was_quoted_from_ref.activity)), + DerivationType::Quotation, + )) + } + + let was_revision_of_refs = get_derivation("was_revision_of", record_batch, row_index)?; + + for was_revision_of_ref in was_revision_of_refs { + operations.push(ChronicleOperation::entity_derive( + ns.clone(), + EntityId::from_external_id(was_revision_of_ref.source), + EntityId::from_external_id(id), + Some(ActivityId::from_external_id(was_revision_of_ref.activity)), + DerivationType::Revision, + )) + } + + Ok(with_implied(operations)) } diff --git a/crates/chronicle-arrow/src/operations/mod.rs b/crates/chronicle-arrow/src/operations/mod.rs index 87102cb93..18b440678 100644 --- a/crates/chronicle-arrow/src/operations/mod.rs +++ b/crates/chronicle-arrow/src/operations/mod.rs @@ -7,19 +7,21 @@ use uuid::Uuid; pub(crate) use activity::*; pub(crate) use agent::*; -use api::ApiDispatch; -use api::commands::{ApiCommand, ImportCommand}; +use api::{ + commands::{ApiCommand, ImportCommand}, + ApiDispatch, +}; use common::{ attributes::{Attribute, Attributes}, domain::TypeName, identity::AuthId, - prov::{NamespaceId, operations::ChronicleOperation}, + prov::{operations::ChronicleOperation, NamespaceId}, }; pub(crate) use entity::*; use crate::{ + meta::{get_domain_type_meta_from_cache, DomainTypeMeta, Term}, ChronicleArrowError, - meta::{DomainTypeMeta, get_domain_type_meta_from_cache, Term}, }; mod activity; @@ -28,372 +30,372 @@ mod entity; #[tracing::instrument(skip(record_batch, api))] pub async fn process_record_batch( - descriptor_path: &Vec, - record_batch: RecordBatch, - api: &ApiDispatch, + descriptor_path: &Vec, + record_batch: RecordBatch, + api: &ApiDispatch, ) -> Result<(), ChronicleArrowError> { - let domain_type_meta = get_domain_type_meta_from_cache(descriptor_path) - .ok_or(ChronicleArrowError::MetadataNotFound)?; - - let attribute_columns = domain_type_meta - .schema - .fields() - .iter() - .filter_map(|field| { - if field.name().ends_with("Attribute") { - Some(field.name().clone()) - } else { - None - } - }) - .collect::>(); - - match domain_type_meta.term { - Term::Entity => - create_chronicle_entity(&domain_type_meta.typ, &record_batch, &attribute_columns, api) - .await?, - Term::Activity => - create_chronicle_activity(&domain_type_meta.typ, &record_batch, &attribute_columns, api) - .await?, - Term::Agent => - create_chronicle_agent(&domain_type_meta.typ, &record_batch, &attribute_columns, api) - .await?, - Term::Namespace => create_chronicle_namespace(&record_batch, api).await?, - } - Ok(()) + let domain_type_meta = get_domain_type_meta_from_cache(descriptor_path) + .ok_or(ChronicleArrowError::MetadataNotFound)?; + + let attribute_columns = domain_type_meta + .schema + .fields() + .iter() + .filter_map(|field| { + if field.name().ends_with("Attribute") { + Some(field.name().clone()) + } else { + None + } + }) + .collect::>(); + + match domain_type_meta.term { + Term::Entity => + create_chronicle_entity(&domain_type_meta.typ, &record_batch, &attribute_columns, api) + .await?, + Term::Activity => + create_chronicle_activity(&domain_type_meta.typ, &record_batch, &attribute_columns, api) + .await?, + Term::Agent => + create_chronicle_agent(&domain_type_meta.typ, &record_batch, &attribute_columns, api) + .await?, + Term::Namespace => create_chronicle_namespace(&record_batch, api).await?, + } + Ok(()) } #[tracing::instrument(skip(descriptor, meta, batch))] pub fn batch_to_flight_data( - descriptor: &FlightDescriptor, - meta: &DomainTypeMeta, - batch: RecordBatch, + descriptor: &FlightDescriptor, + meta: &DomainTypeMeta, + batch: RecordBatch, ) -> Result, ArrowError> { - let options = IpcWriteOptions::default(); + let options = IpcWriteOptions::default(); - let schema_flight_data: FlightData = - std::convert::Into::::into(SchemaAsIpc::new(&meta.schema, &options)) - .with_descriptor(descriptor.clone()); + let schema_flight_data: FlightData = + std::convert::Into::::into(SchemaAsIpc::new(&meta.schema, &options)) + .with_descriptor(descriptor.clone()); - let data_gen = IpcDataGenerator::default(); - let mut dictionary_tracker = DictionaryTracker::new(false); + let data_gen = IpcDataGenerator::default(); + let mut dictionary_tracker = DictionaryTracker::new(false); - let (encoded_dictionaries, encoded_batch) = - data_gen.encoded_batch(&batch, &mut dictionary_tracker, &options)?; + let (encoded_dictionaries, encoded_batch) = + data_gen.encoded_batch(&batch, &mut dictionary_tracker, &options)?; - let dictionaries: Vec = encoded_dictionaries.into_iter().map(Into::into).collect(); - let flight_data: FlightData = encoded_batch.into(); + let dictionaries: Vec = encoded_dictionaries.into_iter().map(Into::into).collect(); + let flight_data: FlightData = encoded_batch.into(); - let mut stream = vec![schema_flight_data]; - stream.extend(dictionaries); - stream.push(flight_data); + let mut stream = vec![schema_flight_data]; + stream.extend(dictionaries); + stream.push(flight_data); - Ok(stream) + Ok(stream) } async fn create_chronicle_namespace( - record_batch: &RecordBatch, - _api: &ApiDispatch, + record_batch: &RecordBatch, + _api: &ApiDispatch, ) -> Result<(), ChronicleArrowError> { - let _uuid = record_batch - .column_by_name("uuid") - .ok_or(ChronicleArrowError::MissingColumn("uuid".to_string()))?; - let _name = record_batch - .column_by_name("name") - .ok_or(ChronicleArrowError::MissingColumn("name".to_string()))?; - - Ok(()) + let _uuid = record_batch + .column_by_name("uuid") + .ok_or(ChronicleArrowError::MissingColumn("uuid".to_string()))?; + let _name = record_batch + .column_by_name("name") + .ok_or(ChronicleArrowError::MissingColumn("name".to_string()))?; + + Ok(()) } pub async fn create_chronicle_entity( - domain_type: &Option>, - record_batch: &RecordBatch, - attribute_columns: &Vec, - api: &ApiDispatch, + domain_type: &Option>, + record_batch: &RecordBatch, + attribute_columns: &Vec, + api: &ApiDispatch, ) -> Result<(), ChronicleArrowError> { - create_chronicle_terms(record_batch, Term::Entity, domain_type, attribute_columns, api).await + create_chronicle_terms(record_batch, Term::Entity, domain_type, attribute_columns, api).await } pub async fn create_chronicle_activity( - domain_type: &Option>, - record_batch: &RecordBatch, - attribute_columns: &Vec, - api: &ApiDispatch, + domain_type: &Option>, + record_batch: &RecordBatch, + attribute_columns: &Vec, + api: &ApiDispatch, ) -> Result<(), ChronicleArrowError> { - create_chronicle_terms(record_batch, Term::Activity, domain_type, attribute_columns, api).await + create_chronicle_terms(record_batch, Term::Activity, domain_type, attribute_columns, api).await } pub async fn create_chronicle_agent( - domain_type: &Option>, - record_batch: &RecordBatch, - attribute_columns: &Vec, - api: &ApiDispatch, + domain_type: &Option>, + record_batch: &RecordBatch, + attribute_columns: &Vec, + api: &ApiDispatch, ) -> Result<(), ChronicleArrowError> { - create_chronicle_terms(record_batch, Term::Agent, domain_type, attribute_columns, api).await + create_chronicle_terms(record_batch, Term::Agent, domain_type, attribute_columns, api).await } pub async fn create_chronicle_terms( - record_batch: &RecordBatch, - record_type: Term, - domain_type: &Option>, - attribute_columns: &Vec, - api: &ApiDispatch, + record_batch: &RecordBatch, + record_type: Term, + domain_type: &Option>, + attribute_columns: &Vec, + api: &ApiDispatch, ) -> Result<(), ChronicleArrowError> { - let ns_name_column = record_batch - .column_by_name("namespace_name") - .ok_or(ChronicleArrowError::MissingColumn("namespace_name".to_string()))?; - - let ns_uuid_column = record_batch - .column_by_name("namespace_uuid") - .ok_or(ChronicleArrowError::MissingColumn("namespace_uuid".to_string()))?; - - let id_column = record_batch - .column_by_name("id") - .ok_or(ChronicleArrowError::MissingColumn("id".to_string()))?; - - let attribute_columns_refs: Vec<&String> = attribute_columns.iter().collect(); - let attribute_values = attribute_columns_refs - .iter() - .map(|column_name| (column_name.to_string(), record_batch.column_by_name(column_name))) - .filter_map(|(column_name, array_ref)| array_ref.map(|array_ref| (column_name, array_ref))) - .collect::>(); - - tracing::trace!(?attribute_columns, "Processing attribute columns"); - - let mut operations = Vec::new(); - for row_index in 0..record_batch.num_rows() { - let ns_name = ns_name_column.as_string::().value(row_index); - let ns_uuid = ns_uuid_column.as_string::().value(row_index); - let ns_uuid = Uuid::parse_str(ns_uuid).map_err(ChronicleArrowError::from)?; - let ns: NamespaceId = NamespaceId::from_external_id(ns_name, ns_uuid); - - let id = id_column.as_string::().value(row_index); - - let mut attributes: Vec = Vec::new(); - - for (attribute_name, attribute_array) in attribute_values.iter() { - tracing::trace!(%attribute_name, row_index, "Appending to attributes"); - if let Some(array) = attribute_array.as_any().downcast_ref::() { - let value = array.value(row_index); - attributes.push(Attribute::new( - attribute_name.clone(), - serde_json::Value::String(value.to_string()), - )); - } else if let Some(array) = attribute_array.as_any().downcast_ref::() { - let value = array.value(row_index); - attributes.push(Attribute::new( - attribute_name.clone(), - serde_json::Value::Number(value.into()), - )); - } else if let Some(array) = attribute_array.as_any().downcast_ref::() { - let value = array.value(row_index); - attributes - .push(Attribute::new(attribute_name.clone(), serde_json::Value::Bool(value))); - } else { - tracing::warn!(%attribute_name, row_index, "Unsupported attribute type"); - } - } - let attributes = - Attributes::new(domain_type.as_ref().map(|x| x.as_domain_type_id()), attributes); - - match record_type { - Term::Entity => { - operations.extend(entity_operations(&ns, id, attributes, row_index, record_batch)?); - } - Term::Activity => { - operations.extend(activity_operations( - &ns, - id, - attributes, - row_index, - record_batch, - )?); - } - Term::Agent => { - operations.extend(agent_operations(&ns, id, attributes, row_index, record_batch)?); - } - Term::Namespace => { - // Noop / unreachable - } - } - } - - api.dispatch(ApiCommand::Import(ImportCommand { operations }), AuthId::anonymous()) - .await?; - - Ok(()) + let ns_name_column = record_batch + .column_by_name("namespace_name") + .ok_or(ChronicleArrowError::MissingColumn("namespace_name".to_string()))?; + + let ns_uuid_column = record_batch + .column_by_name("namespace_uuid") + .ok_or(ChronicleArrowError::MissingColumn("namespace_uuid".to_string()))?; + + let id_column = record_batch + .column_by_name("id") + .ok_or(ChronicleArrowError::MissingColumn("id".to_string()))?; + + let attribute_columns_refs: Vec<&String> = attribute_columns.iter().collect(); + let attribute_values = attribute_columns_refs + .iter() + .map(|column_name| (column_name.to_string(), record_batch.column_by_name(column_name))) + .filter_map(|(column_name, array_ref)| array_ref.map(|array_ref| (column_name, array_ref))) + .collect::>(); + + tracing::trace!(?attribute_columns, "Processing attribute columns"); + + let mut operations = Vec::new(); + for row_index in 0..record_batch.num_rows() { + let ns_name = ns_name_column.as_string::().value(row_index); + let ns_uuid = ns_uuid_column.as_string::().value(row_index); + let ns_uuid = Uuid::parse_str(ns_uuid).map_err(ChronicleArrowError::from)?; + let ns: NamespaceId = NamespaceId::from_external_id(ns_name, ns_uuid); + + let id = id_column.as_string::().value(row_index); + + let mut attributes: Vec = Vec::new(); + + for (attribute_name, attribute_array) in attribute_values.iter() { + tracing::trace!(%attribute_name, row_index, "Appending to attributes"); + if let Some(array) = attribute_array.as_any().downcast_ref::() { + let value = array.value(row_index); + attributes.push(Attribute::new( + attribute_name.clone(), + serde_json::Value::String(value.to_string()), + )); + } else if let Some(array) = attribute_array.as_any().downcast_ref::() { + let value = array.value(row_index); + attributes.push(Attribute::new( + attribute_name.clone(), + serde_json::Value::Number(value.into()), + )); + } else if let Some(array) = attribute_array.as_any().downcast_ref::() { + let value = array.value(row_index); + attributes + .push(Attribute::new(attribute_name.clone(), serde_json::Value::Bool(value))); + } else { + tracing::warn!(%attribute_name, row_index, "Unsupported attribute type"); + } + } + let attributes = + Attributes::new(domain_type.as_ref().map(|x| x.as_domain_type_id()), attributes); + + match record_type { + Term::Entity => { + operations.extend(entity_operations(&ns, id, attributes, row_index, record_batch)?); + }, + Term::Activity => { + operations.extend(activity_operations( + &ns, + id, + attributes, + row_index, + record_batch, + )?); + }, + Term::Agent => { + operations.extend(agent_operations(&ns, id, attributes, row_index, record_batch)?); + }, + Term::Namespace => { + // Noop / unreachable + }, + } + } + + api.dispatch(ApiCommand::Import(ImportCommand { operations }), AuthId::anonymous()) + .await?; + + Ok(()) } fn string_list_column( - record_batch: &RecordBatch, - column_name: &str, - row_index: usize, + record_batch: &RecordBatch, + column_name: &str, + row_index: usize, ) -> Result, ChronicleArrowError> { - let column_index = record_batch - .schema() - .index_of(column_name) - .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; - let column = record_batch.column(column_index); - if let Some(list_array) = column.as_any().downcast_ref::() { - if let Some(string_array) = - list_array.value(row_index).as_any().downcast_ref::() - { - Ok((0..string_array.len()).map(|i| string_array.value(i).to_string()).collect()) - } else { - Ok(vec![]) - } - } else { - Ok(vec![]) - } + let column_index = record_batch + .schema() + .index_of(column_name) + .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; + let column = record_batch.column(column_index); + if let Some(list_array) = column.as_any().downcast_ref::() { + if let Some(string_array) = + list_array.value(row_index).as_any().downcast_ref::() + { + Ok((0..string_array.len()).map(|i| string_array.value(i).to_string()).collect()) + } else { + Ok(vec![]) + } + } else { + Ok(vec![]) + } } fn struct_2_list_column_opt_string( - record_batch: &RecordBatch, - column_name: &str, - row_index: usize, - field1_name: &str, - field2_name: &str, + record_batch: &RecordBatch, + column_name: &str, + row_index: usize, + field1_name: &str, + field2_name: &str, ) -> Result)>, ChronicleArrowError> { - let column_index = record_batch - .schema() - .index_of(column_name) - .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; - let column = record_batch.column(column_index); - if let Some(list_array) = column.as_any().downcast_ref::() { - if let Some(struct_array) = - list_array.value(row_index).as_any().downcast_ref::() - { - let field1_index = struct_array - .column_by_name(field1_name) - .ok_or_else(|| ChronicleArrowError::MissingColumn(field1_name.to_string()))?; - let field2_index = struct_array - .column_by_name(field2_name) - .ok_or_else(|| ChronicleArrowError::MissingColumn(field2_name.to_string()))?; - - let field1_array = field1_index - .as_any() - .downcast_ref::() - .ok_or_else(|| ChronicleArrowError::ColumnTypeMismatch(field1_name.to_string()))?; - let field2_array = field2_index.as_any().downcast_ref::(); - - Ok((0..struct_array.len()) - .map(|i| { - ( - field1_array.value(i).to_string(), - field2_array.map(|arr| arr.value(i).to_string()), - ) - }) - .collect()) - } else { - Ok(vec![]) - } - } else { - Ok(vec![]) - } + let column_index = record_batch + .schema() + .index_of(column_name) + .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; + let column = record_batch.column(column_index); + if let Some(list_array) = column.as_any().downcast_ref::() { + if let Some(struct_array) = + list_array.value(row_index).as_any().downcast_ref::() + { + let field1_index = struct_array + .column_by_name(field1_name) + .ok_or_else(|| ChronicleArrowError::MissingColumn(field1_name.to_string()))?; + let field2_index = struct_array + .column_by_name(field2_name) + .ok_or_else(|| ChronicleArrowError::MissingColumn(field2_name.to_string()))?; + + let field1_array = field1_index + .as_any() + .downcast_ref::() + .ok_or_else(|| ChronicleArrowError::ColumnTypeMismatch(field1_name.to_string()))?; + let field2_array = field2_index.as_any().downcast_ref::(); + + Ok((0..struct_array.len()) + .map(|i| { + ( + field1_array.value(i).to_string(), + field2_array.map(|arr| arr.value(i).to_string()), + ) + }) + .collect()) + } else { + Ok(vec![]) + } + } else { + Ok(vec![]) + } } fn struct_3_list_column_opt_string( - record_batch: &RecordBatch, - column_name: &str, - row_index: usize, - field1_name: &str, - field2_name: &str, - field3_name: &str, + record_batch: &RecordBatch, + column_name: &str, + row_index: usize, + field1_name: &str, + field2_name: &str, + field3_name: &str, ) -> Result)>, ChronicleArrowError> { - let column_index = record_batch - .schema() - .index_of(column_name) - .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; - let column = record_batch.column(column_index); - if let Some(list_array) = column.as_any().downcast_ref::() { - if let Some(struct_array) = - list_array.value(row_index).as_any().downcast_ref::() - { - let field1_index = struct_array - .column_by_name(field1_name) - .ok_or_else(|| ChronicleArrowError::MissingColumn(field1_name.to_string()))?; - let field2_index = struct_array - .column_by_name(field2_name) - .ok_or_else(|| ChronicleArrowError::MissingColumn(field2_name.to_string()))?; - let field3_index = struct_array - .column_by_name(field3_name) - .ok_or_else(|| ChronicleArrowError::MissingColumn(field3_name.to_string()))?; - - let field1_array = field1_index - .as_any() - .downcast_ref::() - .ok_or_else(|| ChronicleArrowError::ColumnTypeMismatch(field1_name.to_string()))?; - let field2_array = field2_index - .as_any() - .downcast_ref::() - .ok_or_else(|| ChronicleArrowError::ColumnTypeMismatch(field2_name.to_string()))?; - let field3_array = field3_index.as_any().downcast_ref::(); - - Ok((0..struct_array.len()) - .map(|i| { - ( - field1_array.value(i).to_string(), - field2_array.value(i).to_string(), - field3_array.map(|arr| arr.value(i).to_string()), - ) - }) - .collect::)>>()) - } else { - Ok(vec![]) - } - } else { - Ok(vec![]) - } + let column_index = record_batch + .schema() + .index_of(column_name) + .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; + let column = record_batch.column(column_index); + if let Some(list_array) = column.as_any().downcast_ref::() { + if let Some(struct_array) = + list_array.value(row_index).as_any().downcast_ref::() + { + let field1_index = struct_array + .column_by_name(field1_name) + .ok_or_else(|| ChronicleArrowError::MissingColumn(field1_name.to_string()))?; + let field2_index = struct_array + .column_by_name(field2_name) + .ok_or_else(|| ChronicleArrowError::MissingColumn(field2_name.to_string()))?; + let field3_index = struct_array + .column_by_name(field3_name) + .ok_or_else(|| ChronicleArrowError::MissingColumn(field3_name.to_string()))?; + + let field1_array = field1_index + .as_any() + .downcast_ref::() + .ok_or_else(|| ChronicleArrowError::ColumnTypeMismatch(field1_name.to_string()))?; + let field2_array = field2_index + .as_any() + .downcast_ref::() + .ok_or_else(|| ChronicleArrowError::ColumnTypeMismatch(field2_name.to_string()))?; + let field3_array = field3_index.as_any().downcast_ref::(); + + Ok((0..struct_array.len()) + .map(|i| { + ( + field1_array.value(i).to_string(), + field2_array.value(i).to_string(), + field3_array.map(|arr| arr.value(i).to_string()), + ) + }) + .collect::)>>()) + } else { + Ok(vec![]) + } + } else { + Ok(vec![]) + } } fn struct_2_list_column( - record_batch: &RecordBatch, - column_name: &str, - row_index: usize, - field1_name: &str, - field2_name: &str, + record_batch: &RecordBatch, + column_name: &str, + row_index: usize, + field1_name: &str, + field2_name: &str, ) -> Result, ChronicleArrowError> { - let column_index = record_batch - .schema() - .index_of(column_name) - .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; - let column = record_batch.column(column_index); - if let Some(list_array) = column.as_any().downcast_ref::() { - if let Some(struct_array) = - list_array.value(row_index).as_any().downcast_ref::() - { - let field1_index = struct_array - .column_by_name(field1_name) - .ok_or_else(|| ChronicleArrowError::MissingColumn(field1_name.to_string()))?; - let field2_index = struct_array - .column_by_name(field2_name) - .ok_or_else(|| ChronicleArrowError::MissingColumn(field2_name.to_string()))?; - - if let (Some(field1_array), Some(field2_array)) = ( - field1_index.as_any().downcast_ref::(), - field2_index.as_any().downcast_ref::(), - ) { - Ok((0..struct_array.len()) - .map(|i| (field1_array.value(i).to_string(), field2_array.value(i).to_string())) - .collect()) - } else { - Ok(vec![]) - } - } else { - Ok(vec![]) - } - } else { - Ok(vec![]) - } + let column_index = record_batch + .schema() + .index_of(column_name) + .map_err(|_| ChronicleArrowError::MissingColumn(column_name.to_string()))?; + let column = record_batch.column(column_index); + if let Some(list_array) = column.as_any().downcast_ref::() { + if let Some(struct_array) = + list_array.value(row_index).as_any().downcast_ref::() + { + let field1_index = struct_array + .column_by_name(field1_name) + .ok_or_else(|| ChronicleArrowError::MissingColumn(field1_name.to_string()))?; + let field2_index = struct_array + .column_by_name(field2_name) + .ok_or_else(|| ChronicleArrowError::MissingColumn(field2_name.to_string()))?; + + if let (Some(field1_array), Some(field2_array)) = ( + field1_index.as_any().downcast_ref::(), + field2_index.as_any().downcast_ref::(), + ) { + Ok((0..struct_array.len()) + .map(|i| (field1_array.value(i).to_string(), field2_array.value(i).to_string())) + .collect()) + } else { + Ok(vec![]) + } + } else { + Ok(vec![]) + } + } else { + Ok(vec![]) + } } fn with_implied(operations: Vec) -> Vec { - operations - .into_iter() - .flat_map(|op| { - let mut implied_ops = op.implied_by(); - implied_ops.push(op); - implied_ops - }) - .collect() + operations + .into_iter() + .flat_map(|op| { + let mut implied_ops = op.implied_by(); + implied_ops.push(op); + implied_ops + }) + .collect() } diff --git a/crates/chronicle-arrow/src/peekablestream.rs b/crates/chronicle-arrow/src/peekablestream.rs index c916a390b..4e8860b50 100644 --- a/crates/chronicle-arrow/src/peekablestream.rs +++ b/crates/chronicle-arrow/src/peekablestream.rs @@ -1,45 +1,45 @@ use std::pin::Pin; use arrow_flight::FlightData; -use futures::{Stream, stream::Peekable, StreamExt}; +use futures::{stream::Peekable, Stream, StreamExt}; use tonic::{Status, Streaming}; pub struct PeekableFlightDataStream { - inner: Peekable>, + inner: Peekable>, } impl PeekableFlightDataStream { - pub fn new(stream: Streaming) -> Self { - Self { inner: stream.peekable() } - } - - /// Convert this stream into a `Streaming`. - /// Any messages observed through [`Self::peek`] will be lost - /// after the conversion. - pub fn into_inner(self) -> Streaming { - self.inner.into_inner() - } - - /// Convert this stream into a `Peekable>`. - /// Preserves the state of the stream, so that calls to [`Self::peek`] - /// and [`Self::poll_next`] are the same. - pub fn into_peekable(self) -> Peekable> { - self.inner - } - - /// Peek at the head of this stream without advancing it. - pub async fn peek(&mut self) -> Option<&Result> { - Pin::new(&mut self.inner).peek().await - } + pub fn new(stream: Streaming) -> Self { + Self { inner: stream.peekable() } + } + + /// Convert this stream into a `Streaming`. + /// Any messages observed through [`Self::peek`] will be lost + /// after the conversion. + pub fn into_inner(self) -> Streaming { + self.inner.into_inner() + } + + /// Convert this stream into a `Peekable>`. + /// Preserves the state of the stream, so that calls to [`Self::peek`] + /// and [`Self::poll_next`] are the same. + pub fn into_peekable(self) -> Peekable> { + self.inner + } + + /// Peek at the head of this stream without advancing it. + pub async fn peek(&mut self) -> Option<&Result> { + Pin::new(&mut self.inner).peek().await + } } impl Stream for PeekableFlightDataStream { - type Item = Result; - - fn poll_next( - mut self: Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - self.inner.poll_next_unpin(cx) - } + type Item = Result; + + fn poll_next( + mut self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + self.inner.poll_next_unpin(cx) + } } diff --git a/crates/chronicle-arrow/src/query/activity.rs b/crates/chronicle-arrow/src/query/activity.rs index a5daffdae..2edf5a956 100644 --- a/crates/chronicle-arrow/src/query/activity.rs +++ b/crates/chronicle-arrow/src/query/activity.rs @@ -2,28 +2,28 @@ use std::{collections::HashMap, sync::Arc}; use arrow::array::{ArrayBuilder, ListBuilder, StringBuilder, StructBuilder}; use arrow_array::{ - Array, BooleanArray, Int64Array, ListArray, RecordBatch, StringArray, TimestampNanosecondArray, + Array, BooleanArray, Int64Array, ListArray, RecordBatch, StringArray, TimestampNanosecondArray, }; use arrow_schema::{DataType, Field}; use chrono::{DateTime, Utc}; use diesel::{ - pg::PgConnection, - prelude::*, - r2d2::{ConnectionManager, Pool}, + pg::PgConnection, + prelude::*, + r2d2::{ConnectionManager, Pool}, }; use uuid::Uuid; use chronicle_persistence::{ - query::{Activity, Association, Delegation, Generation, Namespace, Usage, WasInformedBy}, - schema::{ - activity, agent, association, delegation, entity, generation, namespace, usage, - wasinformedby, - }, + query::{Activity, Association, Delegation, Generation, Namespace, Usage, WasInformedBy}, + schema::{ + activity, agent, association, delegation, entity, generation, namespace, usage, + wasinformedby, + }, }; use common::{ - attributes::Attributes, - domain::PrimitiveType, - prov::{DomaintypeId, ExternalIdPart}, + attributes::Attributes, + domain::PrimitiveType, + prov::{DomaintypeId, ExternalIdPart}, }; use crate::{ChronicleArrowError, DomainTypeMeta}; @@ -32,435 +32,435 @@ use super::vec_vec_string_to_list_array; #[tracing::instrument(skip(pool))] pub fn activity_count_by_type( - pool: &Pool>, - typ: Vec<&str>, + pool: &Pool>, + typ: Vec<&str>, ) -> Result { - let mut connection = pool.get()?; - let count = activity::table - .filter(activity::domaintype.eq_any(typ)) - .count() - .get_result(&mut connection)?; - Ok(count) + let mut connection = pool.get()?; + let count = activity::table + .filter(activity::domaintype.eq_any(typ)) + .count() + .get_result(&mut connection)?; + Ok(count) } #[derive(Default)] pub struct AgentInteraction { - pub(crate) agent: String, - pub(crate) role: Option, + pub(crate) agent: String, + pub(crate) role: Option, } #[derive(Default)] pub struct ActivityAssociationRef { - pub(crate) responsible: AgentInteraction, - pub(crate) delegated: Vec, + pub(crate) responsible: AgentInteraction, + pub(crate) delegated: Vec, } #[derive(Default)] pub struct ActivityAndReferences { - pub(crate) id: String, - pub(crate) namespace_name: String, - pub(crate) namespace_uuid: [u8; 16], - pub(crate) started: Option>, - pub(crate) ended: Option>, - pub(crate) attributes: Attributes, - pub(crate) used: Vec, - pub(crate) generated: Vec, - pub(crate) was_informed_by: Vec, - pub(crate) was_associated_with: Vec, + pub(crate) id: String, + pub(crate) namespace_name: String, + pub(crate) namespace_uuid: [u8; 16], + pub(crate) started: Option>, + pub(crate) ended: Option>, + pub(crate) attributes: Attributes, + pub(crate) used: Vec, + pub(crate) generated: Vec, + pub(crate) was_informed_by: Vec, + pub(crate) was_associated_with: Vec, } impl ActivityAndReferences { - #[tracing::instrument(skip(items, meta))] - pub fn to_record_batch( - items: impl Iterator, - meta: &DomainTypeMeta, - ) -> Result { - let mut attributes_map: HashMap>)> = - HashMap::new(); - - for (attribute_name, primitive_type) in meta.attributes.iter() { - attributes_map.insert(attribute_name.to_string(), (*primitive_type, vec![])); - } - - let mut id_vec = Vec::new(); - let mut namespace_name_vec = Vec::new(); - let mut namespace_uuid_vec = Vec::new(); - let mut started_vec = Vec::new(); - let mut ended_vec = Vec::new(); - let mut used_vec = Vec::new(); - let mut generated_vec = Vec::new(); - let mut was_informed_by_vec = Vec::new(); - let mut was_associated_with_vec = Vec::new(); - - for item in items { - id_vec.push(item.id); - namespace_name_vec.push(item.namespace_name); - namespace_uuid_vec.push(Uuid::from_bytes(item.namespace_uuid).to_string()); - started_vec.push(item.started.map(|dt| dt.timestamp_nanos_opt().unwrap_or_default())); - ended_vec.push(item.ended.map(|dt| dt.timestamp_nanos_opt().unwrap_or_default())); - used_vec.push(item.used); - generated_vec.push(item.generated); - was_informed_by_vec.push(item.was_informed_by); - was_associated_with_vec.push(item.was_associated_with); - - for (key, (_primitive_type, values)) in attributes_map.iter_mut() { - if let Some(attribute) = item.attributes.get_attribute(key) { - values.push(Some(attribute.value.clone().into())); - } else { - values.push(None); - } - } - } - - let used_array = vec_vec_string_to_list_array(used_vec)?; - let generated_array = vec_vec_string_to_list_array(generated_vec)?; - let was_informed_by_array = vec_vec_string_to_list_array(was_informed_by_vec)?; - let was_associated_with_array = associations_to_list_array(was_associated_with_vec)?; - - let mut fields = vec![ - ( - "namespace_name".to_string(), - Arc::new(StringArray::from(namespace_name_vec)) as Arc, - ), - ( - "namespace_uuid".to_string(), - Arc::new(StringArray::from(namespace_uuid_vec)) as Arc, - ), - ("id".to_string(), Arc::new(StringArray::from(id_vec)) as Arc), - ]; - - // Dynamically generate fields for attribute key/values based on their primitive type - for (key, (primitive_type, values)) in attributes_map { - let array: Arc = match primitive_type { - PrimitiveType::String => { - tracing::debug!("Converting String attribute values for key: {}", key); - Arc::new(StringArray::from( - values - .iter() - .map(|v| v.as_ref().map(|v| v.as_str()).unwrap_or_default()) - .collect::>(), - )) as Arc - } - PrimitiveType::Int => { - tracing::debug!("Converting Int attribute values for key: {}", key); - Arc::new(Int64Array::from( - values - .iter() - .map(|v| v.as_ref().map(|v| v.as_i64()).unwrap_or_default()) - .collect::>(), - )) as Arc - } - PrimitiveType::Bool => { - tracing::debug!("Converting Bool attribute values for key: {}", key); - Arc::new(BooleanArray::from( - values - .iter() - .map(|v| v.as_ref().map(|v| v.as_bool()).unwrap_or_default()) - .collect::>(), - )) as Arc - } - _ => { - tracing::warn!("Unsupported attribute primitive type for key: {}", key); - continue; - } - }; - fields.push((key, array as Arc)); - } - - fields.extend(vec![ - ( - "started".to_string(), - Arc::new(TimestampNanosecondArray::with_timezone_opt( - started_vec.into(), - Some("UTC".to_string()), - )) as Arc, - ), - ( - "ended".to_string(), - Arc::new(TimestampNanosecondArray::with_timezone_opt( - ended_vec.into(), - Some("UTC".to_string()), - )) as Arc, - ), - ("used".to_string(), Arc::new(used_array) as Arc), - ("generated".to_string(), Arc::new(generated_array) as Arc), - ( - "was_informed_by".to_string(), - Arc::new(was_informed_by_array) as Arc, - ), - ( - "was_associated_with".to_string(), - Arc::new(was_associated_with_array) as Arc, - ), - ]); - - let hashed_fields = fields.into_iter().collect::>(); - - let mut columns = Vec::new(); - for field in meta.schema.fields() { - let field_name = field.name(); - match hashed_fields.get(field_name) { - Some(array) => columns.push(array.clone()), - None => - return Err(ChronicleArrowError::SchemaFieldNotFound(field_name.to_string())), - } - } - - RecordBatch::try_new(meta.schema.clone(), columns).map_err(ChronicleArrowError::from) - } + #[tracing::instrument(skip(items, meta))] + pub fn to_record_batch( + items: impl Iterator, + meta: &DomainTypeMeta, + ) -> Result { + let mut attributes_map: HashMap>)> = + HashMap::new(); + + for (attribute_name, primitive_type) in meta.attributes.iter() { + attributes_map.insert(attribute_name.to_string(), (*primitive_type, vec![])); + } + + let mut id_vec = Vec::new(); + let mut namespace_name_vec = Vec::new(); + let mut namespace_uuid_vec = Vec::new(); + let mut started_vec = Vec::new(); + let mut ended_vec = Vec::new(); + let mut used_vec = Vec::new(); + let mut generated_vec = Vec::new(); + let mut was_informed_by_vec = Vec::new(); + let mut was_associated_with_vec = Vec::new(); + + for item in items { + id_vec.push(item.id); + namespace_name_vec.push(item.namespace_name); + namespace_uuid_vec.push(Uuid::from_bytes(item.namespace_uuid).to_string()); + started_vec.push(item.started.map(|dt| dt.timestamp_nanos_opt().unwrap_or_default())); + ended_vec.push(item.ended.map(|dt| dt.timestamp_nanos_opt().unwrap_or_default())); + used_vec.push(item.used); + generated_vec.push(item.generated); + was_informed_by_vec.push(item.was_informed_by); + was_associated_with_vec.push(item.was_associated_with); + + for (key, (_primitive_type, values)) in attributes_map.iter_mut() { + if let Some(attribute) = item.attributes.get_attribute(key) { + values.push(Some(attribute.value.clone().into())); + } else { + values.push(None); + } + } + } + + let used_array = vec_vec_string_to_list_array(used_vec)?; + let generated_array = vec_vec_string_to_list_array(generated_vec)?; + let was_informed_by_array = vec_vec_string_to_list_array(was_informed_by_vec)?; + let was_associated_with_array = associations_to_list_array(was_associated_with_vec)?; + + let mut fields = vec![ + ( + "namespace_name".to_string(), + Arc::new(StringArray::from(namespace_name_vec)) as Arc, + ), + ( + "namespace_uuid".to_string(), + Arc::new(StringArray::from(namespace_uuid_vec)) as Arc, + ), + ("id".to_string(), Arc::new(StringArray::from(id_vec)) as Arc), + ]; + + // Dynamically generate fields for attribute key/values based on their primitive type + for (key, (primitive_type, values)) in attributes_map { + let array: Arc = match primitive_type { + PrimitiveType::String => { + tracing::debug!("Converting String attribute values for key: {}", key); + Arc::new(StringArray::from( + values + .iter() + .map(|v| v.as_ref().map(|v| v.as_str()).unwrap_or_default()) + .collect::>(), + )) as Arc + }, + PrimitiveType::Int => { + tracing::debug!("Converting Int attribute values for key: {}", key); + Arc::new(Int64Array::from( + values + .iter() + .map(|v| v.as_ref().map(|v| v.as_i64()).unwrap_or_default()) + .collect::>(), + )) as Arc + }, + PrimitiveType::Bool => { + tracing::debug!("Converting Bool attribute values for key: {}", key); + Arc::new(BooleanArray::from( + values + .iter() + .map(|v| v.as_ref().map(|v| v.as_bool()).unwrap_or_default()) + .collect::>(), + )) as Arc + }, + _ => { + tracing::warn!("Unsupported attribute primitive type for key: {}", key); + continue; + }, + }; + fields.push((key, array as Arc)); + } + + fields.extend(vec![ + ( + "started".to_string(), + Arc::new(TimestampNanosecondArray::with_timezone_opt( + started_vec.into(), + Some("UTC".to_string()), + )) as Arc, + ), + ( + "ended".to_string(), + Arc::new(TimestampNanosecondArray::with_timezone_opt( + ended_vec.into(), + Some("UTC".to_string()), + )) as Arc, + ), + ("used".to_string(), Arc::new(used_array) as Arc), + ("generated".to_string(), Arc::new(generated_array) as Arc), + ( + "was_informed_by".to_string(), + Arc::new(was_informed_by_array) as Arc, + ), + ( + "was_associated_with".to_string(), + Arc::new(was_associated_with_array) as Arc, + ), + ]); + + let hashed_fields = fields.into_iter().collect::>(); + + let mut columns = Vec::new(); + for field in meta.schema.fields() { + let field_name = field.name(); + match hashed_fields.get(field_name) { + Some(array) => columns.push(array.clone()), + None => + return Err(ChronicleArrowError::SchemaFieldNotFound(field_name.to_string())), + } + } + + RecordBatch::try_new(meta.schema.clone(), columns).map_err(ChronicleArrowError::from) + } } fn associations_to_list_array( - associations: Vec>, + associations: Vec>, ) -> Result { - let fields = - vec![Field::new("agent", DataType::Utf8, false), Field::new("role", DataType::Utf8, true)]; - - let agent_struct = DataType::Struct(fields.clone().into()); - - let mut builder = ListBuilder::new(StructBuilder::new( - vec![ - Field::new("responsible", agent_struct.clone(), false), - Field::new( - "delegated", - DataType::List(Arc::new(Field::new("item", agent_struct, true))), - false, - ), - ], - vec![ - Box::new(StructBuilder::from_fields(fields.clone(), 0)), - Box::new(ListBuilder::new(StructBuilder::from_fields(fields, 0))), - ], - )); - - for association_vec in associations { - let struct_builder = builder.values(); - - for association in association_vec { - // Build the responsible field - let responsible_builder = struct_builder.field_builder::(0).unwrap(); - responsible_builder - .field_builder::(0) - .unwrap() - .append_value(&association.responsible.agent); - if let Some(role) = &association.responsible.role { - responsible_builder - .field_builder::(1) - .unwrap() - .append_value(role); - } else { - responsible_builder.field_builder::(1).unwrap().append_null(); - } - responsible_builder.append(true); - - // Build the delegated field - let delegated_builder = - struct_builder.field_builder::>(1).unwrap(); - for agent_interaction in &association.delegated { - let interaction_builder = delegated_builder.values(); - interaction_builder - .field_builder::(0) - .unwrap() - .append_value(&agent_interaction.agent); - if let Some(role) = &agent_interaction.role { - interaction_builder - .field_builder::(1) - .unwrap() - .append_value(role); - } else { - interaction_builder.field_builder::(1).unwrap().append_null(); - } - interaction_builder.append(true); - } - delegated_builder.append(true); - - struct_builder.append(true); - } - - builder.append(true); - } - - Ok(builder.finish()) + let fields = + vec![Field::new("agent", DataType::Utf8, false), Field::new("role", DataType::Utf8, true)]; + + let agent_struct = DataType::Struct(fields.clone().into()); + + let mut builder = ListBuilder::new(StructBuilder::new( + vec![ + Field::new("responsible", agent_struct.clone(), false), + Field::new( + "delegated", + DataType::List(Arc::new(Field::new("item", agent_struct, true))), + false, + ), + ], + vec![ + Box::new(StructBuilder::from_fields(fields.clone(), 0)), + Box::new(ListBuilder::new(StructBuilder::from_fields(fields, 0))), + ], + )); + + for association_vec in associations { + let struct_builder = builder.values(); + + for association in association_vec { + // Build the responsible field + let responsible_builder = struct_builder.field_builder::(0).unwrap(); + responsible_builder + .field_builder::(0) + .unwrap() + .append_value(&association.responsible.agent); + if let Some(role) = &association.responsible.role { + responsible_builder + .field_builder::(1) + .unwrap() + .append_value(role); + } else { + responsible_builder.field_builder::(1).unwrap().append_null(); + } + responsible_builder.append(true); + + // Build the delegated field + let delegated_builder = + struct_builder.field_builder::>(1).unwrap(); + for agent_interaction in &association.delegated { + let interaction_builder = delegated_builder.values(); + interaction_builder + .field_builder::(0) + .unwrap() + .append_value(&agent_interaction.agent); + if let Some(role) = &agent_interaction.role { + interaction_builder + .field_builder::(1) + .unwrap() + .append_value(role); + } else { + interaction_builder.field_builder::(1).unwrap().append_null(); + } + interaction_builder.append(true); + } + delegated_builder.append(true); + + struct_builder.append(true); + } + + builder.append(true); + } + + Ok(builder.finish()) } pub fn load_activities_by_type( - pool: &Pool>, - typ: &Option, - position: u64, - max_records: u64, -) -> Result<(impl Iterator, u64, u64), ChronicleArrowError> { - let mut connection = pool.get().map_err(ChronicleArrowError::PoolError)?; - - let activities_and_namespaces: Vec<(Activity, Namespace)> = match typ { - Some(typ_value) => activity::table - .inner_join(namespace::table.on(activity::namespace_id.eq(namespace::id))) - .filter(activity::domaintype.eq(typ_value.external_id_part())) - .order(activity::id) - .select((Activity::as_select(), Namespace::as_select())) - .offset(position as i64) - .limit(max_records as i64) - .load(&mut connection)?, - None => activity::table - .inner_join(namespace::table.on(activity::namespace_id.eq(namespace::id))) - .filter(activity::domaintype.is_null()) - .order(activity::id) - .select((Activity::as_select(), Namespace::as_select())) - .offset(position as i64) - .limit(max_records as i64) - .load(&mut connection)?, - }; - - let (activities, namespaces): (Vec, Vec) = - activities_and_namespaces.into_iter().unzip(); - - let mut was_informed_by_map: HashMap> = - WasInformedBy::belonging_to(&activities) - .inner_join(activity::table.on(wasinformedby::informing_activity_id.eq(activity::id))) - .select((wasinformedby::activity_id, activity::external_id)) - .load::<(i32, String)>(&mut connection)? - .into_iter() - .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id)| { - acc.entry(id).or_default().push(external_id); - acc - }); - - let mut used_map: HashMap> = Usage::belonging_to(&activities) - .inner_join(entity::table.on(usage::entity_id.eq(entity::id))) - .select((usage::activity_id, entity::external_id)) - .load::<(i32, String)>(&mut connection)? - .into_iter() - .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id)| { - acc.entry(id).or_default().push(external_id); - acc - }); - - let mut generated_map: HashMap> = Generation::belonging_to(&activities) - .inner_join(entity::table.on(generation::generated_entity_id.eq(entity::id))) - .select((generation::activity_id, entity::external_id)) - .load::<(i32, String)>(&mut connection)? - .into_iter() - .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id)| { - acc.entry(id).or_default().push(external_id); - acc - }); - - let associations_map: HashMap> = - Association::belonging_to(&activities) - .inner_join(agent::table.on(association::agent_id.eq(agent::id))) - .select((association::activity_id, (agent::id, agent::external_id, association::role))) - .load::<(i32, (i32, String, String))>(&mut connection)? - .into_iter() - .fold( - HashMap::new(), - |mut acc: HashMap>, - (activity_id, (agent_id, agent_external_id, role_external_id))| { - acc.entry(activity_id) - .or_default() - .insert(agent_id, (agent_external_id, role_external_id)); - acc - }, - ); - - let delegations_map: HashMap> = - Delegation::belonging_to(&activities) - .inner_join(agent::table.on(delegation::delegate_id.eq(agent::id))) - .select(( - delegation::activity_id, - (delegation::responsible_id, agent::external_id, delegation::role), - )) - .load::<(i32, (i32, String, String))>(&mut connection)? - .into_iter() - .fold( - HashMap::new(), - |mut acc: HashMap>, - (activity_id, (agent_id, agent_external_id, role_external_id))| { - acc.entry(activity_id) - .or_default() - .insert(agent_id, (agent_external_id, role_external_id)); - acc - }, - ); - - let mut activity_associations: HashMap> = HashMap::new(); - - for (activity_id, agent_map) in associations_map.into_iter() { - let mut association_refs = Vec::new(); - for (agent_id, (agent_external_id, role_external_id)) in agent_map.into_iter() { - let mut delegated_agents = Vec::new(); - if let Some(delegations) = delegations_map.get(&activity_id) { - if let Some((delegated_agent_external_id, delegated_role_external_id)) = - delegations.get(&agent_id) - { - delegated_agents.push(AgentInteraction { - agent: delegated_agent_external_id.clone(), - role: Some(delegated_role_external_id.clone()), - }); - } - } - association_refs.push(ActivityAssociationRef { - responsible: AgentInteraction { - agent: agent_external_id, - role: Some(role_external_id), - }, - delegated: delegated_agents, - }); - } - activity_associations.insert(activity_id, association_refs); - } - let fetched_records = activities.len() as u64; - - let mut activities_and_references = vec![]; - - for (activity, ns) in activities.into_iter().zip(namespaces) { - activities_and_references.push(ActivityAndReferences { - id: activity.external_id, - namespace_name: ns.external_id, - namespace_uuid: Uuid::parse_str(&ns.uuid)?.into_bytes(), - attributes: Attributes::new( - activity.domaintype.map(DomaintypeId::from_external_id), - vec![], - ), // Placeholder for attribute loading logic - started: activity.started.map(|dt| dt.and_utc()), - ended: activity.ended.map(|dt| dt.and_utc()), - was_informed_by: was_informed_by_map.remove(&activity.id).unwrap_or_default(), - used: used_map.remove(&activity.id).unwrap_or_default(), - generated: generated_map.remove(&activity.id).unwrap_or_default(), - was_associated_with: activity_associations.remove(&activity.id).unwrap_or_default(), - }); - } - Ok((activities_and_references.into_iter(), fetched_records, fetched_records)) + pool: &Pool>, + typ: &Option, + position: u64, + max_records: u64, +) -> Result<(impl Iterator, u64, u64), ChronicleArrowError> { + let mut connection = pool.get().map_err(ChronicleArrowError::PoolError)?; + + let activities_and_namespaces: Vec<(Activity, Namespace)> = match typ { + Some(typ_value) => activity::table + .inner_join(namespace::table.on(activity::namespace_id.eq(namespace::id))) + .filter(activity::domaintype.eq(typ_value.external_id_part())) + .order(activity::id) + .select((Activity::as_select(), Namespace::as_select())) + .offset(position as i64) + .limit(max_records as i64) + .load(&mut connection)?, + None => activity::table + .inner_join(namespace::table.on(activity::namespace_id.eq(namespace::id))) + .filter(activity::domaintype.is_null()) + .order(activity::id) + .select((Activity::as_select(), Namespace::as_select())) + .offset(position as i64) + .limit(max_records as i64) + .load(&mut connection)?, + }; + + let (activities, namespaces): (Vec, Vec) = + activities_and_namespaces.into_iter().unzip(); + + let mut was_informed_by_map: HashMap> = + WasInformedBy::belonging_to(&activities) + .inner_join(activity::table.on(wasinformedby::informing_activity_id.eq(activity::id))) + .select((wasinformedby::activity_id, activity::external_id)) + .load::<(i32, String)>(&mut connection)? + .into_iter() + .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id)| { + acc.entry(id).or_default().push(external_id); + acc + }); + + let mut used_map: HashMap> = Usage::belonging_to(&activities) + .inner_join(entity::table.on(usage::entity_id.eq(entity::id))) + .select((usage::activity_id, entity::external_id)) + .load::<(i32, String)>(&mut connection)? + .into_iter() + .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id)| { + acc.entry(id).or_default().push(external_id); + acc + }); + + let mut generated_map: HashMap> = Generation::belonging_to(&activities) + .inner_join(entity::table.on(generation::generated_entity_id.eq(entity::id))) + .select((generation::activity_id, entity::external_id)) + .load::<(i32, String)>(&mut connection)? + .into_iter() + .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id)| { + acc.entry(id).or_default().push(external_id); + acc + }); + + let associations_map: HashMap> = + Association::belonging_to(&activities) + .inner_join(agent::table.on(association::agent_id.eq(agent::id))) + .select((association::activity_id, (agent::id, agent::external_id, association::role))) + .load::<(i32, (i32, String, String))>(&mut connection)? + .into_iter() + .fold( + HashMap::new(), + |mut acc: HashMap>, + (activity_id, (agent_id, agent_external_id, role_external_id))| { + acc.entry(activity_id) + .or_default() + .insert(agent_id, (agent_external_id, role_external_id)); + acc + }, + ); + + let delegations_map: HashMap> = + Delegation::belonging_to(&activities) + .inner_join(agent::table.on(delegation::delegate_id.eq(agent::id))) + .select(( + delegation::activity_id, + (delegation::responsible_id, agent::external_id, delegation::role), + )) + .load::<(i32, (i32, String, String))>(&mut connection)? + .into_iter() + .fold( + HashMap::new(), + |mut acc: HashMap>, + (activity_id, (agent_id, agent_external_id, role_external_id))| { + acc.entry(activity_id) + .or_default() + .insert(agent_id, (agent_external_id, role_external_id)); + acc + }, + ); + + let mut activity_associations: HashMap> = HashMap::new(); + + for (activity_id, agent_map) in associations_map.into_iter() { + let mut association_refs = Vec::new(); + for (agent_id, (agent_external_id, role_external_id)) in agent_map.into_iter() { + let mut delegated_agents = Vec::new(); + if let Some(delegations) = delegations_map.get(&activity_id) { + if let Some((delegated_agent_external_id, delegated_role_external_id)) = + delegations.get(&agent_id) + { + delegated_agents.push(AgentInteraction { + agent: delegated_agent_external_id.clone(), + role: Some(delegated_role_external_id.clone()), + }); + } + } + association_refs.push(ActivityAssociationRef { + responsible: AgentInteraction { + agent: agent_external_id, + role: Some(role_external_id), + }, + delegated: delegated_agents, + }); + } + activity_associations.insert(activity_id, association_refs); + } + let fetched_records = activities.len() as u64; + + let mut activities_and_references = vec![]; + + for (activity, ns) in activities.into_iter().zip(namespaces) { + activities_and_references.push(ActivityAndReferences { + id: activity.external_id, + namespace_name: ns.external_id, + namespace_uuid: Uuid::parse_str(&ns.uuid)?.into_bytes(), + attributes: Attributes::new( + activity.domaintype.map(DomaintypeId::from_external_id), + vec![], + ), // Placeholder for attribute loading logic + started: activity.started.map(|dt| dt.and_utc()), + ended: activity.ended.map(|dt| dt.and_utc()), + was_informed_by: was_informed_by_map.remove(&activity.id).unwrap_or_default(), + used: used_map.remove(&activity.id).unwrap_or_default(), + generated: generated_map.remove(&activity.id).unwrap_or_default(), + was_associated_with: activity_associations.remove(&activity.id).unwrap_or_default(), + }); + } + Ok((activities_and_references.into_iter(), fetched_records, fetched_records)) } #[cfg(test)] mod test { - use super::*; - - #[test] - fn test_associations_to_list_array_empty() { - let associations = Vec::new(); - let result = associations_to_list_array(associations); - assert!(result.is_ok()); - let array = result.unwrap(); - assert_eq!(array.len(), 0); - } - - #[test] - fn test_associations_to_list_array_single() { - let associations = vec![ActivityAssociationRef { - responsible: AgentInteraction { - agent: "agent1".to_string(), - role: Some("role1".to_string()), - }, - delegated: vec![AgentInteraction { - agent: "delegated1".to_string(), - role: Some("role3".to_string()), - }], - }]; - let result = associations_to_list_array(vec![associations]).unwrap(); - - let json = arrow::json::writer::array_to_json_array(&result).unwrap(); - - insta::assert_debug_snapshot!(&json, @r###" + use super::*; + + #[test] + fn test_associations_to_list_array_empty() { + let associations = Vec::new(); + let result = associations_to_list_array(associations); + assert!(result.is_ok()); + let array = result.unwrap(); + assert_eq!(array.len(), 0); + } + + #[test] + fn test_associations_to_list_array_single() { + let associations = vec![ActivityAssociationRef { + responsible: AgentInteraction { + agent: "agent1".to_string(), + role: Some("role1".to_string()), + }, + delegated: vec![AgentInteraction { + agent: "delegated1".to_string(), + role: Some("role3".to_string()), + }], + }]; + let result = associations_to_list_array(vec![associations]).unwrap(); + + let json = arrow::json::writer::array_to_json_array(&result).unwrap(); + + insta::assert_debug_snapshot!(&json, @r###" [ Array [ Object { @@ -478,40 +478,40 @@ mod test { ], ] "### ); - } - - #[test] - fn test_associations_to_list_array_multiple() { - let associations = vec![ - ActivityAssociationRef { - responsible: AgentInteraction { - agent: "agent1".to_string(), - role: Some("role1".to_string()), - }, - delegated: vec![], - }, - ActivityAssociationRef { - responsible: AgentInteraction { - agent: "agent2".to_string(), - role: Some("role2".to_string()), - }, - delegated: vec![ - AgentInteraction { - agent: "delegated1".to_string(), - role: Some("role3".to_string()), - }, - AgentInteraction { - agent: "delegated2".to_string(), - role: Some("role3".to_string()), - }, - ], - }, - ]; - let result = associations_to_list_array(vec![associations]).unwrap(); - - let json = arrow::json::writer::array_to_json_array(&result).unwrap(); - - insta::assert_debug_snapshot!(&json, @r###" + } + + #[test] + fn test_associations_to_list_array_multiple() { + let associations = vec![ + ActivityAssociationRef { + responsible: AgentInteraction { + agent: "agent1".to_string(), + role: Some("role1".to_string()), + }, + delegated: vec![], + }, + ActivityAssociationRef { + responsible: AgentInteraction { + agent: "agent2".to_string(), + role: Some("role2".to_string()), + }, + delegated: vec![ + AgentInteraction { + agent: "delegated1".to_string(), + role: Some("role3".to_string()), + }, + AgentInteraction { + agent: "delegated2".to_string(), + role: Some("role3".to_string()), + }, + ], + }, + ]; + let result = associations_to_list_array(vec![associations]).unwrap(); + + let json = arrow::json::writer::array_to_json_array(&result).unwrap(); + + insta::assert_debug_snapshot!(&json, @r###" [ Array [ Object { @@ -540,5 +540,5 @@ mod test { ], ] "### ); - } + } } diff --git a/crates/chronicle-arrow/src/query/agent.rs b/crates/chronicle-arrow/src/query/agent.rs index 6ef00782a..4a3ab5632 100644 --- a/crates/chronicle-arrow/src/query/agent.rs +++ b/crates/chronicle-arrow/src/query/agent.rs @@ -6,375 +6,375 @@ use arrow_buffer::{Buffer, ToByteSlice}; use arrow_data::ArrayData; use arrow_schema::{DataType, Field}; use diesel::{ - pg::PgConnection, - prelude::*, - r2d2::{ConnectionManager, Pool}, + pg::PgConnection, + prelude::*, + r2d2::{ConnectionManager, Pool}, }; use uuid::Uuid; use chronicle_persistence::{ - query::{Agent, Attribution, Delegation, Namespace}, - schema::{activity, agent, attribution, delegation, entity, namespace}, + query::{Agent, Attribution, Delegation, Namespace}, + schema::{activity, agent, attribution, delegation, entity, namespace}, }; use common::{ - attributes::Attributes, - domain::PrimitiveType, - prov::{DomaintypeId, ExternalIdPart, Role}, + attributes::Attributes, + domain::PrimitiveType, + prov::{DomaintypeId, ExternalIdPart, Role}, }; use crate::{ - ChronicleArrowError, - DomainTypeMeta, meta::{agent_attribution_struct, agent_delegation_struct}, + meta::{agent_attribution_struct, agent_delegation_struct}, + ChronicleArrowError, DomainTypeMeta, }; #[tracing::instrument(skip(pool))] pub fn agent_count_by_type( - pool: &Pool>, - typ: Vec<&str>, + pool: &Pool>, + typ: Vec<&str>, ) -> Result { - let mut connection = pool.get()?; - let count = agent::table - .filter(agent::domaintype.eq_any(typ)) - .count() - .get_result(&mut connection)?; - Ok(count) + let mut connection = pool.get()?; + let count = agent::table + .filter(agent::domaintype.eq_any(typ)) + .count() + .get_result(&mut connection)?; + Ok(count) } #[derive(Default)] pub struct ActedOnBehalfOfRef { - pub(crate) agent: String, - pub(crate) role: Option, - pub(crate) activity: String, + pub(crate) agent: String, + pub(crate) role: Option, + pub(crate) activity: String, } #[derive(Default)] pub struct AgentAttributionRef { - pub(crate) entity: String, - pub(crate) role: Option, + pub(crate) entity: String, + pub(crate) role: Option, } #[derive(Default)] pub struct AgentAndReferences { - pub(crate) id: String, - pub(crate) namespace_name: String, - pub(crate) namespace_uuid: [u8; 16], - pub(crate) attributes: Attributes, - pub(crate) acted_on_behalf_of: Vec, - pub(crate) was_attributed_to: Vec, + pub(crate) id: String, + pub(crate) namespace_name: String, + pub(crate) namespace_uuid: [u8; 16], + pub(crate) attributes: Attributes, + pub(crate) acted_on_behalf_of: Vec, + pub(crate) was_attributed_to: Vec, } impl AgentAndReferences { - #[tracing::instrument(skip(items, meta))] - pub fn to_record_batch( - items: impl Iterator, - meta: &DomainTypeMeta, - ) -> Result { - let mut attributes_map: HashMap>)> = - HashMap::new(); - - for (attribute_name, primitive_type) in meta.attributes.iter() { - attributes_map.insert(attribute_name.to_string(), (*primitive_type, vec![])); - } - - let mut id_vec = Vec::new(); - let mut namespace_name_vec = Vec::new(); - let mut namespace_uuid_vec = Vec::new(); - let mut acted_on_behalf_of_vec = Vec::new(); - let mut was_attributed_to_vec = Vec::new(); - - for item in items { - id_vec.push(item.id); - namespace_name_vec.push(item.namespace_name); - - namespace_uuid_vec.push(Uuid::from_bytes(item.namespace_uuid).to_string()); - acted_on_behalf_of_vec.push(item.acted_on_behalf_of); - was_attributed_to_vec.push(item.was_attributed_to); - - for (key, (_primitive_type, values)) in attributes_map.iter_mut() { - if let Some(attribute) = item.attributes.get_attribute(key) { - values.push(Some(attribute.value.clone().into())); - } else { - values.push(None); - } - } - } - - let acted_on_behalf_of_array = - agent_acted_on_behalf_of_to_list_array(acted_on_behalf_of_vec)?; - let was_attributed_to_array = agent_attributions_to_list_array(was_attributed_to_vec)?; - - let mut fields = vec![ - ( - "namespace_name".to_string(), - Arc::new(StringArray::from(namespace_name_vec)) as Arc, - ), - ( - "namespace_uuid".to_string(), - Arc::new(StringArray::from(namespace_uuid_vec)) as Arc, - ), - ("id".to_string(), Arc::new(StringArray::from(id_vec)) as Arc), - ]; - - // Dynamically generate fields for attribute key/values based on their primitive type - for (key, (primitive_type, values)) in attributes_map { - let array: Arc = match primitive_type { - PrimitiveType::String => { - tracing::debug!("Converting String attribute values for key: {}", key); - Arc::new(StringArray::from_iter( - values.iter().map(|v| v.as_ref().map(|v| v.as_str()).unwrap_or_default()), - )) as Arc - } - PrimitiveType::Int => { - tracing::debug!("Converting Int attribute values for key: {}", key); - Arc::new(Int64Array::from_iter( - values.iter().map(|v| v.as_ref().map(|v| v.as_i64()).unwrap_or_default()), - )) as Arc - } - PrimitiveType::Bool => { - tracing::debug!("Converting Bool attribute values for key: {}", key); - Arc::new(BooleanArray::from_iter( - values.iter().map(|v| v.as_ref().map(|v| v.as_bool()).unwrap_or_default()), - )) as Arc - } - _ => { - tracing::warn!("Unsupported attribute primitive type for key: {}", key); - continue; - } - }; - fields.push((key, array as Arc)); - } - - fields.extend(vec![ - ( - "acted_on_behalf_of".to_string(), - Arc::new(acted_on_behalf_of_array) as Arc, - ), - ( - "was_attributed_to".to_string(), - Arc::new(was_attributed_to_array) as Arc, - ), - ]); - - let hashed_fields = fields.into_iter().collect::>(); - - let mut columns = Vec::new(); - for field in meta.schema.fields() { - let field_name = field.name(); - match hashed_fields.get(field_name) { - Some(array) => columns.push(array.clone()), - None => - return Err(ChronicleArrowError::SchemaFieldNotFound(field_name.to_string())), - } - } - - RecordBatch::try_new(meta.schema.clone(), columns).map_err(ChronicleArrowError::from) - } + #[tracing::instrument(skip(items, meta))] + pub fn to_record_batch( + items: impl Iterator, + meta: &DomainTypeMeta, + ) -> Result { + let mut attributes_map: HashMap>)> = + HashMap::new(); + + for (attribute_name, primitive_type) in meta.attributes.iter() { + attributes_map.insert(attribute_name.to_string(), (*primitive_type, vec![])); + } + + let mut id_vec = Vec::new(); + let mut namespace_name_vec = Vec::new(); + let mut namespace_uuid_vec = Vec::new(); + let mut acted_on_behalf_of_vec = Vec::new(); + let mut was_attributed_to_vec = Vec::new(); + + for item in items { + id_vec.push(item.id); + namespace_name_vec.push(item.namespace_name); + + namespace_uuid_vec.push(Uuid::from_bytes(item.namespace_uuid).to_string()); + acted_on_behalf_of_vec.push(item.acted_on_behalf_of); + was_attributed_to_vec.push(item.was_attributed_to); + + for (key, (_primitive_type, values)) in attributes_map.iter_mut() { + if let Some(attribute) = item.attributes.get_attribute(key) { + values.push(Some(attribute.value.clone().into())); + } else { + values.push(None); + } + } + } + + let acted_on_behalf_of_array = + agent_acted_on_behalf_of_to_list_array(acted_on_behalf_of_vec)?; + let was_attributed_to_array = agent_attributions_to_list_array(was_attributed_to_vec)?; + + let mut fields = vec![ + ( + "namespace_name".to_string(), + Arc::new(StringArray::from(namespace_name_vec)) as Arc, + ), + ( + "namespace_uuid".to_string(), + Arc::new(StringArray::from(namespace_uuid_vec)) as Arc, + ), + ("id".to_string(), Arc::new(StringArray::from(id_vec)) as Arc), + ]; + + // Dynamically generate fields for attribute key/values based on their primitive type + for (key, (primitive_type, values)) in attributes_map { + let array: Arc = match primitive_type { + PrimitiveType::String => { + tracing::debug!("Converting String attribute values for key: {}", key); + Arc::new(StringArray::from_iter( + values.iter().map(|v| v.as_ref().map(|v| v.as_str()).unwrap_or_default()), + )) as Arc + }, + PrimitiveType::Int => { + tracing::debug!("Converting Int attribute values for key: {}", key); + Arc::new(Int64Array::from_iter( + values.iter().map(|v| v.as_ref().map(|v| v.as_i64()).unwrap_or_default()), + )) as Arc + }, + PrimitiveType::Bool => { + tracing::debug!("Converting Bool attribute values for key: {}", key); + Arc::new(BooleanArray::from_iter( + values.iter().map(|v| v.as_ref().map(|v| v.as_bool()).unwrap_or_default()), + )) as Arc + }, + _ => { + tracing::warn!("Unsupported attribute primitive type for key: {}", key); + continue; + }, + }; + fields.push((key, array as Arc)); + } + + fields.extend(vec![ + ( + "acted_on_behalf_of".to_string(), + Arc::new(acted_on_behalf_of_array) as Arc, + ), + ( + "was_attributed_to".to_string(), + Arc::new(was_attributed_to_array) as Arc, + ), + ]); + + let hashed_fields = fields.into_iter().collect::>(); + + let mut columns = Vec::new(); + for field in meta.schema.fields() { + let field_name = field.name(); + match hashed_fields.get(field_name) { + Some(array) => columns.push(array.clone()), + None => + return Err(ChronicleArrowError::SchemaFieldNotFound(field_name.to_string())), + } + } + + RecordBatch::try_new(meta.schema.clone(), columns).map_err(ChronicleArrowError::from) + } } fn agent_acted_on_behalf_of_to_list_array( - agent_attributions: Vec>, + agent_attributions: Vec>, ) -> Result { - let offsets: Vec = std::iter::once(0) - .chain(agent_attributions.iter().map(|v| v.len() as i32)) - .scan(0, |state, len| { - *state += len; - Some(*state) - }) - .collect(); - - let agent_builder = StringBuilder::new(); - let role_builder = StringBuilder::new(); - let activity_builder = StringBuilder::new(); - - let fields = vec![ - Field::new("agent", DataType::Utf8, false), - Field::new("activity", DataType::Utf8, false), - Field::new("role", DataType::Utf8, true), - ]; - let field_builders = vec![ - Box::new(agent_builder) as Box, - Box::new(activity_builder) as Box, - Box::new(role_builder) as Box, - ]; - - let mut builder = StructBuilder::new(fields, field_builders); - - for acted_on_behalf_of in agent_attributions.into_iter().flatten() { - builder - .field_builder::(0) - .expect("Failed to get agent field builder") - .append_value(&acted_on_behalf_of.agent); - builder - .field_builder::(1) - .expect("Failed to get activity field builder") - .append_value(acted_on_behalf_of.activity); - builder - .field_builder::(2) - .expect("Failed to get role field builder") - .append_option(acted_on_behalf_of.role.as_deref()); - - builder.append(true); - } - - let values_array = builder.finish(); - - let data_type = DataType::new_list(agent_delegation_struct(), false); - let offsets_buffer = Buffer::from(offsets.to_byte_slice()); - - let list_array = ListArray::from( - ArrayData::builder(data_type) - .add_child_data(values_array.to_data()) - .len(offsets.len() - 1) - .null_count(0) - .add_buffer(offsets_buffer) - .build()?, - ); - - Ok(list_array) + let offsets: Vec = std::iter::once(0) + .chain(agent_attributions.iter().map(|v| v.len() as i32)) + .scan(0, |state, len| { + *state += len; + Some(*state) + }) + .collect(); + + let agent_builder = StringBuilder::new(); + let role_builder = StringBuilder::new(); + let activity_builder = StringBuilder::new(); + + let fields = vec![ + Field::new("agent", DataType::Utf8, false), + Field::new("activity", DataType::Utf8, false), + Field::new("role", DataType::Utf8, true), + ]; + let field_builders = vec![ + Box::new(agent_builder) as Box, + Box::new(activity_builder) as Box, + Box::new(role_builder) as Box, + ]; + + let mut builder = StructBuilder::new(fields, field_builders); + + for acted_on_behalf_of in agent_attributions.into_iter().flatten() { + builder + .field_builder::(0) + .expect("Failed to get agent field builder") + .append_value(&acted_on_behalf_of.agent); + builder + .field_builder::(1) + .expect("Failed to get activity field builder") + .append_value(acted_on_behalf_of.activity); + builder + .field_builder::(2) + .expect("Failed to get role field builder") + .append_option(acted_on_behalf_of.role.as_deref()); + + builder.append(true); + } + + let values_array = builder.finish(); + + let data_type = DataType::new_list(agent_delegation_struct(), false); + let offsets_buffer = Buffer::from(offsets.to_byte_slice()); + + let list_array = ListArray::from( + ArrayData::builder(data_type) + .add_child_data(values_array.to_data()) + .len(offsets.len() - 1) + .null_count(0) + .add_buffer(offsets_buffer) + .build()?, + ); + + Ok(list_array) } fn agent_attributions_to_list_array( - agent_attributions: Vec>, + agent_attributions: Vec>, ) -> Result { - let offsets: Vec = std::iter::once(0) - .chain(agent_attributions.iter().map(|v| v.len() as i32)) - .scan(0, |state, len| { - *state += len; - Some(*state) - }) - .collect(); - - let entity_builder = StringBuilder::new(); - let role_builder = StringBuilder::new(); - - let fields = - vec![Field::new("entity", DataType::Utf8, false), Field::new("role", DataType::Utf8, true)]; - let field_builders = vec![ - Box::new(entity_builder) as Box, - Box::new(role_builder) as Box, - ]; - - let mut builder = StructBuilder::new(fields, field_builders); - - for agent_attribution in agent_attributions.into_iter().flatten() { - builder - .field_builder::(0) - .unwrap() - .append_value(agent_attribution.entity); - builder - .field_builder::(1) - .unwrap() - .append_option(agent_attribution.role.map(|r| r.to_string())); - - builder.append(true); - } - - let values_array = builder.finish(); - - let data_type = DataType::new_list(agent_attribution_struct(), false); - let offsets_buffer = Buffer::from(offsets.to_byte_slice()); - - let list_array = ListArray::from( - ArrayData::builder(data_type) - .add_child_data(values_array.to_data()) - .len(offsets.len() - 1) - .null_count(0) - .add_buffer(offsets_buffer) - .build()?, - ); - - Ok(list_array) + let offsets: Vec = std::iter::once(0) + .chain(agent_attributions.iter().map(|v| v.len() as i32)) + .scan(0, |state, len| { + *state += len; + Some(*state) + }) + .collect(); + + let entity_builder = StringBuilder::new(); + let role_builder = StringBuilder::new(); + + let fields = + vec![Field::new("entity", DataType::Utf8, false), Field::new("role", DataType::Utf8, true)]; + let field_builders = vec![ + Box::new(entity_builder) as Box, + Box::new(role_builder) as Box, + ]; + + let mut builder = StructBuilder::new(fields, field_builders); + + for agent_attribution in agent_attributions.into_iter().flatten() { + builder + .field_builder::(0) + .unwrap() + .append_value(agent_attribution.entity); + builder + .field_builder::(1) + .unwrap() + .append_option(agent_attribution.role.map(|r| r.to_string())); + + builder.append(true); + } + + let values_array = builder.finish(); + + let data_type = DataType::new_list(agent_attribution_struct(), false); + let offsets_buffer = Buffer::from(offsets.to_byte_slice()); + + let list_array = ListArray::from( + ArrayData::builder(data_type) + .add_child_data(values_array.to_data()) + .len(offsets.len() - 1) + .null_count(0) + .add_buffer(offsets_buffer) + .build()?, + ); + + Ok(list_array) } #[tracing::instrument(skip(pool))] pub fn load_agents_by_type( - pool: &Pool>, - typ: &Option, - position: u64, - max_records: u64, -) -> Result<(impl Iterator, u64, u64), ChronicleArrowError> { - let mut connection = pool.get().map_err(ChronicleArrowError::PoolError)?; - - let agents_and_namespaces: Vec<(Agent, Namespace)> = match typ { - Some(typ_value) => agent::table - .inner_join(namespace::table.on(agent::namespace_id.eq(namespace::id))) - .filter(agent::domaintype.eq(typ_value.external_id_part())) - .order(agent::id) - .select((Agent::as_select(), Namespace::as_select())) - .offset(position as i64) - .limit(max_records as i64) - .load(&mut connection)?, - None => agent::table - .inner_join(namespace::table.on(agent::namespace_id.eq(namespace::id))) - .filter(agent::domaintype.is_null()) - .order(agent::id) - .select((Agent::as_select(), Namespace::as_select())) - .offset(position as i64) - .limit(max_records as i64) - .load(&mut connection)?, - }; - - let total_records = agents_and_namespaces.len() as u64; - - let (agents, namespaces): (Vec, Vec) = - agents_and_namespaces.into_iter().unzip(); - - let mut attributions_map: HashMap> = - Attribution::belonging_to(&agents) - .inner_join(entity::table.on(attribution::entity_id.eq(entity::id))) - .select((attribution::agent_id, attribution::role, entity::external_id)) - .load::<(i32, Role, String)>(&mut connection)? - .into_iter() - .fold( - HashMap::new(), - |mut acc: HashMap>, (id, role, external_id)| { - acc.entry(id).or_default().push(AgentAttributionRef { - entity: external_id, - role: Some(role.to_string()), - }); - acc - }, - ); - - let mut delegations_map: HashMap> = - Delegation::belonging_to(&agents) - .inner_join(activity::table.on(delegation::activity_id.eq(activity::id))) - .inner_join(agent::table.on(delegation::delegate_id.eq(agent::id))) - .select(( - delegation::responsible_id, - delegation::role, - activity::external_id, - agent::external_id, - )) - .load::<(i32, Role, String, String)>(&mut connection)? - .into_iter() - .fold( - HashMap::new(), - |mut acc: HashMap>, (id, role, activity, delegate)| { - acc.entry(id).or_default().push(ActedOnBehalfOfRef { - agent: delegate, - activity, - role: Some(role.to_string()), - }); - acc - }, - ); - - let mut agents_and_references = vec![]; - - for (agent, ns) in agents.into_iter().zip(namespaces) { - agents_and_references.push(AgentAndReferences { - id: agent.external_id, - namespace_name: ns.external_id, - namespace_uuid: Uuid::parse_str(&ns.uuid)?.into_bytes(), - attributes: Attributes::new( - agent.domaintype.map(DomaintypeId::from_external_id), - vec![], - ), - was_attributed_to: attributions_map.remove(&agent.id).unwrap_or_default(), - acted_on_behalf_of: delegations_map.remove(&agent.id).unwrap_or_default(), - }); - } - - Ok((agents_and_references.into_iter(), total_records, total_records)) + pool: &Pool>, + typ: &Option, + position: u64, + max_records: u64, +) -> Result<(impl Iterator, u64, u64), ChronicleArrowError> { + let mut connection = pool.get().map_err(ChronicleArrowError::PoolError)?; + + let agents_and_namespaces: Vec<(Agent, Namespace)> = match typ { + Some(typ_value) => agent::table + .inner_join(namespace::table.on(agent::namespace_id.eq(namespace::id))) + .filter(agent::domaintype.eq(typ_value.external_id_part())) + .order(agent::id) + .select((Agent::as_select(), Namespace::as_select())) + .offset(position as i64) + .limit(max_records as i64) + .load(&mut connection)?, + None => agent::table + .inner_join(namespace::table.on(agent::namespace_id.eq(namespace::id))) + .filter(agent::domaintype.is_null()) + .order(agent::id) + .select((Agent::as_select(), Namespace::as_select())) + .offset(position as i64) + .limit(max_records as i64) + .load(&mut connection)?, + }; + + let total_records = agents_and_namespaces.len() as u64; + + let (agents, namespaces): (Vec, Vec) = + agents_and_namespaces.into_iter().unzip(); + + let mut attributions_map: HashMap> = + Attribution::belonging_to(&agents) + .inner_join(entity::table.on(attribution::entity_id.eq(entity::id))) + .select((attribution::agent_id, attribution::role, entity::external_id)) + .load::<(i32, Role, String)>(&mut connection)? + .into_iter() + .fold( + HashMap::new(), + |mut acc: HashMap>, (id, role, external_id)| { + acc.entry(id).or_default().push(AgentAttributionRef { + entity: external_id, + role: Some(role.to_string()), + }); + acc + }, + ); + + let mut delegations_map: HashMap> = + Delegation::belonging_to(&agents) + .inner_join(activity::table.on(delegation::activity_id.eq(activity::id))) + .inner_join(agent::table.on(delegation::delegate_id.eq(agent::id))) + .select(( + delegation::responsible_id, + delegation::role, + activity::external_id, + agent::external_id, + )) + .load::<(i32, Role, String, String)>(&mut connection)? + .into_iter() + .fold( + HashMap::new(), + |mut acc: HashMap>, (id, role, activity, delegate)| { + acc.entry(id).or_default().push(ActedOnBehalfOfRef { + agent: delegate, + activity, + role: Some(role.to_string()), + }); + acc + }, + ); + + let mut agents_and_references = vec![]; + + for (agent, ns) in agents.into_iter().zip(namespaces) { + agents_and_references.push(AgentAndReferences { + id: agent.external_id, + namespace_name: ns.external_id, + namespace_uuid: Uuid::parse_str(&ns.uuid)?.into_bytes(), + attributes: Attributes::new( + agent.domaintype.map(DomaintypeId::from_external_id), + vec![], + ), + was_attributed_to: attributions_map.remove(&agent.id).unwrap_or_default(), + acted_on_behalf_of: delegations_map.remove(&agent.id).unwrap_or_default(), + }); + } + + Ok((agents_and_references.into_iter(), total_records, total_records)) } diff --git a/crates/chronicle-arrow/src/query/entity.rs b/crates/chronicle-arrow/src/query/entity.rs index bd3de62f4..49d962a4b 100644 --- a/crates/chronicle-arrow/src/query/entity.rs +++ b/crates/chronicle-arrow/src/query/entity.rs @@ -6,27 +6,27 @@ use arrow_buffer::{Buffer, ToByteSlice}; use arrow_data::ArrayData; use arrow_schema::{DataType, Field}; use diesel::{ - pg::PgConnection, - prelude::*, - r2d2::{ConnectionManager, Pool}, + pg::PgConnection, + prelude::*, + r2d2::{ConnectionManager, Pool}, }; use uuid::Uuid; use chronicle_persistence::{ - query::{Attribution, Derivation, Entity, Generation, Namespace}, - schema::{ - activity, agent, attribution, derivation, entity, entity_attribute, generation, namespace, - }, + query::{Attribution, Derivation, Entity, Generation, Namespace}, + schema::{ + activity, agent, attribution, derivation, entity, entity_attribute, generation, namespace, + }, }; use common::{ - attributes::{Attribute, Attributes}, - domain::PrimitiveType, - prov::{DomaintypeId, ExternalIdPart, operations::DerivationType}, + attributes::{Attribute, Attributes}, + domain::PrimitiveType, + prov::{operations::DerivationType, DomaintypeId, ExternalIdPart}, }; use crate::{ - ChronicleArrowError, - DomainTypeMeta, meta::{attribution_struct, derivation_struct}, + meta::{attribution_struct, derivation_struct}, + ChronicleArrowError, DomainTypeMeta, }; use super::vec_vec_string_to_list_array; @@ -35,446 +35,446 @@ use super::vec_vec_string_to_list_array; // may no longer be present in the domain definition #[tracing::instrument(skip(pool))] pub fn term_types( - pool: &Pool>, + pool: &Pool>, ) -> Result, ChronicleArrowError> { - let mut connection = pool.get()?; - let types = entity::table - .select(entity::domaintype) - .distinct() - .union(agent::table.select(agent::domaintype).distinct()) - .union(activity::table.select(activity::domaintype).distinct()) - .load::>(&mut connection)?; - - let mut unique_types = types.into_iter().collect::>(); - unique_types.sort(); - unique_types.dedup(); - - Ok(unique_types - .into_iter() - .filter_map(|x| x.map(DomaintypeId::from_external_id)) - .collect()) + let mut connection = pool.get()?; + let types = entity::table + .select(entity::domaintype) + .distinct() + .union(agent::table.select(agent::domaintype).distinct()) + .union(activity::table.select(activity::domaintype).distinct()) + .load::>(&mut connection)?; + + let mut unique_types = types.into_iter().collect::>(); + unique_types.sort(); + unique_types.dedup(); + + Ok(unique_types + .into_iter() + .filter_map(|x| x.map(DomaintypeId::from_external_id)) + .collect()) } pub fn entity_count_by_type( - pool: &Pool>, - typ: Vec<&str>, + pool: &Pool>, + typ: Vec<&str>, ) -> Result { - let mut connection = pool.get()?; - let count = entity::table - .filter(entity::domaintype.eq_any(typ)) - .count() - .get_result(&mut connection)?; - Ok(count) + let mut connection = pool.get()?; + let count = entity::table + .filter(entity::domaintype.eq_any(typ)) + .count() + .get_result(&mut connection)?; + Ok(count) } #[derive(Default, Debug)] pub struct DerivationRef { - pub source: String, - pub activity: String, + pub source: String, + pub activity: String, } #[derive(Default, Debug)] pub struct EntityAttributionRef { - pub agent: String, - pub role: Option, + pub agent: String, + pub role: Option, } #[derive(Default, Debug)] pub struct EntityAndReferences { - pub(crate) id: String, - pub(crate) namespace_name: String, - pub(crate) namespace_uuid: [u8; 16], - pub(crate) attributes: Attributes, - pub(crate) was_generated_by: Vec, - pub(crate) was_attributed_to: Vec, - pub(crate) was_derived_from: Vec, - pub(crate) had_primary_source: Vec, - pub(crate) was_quoted_from: Vec, - pub(crate) was_revision_of: Vec, + pub(crate) id: String, + pub(crate) namespace_name: String, + pub(crate) namespace_uuid: [u8; 16], + pub(crate) attributes: Attributes, + pub(crate) was_generated_by: Vec, + pub(crate) was_attributed_to: Vec, + pub(crate) was_derived_from: Vec, + pub(crate) had_primary_source: Vec, + pub(crate) was_quoted_from: Vec, + pub(crate) was_revision_of: Vec, } impl EntityAndReferences { - #[tracing::instrument(skip(items, meta))] - pub fn to_record_batch( - items: impl Iterator, - meta: &DomainTypeMeta, - ) -> Result { - let mut attributes_map: HashMap>)> = - HashMap::new(); - - for (attribute_name, primitive_type) in meta.attributes.iter() { - attributes_map.insert(attribute_name.clone(), (*primitive_type, vec![])); - } - - let mut id_vec = Vec::new(); - let mut namespace_name_vec = Vec::new(); - let mut namespace_uuid_vec = Vec::new(); - let mut was_generated_by_vec = Vec::new(); - let mut was_attributed_to_vec = Vec::new(); - let mut was_derived_from_vec = Vec::new(); - let mut had_primary_source_vec = Vec::new(); - let mut was_quoted_from_vec = Vec::new(); - let mut was_revision_of_vec = Vec::new(); - - for item in items { - id_vec.push(item.id); - namespace_name_vec.push(item.namespace_name); - namespace_uuid_vec.push(Uuid::from_bytes(item.namespace_uuid).to_string()); - was_generated_by_vec.push(item.was_generated_by); - was_attributed_to_vec.push(item.was_attributed_to); - was_derived_from_vec.push(item.was_derived_from); - had_primary_source_vec.push(item.had_primary_source); - was_quoted_from_vec.push(item.was_quoted_from); - was_revision_of_vec.push(item.was_revision_of); - for (key, (_primitive_type, values)) in attributes_map.iter_mut() { - if let Some(attribute) = item.attributes.get_attribute(key) { - values.push(Some(attribute.value.clone().into())); - } else { - values.push(None); - } - } - } - - let was_generated_by_array = vec_vec_string_to_list_array(was_generated_by_vec)?; - let was_attributed_to_array = attributions_to_list_array(was_attributed_to_vec)?; - let was_derived_from_array = derivations_to_list_array(was_derived_from_vec)?; - let had_primary_source_array = derivations_to_list_array(had_primary_source_vec)?; - let was_quoted_from_array = derivations_to_list_array(was_quoted_from_vec)?; - let was_revision_of_array = derivations_to_list_array(was_revision_of_vec)?; - - let mut fields = vec![ - ( - "namespace_name".to_string(), - Arc::new(StringArray::from(namespace_name_vec)) as Arc, - ), - ( - "namespace_uuid".to_string(), - Arc::new(StringArray::from(namespace_uuid_vec)) as Arc, - ), - ("id".to_string(), Arc::new(StringArray::from(id_vec)) as Arc), - ]; - // Dynamically generate fields for attribute key/values based on their primitive type - for (key, (primitive_type, values)) in attributes_map { - tracing::trace!("Key: {}, Primitive Type: {:?}", key, primitive_type); - let array: Arc = match primitive_type { - PrimitiveType::String => { - tracing::debug!("Converting String attribute values for key: {}", key); - Arc::new(StringArray::from_iter( - values.iter().map(|v| v.as_ref().map(|v| v.as_str()).unwrap_or_default()), - )) as Arc - } - PrimitiveType::Int => { - tracing::debug!("Converting Int attribute values for key: {}", key); - Arc::new(Int64Array::from_iter( - values.iter().map(|v| v.as_ref().map(|v| v.as_i64()).unwrap_or_default()), - )) as Arc - } - PrimitiveType::Bool => { - tracing::debug!("Converting Bool attribute values for key: {}", key); - Arc::new(BooleanArray::from_iter( - values.iter().map(|v| v.as_ref().map(|v| v.as_bool()).unwrap_or_default()), - )) as Arc - } - _ => { - tracing::warn!("Unsupported attribute primitive type for key: {}", key); - continue; - } - }; - fields.push((key, array as Arc)); - } - - fields.extend(vec![ - ( - "was_generated_by".to_string(), - Arc::new(was_generated_by_array) as Arc, - ), - ( - "was_attributed_to".to_string(), - Arc::new(was_attributed_to_array) as Arc, - ), - ( - "was_derived_from".to_string(), - Arc::new(was_derived_from_array) as Arc, - ), - ( - "had_primary_source".to_string(), - Arc::new(had_primary_source_array) as Arc, - ), - ( - "was_quoted_from".to_string(), - Arc::new(was_quoted_from_array) as Arc, - ), - ( - "was_revision_of".to_string(), - Arc::new(was_revision_of_array) as Arc, - ), - ]); - - let hashed_fields = fields.into_iter().collect::>(); - - let mut columns = Vec::new(); - - for field in meta.schema.fields() { - let field_name = field.name(); - match hashed_fields.get(field_name) { - Some(array) => columns.push(array.clone()), - None => - return Err(ChronicleArrowError::SchemaFieldNotFound(field_name.to_string())), - } - } - - RecordBatch::try_new(meta.schema.clone(), columns).map_err(ChronicleArrowError::from) - } + #[tracing::instrument(skip(items, meta))] + pub fn to_record_batch( + items: impl Iterator, + meta: &DomainTypeMeta, + ) -> Result { + let mut attributes_map: HashMap>)> = + HashMap::new(); + + for (attribute_name, primitive_type) in meta.attributes.iter() { + attributes_map.insert(attribute_name.clone(), (*primitive_type, vec![])); + } + + let mut id_vec = Vec::new(); + let mut namespace_name_vec = Vec::new(); + let mut namespace_uuid_vec = Vec::new(); + let mut was_generated_by_vec = Vec::new(); + let mut was_attributed_to_vec = Vec::new(); + let mut was_derived_from_vec = Vec::new(); + let mut had_primary_source_vec = Vec::new(); + let mut was_quoted_from_vec = Vec::new(); + let mut was_revision_of_vec = Vec::new(); + + for item in items { + id_vec.push(item.id); + namespace_name_vec.push(item.namespace_name); + namespace_uuid_vec.push(Uuid::from_bytes(item.namespace_uuid).to_string()); + was_generated_by_vec.push(item.was_generated_by); + was_attributed_to_vec.push(item.was_attributed_to); + was_derived_from_vec.push(item.was_derived_from); + had_primary_source_vec.push(item.had_primary_source); + was_quoted_from_vec.push(item.was_quoted_from); + was_revision_of_vec.push(item.was_revision_of); + for (key, (_primitive_type, values)) in attributes_map.iter_mut() { + if let Some(attribute) = item.attributes.get_attribute(key) { + values.push(Some(attribute.value.clone().into())); + } else { + values.push(None); + } + } + } + + let was_generated_by_array = vec_vec_string_to_list_array(was_generated_by_vec)?; + let was_attributed_to_array = attributions_to_list_array(was_attributed_to_vec)?; + let was_derived_from_array = derivations_to_list_array(was_derived_from_vec)?; + let had_primary_source_array = derivations_to_list_array(had_primary_source_vec)?; + let was_quoted_from_array = derivations_to_list_array(was_quoted_from_vec)?; + let was_revision_of_array = derivations_to_list_array(was_revision_of_vec)?; + + let mut fields = vec![ + ( + "namespace_name".to_string(), + Arc::new(StringArray::from(namespace_name_vec)) as Arc, + ), + ( + "namespace_uuid".to_string(), + Arc::new(StringArray::from(namespace_uuid_vec)) as Arc, + ), + ("id".to_string(), Arc::new(StringArray::from(id_vec)) as Arc), + ]; + // Dynamically generate fields for attribute key/values based on their primitive type + for (key, (primitive_type, values)) in attributes_map { + tracing::trace!("Key: {}, Primitive Type: {:?}", key, primitive_type); + let array: Arc = match primitive_type { + PrimitiveType::String => { + tracing::debug!("Converting String attribute values for key: {}", key); + Arc::new(StringArray::from_iter( + values.iter().map(|v| v.as_ref().map(|v| v.as_str()).unwrap_or_default()), + )) as Arc + }, + PrimitiveType::Int => { + tracing::debug!("Converting Int attribute values for key: {}", key); + Arc::new(Int64Array::from_iter( + values.iter().map(|v| v.as_ref().map(|v| v.as_i64()).unwrap_or_default()), + )) as Arc + }, + PrimitiveType::Bool => { + tracing::debug!("Converting Bool attribute values for key: {}", key); + Arc::new(BooleanArray::from_iter( + values.iter().map(|v| v.as_ref().map(|v| v.as_bool()).unwrap_or_default()), + )) as Arc + }, + _ => { + tracing::warn!("Unsupported attribute primitive type for key: {}", key); + continue; + }, + }; + fields.push((key, array as Arc)); + } + + fields.extend(vec![ + ( + "was_generated_by".to_string(), + Arc::new(was_generated_by_array) as Arc, + ), + ( + "was_attributed_to".to_string(), + Arc::new(was_attributed_to_array) as Arc, + ), + ( + "was_derived_from".to_string(), + Arc::new(was_derived_from_array) as Arc, + ), + ( + "had_primary_source".to_string(), + Arc::new(had_primary_source_array) as Arc, + ), + ( + "was_quoted_from".to_string(), + Arc::new(was_quoted_from_array) as Arc, + ), + ( + "was_revision_of".to_string(), + Arc::new(was_revision_of_array) as Arc, + ), + ]); + + let hashed_fields = fields.into_iter().collect::>(); + + let mut columns = Vec::new(); + + for field in meta.schema.fields() { + let field_name = field.name(); + match hashed_fields.get(field_name) { + Some(array) => columns.push(array.clone()), + None => + return Err(ChronicleArrowError::SchemaFieldNotFound(field_name.to_string())), + } + } + + RecordBatch::try_new(meta.schema.clone(), columns).map_err(ChronicleArrowError::from) + } } fn derivations_to_list_array( - derivations: Vec>, + derivations: Vec>, ) -> Result { - let offsets: Vec = std::iter::once(0) - .chain(derivations.iter().map(|v| v.len() as i32)) - .scan(0, |state, len| { - *state += len; - Some(*state) - }) - .collect(); - - let fields = vec![ - Field::new("source", DataType::Utf8, false), - Field::new("activity", DataType::Utf8, false), - ]; - let field_builders = vec![ - Box::new(StringBuilder::new()) as Box, - Box::new(StringBuilder::new()) as Box, - ]; - - let mut builder = StructBuilder::new(fields, field_builders); - - for derivation in derivations.into_iter().flatten() { - builder - .field_builder::(0) - .unwrap() - .append_value(derivation.source); - builder - .field_builder::(1) - .unwrap() - .append_value(derivation.activity); - - builder.append(true) - } - - let values_array = builder.finish(); - - let data_type = DataType::new_list(derivation_struct(), false); - let offsets_buffer = Buffer::from(offsets.to_byte_slice()); - - let list_array = ListArray::from( - ArrayData::builder(data_type) - .add_child_data(values_array.to_data()) - .len(offsets.len() - 1) - .null_count(0) - .add_buffer(offsets_buffer) - .build()?, - ); - - Ok(list_array) + let offsets: Vec = std::iter::once(0) + .chain(derivations.iter().map(|v| v.len() as i32)) + .scan(0, |state, len| { + *state += len; + Some(*state) + }) + .collect(); + + let fields = vec![ + Field::new("source", DataType::Utf8, false), + Field::new("activity", DataType::Utf8, false), + ]; + let field_builders = vec![ + Box::new(StringBuilder::new()) as Box, + Box::new(StringBuilder::new()) as Box, + ]; + + let mut builder = StructBuilder::new(fields, field_builders); + + for derivation in derivations.into_iter().flatten() { + builder + .field_builder::(0) + .unwrap() + .append_value(derivation.source); + builder + .field_builder::(1) + .unwrap() + .append_value(derivation.activity); + + builder.append(true) + } + + let values_array = builder.finish(); + + let data_type = DataType::new_list(derivation_struct(), false); + let offsets_buffer = Buffer::from(offsets.to_byte_slice()); + + let list_array = ListArray::from( + ArrayData::builder(data_type) + .add_child_data(values_array.to_data()) + .len(offsets.len() - 1) + .null_count(0) + .add_buffer(offsets_buffer) + .build()?, + ); + + Ok(list_array) } fn attributions_to_list_array( - attributions: Vec>, + attributions: Vec>, ) -> Result { - let offsets: Vec = std::iter::once(0) - .chain(attributions.iter().map(|v| v.len() as i32)) - .scan(0, |state, len| { - *state += len; - Some(*state) - }) - .collect(); - - let agent_builder = StringBuilder::new(); - let role_builder = StringBuilder::new(); - - let fields = - vec![Field::new("agent", DataType::Utf8, false), Field::new("role", DataType::Utf8, true)]; - let field_builders = vec![ - Box::new(agent_builder) as Box, - Box::new(role_builder) as Box, - ]; - - let mut builder = StructBuilder::new(fields, field_builders); - - for attribution in attributions.into_iter().flatten() { - builder - .field_builder::(0) - .unwrap() - .append_value(attribution.agent); - builder - .field_builder::(1) - .unwrap() - .append_option(attribution.role); - - builder.append(true) - } - - let values_array = builder.finish(); - - let data_type = DataType::new_list(attribution_struct(), false); - let offsets_buffer = Buffer::from(offsets.to_byte_slice()); - - let list_array = ListArray::from( - ArrayData::builder(data_type) - .add_child_data(values_array.to_data()) - .len(offsets.len() - 1) - .null_count(0) - .add_buffer(offsets_buffer) - .build()?, - ); - - Ok(list_array) + let offsets: Vec = std::iter::once(0) + .chain(attributions.iter().map(|v| v.len() as i32)) + .scan(0, |state, len| { + *state += len; + Some(*state) + }) + .collect(); + + let agent_builder = StringBuilder::new(); + let role_builder = StringBuilder::new(); + + let fields = + vec![Field::new("agent", DataType::Utf8, false), Field::new("role", DataType::Utf8, true)]; + let field_builders = vec![ + Box::new(agent_builder) as Box, + Box::new(role_builder) as Box, + ]; + + let mut builder = StructBuilder::new(fields, field_builders); + + for attribution in attributions.into_iter().flatten() { + builder + .field_builder::(0) + .unwrap() + .append_value(attribution.agent); + builder + .field_builder::(1) + .unwrap() + .append_option(attribution.role); + + builder.append(true) + } + + let values_array = builder.finish(); + + let data_type = DataType::new_list(attribution_struct(), false); + let offsets_buffer = Buffer::from(offsets.to_byte_slice()); + + let list_array = ListArray::from( + ArrayData::builder(data_type) + .add_child_data(values_array.to_data()) + .len(offsets.len() - 1) + .null_count(0) + .add_buffer(offsets_buffer) + .build()?, + ); + + Ok(list_array) } // Returns a tuple of an iterator over entities of the specified domain types and their relations, // the number of returned records and the total number of records #[tracing::instrument(skip(pool))] pub fn load_entities_by_type( - pool: &Pool>, - typ: &Option, - attributes: &Vec<(String, PrimitiveType)>, - position: u64, - max_records: u64, -) -> Result<(impl Iterator, u64, u64), ChronicleArrowError> { - let mut connection = pool.get()?; - - let mut entities_and_references = Vec::new(); - - let entities_and_namespaces: Vec<(Entity, Namespace)> = if let Some(typ_value) = typ { - entity::table - .inner_join(namespace::table.on(entity::namespace_id.eq(namespace::id))) - .filter(entity::domaintype.eq(typ_value.external_id_part())) - .order(entity::id) - .select((Entity::as_select(), Namespace::as_select())) - .offset(position as i64) - .limit(max_records as i64) - .load::<(Entity, Namespace)>(&mut connection)? - } else { - entity::table - .inner_join(namespace::table.on(entity::namespace_id.eq(namespace::id))) - .filter(entity::domaintype.is_null()) - .order(entity::id) - .select((Entity::as_select(), Namespace::as_select())) - .offset(position as i64) - .limit(max_records as i64) - .load::<(Entity, Namespace)>(&mut connection)? - }; - - let (entities, namespaces): (Vec, Vec) = - entities_and_namespaces.into_iter().unzip(); - - let entity_ids: Vec = entities.iter().map(|entity| entity.id).collect(); - let attribute_names: Vec = attributes.iter().map(|(name, _)| name.clone()).collect(); - - let loaded_attributes: Vec<(i32, String, serde_json::Value)> = entity_attribute::table - .filter(entity_attribute::entity_id.eq_any(&entity_ids)) - .filter(entity_attribute::typename.eq_any(&attribute_names)) - .select((entity_attribute::entity_id, entity_attribute::typename, entity_attribute::value)) - .load::<(i32, String, String)>(&mut connection)? - .into_iter() - .map(|(entity_id, typename, value)| { - let parsed_value: serde_json::Value = serde_json::from_str(&value).unwrap_or_default(); - (entity_id, typename, parsed_value) - }) - .collect(); - - let mut attributes_map: HashMap> = HashMap::new(); - for (entity_id, typename, value) in loaded_attributes { - let attribute = Attribute::new(&typename, value); - attributes_map.entry(entity_id).or_default().push(attribute); - } - - let fetched_records: u64 = entities.len() as u64; - // Load generations - let mut generation_map: HashMap> = Generation::belonging_to(&entities) - .inner_join(activity::table) - .select((generation::generated_entity_id, activity::external_id)) - .load::<(i32, String)>(&mut connection)? - .into_iter() - .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id)| { - acc.entry(id).or_default().push(external_id); - acc - }); - - let mut attribution_map: HashMap> = Attribution::belonging_to(&entities) - .inner_join(agent::table) - .select((attribution::agent_id, agent::external_id, attribution::role.nullable())) - .load::<(i32, String, Option)>(&mut connection)? - .into_iter() - .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id, role)| { - acc.entry(id) - .or_default() - .push(EntityAttributionRef { agent: external_id, role }); - acc - }); - - let mut derivation_map: HashMap<(i32, DerivationType), Vec<_>> = - Derivation::belonging_to(&entities) - .inner_join(activity::table.on(derivation::activity_id.eq(activity::id))) - .inner_join(entity::table.on(derivation::used_entity_id.eq(entity::id))) - .select(( - derivation::used_entity_id, - activity::external_id, - entity::external_id, - derivation::typ, - )) - .load::<(i32, String, String, i32)>(&mut connection)? - .into_iter() - .map(|(entity_id, activity_external_id, entity_external_id, derivation_type)| { - DerivationType::try_from(derivation_type) - .map(|derivation_type| { - (entity_id, activity_external_id, entity_external_id, derivation_type) - }) - .map_err(|e| ChronicleArrowError::InvalidValue(e.to_string())) - }) - .collect::, ChronicleArrowError>>()? - .into_iter() - .fold( - HashMap::new(), - |mut acc: HashMap<(i32, DerivationType), Vec<_>>, - (entity_id, activity_external_id, entity_external_id, derivation_type)| { - acc.entry((entity_id, derivation_type)).or_default().push(DerivationRef { - activity: activity_external_id, - source: entity_external_id, - }); - acc - }, - ); - - for (entity, ns) in entities.into_iter().zip(namespaces) { - let entity_id = entity.id; - entities_and_references.push(EntityAndReferences { - id: entity.external_id, - namespace_name: ns.external_id, - namespace_uuid: Uuid::parse_str(&ns.uuid)?.into_bytes(), - attributes: Attributes::new( - entity.domaintype.map(DomaintypeId::from_external_id), - attributes_map.remove(&entity_id).unwrap_or_default(), - ), - was_generated_by: generation_map.remove(&entity_id).unwrap_or_default(), - was_attributed_to: attribution_map.remove(&entity_id).unwrap_or_default(), - was_derived_from: derivation_map - .remove(&(entity_id, DerivationType::None)) - .unwrap_or_default(), - was_quoted_from: derivation_map - .remove(&(entity_id, DerivationType::Quotation)) - .unwrap_or_default(), - had_primary_source: derivation_map - .remove(&(entity_id, DerivationType::PrimarySource)) - .unwrap_or_default(), - was_revision_of: derivation_map - .remove(&(entity_id, DerivationType::Revision)) - .unwrap_or_default(), - }); - } - - tracing::debug!(?fetched_records); - - Ok((entities_and_references.into_iter(), fetched_records, fetched_records)) + pool: &Pool>, + typ: &Option, + attributes: &Vec<(String, PrimitiveType)>, + position: u64, + max_records: u64, +) -> Result<(impl Iterator, u64, u64), ChronicleArrowError> { + let mut connection = pool.get()?; + + let mut entities_and_references = Vec::new(); + + let entities_and_namespaces: Vec<(Entity, Namespace)> = if let Some(typ_value) = typ { + entity::table + .inner_join(namespace::table.on(entity::namespace_id.eq(namespace::id))) + .filter(entity::domaintype.eq(typ_value.external_id_part())) + .order(entity::id) + .select((Entity::as_select(), Namespace::as_select())) + .offset(position as i64) + .limit(max_records as i64) + .load::<(Entity, Namespace)>(&mut connection)? + } else { + entity::table + .inner_join(namespace::table.on(entity::namespace_id.eq(namespace::id))) + .filter(entity::domaintype.is_null()) + .order(entity::id) + .select((Entity::as_select(), Namespace::as_select())) + .offset(position as i64) + .limit(max_records as i64) + .load::<(Entity, Namespace)>(&mut connection)? + }; + + let (entities, namespaces): (Vec, Vec) = + entities_and_namespaces.into_iter().unzip(); + + let entity_ids: Vec = entities.iter().map(|entity| entity.id).collect(); + let attribute_names: Vec = attributes.iter().map(|(name, _)| name.clone()).collect(); + + let loaded_attributes: Vec<(i32, String, serde_json::Value)> = entity_attribute::table + .filter(entity_attribute::entity_id.eq_any(&entity_ids)) + .filter(entity_attribute::typename.eq_any(&attribute_names)) + .select((entity_attribute::entity_id, entity_attribute::typename, entity_attribute::value)) + .load::<(i32, String, String)>(&mut connection)? + .into_iter() + .map(|(entity_id, typename, value)| { + let parsed_value: serde_json::Value = serde_json::from_str(&value).unwrap_or_default(); + (entity_id, typename, parsed_value) + }) + .collect(); + + let mut attributes_map: HashMap> = HashMap::new(); + for (entity_id, typename, value) in loaded_attributes { + let attribute = Attribute::new(&typename, value); + attributes_map.entry(entity_id).or_default().push(attribute); + } + + let fetched_records: u64 = entities.len() as u64; + // Load generations + let mut generation_map: HashMap> = Generation::belonging_to(&entities) + .inner_join(activity::table) + .select((generation::generated_entity_id, activity::external_id)) + .load::<(i32, String)>(&mut connection)? + .into_iter() + .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id)| { + acc.entry(id).or_default().push(external_id); + acc + }); + + let mut attribution_map: HashMap> = Attribution::belonging_to(&entities) + .inner_join(agent::table) + .select((attribution::agent_id, agent::external_id, attribution::role.nullable())) + .load::<(i32, String, Option)>(&mut connection)? + .into_iter() + .fold(HashMap::new(), |mut acc: HashMap>, (id, external_id, role)| { + acc.entry(id) + .or_default() + .push(EntityAttributionRef { agent: external_id, role }); + acc + }); + + let mut derivation_map: HashMap<(i32, DerivationType), Vec<_>> = + Derivation::belonging_to(&entities) + .inner_join(activity::table.on(derivation::activity_id.eq(activity::id))) + .inner_join(entity::table.on(derivation::used_entity_id.eq(entity::id))) + .select(( + derivation::used_entity_id, + activity::external_id, + entity::external_id, + derivation::typ, + )) + .load::<(i32, String, String, i32)>(&mut connection)? + .into_iter() + .map(|(entity_id, activity_external_id, entity_external_id, derivation_type)| { + DerivationType::try_from(derivation_type) + .map(|derivation_type| { + (entity_id, activity_external_id, entity_external_id, derivation_type) + }) + .map_err(|e| ChronicleArrowError::InvalidValue(e.to_string())) + }) + .collect::, ChronicleArrowError>>()? + .into_iter() + .fold( + HashMap::new(), + |mut acc: HashMap<(i32, DerivationType), Vec<_>>, + (entity_id, activity_external_id, entity_external_id, derivation_type)| { + acc.entry((entity_id, derivation_type)).or_default().push(DerivationRef { + activity: activity_external_id, + source: entity_external_id, + }); + acc + }, + ); + + for (entity, ns) in entities.into_iter().zip(namespaces) { + let entity_id = entity.id; + entities_and_references.push(EntityAndReferences { + id: entity.external_id, + namespace_name: ns.external_id, + namespace_uuid: Uuid::parse_str(&ns.uuid)?.into_bytes(), + attributes: Attributes::new( + entity.domaintype.map(DomaintypeId::from_external_id), + attributes_map.remove(&entity_id).unwrap_or_default(), + ), + was_generated_by: generation_map.remove(&entity_id).unwrap_or_default(), + was_attributed_to: attribution_map.remove(&entity_id).unwrap_or_default(), + was_derived_from: derivation_map + .remove(&(entity_id, DerivationType::None)) + .unwrap_or_default(), + was_quoted_from: derivation_map + .remove(&(entity_id, DerivationType::Quotation)) + .unwrap_or_default(), + had_primary_source: derivation_map + .remove(&(entity_id, DerivationType::PrimarySource)) + .unwrap_or_default(), + was_revision_of: derivation_map + .remove(&(entity_id, DerivationType::Revision)) + .unwrap_or_default(), + }); + } + + tracing::debug!(?fetched_records); + + Ok((entities_and_references.into_iter(), fetched_records, fetched_records)) } diff --git a/crates/chronicle-arrow/src/query/mod.rs b/crates/chronicle-arrow/src/query/mod.rs index ddb573221..a762caa9f 100644 --- a/crates/chronicle-arrow/src/query/mod.rs +++ b/crates/chronicle-arrow/src/query/mod.rs @@ -17,29 +17,29 @@ mod entity; // For simple id only relations, we can just reuse this mapping fn vec_vec_string_to_list_array( - vec_vec_string: Vec>, + vec_vec_string: Vec>, ) -> Result { - let offsets: Vec = std::iter::once(0) - .chain(vec_vec_string.iter().map(|v| v.len() as i32)) - .scan(0, |state, len| { - *state += len; - Some(*state) - }) - .collect(); - let values: Vec = vec_vec_string.into_iter().flatten().collect(); + let offsets: Vec = std::iter::once(0) + .chain(vec_vec_string.iter().map(|v| v.len() as i32)) + .scan(0, |state, len| { + *state += len; + Some(*state) + }) + .collect(); + let values: Vec = vec_vec_string.into_iter().flatten().collect(); - let values_array = Arc::new(StringArray::from(values)) as Arc; - // Create an OffsetBuffer from the offsets - let offsets_buffer = Buffer::from(offsets.to_byte_slice()); - let data_type = DataType::new_list(DataType::Utf8, false); - let list_array = ListArray::from( - ArrayData::builder(data_type) - .add_child_data(values_array.to_data()) - .len(offsets.len() - 1) - .null_count(0) - .add_buffer(offsets_buffer) - .build()?, - ); + let values_array = Arc::new(StringArray::from(values)) as Arc; + // Create an OffsetBuffer from the offsets + let offsets_buffer = Buffer::from(offsets.to_byte_slice()); + let data_type = DataType::new_list(DataType::Utf8, false); + let list_array = ListArray::from( + ArrayData::builder(data_type) + .add_child_data(values_array.to_data()) + .len(offsets.len() - 1) + .null_count(0) + .add_buffer(offsets_buffer) + .build()?, + ); - Ok(list_array) + Ok(list_array) } diff --git a/crates/chronicle-data/src/cli.rs b/crates/chronicle-data/src/cli.rs index 256d9d624..3931f555a 100644 --- a/crates/chronicle-data/src/cli.rs +++ b/crates/chronicle-data/src/cli.rs @@ -3,33 +3,33 @@ use clap::{Parser, Subcommand}; #[derive(Parser)] #[clap(name = "chronicle-data", about = "CLI for Chronicle Data operations")] pub struct Cli { - #[arg(long, help = "The Chronicle server URL", global = true, required = true)] - pub chronicle: String, + #[arg(long, help = "The Chronicle server URL", global = true, required = true)] + pub chronicle: String, - #[arg(long, help = "Authentication token", global = true, required = false)] - pub auth: Option, + #[arg(long, help = "Authentication token", global = true, required = false)] + pub auth: Option, - #[clap(subcommand)] - pub command: Commands, + #[clap(subcommand)] + pub command: Commands, } #[derive(Subcommand)] pub enum Commands { - #[clap(about = "Describes the data schema and operations")] - Describe { - #[clap(subcommand)] - subcommand: DescribeSubcommands, - }, - #[clap(about = "Handles incoming data operations")] - In, - #[clap(about = "Handles outgoing data operations")] - Out, + #[clap(about = "Describes the data schema and operations")] + Describe { + #[clap(subcommand)] + subcommand: DescribeSubcommands, + }, + #[clap(about = "Handles incoming data operations")] + In, + #[clap(about = "Handles outgoing data operations")] + Out, } #[derive(Subcommand)] pub enum DescribeSubcommands { - #[clap(about = "Describes the data schema")] - Schema, - #[clap(about = "List the available flights")] - Flights, + #[clap(about = "Describes the data schema")] + Schema, + #[clap(about = "List the available flights")] + Flights, } diff --git a/crates/chronicle-data/src/main.rs b/crates/chronicle-data/src/main.rs index d831c7979..102d83dd0 100644 --- a/crates/chronicle-data/src/main.rs +++ b/crates/chronicle-data/src/main.rs @@ -1,80 +1,77 @@ -use arrow_flight::flight_service_client::FlightServiceClient; -use arrow_flight::FlightInfo; +use arrow_flight::{flight_service_client::FlightServiceClient, FlightInfo}; use arrow_schema::Schema; use clap::Parser; use cli::{Cli, Commands, DescribeSubcommands}; -use prettytable::{Cell, format, row, Row, Table}; +use prettytable::{format, row, Cell, Row, Table}; use tonic::transport::Channel; mod cli; async fn init_flight_client( - cli: &Cli, + cli: &Cli, ) -> Result, Box> { - let chronicle_url = &cli.chronicle; - let channel = Channel::from_shared(chronicle_url.clone())?.connect().await?; + let chronicle_url = &cli.chronicle; + let channel = Channel::from_shared(chronicle_url.clone())?.connect().await?; - Ok(FlightServiceClient::new(channel)) + Ok(FlightServiceClient::new(channel)) } fn format_schema_as_table(schema: &Schema) -> String { - let mut table = Table::new(); - table.add_row(row!["Field Name", "Data Type", "Nullable"]); - for field in schema.fields() { - table.add_row(Row::new(vec![ - Cell::new(field.name()), - Cell::new(&format!("{:?}", field.data_type())), - Cell::new(&format!("{}", field.is_nullable())), - ])); - } - table.to_string() + let mut table = Table::new(); + table.add_row(row!["Field Name", "Data Type", "Nullable"]); + for field in schema.fields() { + table.add_row(Row::new(vec![ + Cell::new(field.name()), + Cell::new(&format!("{:?}", field.data_type())), + Cell::new(&format!("{}", field.is_nullable())), + ])); + } + table.to_string() } fn format_flight_info_as_table(_flight_infos: Vec) -> String { - let mut table = Table::new(); - table.set_format(*format::consts::FORMAT_NO_LINESEP_WITH_TITLE); + let mut table = Table::new(); + table.set_format(*format::consts::FORMAT_NO_LINESEP_WITH_TITLE); - table.set_titles(row!["Descriptor", "Endpoints", "Summary"]); + table.set_titles(row!["Descriptor", "Endpoints", "Summary"]); - table.to_string() + table.to_string() } async fn list_flights( - client: &mut FlightServiceClient, + client: &mut FlightServiceClient, ) -> Result, Box> { - let request = tonic::Request::new(arrow_flight::Criteria::default()); - let response = client.list_flights(request).await?; + let request = tonic::Request::new(arrow_flight::Criteria::default()); + let response = client.list_flights(request).await?; - let mut flights_info = Vec::new(); - let mut stream = response.into_inner(); - while let Some(flight_info) = stream.message().await? { - flights_info.push(flight_info); - } + let mut flights_info = Vec::new(); + let mut stream = response.into_inner(); + while let Some(flight_info) = stream.message().await? { + flights_info.push(flight_info); + } - Ok(flights_info) + Ok(flights_info) } #[tokio::main] async fn main() { - let cli = Cli::parse(); - let _client = init_flight_client(&cli) - .await - .expect("Failed to initialize the Flight client"); + let cli = Cli::parse(); + let _client = init_flight_client(&cli).await.expect("Failed to initialize the Flight client"); - match &cli.command { - Commands::Describe { subcommand } => match subcommand { - DescribeSubcommands::Schema => { - println!("Describing the data schema..."); - } - DescribeSubcommands::Flights => { - println!("Listing available flights..."); - } - }, - Commands::In => { - println!("Handling incoming data operations..."); - } - Commands::Out => { - println!("Handling outgoing data operations..."); - } - } + match &cli.command { + Commands::Describe { subcommand } => match subcommand { + DescribeSubcommands::Schema => { + println!("Describing the data schema..."); + }, + DescribeSubcommands::Flights => { + println!("Listing available flights..."); + }, + }, + Commands::In => { + println!("Handling incoming data operations..."); + }, + Commands::Out => { + println!("Handling outgoing data operations..."); + }, + } } diff --git a/crates/chronicle-domain-lint/build.rs b/crates/chronicle-domain-lint/build.rs index 5a1d86bbe..afb2c9546 100644 --- a/crates/chronicle-domain-lint/build.rs +++ b/crates/chronicle-domain-lint/build.rs @@ -1,8 +1,8 @@ fn main() { - //Create a .VERSION file containing 'local' if it does not exist + //Create a .VERSION file containing 'local' if it does not exist - let version_file = std::path::Path::new("../../.VERSION"); - if !version_file.exists() { - std::fs::write(version_file, "local").expect("Unable to write file"); - } + let version_file = std::path::Path::new("../../.VERSION"); + if !version_file.exists() { + std::fs::write(version_file, "local").expect("Unable to write file"); + } } diff --git a/crates/chronicle-domain-lint/src/main.rs b/crates/chronicle-domain-lint/src/main.rs index ac3628b7f..35086d199 100644 --- a/crates/chronicle-domain-lint/src/main.rs +++ b/crates/chronicle-domain-lint/src/main.rs @@ -2,21 +2,21 @@ use chronicle::codegen::linter::check_files; use clap::{Arg, Command, ValueHint}; fn main() { - let version = env!("CARGO_PKG_VERSION"); - let cli = Command::new("chronicle-domain-lint") - .version(version) - .author("Blockchain Technology Partners") - .arg( - Arg::new("filenames") - .value_hint(ValueHint::FilePath) - .required(true) - .multiple_values(true) - .min_values(1) - .help("domain definition files for linting"), - ); + let version = env!("CARGO_PKG_VERSION"); + let cli = Command::new("chronicle-domain-lint") + .version(version) + .author("Blockchain Technology Partners") + .arg( + Arg::new("filenames") + .value_hint(ValueHint::FilePath) + .required(true) + .multiple_values(true) + .min_values(1) + .help("domain definition files for linting"), + ); - let matches = cli.get_matches(); - let filenames = matches.values_of("filenames").unwrap().collect(); - check_files(filenames); - println!("successful: no domain definition errors detected"); + let matches = cli.get_matches(); + let filenames = matches.values_of("filenames").unwrap().collect(); + check_files(filenames); + println!("successful: no domain definition errors detected"); } diff --git a/crates/chronicle-domain-test/src/test.rs b/crates/chronicle-domain-test/src/test.rs index 10e6b7387..913a8db7a 100644 --- a/crates/chronicle-domain-test/src/test.rs +++ b/crates/chronicle-domain-test/src/test.rs @@ -122,7 +122,7 @@ mod test { async fn test_schema_with_opa<'a>( opa_executor: ExecutorContext, ) -> (Schema, TemporaryDatabase<'a>) { - chronicle_telemetry::telemetry(false, chronicle_telemetry::ConsoleLogging::Pretty); + chronicle_telemetry::telemetry(chronicle_telemetry::ConsoleLogging::Pretty); let secrets = ChronicleSigning::new( chronicle_secret_names(), diff --git a/crates/chronicle-persistence/src/cursor.rs b/crates/chronicle-persistence/src/cursor.rs index 64f7224b8..e994eb3d4 100644 --- a/crates/chronicle-persistence/src/cursor.rs +++ b/crates/chronicle-persistence/src/cursor.rs @@ -1,9 +1,9 @@ use diesel::{ - pg::Pg, - prelude::*, - query_builder::*, - r2d2::{ConnectionManager, PooledConnection}, - sql_types::BigInt, + pg::Pg, + prelude::*, + query_builder::*, + r2d2::{ConnectionManager, PooledConnection}, + sql_types::BigInt, }; type Conn = PooledConnection>; @@ -12,59 +12,59 @@ const DEFAULT_PAGE_SIZE: i32 = 10; #[derive(QueryId)] pub struct CursorPosition { - query: T, - pub start: i64, - pub limit: i64, + query: T, + pub start: i64, + pub limit: i64, } pub trait Cursorize: Sized { - fn cursor( - self, - after: Option, - before: Option, - first: Option, - last: Option, - ) -> CursorPosition; + fn cursor( + self, + after: Option, + before: Option, + first: Option, + last: Option, + ) -> CursorPosition; } impl Cursorize for T { - fn cursor( - self, - after: Option, - before: Option, - first: Option, - last: Option, - ) -> CursorPosition { - let mut start = after.map(|after| after + 1).unwrap_or(0) as usize; - let mut end = before.unwrap_or(DEFAULT_PAGE_SIZE) as usize; - if let Some(first) = first { - end = start + first - } - if let Some(last) = last { - start = if last > end - start { end } else { end - last }; - }; + fn cursor( + self, + after: Option, + before: Option, + first: Option, + last: Option, + ) -> CursorPosition { + let mut start = after.map(|after| after + 1).unwrap_or(0) as usize; + let mut end = before.unwrap_or(DEFAULT_PAGE_SIZE) as usize; + if let Some(first) = first { + end = start + first + } + if let Some(last) = last { + start = if last > end - start { end } else { end - last }; + }; - CursorPosition { query: self, start: start as _, limit: (end - start) as _ } - } + CursorPosition { query: self, start: start as _, limit: (end - start) as _ } + } } impl QueryFragment for CursorPosition - where - T: QueryFragment, +where + T: QueryFragment, { - fn walk_ast<'a>(&'a self, mut out: AstPass<'_, 'a, Pg>) -> QueryResult<()> { - out.push_sql("SELECT *, COUNT(*) OVER () FROM ("); - self.query.walk_ast(out.reborrow())?; - out.push_sql(") t LIMIT "); - out.push_bind_param::(&(self.limit))?; - out.push_sql(" OFFSET "); - out.push_bind_param::(&self.start)?; - Ok(()) - } + fn walk_ast<'a>(&'a self, mut out: AstPass<'_, 'a, Pg>) -> QueryResult<()> { + out.push_sql("SELECT *, COUNT(*) OVER () FROM ("); + self.query.walk_ast(out.reborrow())?; + out.push_sql(") t LIMIT "); + out.push_bind_param::(&(self.limit))?; + out.push_sql(" OFFSET "); + out.push_bind_param::(&self.start)?; + Ok(()) + } } impl Query for CursorPosition { - type SqlType = (T::SqlType, BigInt); + type SqlType = (T::SqlType, BigInt); } impl RunQueryDsl for CursorPosition {} diff --git a/crates/chronicle-persistence/src/database.rs b/crates/chronicle-persistence/src/database.rs index e2fe7859c..5f700cac7 100644 --- a/crates/chronicle-persistence/src/database.rs +++ b/crates/chronicle-persistence/src/database.rs @@ -5,26 +5,26 @@ use std::{fmt::Display, time::Duration}; #[async_trait::async_trait] pub trait DatabaseConnector { - async fn try_connect(&self) -> Result<(X, Pool>), E>; - fn should_retry(&self, error: &E) -> bool; + async fn try_connect(&self) -> Result<(X, Pool>), E>; + fn should_retry(&self, error: &E) -> bool; } pub async fn get_connection_with_retry( - connector: impl DatabaseConnector, + connector: impl DatabaseConnector, ) -> Result<(X, Pool>), E> { - let mut i = 1; - let mut j = 1; - loop { - let connection = connector.try_connect().await; - if let Err(source) = &connection { - tracing::warn!("database connection failed: {source}"); - if i < 20 && connector.should_retry(source) { - tracing::info!("waiting to retry database connection..."); - std::thread::sleep(Duration::from_secs(i)); - (i, j) = (i + j, i); - continue; - } - } - return connection; - } + let mut i = 1; + let mut j = 1; + loop { + let connection = connector.try_connect().await; + if let Err(source) = &connection { + tracing::warn!("database connection failed: {source}"); + if i < 20 && connector.should_retry(source) { + tracing::info!("waiting to retry database connection..."); + std::thread::sleep(Duration::from_secs(i)); + (i, j) = (i + j, i); + continue; + } + } + return connection; + } } diff --git a/crates/chronicle-persistence/src/lib.rs b/crates/chronicle-persistence/src/lib.rs index 340eadc2b..60df88c3d 100644 --- a/crates/chronicle-persistence/src/lib.rs +++ b/crates/chronicle-persistence/src/lib.rs @@ -2,19 +2,19 @@ use std::{collections::BTreeMap, str::FromStr, sync::Arc, time::Duration}; use chrono::{TimeZone, Utc}; use common::{ - attributes::Attribute, - prov::{ - operations::DerivationType, Activity, ActivityId, Agent, AgentId, Association, Attribution, - ChronicleTransactionId, ChronicleTransactionIdError, Delegation, Derivation, DomaintypeId, - Entity, EntityId, ExternalId, ExternalIdPart, Generation, Namespace, NamespaceId, - ProvModel, Role, Usage, - }, + attributes::Attribute, + prov::{ + operations::DerivationType, Activity, ActivityId, Agent, AgentId, Association, Attribution, + ChronicleTransactionId, ChronicleTransactionIdError, Delegation, Derivation, DomaintypeId, + Entity, EntityId, ExternalId, ExternalIdPart, Generation, Namespace, NamespaceId, + ProvModel, Role, Usage, + }, }; use diesel::{ - prelude::*, - r2d2::{ConnectionManager, Pool, PooledConnection}, - PgConnection, + prelude::*, + r2d2::{ConnectionManager, Pool, PooledConnection}, + PgConnection, }; use diesel_migrations::{embed_migrations, EmbeddedMigrations}; use protocol_substrate_chronicle::protocol::BlockId; @@ -33,1271 +33,1275 @@ pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!(); #[derive(Error, Debug)] pub enum StoreError { - #[error("Database operation failed: {0}")] - Db( - #[from] - #[source] - diesel::result::Error, - ), - - #[error("Database connection failed (maybe check PGPASSWORD): {0}")] - DbConnection( - #[from] - #[source] - diesel::ConnectionError, - ), - - #[error("Database migration failed: {0}")] - DbMigration( - #[from] - #[source] - Box, - ), - - #[error("Connection pool error: {0}")] - DbPool( - #[from] - #[source] - r2d2::Error, - ), - - #[error("Infallible")] - Infallible(#[from] std::convert::Infallible), - - #[error( - "Integer returned from database was an unrecognized 'DerivationType' enum variant: {0}" - )] - InvalidDerivationTypeRecord(i32), - - #[error("Could not find namespace {0}")] - InvalidNamespace(NamespaceId), - - #[error("Unreadable Attribute: {0}")] - Json( - #[from] - #[source] - serde_json::Error, - ), - - #[error("Parse blockid: {0}")] - ParseBlockId( - #[from] - #[source] - protocol_substrate_chronicle::protocol::BlockIdError, - ), - - #[error("Invalid transaction ID: {0}")] - TransactionId( - #[from] - #[source] - ChronicleTransactionIdError, - ), - - #[error("Could not locate record in store")] - RecordNotFound, - - #[error("Invalid UUID: {0}")] - Uuid( - #[from] - #[source] - uuid::Error, - ), - - #[error("Serialization error: {0}")] - SerializationError(String), + #[error("Database operation failed: {0}")] + Db( + #[from] + #[source] + diesel::result::Error, + ), + + #[error("Database connection failed (maybe check PGPASSWORD): {0}")] + DbConnection( + #[from] + #[source] + diesel::ConnectionError, + ), + + #[error("Database migration failed: {0}")] + DbMigration( + #[from] + #[source] + Box, + ), + + #[error("Connection pool error: {0}")] + DbPool( + #[from] + #[source] + r2d2::Error, + ), + + #[error("Infallible")] + Infallible(#[from] std::convert::Infallible), + + #[error( + "Integer returned from database was an unrecognized 'DerivationType' enum variant: {0}" + )] + InvalidDerivationTypeRecord(i32), + + #[error("Could not find namespace {0}")] + InvalidNamespace(NamespaceId), + + #[error("Unreadable Attribute: {0}")] + Json( + #[from] + #[source] + serde_json::Error, + ), + + #[error("Parse blockid: {0}")] + ParseBlockId( + #[from] + #[source] + protocol_substrate_chronicle::protocol::BlockIdError, + ), + + #[error("Invalid transaction ID: {0}")] + TransactionId( + #[from] + #[source] + ChronicleTransactionIdError, + ), + + #[error("Could not locate record in store")] + RecordNotFound, + + #[error("Invalid UUID: {0}")] + Uuid( + #[from] + #[source] + uuid::Error, + ), + + #[error("Serialization error: {0}")] + SerializationError(String), } #[derive(Debug)] pub struct ConnectionOptions { - pub enable_wal: bool, - pub enable_foreign_keys: bool, - pub busy_timeout: Option, + pub enable_wal: bool, + pub enable_foreign_keys: bool, + pub busy_timeout: Option, } #[derive(Clone)] pub struct Store { - pool: Pool>, + pool: Pool>, } - type Generations = Vec; - type Usages = Vec; - type WasInformedBys = Vec; - type Associations = Vec<(String, String)>; +type Generations = Vec; +type Usages = Vec; +type WasInformedBys = Vec; +type Associations = Vec<(String, String)>; impl Store { - - - #[instrument(name = "Bind namespace", skip(self))] - pub fn namespace_binding(&self, external_id: &str, uuid: Uuid) -> Result<(), StoreError> { - use schema::namespace::dsl; - - let uuid = uuid.to_string(); - self.connection()?.build_transaction().run(|conn| { - diesel::insert_into(dsl::namespace) - .values((dsl::external_id.eq(external_id), dsl::uuid.eq(&uuid))) - .on_conflict(dsl::external_id) - .do_update() - .set(dsl::uuid.eq(&uuid)) - .execute(conn) - })?; - - Ok(()) - } - - /// Fetch the activity record for the IRI - fn activity_by_activity_external_id_and_namespace( - &self, - connection: &mut PgConnection, - external_id: &ExternalId, - namespace_id: &NamespaceId, - ) -> Result { - let (_namespaceid, nsid) = - self.namespace_by_external_id(connection, namespace_id.external_id_part())?; - use schema::activity::dsl; - - Ok(schema::activity::table - .filter(dsl::external_id.eq(external_id).and(dsl::namespace_id.eq(nsid))) - .first::(connection)?) - } - - /// Fetch the entity record for the IRI - fn entity_by_entity_external_id_and_namespace( - &self, - connection: &mut PgConnection, - external_id: &ExternalId, - namespace_id: &NamespaceId, - ) -> Result { - let (_, ns_id) = - self.namespace_by_external_id(connection, namespace_id.external_id_part())?; - use schema::entity::dsl; - - Ok(schema::entity::table - .filter(dsl::external_id.eq(external_id).and(dsl::namespace_id.eq(ns_id))) - .first::(connection)?) - } - - /// Fetch the agent record for the IRI - pub fn agent_by_agent_external_id_and_namespace( - &self, - connection: &mut PgConnection, - external_id: &ExternalId, - namespace_id: &NamespaceId, - ) -> Result { - let (_namespaceid, nsid) = - self.namespace_by_external_id(connection, namespace_id.external_id_part())?; - use schema::agent::dsl; - - Ok(schema::agent::table - .filter(dsl::external_id.eq(external_id).and(dsl::namespace_id.eq(nsid))) - .first::(connection)?) - } - - /// Apply an activity to persistent storage, name + namespace are a key, so we update times + - /// domaintype on conflict - #[instrument(level = "trace", skip(self, connection), ret(Debug))] - fn apply_activity( - &self, - connection: &mut PgConnection, - Activity { - ref external_id, namespace_id, started, ended, domaintype_id, attributes, .. - }: &Activity, - ) -> Result<(), StoreError> { - use schema::activity as dsl; - let (_, nsid) = - self.namespace_by_external_id(connection, namespace_id.external_id_part())?; - - let existing = self - .activity_by_activity_external_id_and_namespace(connection, external_id, namespace_id) - .ok(); - - let resolved_domain_type = - domaintype_id.as_ref().map(|x| x.external_id_part().clone()).or_else(|| { - existing.as_ref().and_then(|x| x.domaintype.as_ref().map(ExternalId::from)) - }); - - let resolved_started = started - .map(|x| x.naive_utc()) - .or_else(|| existing.as_ref().and_then(|x| x.started)); - - let resolved_ended = - ended.map(|x| x.naive_utc()).or_else(|| existing.as_ref().and_then(|x| x.ended)); - - diesel::insert_into(schema::activity::table) - .values(( - dsl::external_id.eq(external_id), - dsl::namespace_id.eq(nsid), - dsl::started.eq(started.map(|t| t.naive_utc())), - dsl::ended.eq(ended.map(|t| t.naive_utc())), - dsl::domaintype.eq(domaintype_id.as_ref().map(|x| x.external_id_part())), - )) - .on_conflict((dsl::external_id, dsl::namespace_id)) - .do_update() - .set(( - dsl::domaintype.eq(resolved_domain_type), - dsl::started.eq(resolved_started), - dsl::ended.eq(resolved_ended), - )) - .execute(connection)?; - - let query::Activity { id, .. } = self.activity_by_activity_external_id_and_namespace( - connection, - external_id, - namespace_id, - )?; - - diesel::insert_into(schema::activity_attribute::table) - .values( - attributes - .iter() - .map(|Attribute { typ, value, .. }| query::ActivityAttribute { - activity_id: id, - typename: typ.to_owned(), - value: value.to_string(), - }) - .collect::>(), - ) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - /// Apply an agent to persistent storage, external_id + namespace are a key, so we update - /// publickey + domaintype on conflict current is a special case, only relevant to local CLI - /// context. A possibly improved design would be to store this in another table given its scope - #[instrument(level = "trace", skip(self, connection), ret(Debug))] - fn apply_agent( - &self, - connection: &mut PgConnection, - Agent { ref external_id, namespaceid, domaintypeid, attributes, .. }: &Agent, - ) -> Result<(), StoreError> { - use schema::agent::dsl; - let (_, nsid) = - self.namespace_by_external_id(connection, namespaceid.external_id_part())?; - - let existing = self - .agent_by_agent_external_id_and_namespace(connection, external_id, namespaceid) - .ok(); - - let resolved_domain_type = - domaintypeid.as_ref().map(|x| x.external_id_part().clone()).or_else(|| { - existing.as_ref().and_then(|x| x.domaintype.as_ref().map(ExternalId::from)) - }); - - diesel::insert_into(schema::agent::table) - .values(( - dsl::external_id.eq(external_id), - dsl::namespace_id.eq(nsid), - dsl::current.eq(0), - dsl::domaintype.eq(domaintypeid.as_ref().map(|x| x.external_id_part())), - )) - .on_conflict((dsl::namespace_id, dsl::external_id)) - .do_update() - .set(dsl::domaintype.eq(resolved_domain_type)) - .execute(connection)?; - - let query::Agent { id, .. } = - self.agent_by_agent_external_id_and_namespace(connection, external_id, namespaceid)?; - - diesel::insert_into(schema::agent_attribute::table) - .values( - attributes - .iter() - .map(|Attribute { typ, value, .. }| query::AgentAttribute { - agent_id: id, - typename: typ.to_owned(), - value: value.to_string(), - }) - .collect::>(), - ) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - #[instrument(level = "trace", skip(self, connection), ret(Debug))] - fn apply_entity( - &self, - connection: &mut PgConnection, - Entity { namespace_id, external_id, domaintypeid, attributes, .. }: &Entity, - ) -> Result<(), StoreError> { - use schema::entity::dsl; - let (_, nsid) = - self.namespace_by_external_id(connection, namespace_id.external_id_part())?; - - let existing = self - .entity_by_entity_external_id_and_namespace(connection, external_id, namespace_id) - .ok(); - - let resolved_domain_type = - domaintypeid.as_ref().map(|x| x.external_id_part().clone()).or_else(|| { - existing.as_ref().and_then(|x| x.domaintype.as_ref().map(ExternalId::from)) - }); - - diesel::insert_into(schema::entity::table) - .values(( - dsl::external_id.eq(&external_id), - dsl::namespace_id.eq(nsid), - dsl::domaintype.eq(domaintypeid.as_ref().map(|x| x.external_id_part())), - )) - .on_conflict((dsl::namespace_id, dsl::external_id)) - .do_update() - .set(dsl::domaintype.eq(resolved_domain_type)) - .execute(connection)?; - - let query::Entity { id, .. } = - self.entity_by_entity_external_id_and_namespace(connection, external_id, namespace_id)?; - - diesel::insert_into(schema::entity_attribute::table) - .values( - attributes - .iter() - .map(|Attribute { typ, value, .. }| query::EntityAttribute { - entity_id: id, - typename: typ.to_owned(), - value: value.to_string(), - }) - .collect::>(), - ) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - fn apply_model( - &self, - connection: &mut PgConnection, - model: &ProvModel, - ) -> Result<(), StoreError> { - for (_, ns) in model.namespaces.iter() { - self.apply_namespace(connection, ns)? - } - for (_, agent) in model.agents.iter() { - self.apply_agent(connection, agent)? - } - for (_, activity) in model.activities.iter() { - self.apply_activity(connection, activity)? - } - for (_, entity) in model.entities.iter() { - self.apply_entity(connection, entity)? - } - - for ((namespaceid, _), association) in model.association.iter() { - for association in association.iter() { - self.apply_was_associated_with(connection, namespaceid, association)?; - } - } - - for ((namespaceid, _), usage) in model.usage.iter() { - for usage in usage.iter() { - self.apply_used(connection, namespaceid, usage)?; - } - } - - for ((namespaceid, activity_id), was_informed_by) in model.was_informed_by.iter() { - for (_, informing_activity_id) in was_informed_by.iter() { - self.apply_was_informed_by( - connection, - namespaceid, - activity_id, - informing_activity_id, - )?; - } - } - - for ((namespaceid, _), generation) in model.generation.iter() { - for generation in generation.iter() { - self.apply_was_generated_by(connection, namespaceid, generation)?; - } - } - - for ((namespaceid, _), derivation) in model.derivation.iter() { - for derivation in derivation.iter() { - self.apply_derivation(connection, namespaceid, derivation)?; - } - } - - for ((namespaceid, _), delegation) in model.delegation.iter() { - for delegation in delegation.iter() { - self.apply_delegation(connection, namespaceid, delegation)?; - } - } - - for ((namespace_id, _), attribution) in model.attribution.iter() { - for attribution in attribution.iter() { - self.apply_was_attributed_to(connection, namespace_id, attribution)?; - } - } - - Ok(()) - } - - #[instrument(level = "trace", skip(self, connection), ret(Debug))] - fn apply_namespace( - &self, - connection: &mut PgConnection, - Namespace { ref external_id, ref uuid, .. }: &Namespace, - ) -> Result<(), StoreError> { - use schema::namespace::dsl; - diesel::insert_into(schema::namespace::table) - .values((dsl::external_id.eq(external_id), dsl::uuid.eq(hex::encode(uuid)))) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - pub fn apply_prov(&self, prov: &ProvModel) -> Result<(), StoreError> { - self.connection()? - .build_transaction() - .run(|connection| self.apply_model(connection, prov))?; - - Ok(()) - } - - #[instrument(skip_all)] - fn apply_used( - &self, - connection: &mut PgConnection, - namespace: &NamespaceId, - usage: &Usage, - ) -> Result<(), StoreError> { - let storedactivity = self.activity_by_activity_external_id_and_namespace( - connection, - usage.activity_id.external_id_part(), - namespace, - )?; - - let storedentity = self.entity_by_entity_external_id_and_namespace( - connection, - usage.entity_id.external_id_part(), - namespace, - )?; - - use schema::usage::dsl as link; - diesel::insert_into(schema::usage::table) - .values(( - &link::activity_id.eq(storedactivity.id), - &link::entity_id.eq(storedentity.id), - )) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - #[instrument(skip_all)] - fn apply_was_informed_by( - &self, - connection: &mut PgConnection, - namespace: &NamespaceId, - activity_id: &ActivityId, - informing_activity_id: &ActivityId, - ) -> Result<(), StoreError> { - let storedactivity = self.activity_by_activity_external_id_and_namespace( - connection, - activity_id.external_id_part(), - namespace, - )?; - - let storedinformingactivity = self.activity_by_activity_external_id_and_namespace( - connection, - informing_activity_id.external_id_part(), - namespace, - )?; - - use schema::wasinformedby::dsl as link; - diesel::insert_into(schema::wasinformedby::table) - .values(( - &link::activity_id.eq(storedactivity.id), - &link::informing_activity_id.eq(storedinformingactivity.id), - )) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - #[instrument(skip(self, connection))] - fn apply_was_associated_with( - &self, - connection: &mut PgConnection, - namespaceid: &common::prov::NamespaceId, - association: &Association, - ) -> Result<(), StoreError> { - let storedactivity = self.activity_by_activity_external_id_and_namespace( - connection, - association.activity_id.external_id_part(), - namespaceid, - )?; - - let storedagent = self.agent_by_agent_external_id_and_namespace( - connection, - association.agent_id.external_id_part(), - namespaceid, - )?; - - use schema::association::dsl as asoc; - let no_role = common::prov::Role("".to_string()); - diesel::insert_into(schema::association::table) - .values(( - &asoc::activity_id.eq(storedactivity.id), - &asoc::agent_id.eq(storedagent.id), - &asoc::role.eq(association.role.as_ref().unwrap_or(&no_role)), - )) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - #[instrument(skip(self, connection, namespace))] - fn apply_delegation( - &self, - connection: &mut PgConnection, - namespace: &common::prov::NamespaceId, - delegation: &Delegation, - ) -> Result<(), StoreError> { - let responsible = self.agent_by_agent_external_id_and_namespace( - connection, - delegation.responsible_id.external_id_part(), - namespace, - )?; - - let delegate = self.agent_by_agent_external_id_and_namespace( - connection, - delegation.delegate_id.external_id_part(), - namespace, - )?; - - let activity = { - if let Some(ref activity_id) = delegation.activity_id { - Some( - self.activity_by_activity_external_id_and_namespace( - connection, - activity_id.external_id_part(), - namespace, - )? - .id, - ) - } else { - None - } - }; - - use schema::delegation::dsl as link; - let no_role = common::prov::Role("".to_string()); - diesel::insert_into(schema::delegation::table) - .values(( - &link::responsible_id.eq(responsible.id), - &link::delegate_id.eq(delegate.id), - &link::activity_id.eq(activity.unwrap_or(-1)), - &link::role.eq(delegation.role.as_ref().unwrap_or(&no_role)), - )) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - #[instrument(skip(self, connection, namespace))] - fn apply_derivation( - &self, - connection: &mut PgConnection, - namespace: &common::prov::NamespaceId, - derivation: &Derivation, - ) -> Result<(), StoreError> { - let stored_generated = self.entity_by_entity_external_id_and_namespace( - connection, - derivation.generated_id.external_id_part(), - namespace, - )?; - - let stored_used = self.entity_by_entity_external_id_and_namespace( - connection, - derivation.used_id.external_id_part(), - namespace, - )?; - - let stored_activity = derivation - .activity_id - .as_ref() - .map(|activity_id| { - self.activity_by_activity_external_id_and_namespace( - connection, - activity_id.external_id_part(), - namespace, - ) - }) - .transpose()?; - - use schema::derivation::dsl as link; - diesel::insert_into(schema::derivation::table) - .values(( - &link::used_entity_id.eq(stored_used.id), - &link::generated_entity_id.eq(stored_generated.id), - &link::typ.eq(derivation.typ), - &link::activity_id.eq(stored_activity.map_or(-1, |activity| activity.id)), - )) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - #[instrument(skip_all)] - fn apply_was_generated_by( - &self, - connection: &mut PgConnection, - namespace: &common::prov::NamespaceId, - generation: &Generation, - ) -> Result<(), StoreError> { - let storedactivity = self.activity_by_activity_external_id_and_namespace( - connection, - generation.activity_id.external_id_part(), - namespace, - )?; - - let storedentity = self.entity_by_entity_external_id_and_namespace( - connection, - generation.generated_id.external_id_part(), - namespace, - )?; - - use schema::generation::dsl as link; - diesel::insert_into(schema::generation::table) - .values(( - &link::activity_id.eq(storedactivity.id), - &link::generated_entity_id.eq(storedentity.id), - )) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - #[instrument(skip(self, connection))] - fn apply_was_attributed_to( - &self, - connection: &mut PgConnection, - namespace_id: &common::prov::NamespaceId, - attribution: &Attribution, - ) -> Result<(), StoreError> { - let stored_entity = self.entity_by_entity_external_id_and_namespace( - connection, - attribution.entity_id.external_id_part(), - namespace_id, - )?; - - let stored_agent = self.agent_by_agent_external_id_and_namespace( - connection, - attribution.agent_id.external_id_part(), - namespace_id, - )?; - - use schema::attribution::dsl as attr; - let no_role = common::prov::Role("".to_string()); - diesel::insert_into(schema::attribution::table) - .values(( - &attr::entity_id.eq(stored_entity.id), - &attr::agent_id.eq(stored_agent.id), - &attr::role.eq(attribution.role.as_ref().unwrap_or(&no_role)), - )) - .on_conflict_do_nothing() - .execute(connection)?; - - Ok(()) - } - - pub fn connection( - &self, - ) -> Result>, StoreError> { - self.pool.get().map_err(StoreError::DbPool) - } - - #[instrument(skip_all)] - pub fn get_current_agent( - &self, - connection: &mut PgConnection, - ) -> Result { - use schema::agent::dsl; - Ok(schema::agent::table - .filter(dsl::current.ne(0)) - .first::(connection)?) - } - - /// Get the last fully synchronized offset - #[instrument(skip_all)] - pub fn get_last_block_id(&self) -> Result, StoreError> { - use schema::ledgersync::dsl; - self.connection()?.build_transaction().run(|connection| { - let block_id_and_tx = schema::ledgersync::table - .order_by(dsl::sync_time) - .select((dsl::bc_offset, dsl::tx_id)) - .first::<(Option, String)>(connection) - .map_err(StoreError::from)?; - - if let Some(block_id) = block_id_and_tx.0 { - Ok(Some(BlockId::try_from(&*block_id)?)) - } else { - Ok(None) - } - }) - } - - #[instrument(skip_all)] - pub fn namespace_by_external_id( - &self, - connection: &mut PgConnection, - namespace: &ExternalId, - ) -> Result<(NamespaceId, i32), StoreError> { - use self::schema::namespace::dsl; - - let ns = dsl::namespace - .filter(dsl::external_id.eq(namespace)) - .select((dsl::id, dsl::external_id, dsl::uuid)) - .first::<(i32, String, String)>(connection) - .optional()? - .ok_or(StoreError::RecordNotFound {})?; - - Ok((NamespaceId::from_external_id(ns.1, Uuid::from_str(&ns.2)?), ns.0)) - } - - #[instrument(skip_all)] - pub fn new(pool: Pool>) -> Result { - Ok(Store { pool }) - } - - #[instrument(skip_all)] - pub fn populate_prov_model_for_agent( - &self, - agent: query::Agent, - namespaceid: &NamespaceId, - model: &mut ProvModel, - connection: &mut PgConnection, - ) -> Result<(), StoreError> { - debug!(?agent, "Map agent to prov"); - - let attributes = schema::agent_attribute::table - .filter(schema::agent_attribute::agent_id.eq(&agent.id)) - .load::(connection)?; - - let agentid: AgentId = AgentId::from_external_id(&agent.external_id); - model.agents.insert( - (namespaceid.clone(), agentid.clone()), - Agent { - id: agentid, - namespaceid: namespaceid.clone(), - external_id: ExternalId::from(&agent.external_id), - domaintypeid: agent.domaintype.map(DomaintypeId::from_external_id), - attributes: attributes - .iter() - .map(|attr| { - serde_json::from_str(&attr.value) - .map_err(|e| StoreError::SerializationError(e.to_string())) - .map(|value| Attribute { typ: attr.typename.clone(), value }) - }) - .collect::, StoreError>>()?, - } - .into(), - ); - - for (responsible, activity, role) in schema::delegation::table - .filter(schema::delegation::delegate_id.eq(agent.id)) - .inner_join( - schema::agent::table.on(schema::delegation::responsible_id.eq(schema::agent::id)), - ) - .inner_join( - schema::activity::table - .on(schema::delegation::activity_id.eq(schema::activity::id)), - ) - .order(schema::agent::external_id) - .select(( - schema::agent::external_id, - schema::activity::external_id, - schema::delegation::role, - )) - .load::<(String, String, String)>(connection)? - { - model.qualified_delegation( - namespaceid, - &AgentId::from_external_id(responsible), - &AgentId::from_external_id(&agent.external_id), - { - if activity.contains("hidden entry for Option None") { - None - } else { - Some(ActivityId::from_external_id(activity)) - } - }, - { - if role.is_empty() { - None - } else { - Some(Role(role)) - } - }, - ); - } - - Ok(()) - } - - #[instrument(skip_all)] - pub fn populate_prov_model_for_activity( - &self, - activity: query::Activity, - namespaceid: &NamespaceId, - model: &mut ProvModel, - connection: &mut PgConnection, - ) -> Result<(), StoreError> { - use diesel::prelude::*; - use schema::{ - activity_attribute::dsl as activity_attr_dsl, - generation::dsl as generation_dsl, - usage::dsl as usage_dsl, - wasinformedby::dsl as wasinformedby_dsl, - association::dsl as association_dsl, - entity::dsl as entity_dsl, - agent::dsl as agent_dsl, - activity::dsl as activity_dsl, - }; - - let attributes = activity_attr_dsl::activity_attribute - .filter(activity_attr_dsl::activity_id.eq(&activity.id)) - .load::(connection)?; - - let id: ActivityId = ActivityId::from_external_id(&activity.external_id); - model.activities.insert( - (namespaceid.clone(), id.clone()), - Activity { - id: id.clone(), - namespace_id: namespaceid.clone(), - external_id: activity.external_id.into(), - started: activity.started.map(|x| Utc.from_utc_datetime(&x).into()), - ended: activity.ended.map(|x| Utc.from_utc_datetime(&x).into()), - domaintype_id: activity.domaintype.map(DomaintypeId::from_external_id), - attributes: attributes - .iter() - .map(|attr| { - serde_json::from_str(&attr.value) - .map_err(|e| StoreError::SerializationError(e.to_string())) - .map(|value| Attribute { typ: attr.typename.clone(), value }) - }) - .collect::, StoreError>>()?, - } - .into(), - ); - - let (generations, usages, wasinformedbys, associations): (Generations, Usages, WasInformedBys, Associations) = ( - generation_dsl::generation - .filter(generation_dsl::activity_id.eq(activity.id)) - .order(generation_dsl::activity_id.asc()) - .inner_join(entity_dsl::entity) - .select(entity_dsl::external_id) - .load::(connection)?, - usage_dsl::usage - .filter(usage_dsl::activity_id.eq(activity.id)) - .order(usage_dsl::activity_id.asc()) - .inner_join(entity_dsl::entity) - .select(entity_dsl::external_id) - .load::(connection)?, - wasinformedby_dsl::wasinformedby - .filter(wasinformedby_dsl::activity_id.eq(activity.id)) - .inner_join(activity_dsl::activity.on(wasinformedby_dsl::informing_activity_id.eq(activity_dsl::id))) - .select(activity_dsl::external_id) - .load::(connection)?, - association_dsl::association - .filter(association_dsl::activity_id.eq(activity.id)) - .order(association_dsl::activity_id.asc()) - .inner_join(agent_dsl::agent) - .select((agent_dsl::external_id, association_dsl::role)) - .load::<(String, String)>(connection)? - ); - - for generation in generations { - model.was_generated_by( - namespaceid.clone(), - &EntityId::from_external_id(generation), - &id, - ); - } - - for used in usages { - model.used(namespaceid.clone(), &id, &EntityId::from_external_id(used)); - } - - for wasinformedby in wasinformedbys { - model.was_informed_by( - namespaceid.clone(), - &id, - &ActivityId::from_external_id(wasinformedby), - ); - } - - for (agent, role) in associations { - model.qualified_association(namespaceid, &id, &AgentId::from_external_id(agent), { - if role.is_empty() { - None - } else { - Some(Role(role)) - } - }); - } - - debug!(populate_entity = %model.summarize()); - - Ok(()) - } - - #[instrument(skip_all)] - pub fn populate_prov_model_for_entity( - &self, - entity: query::Entity, - namespace_id: &NamespaceId, - model: &mut ProvModel, - connection: &mut PgConnection, - ) -> Result<(), StoreError> { - let query::Entity { id, namespace_id: _, domaintype, external_id } = entity; - - let entity_id = EntityId::from_external_id(&external_id); - - for (agent, role) in schema::attribution::table - .filter(schema::attribution::entity_id.eq(&id)) - .order(schema::attribution::entity_id.asc()) - .inner_join(schema::agent::table) - .select((schema::agent::external_id, schema::attribution::role)) - .load::<(String, String)>(connection)? - { - model.qualified_attribution( - namespace_id, - &entity_id, - &AgentId::from_external_id(agent), - { - if role.is_empty() { - None - } else { - Some(Role(role)) - } - }, - ); - } - - let attributes = schema::entity_attribute::table - .filter(schema::entity_attribute::entity_id.eq(&id)) - .load::(connection)?; - - model.entities.insert( - (namespace_id.clone(), entity_id.clone()), - Entity { - id: entity_id.clone(), - namespace_id: namespace_id.clone(), - external_id: external_id.into(), - domaintypeid: domaintype.map(DomaintypeId::from_external_id), - attributes: attributes - .iter() - .map(|attr| { - serde_json::from_str(&attr.value) - .map_err(|e| StoreError::SerializationError(e.to_string())) - .map(|value| Attribute { typ: attr.typename.clone(), value }) - }) - .collect::, StoreError>>()?, - } - .into(), - ); - - for (activity_id, activity_external_id, used_entity_id, typ) in schema::derivation::table - .filter(schema::derivation::generated_entity_id.eq(&id)) - .order(schema::derivation::generated_entity_id.asc()) - .inner_join( - schema::activity::table - .on(schema::derivation::activity_id.eq(schema::activity::id)), - ) - .inner_join( - schema::entity::table.on(schema::derivation::used_entity_id.eq(schema::entity::id)), - ) - .select(( - schema::derivation::activity_id, - schema::activity::external_id, - schema::entity::external_id, - schema::derivation::typ, - )) - .load::<(i32, String, String, i32)>(connection)? - { - let typ = DerivationType::try_from(typ) - .map_err(|_| StoreError::InvalidDerivationTypeRecord(typ))?; - - model.was_derived_from( - namespace_id.clone(), - typ, - EntityId::from_external_id(used_entity_id), - entity_id.clone(), - { - match activity_id { - -1 => None, - _ => Some(ActivityId::from_external_id(activity_external_id)), - } - }, - ); - } - - Ok(()) - } - - #[instrument(skip_all)] - pub fn load_prov_model_for_namespace( - &self, - connection: &mut PgConnection, - namespace: &NamespaceId, - ) -> Result { - let mut model = ProvModel::default(); - let (namespaceid, nsid) = - self.namespace_by_external_id(connection, namespace.external_id_part())?; - - let agents = schema::agent::table - .filter(schema::agent::namespace_id.eq(&nsid)) - .load::(connection)?; - - for agent in agents { - self.populate_prov_model_for_agent(agent, &namespaceid, &mut model, connection)?; - } - - let activities = schema::activity::table - .filter(schema::activity::namespace_id.eq(nsid)) - .load::(connection)?; - - for activity in activities { - self.populate_prov_model_for_activity(activity, &namespaceid, &mut model, connection)?; - } - - let entities = schema::entity::table - .filter(schema::entity::namespace_id.eq(nsid)) - .load::(connection)?; - - for entity in entities { - self.populate_prov_model_for_entity(entity, &namespaceid, &mut model, connection)?; - } - - Ok(model) - } - - /// Set the last fully synchronized offset - #[instrument(skip(self), level = "info")] - pub fn set_last_block_id( - &self, - block_id: &BlockId, - tx_id: ChronicleTransactionId, - ) -> Result<(), StoreError> { - use schema::ledgersync as dsl; - - Ok(self.connection()?.build_transaction().run(|connection| { - diesel::insert_into(dsl::table) - .values(( - dsl::bc_offset.eq(block_id.to_string()), - dsl::tx_id.eq(&*tx_id.to_string()), - (dsl::sync_time.eq(Utc::now().naive_utc())), - )) - .on_conflict(dsl::tx_id) - .do_update() - .set(dsl::sync_time.eq(Utc::now().naive_utc())) - .execute(connection) - .map(|_| ()) - })?) - } - - #[instrument(skip_all)] - pub fn apply_use_agent( - &self, - connection: &mut PgConnection, - external_id: &ExternalId, - namespace: &ExternalId, - ) -> Result<(), StoreError> { - let (_, nsid) = self.namespace_by_external_id(connection, namespace)?; - use schema::agent::dsl; - - diesel::update(schema::agent::table.filter(dsl::current.ne(0))) - .set(dsl::current.eq(0)) - .execute(connection)?; - - diesel::update( - schema::agent::table - .filter(dsl::external_id.eq(external_id).and(dsl::namespace_id.eq(nsid))), - ) - .set(dsl::current.eq(1)) - .execute(connection)?; - - Ok(()) - } - - #[instrument(skip_all)] - pub fn prov_model_for_agent_id( - &self, - connection: &mut PgConnection, - id: &AgentId, - ns: &ExternalId, - ) -> Result { - let agent = schema::agent::table - .inner_join(schema::namespace::dsl::namespace) - .filter(schema::agent::external_id.eq(id.external_id_part())) - .filter(schema::namespace::external_id.eq(ns)) - .select(query::Agent::as_select()) - .first(connection)?; - - let namespace = self.namespace_by_external_id(connection, ns)?.0; - - let mut model = ProvModel::default(); - self.populate_prov_model_for_agent(agent, &namespace, &mut model, connection)?; - Ok(model) - } - - #[instrument(skip_all)] - pub fn apply_prov_model_for_agent_id( - &self, - connection: &mut PgConnection, - mut model: ProvModel, - id: &AgentId, - ns: &ExternalId, - ) -> Result { - if let Some(agent) = schema::agent::table - .inner_join(schema::namespace::dsl::namespace) - .filter(schema::agent::external_id.eq(id.external_id_part())) - .filter(schema::namespace::external_id.eq(ns)) - .select(query::Agent::as_select()) - .first(connection) - .optional()? - { - let namespace = self.namespace_by_external_id(connection, ns)?.0; - self.populate_prov_model_for_agent(agent, &namespace, &mut model, connection)?; - } - Ok(model) - } - - #[instrument(skip_all)] - pub fn prov_model_for_activity_id( - &self, - connection: &mut PgConnection, - id: &ActivityId, - ns: &ExternalId, - ) -> Result { - let activity = schema::activity::table - .inner_join(schema::namespace::dsl::namespace) - .filter(schema::activity::external_id.eq(id.external_id_part())) - .filter(schema::namespace::external_id.eq(ns)) - .select(query::Activity::as_select()) - .first(connection)?; - - let namespace = self.namespace_by_external_id(connection, ns)?.0; - - let mut model = ProvModel::default(); - self.populate_prov_model_for_activity(activity, &namespace, &mut model, connection)?; - Ok(model) - } - - #[instrument(skip_all)] - pub fn apply_prov_model_for_activity_id( - &self, - connection: &mut PgConnection, - mut model: ProvModel, - id: &ActivityId, - ns: &ExternalId, - ) -> Result { - if let Some(activity) = schema::activity::table - .inner_join(schema::namespace::dsl::namespace) - .filter(schema::activity::external_id.eq(id.external_id_part())) - .filter(schema::namespace::external_id.eq(ns)) - .select(query::Activity::as_select()) - .first(connection) - .optional()? - { - let namespace = self.namespace_by_external_id(connection, ns)?.0; - self.populate_prov_model_for_activity(activity, &namespace, &mut model, connection)?; - } - Ok(model) - } - - #[instrument(skip_all)] - pub fn prov_model_for_entity_id( - &self, - connection: &mut PgConnection, - id: &EntityId, - ns: &ExternalId, - ) -> Result { - let entity = schema::entity::table - .inner_join(schema::namespace::dsl::namespace) - .filter(schema::entity::external_id.eq(id.external_id_part())) - .filter(schema::namespace::external_id.eq(ns)) - .select(query::Entity::as_select()) - .first(connection)?; - - let namespace = self.namespace_by_external_id(connection, ns)?.0; - - let mut model = ProvModel::default(); - self.populate_prov_model_for_entity(entity, &namespace, &mut model, connection)?; - Ok(model) - } - - #[instrument(skip_all)] - pub fn apply_prov_model_for_entity_id( - &self, - connection: &mut PgConnection, - mut model: ProvModel, - id: &EntityId, - ns: &ExternalId, - ) -> Result { - if let Some(entity) = schema::entity::table - .inner_join(schema::namespace::dsl::namespace) - .filter(schema::entity::external_id.eq(id.external_id_part())) - .filter(schema::namespace::external_id.eq(ns)) - .select(query::Entity::as_select()) - .first(connection) - .optional()? - { - let namespace = self.namespace_by_external_id(connection, ns)?.0; - self.populate_prov_model_for_entity(entity, &namespace, &mut model, connection)?; - } - Ok(model) - } - - #[instrument(skip_all)] - pub fn prov_model_for_usage( - &self, - connection: &mut PgConnection, - mut model: ProvModel, - id: &EntityId, - activity_id: &ActivityId, - ns: &ExternalId, - ) -> Result { - if let Some(entity) = schema::entity::table - .inner_join(schema::namespace::dsl::namespace) - .filter(schema::entity::external_id.eq(id.external_id_part())) - .filter(schema::namespace::external_id.eq(ns)) - .select(query::Entity::as_select()) - .first(connection) - .optional()? - { - if let Some(activity) = schema::activity::table - .inner_join(schema::namespace::dsl::namespace) - .filter(schema::activity::external_id.eq(id.external_id_part())) - .filter(schema::namespace::external_id.eq(ns)) - .select(query::Activity::as_select()) - .first(connection) - .optional()? - { - let namespace = self.namespace_by_external_id(connection, ns)?.0; - for used in schema::usage::table - .filter(schema::usage::activity_id.eq(activity.id)) - .order(schema::usage::activity_id.asc()) - .inner_join(schema::entity::table) - .select(schema::entity::external_id) - .load::(connection)? - { - model.used(namespace.clone(), activity_id, &EntityId::from_external_id(used)); - } - self.populate_prov_model_for_entity(entity, &namespace, &mut model, connection)?; - self.populate_prov_model_for_activity(activity, &namespace, &mut model, connection)?; - } - } - Ok(model) - } + #[instrument(name = "Bind namespace", skip(self))] + pub fn namespace_binding(&self, external_id: &str, uuid: Uuid) -> Result<(), StoreError> { + use schema::namespace::dsl; + + let uuid = uuid.to_string(); + self.connection()?.build_transaction().run(|conn| { + diesel::insert_into(dsl::namespace) + .values((dsl::external_id.eq(external_id), dsl::uuid.eq(&uuid))) + .on_conflict(dsl::external_id) + .do_update() + .set(dsl::uuid.eq(&uuid)) + .execute(conn) + })?; + + Ok(()) + } + + /// Fetch the activity record for the IRI + fn activity_by_activity_external_id_and_namespace( + &self, + connection: &mut PgConnection, + external_id: &ExternalId, + namespace_id: &NamespaceId, + ) -> Result { + let (_namespaceid, nsid) = + self.namespace_by_external_id(connection, namespace_id.external_id_part())?; + use schema::activity::dsl; + + Ok(schema::activity::table + .filter(dsl::external_id.eq(external_id).and(dsl::namespace_id.eq(nsid))) + .first::(connection)?) + } + + /// Fetch the entity record for the IRI + fn entity_by_entity_external_id_and_namespace( + &self, + connection: &mut PgConnection, + external_id: &ExternalId, + namespace_id: &NamespaceId, + ) -> Result { + let (_, ns_id) = + self.namespace_by_external_id(connection, namespace_id.external_id_part())?; + use schema::entity::dsl; + + Ok(schema::entity::table + .filter(dsl::external_id.eq(external_id).and(dsl::namespace_id.eq(ns_id))) + .first::(connection)?) + } + + /// Fetch the agent record for the IRI + pub fn agent_by_agent_external_id_and_namespace( + &self, + connection: &mut PgConnection, + external_id: &ExternalId, + namespace_id: &NamespaceId, + ) -> Result { + let (_namespaceid, nsid) = + self.namespace_by_external_id(connection, namespace_id.external_id_part())?; + use schema::agent::dsl; + + Ok(schema::agent::table + .filter(dsl::external_id.eq(external_id).and(dsl::namespace_id.eq(nsid))) + .first::(connection)?) + } + + /// Apply an activity to persistent storage, name + namespace are a key, so we update times + + /// domaintype on conflict + #[instrument(level = "trace", skip(self, connection), ret(Debug))] + fn apply_activity( + &self, + connection: &mut PgConnection, + Activity { + ref external_id, namespace_id, started, ended, domaintype_id, attributes, .. + }: &Activity, + ) -> Result<(), StoreError> { + use schema::activity as dsl; + let (_, nsid) = + self.namespace_by_external_id(connection, namespace_id.external_id_part())?; + + let existing = self + .activity_by_activity_external_id_and_namespace(connection, external_id, namespace_id) + .ok(); + + let resolved_domain_type = + domaintype_id.as_ref().map(|x| x.external_id_part().clone()).or_else(|| { + existing.as_ref().and_then(|x| x.domaintype.as_ref().map(ExternalId::from)) + }); + + let resolved_started = started + .map(|x| x.naive_utc()) + .or_else(|| existing.as_ref().and_then(|x| x.started)); + + let resolved_ended = + ended.map(|x| x.naive_utc()).or_else(|| existing.as_ref().and_then(|x| x.ended)); + + diesel::insert_into(schema::activity::table) + .values(( + dsl::external_id.eq(external_id), + dsl::namespace_id.eq(nsid), + dsl::started.eq(started.map(|t| t.naive_utc())), + dsl::ended.eq(ended.map(|t| t.naive_utc())), + dsl::domaintype.eq(domaintype_id.as_ref().map(|x| x.external_id_part())), + )) + .on_conflict((dsl::external_id, dsl::namespace_id)) + .do_update() + .set(( + dsl::domaintype.eq(resolved_domain_type), + dsl::started.eq(resolved_started), + dsl::ended.eq(resolved_ended), + )) + .execute(connection)?; + + let query::Activity { id, .. } = self.activity_by_activity_external_id_and_namespace( + connection, + external_id, + namespace_id, + )?; + + diesel::insert_into(schema::activity_attribute::table) + .values( + attributes + .iter() + .map(|Attribute { typ, value, .. }| query::ActivityAttribute { + activity_id: id, + typename: typ.to_owned(), + value: value.to_string(), + }) + .collect::>(), + ) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + /// Apply an agent to persistent storage, external_id + namespace are a key, so we update + /// publickey + domaintype on conflict current is a special case, only relevant to local CLI + /// context. A possibly improved design would be to store this in another table given its scope + #[instrument(level = "trace", skip(self, connection), ret(Debug))] + fn apply_agent( + &self, + connection: &mut PgConnection, + Agent { ref external_id, namespaceid, domaintypeid, attributes, .. }: &Agent, + ) -> Result<(), StoreError> { + use schema::agent::dsl; + let (_, nsid) = + self.namespace_by_external_id(connection, namespaceid.external_id_part())?; + + let existing = self + .agent_by_agent_external_id_and_namespace(connection, external_id, namespaceid) + .ok(); + + let resolved_domain_type = + domaintypeid.as_ref().map(|x| x.external_id_part().clone()).or_else(|| { + existing.as_ref().and_then(|x| x.domaintype.as_ref().map(ExternalId::from)) + }); + + diesel::insert_into(schema::agent::table) + .values(( + dsl::external_id.eq(external_id), + dsl::namespace_id.eq(nsid), + dsl::current.eq(0), + dsl::domaintype.eq(domaintypeid.as_ref().map(|x| x.external_id_part())), + )) + .on_conflict((dsl::namespace_id, dsl::external_id)) + .do_update() + .set(dsl::domaintype.eq(resolved_domain_type)) + .execute(connection)?; + + let query::Agent { id, .. } = + self.agent_by_agent_external_id_and_namespace(connection, external_id, namespaceid)?; + + diesel::insert_into(schema::agent_attribute::table) + .values( + attributes + .iter() + .map(|Attribute { typ, value, .. }| query::AgentAttribute { + agent_id: id, + typename: typ.to_owned(), + value: value.to_string(), + }) + .collect::>(), + ) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + #[instrument(level = "trace", skip(self, connection), ret(Debug))] + fn apply_entity( + &self, + connection: &mut PgConnection, + Entity { namespace_id, external_id, domaintypeid, attributes, .. }: &Entity, + ) -> Result<(), StoreError> { + use schema::entity::dsl; + let (_, nsid) = + self.namespace_by_external_id(connection, namespace_id.external_id_part())?; + + let existing = self + .entity_by_entity_external_id_and_namespace(connection, external_id, namespace_id) + .ok(); + + let resolved_domain_type = + domaintypeid.as_ref().map(|x| x.external_id_part().clone()).or_else(|| { + existing.as_ref().and_then(|x| x.domaintype.as_ref().map(ExternalId::from)) + }); + + diesel::insert_into(schema::entity::table) + .values(( + dsl::external_id.eq(&external_id), + dsl::namespace_id.eq(nsid), + dsl::domaintype.eq(domaintypeid.as_ref().map(|x| x.external_id_part())), + )) + .on_conflict((dsl::namespace_id, dsl::external_id)) + .do_update() + .set(dsl::domaintype.eq(resolved_domain_type)) + .execute(connection)?; + + let query::Entity { id, .. } = + self.entity_by_entity_external_id_and_namespace(connection, external_id, namespace_id)?; + + diesel::insert_into(schema::entity_attribute::table) + .values( + attributes + .iter() + .map(|Attribute { typ, value, .. }| query::EntityAttribute { + entity_id: id, + typename: typ.to_owned(), + value: value.to_string(), + }) + .collect::>(), + ) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + fn apply_model( + &self, + connection: &mut PgConnection, + model: &ProvModel, + ) -> Result<(), StoreError> { + for (_, ns) in model.namespaces.iter() { + self.apply_namespace(connection, ns)? + } + for (_, agent) in model.agents.iter() { + self.apply_agent(connection, agent)? + } + for (_, activity) in model.activities.iter() { + self.apply_activity(connection, activity)? + } + for (_, entity) in model.entities.iter() { + self.apply_entity(connection, entity)? + } + + for ((namespaceid, _), association) in model.association.iter() { + for association in association.iter() { + self.apply_was_associated_with(connection, namespaceid, association)?; + } + } + + for ((namespaceid, _), usage) in model.usage.iter() { + for usage in usage.iter() { + self.apply_used(connection, namespaceid, usage)?; + } + } + + for ((namespaceid, activity_id), was_informed_by) in model.was_informed_by.iter() { + for (_, informing_activity_id) in was_informed_by.iter() { + self.apply_was_informed_by( + connection, + namespaceid, + activity_id, + informing_activity_id, + )?; + } + } + + for ((namespaceid, _), generation) in model.generation.iter() { + for generation in generation.iter() { + self.apply_was_generated_by(connection, namespaceid, generation)?; + } + } + + for ((namespaceid, _), derivation) in model.derivation.iter() { + for derivation in derivation.iter() { + self.apply_derivation(connection, namespaceid, derivation)?; + } + } + + for ((namespaceid, _), delegation) in model.delegation.iter() { + for delegation in delegation.iter() { + self.apply_delegation(connection, namespaceid, delegation)?; + } + } + + for ((namespace_id, _), attribution) in model.attribution.iter() { + for attribution in attribution.iter() { + self.apply_was_attributed_to(connection, namespace_id, attribution)?; + } + } + + Ok(()) + } + + #[instrument(level = "trace", skip(self, connection), ret(Debug))] + fn apply_namespace( + &self, + connection: &mut PgConnection, + Namespace { ref external_id, ref uuid, .. }: &Namespace, + ) -> Result<(), StoreError> { + use schema::namespace::dsl; + diesel::insert_into(schema::namespace::table) + .values((dsl::external_id.eq(external_id), dsl::uuid.eq(hex::encode(uuid)))) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + pub fn apply_prov(&self, prov: &ProvModel) -> Result<(), StoreError> { + self.connection()? + .build_transaction() + .run(|connection| self.apply_model(connection, prov))?; + + Ok(()) + } + + #[instrument(skip_all)] + fn apply_used( + &self, + connection: &mut PgConnection, + namespace: &NamespaceId, + usage: &Usage, + ) -> Result<(), StoreError> { + let storedactivity = self.activity_by_activity_external_id_and_namespace( + connection, + usage.activity_id.external_id_part(), + namespace, + )?; + + let storedentity = self.entity_by_entity_external_id_and_namespace( + connection, + usage.entity_id.external_id_part(), + namespace, + )?; + + use schema::usage::dsl as link; + diesel::insert_into(schema::usage::table) + .values(( + &link::activity_id.eq(storedactivity.id), + &link::entity_id.eq(storedentity.id), + )) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + #[instrument(skip_all)] + fn apply_was_informed_by( + &self, + connection: &mut PgConnection, + namespace: &NamespaceId, + activity_id: &ActivityId, + informing_activity_id: &ActivityId, + ) -> Result<(), StoreError> { + let storedactivity = self.activity_by_activity_external_id_and_namespace( + connection, + activity_id.external_id_part(), + namespace, + )?; + + let storedinformingactivity = self.activity_by_activity_external_id_and_namespace( + connection, + informing_activity_id.external_id_part(), + namespace, + )?; + + use schema::wasinformedby::dsl as link; + diesel::insert_into(schema::wasinformedby::table) + .values(( + &link::activity_id.eq(storedactivity.id), + &link::informing_activity_id.eq(storedinformingactivity.id), + )) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + #[instrument(skip(self, connection))] + fn apply_was_associated_with( + &self, + connection: &mut PgConnection, + namespaceid: &common::prov::NamespaceId, + association: &Association, + ) -> Result<(), StoreError> { + let storedactivity = self.activity_by_activity_external_id_and_namespace( + connection, + association.activity_id.external_id_part(), + namespaceid, + )?; + + let storedagent = self.agent_by_agent_external_id_and_namespace( + connection, + association.agent_id.external_id_part(), + namespaceid, + )?; + + use schema::association::dsl as asoc; + let no_role = common::prov::Role("".to_string()); + diesel::insert_into(schema::association::table) + .values(( + &asoc::activity_id.eq(storedactivity.id), + &asoc::agent_id.eq(storedagent.id), + &asoc::role.eq(association.role.as_ref().unwrap_or(&no_role)), + )) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + #[instrument(skip(self, connection, namespace))] + fn apply_delegation( + &self, + connection: &mut PgConnection, + namespace: &common::prov::NamespaceId, + delegation: &Delegation, + ) -> Result<(), StoreError> { + let responsible = self.agent_by_agent_external_id_and_namespace( + connection, + delegation.responsible_id.external_id_part(), + namespace, + )?; + + let delegate = self.agent_by_agent_external_id_and_namespace( + connection, + delegation.delegate_id.external_id_part(), + namespace, + )?; + + let activity = { + if let Some(ref activity_id) = delegation.activity_id { + Some( + self.activity_by_activity_external_id_and_namespace( + connection, + activity_id.external_id_part(), + namespace, + )? + .id, + ) + } else { + None + } + }; + + use schema::delegation::dsl as link; + let no_role = common::prov::Role("".to_string()); + diesel::insert_into(schema::delegation::table) + .values(( + &link::responsible_id.eq(responsible.id), + &link::delegate_id.eq(delegate.id), + &link::activity_id.eq(activity.unwrap_or(-1)), + &link::role.eq(delegation.role.as_ref().unwrap_or(&no_role)), + )) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + #[instrument(skip(self, connection, namespace))] + fn apply_derivation( + &self, + connection: &mut PgConnection, + namespace: &common::prov::NamespaceId, + derivation: &Derivation, + ) -> Result<(), StoreError> { + let stored_generated = self.entity_by_entity_external_id_and_namespace( + connection, + derivation.generated_id.external_id_part(), + namespace, + )?; + + let stored_used = self.entity_by_entity_external_id_and_namespace( + connection, + derivation.used_id.external_id_part(), + namespace, + )?; + + let stored_activity = derivation + .activity_id + .as_ref() + .map(|activity_id| { + self.activity_by_activity_external_id_and_namespace( + connection, + activity_id.external_id_part(), + namespace, + ) + }) + .transpose()?; + + use schema::derivation::dsl as link; + diesel::insert_into(schema::derivation::table) + .values(( + &link::used_entity_id.eq(stored_used.id), + &link::generated_entity_id.eq(stored_generated.id), + &link::typ.eq(derivation.typ), + &link::activity_id.eq(stored_activity.map_or(-1, |activity| activity.id)), + )) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + #[instrument(skip_all)] + fn apply_was_generated_by( + &self, + connection: &mut PgConnection, + namespace: &common::prov::NamespaceId, + generation: &Generation, + ) -> Result<(), StoreError> { + let storedactivity = self.activity_by_activity_external_id_and_namespace( + connection, + generation.activity_id.external_id_part(), + namespace, + )?; + + let storedentity = self.entity_by_entity_external_id_and_namespace( + connection, + generation.generated_id.external_id_part(), + namespace, + )?; + + use schema::generation::dsl as link; + diesel::insert_into(schema::generation::table) + .values(( + &link::activity_id.eq(storedactivity.id), + &link::generated_entity_id.eq(storedentity.id), + )) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + #[instrument(skip(self, connection))] + fn apply_was_attributed_to( + &self, + connection: &mut PgConnection, + namespace_id: &common::prov::NamespaceId, + attribution: &Attribution, + ) -> Result<(), StoreError> { + let stored_entity = self.entity_by_entity_external_id_and_namespace( + connection, + attribution.entity_id.external_id_part(), + namespace_id, + )?; + + let stored_agent = self.agent_by_agent_external_id_and_namespace( + connection, + attribution.agent_id.external_id_part(), + namespace_id, + )?; + + use schema::attribution::dsl as attr; + let no_role = common::prov::Role("".to_string()); + diesel::insert_into(schema::attribution::table) + .values(( + &attr::entity_id.eq(stored_entity.id), + &attr::agent_id.eq(stored_agent.id), + &attr::role.eq(attribution.role.as_ref().unwrap_or(&no_role)), + )) + .on_conflict_do_nothing() + .execute(connection)?; + + Ok(()) + } + + pub fn connection( + &self, + ) -> Result>, StoreError> { + self.pool.get().map_err(StoreError::DbPool) + } + + #[instrument(skip_all)] + pub fn get_current_agent( + &self, + connection: &mut PgConnection, + ) -> Result { + use schema::agent::dsl; + Ok(schema::agent::table + .filter(dsl::current.ne(0)) + .first::(connection)?) + } + + /// Get the last fully synchronized offset + #[instrument(skip_all)] + pub fn get_last_block_id(&self) -> Result, StoreError> { + use schema::ledgersync::dsl; + self.connection()?.build_transaction().run(|connection| { + let block_id_and_tx = schema::ledgersync::table + .order_by(dsl::sync_time) + .select((dsl::bc_offset, dsl::tx_id)) + .first::<(Option, String)>(connection) + .map_err(StoreError::from)?; + + if let Some(block_id) = block_id_and_tx.0 { + Ok(Some(BlockId::try_from(&*block_id)?)) + } else { + Ok(None) + } + }) + } + + #[instrument(skip_all)] + pub fn namespace_by_external_id( + &self, + connection: &mut PgConnection, + namespace: &ExternalId, + ) -> Result<(NamespaceId, i32), StoreError> { + use self::schema::namespace::dsl; + + let ns = dsl::namespace + .filter(dsl::external_id.eq(namespace)) + .select((dsl::id, dsl::external_id, dsl::uuid)) + .first::<(i32, String, String)>(connection) + .optional()? + .ok_or(StoreError::RecordNotFound {})?; + + Ok((NamespaceId::from_external_id(ns.1, Uuid::from_str(&ns.2)?), ns.0)) + } + + #[instrument(skip_all)] + pub fn new(pool: Pool>) -> Result { + Ok(Store { pool }) + } + + #[instrument(skip_all)] + pub fn populate_prov_model_for_agent( + &self, + agent: query::Agent, + namespaceid: &NamespaceId, + model: &mut ProvModel, + connection: &mut PgConnection, + ) -> Result<(), StoreError> { + debug!(?agent, "Map agent to prov"); + + let attributes = schema::agent_attribute::table + .filter(schema::agent_attribute::agent_id.eq(&agent.id)) + .load::(connection)?; + + let agentid: AgentId = AgentId::from_external_id(&agent.external_id); + model.agents.insert( + (namespaceid.clone(), agentid.clone()), + Agent { + id: agentid, + namespaceid: namespaceid.clone(), + external_id: ExternalId::from(&agent.external_id), + domaintypeid: agent.domaintype.map(DomaintypeId::from_external_id), + attributes: attributes + .iter() + .map(|attr| { + serde_json::from_str(&attr.value) + .map_err(|e| StoreError::SerializationError(e.to_string())) + .map(|value| Attribute { typ: attr.typename.clone(), value }) + }) + .collect::, StoreError>>()?, + } + .into(), + ); + + for (responsible, activity, role) in schema::delegation::table + .filter(schema::delegation::delegate_id.eq(agent.id)) + .inner_join( + schema::agent::table.on(schema::delegation::responsible_id.eq(schema::agent::id)), + ) + .inner_join( + schema::activity::table + .on(schema::delegation::activity_id.eq(schema::activity::id)), + ) + .order(schema::agent::external_id) + .select(( + schema::agent::external_id, + schema::activity::external_id, + schema::delegation::role, + )) + .load::<(String, String, String)>(connection)? + { + model.qualified_delegation( + namespaceid, + &AgentId::from_external_id(responsible), + &AgentId::from_external_id(&agent.external_id), + { + if activity.contains("hidden entry for Option None") { + None + } else { + Some(ActivityId::from_external_id(activity)) + } + }, + { + if role.is_empty() { + None + } else { + Some(Role(role)) + } + }, + ); + } + + Ok(()) + } + + #[instrument(skip_all)] + pub fn populate_prov_model_for_activity( + &self, + activity: query::Activity, + namespaceid: &NamespaceId, + model: &mut ProvModel, + connection: &mut PgConnection, + ) -> Result<(), StoreError> { + use diesel::prelude::*; + use schema::{ + activity::dsl as activity_dsl, activity_attribute::dsl as activity_attr_dsl, + agent::dsl as agent_dsl, association::dsl as association_dsl, + entity::dsl as entity_dsl, generation::dsl as generation_dsl, usage::dsl as usage_dsl, + wasinformedby::dsl as wasinformedby_dsl, + }; + + let attributes = activity_attr_dsl::activity_attribute + .filter(activity_attr_dsl::activity_id.eq(&activity.id)) + .load::(connection)?; + + let id: ActivityId = ActivityId::from_external_id(&activity.external_id); + model.activities.insert( + (namespaceid.clone(), id.clone()), + Activity { + id: id.clone(), + namespace_id: namespaceid.clone(), + external_id: activity.external_id.into(), + started: activity.started.map(|x| Utc.from_utc_datetime(&x).into()), + ended: activity.ended.map(|x| Utc.from_utc_datetime(&x).into()), + domaintype_id: activity.domaintype.map(DomaintypeId::from_external_id), + attributes: attributes + .iter() + .map(|attr| { + serde_json::from_str(&attr.value) + .map_err(|e| StoreError::SerializationError(e.to_string())) + .map(|value| Attribute { typ: attr.typename.clone(), value }) + }) + .collect::, StoreError>>()?, + } + .into(), + ); + + let (generations, usages, wasinformedbys, associations): ( + Generations, + Usages, + WasInformedBys, + Associations, + ) = ( + generation_dsl::generation + .filter(generation_dsl::activity_id.eq(activity.id)) + .order(generation_dsl::activity_id.asc()) + .inner_join(entity_dsl::entity) + .select(entity_dsl::external_id) + .load::(connection)?, + usage_dsl::usage + .filter(usage_dsl::activity_id.eq(activity.id)) + .order(usage_dsl::activity_id.asc()) + .inner_join(entity_dsl::entity) + .select(entity_dsl::external_id) + .load::(connection)?, + wasinformedby_dsl::wasinformedby + .filter(wasinformedby_dsl::activity_id.eq(activity.id)) + .inner_join( + activity_dsl::activity + .on(wasinformedby_dsl::informing_activity_id.eq(activity_dsl::id)), + ) + .select(activity_dsl::external_id) + .load::(connection)?, + association_dsl::association + .filter(association_dsl::activity_id.eq(activity.id)) + .order(association_dsl::activity_id.asc()) + .inner_join(agent_dsl::agent) + .select((agent_dsl::external_id, association_dsl::role)) + .load::<(String, String)>(connection)?, + ); + + for generation in generations { + model.was_generated_by( + namespaceid.clone(), + &EntityId::from_external_id(generation), + &id, + ); + } + + for used in usages { + model.used(namespaceid.clone(), &id, &EntityId::from_external_id(used)); + } + + for wasinformedby in wasinformedbys { + model.was_informed_by( + namespaceid.clone(), + &id, + &ActivityId::from_external_id(wasinformedby), + ); + } + + for (agent, role) in associations { + model.qualified_association(namespaceid, &id, &AgentId::from_external_id(agent), { + if role.is_empty() { + None + } else { + Some(Role(role)) + } + }); + } + + debug!(populate_entity = %model.summarize()); + + Ok(()) + } + + #[instrument(skip_all)] + pub fn populate_prov_model_for_entity( + &self, + entity: query::Entity, + namespace_id: &NamespaceId, + model: &mut ProvModel, + connection: &mut PgConnection, + ) -> Result<(), StoreError> { + let query::Entity { id, namespace_id: _, domaintype, external_id } = entity; + + let entity_id = EntityId::from_external_id(&external_id); + + for (agent, role) in schema::attribution::table + .filter(schema::attribution::entity_id.eq(&id)) + .order(schema::attribution::entity_id.asc()) + .inner_join(schema::agent::table) + .select((schema::agent::external_id, schema::attribution::role)) + .load::<(String, String)>(connection)? + { + model.qualified_attribution( + namespace_id, + &entity_id, + &AgentId::from_external_id(agent), + { + if role.is_empty() { + None + } else { + Some(Role(role)) + } + }, + ); + } + + let attributes = schema::entity_attribute::table + .filter(schema::entity_attribute::entity_id.eq(&id)) + .load::(connection)?; + + model.entities.insert( + (namespace_id.clone(), entity_id.clone()), + Entity { + id: entity_id.clone(), + namespace_id: namespace_id.clone(), + external_id: external_id.into(), + domaintypeid: domaintype.map(DomaintypeId::from_external_id), + attributes: attributes + .iter() + .map(|attr| { + serde_json::from_str(&attr.value) + .map_err(|e| StoreError::SerializationError(e.to_string())) + .map(|value| Attribute { typ: attr.typename.clone(), value }) + }) + .collect::, StoreError>>()?, + } + .into(), + ); + + for (activity_id, activity_external_id, used_entity_id, typ) in schema::derivation::table + .filter(schema::derivation::generated_entity_id.eq(&id)) + .order(schema::derivation::generated_entity_id.asc()) + .inner_join( + schema::activity::table + .on(schema::derivation::activity_id.eq(schema::activity::id)), + ) + .inner_join( + schema::entity::table.on(schema::derivation::used_entity_id.eq(schema::entity::id)), + ) + .select(( + schema::derivation::activity_id, + schema::activity::external_id, + schema::entity::external_id, + schema::derivation::typ, + )) + .load::<(i32, String, String, i32)>(connection)? + { + let typ = DerivationType::try_from(typ) + .map_err(|_| StoreError::InvalidDerivationTypeRecord(typ))?; + + model.was_derived_from( + namespace_id.clone(), + typ, + EntityId::from_external_id(used_entity_id), + entity_id.clone(), + { + match activity_id { + -1 => None, + _ => Some(ActivityId::from_external_id(activity_external_id)), + } + }, + ); + } + + Ok(()) + } + + #[instrument(skip_all)] + pub fn load_prov_model_for_namespace( + &self, + connection: &mut PgConnection, + namespace: &NamespaceId, + ) -> Result { + let mut model = ProvModel::default(); + let (namespaceid, nsid) = + self.namespace_by_external_id(connection, namespace.external_id_part())?; + + let agents = schema::agent::table + .filter(schema::agent::namespace_id.eq(&nsid)) + .load::(connection)?; + + for agent in agents { + self.populate_prov_model_for_agent(agent, &namespaceid, &mut model, connection)?; + } + + let activities = schema::activity::table + .filter(schema::activity::namespace_id.eq(nsid)) + .load::(connection)?; + + for activity in activities { + self.populate_prov_model_for_activity(activity, &namespaceid, &mut model, connection)?; + } + + let entities = schema::entity::table + .filter(schema::entity::namespace_id.eq(nsid)) + .load::(connection)?; + + for entity in entities { + self.populate_prov_model_for_entity(entity, &namespaceid, &mut model, connection)?; + } + + Ok(model) + } + + /// Set the last fully synchronized offset + #[instrument(skip(self), level = "info")] + pub fn set_last_block_id( + &self, + block_id: &BlockId, + tx_id: ChronicleTransactionId, + ) -> Result<(), StoreError> { + use schema::ledgersync as dsl; + + Ok(self.connection()?.build_transaction().run(|connection| { + diesel::insert_into(dsl::table) + .values(( + dsl::bc_offset.eq(block_id.to_string()), + dsl::tx_id.eq(&*tx_id.to_string()), + (dsl::sync_time.eq(Utc::now().naive_utc())), + )) + .on_conflict(dsl::tx_id) + .do_update() + .set(dsl::sync_time.eq(Utc::now().naive_utc())) + .execute(connection) + .map(|_| ()) + })?) + } + + #[instrument(skip_all)] + pub fn apply_use_agent( + &self, + connection: &mut PgConnection, + external_id: &ExternalId, + namespace: &ExternalId, + ) -> Result<(), StoreError> { + let (_, nsid) = self.namespace_by_external_id(connection, namespace)?; + use schema::agent::dsl; + + diesel::update(schema::agent::table.filter(dsl::current.ne(0))) + .set(dsl::current.eq(0)) + .execute(connection)?; + + diesel::update( + schema::agent::table + .filter(dsl::external_id.eq(external_id).and(dsl::namespace_id.eq(nsid))), + ) + .set(dsl::current.eq(1)) + .execute(connection)?; + + Ok(()) + } + + #[instrument(skip_all)] + pub fn prov_model_for_agent_id( + &self, + connection: &mut PgConnection, + id: &AgentId, + ns: &ExternalId, + ) -> Result { + let agent = schema::agent::table + .inner_join(schema::namespace::dsl::namespace) + .filter(schema::agent::external_id.eq(id.external_id_part())) + .filter(schema::namespace::external_id.eq(ns)) + .select(query::Agent::as_select()) + .first(connection)?; + + let namespace = self.namespace_by_external_id(connection, ns)?.0; + + let mut model = ProvModel::default(); + self.populate_prov_model_for_agent(agent, &namespace, &mut model, connection)?; + Ok(model) + } + + #[instrument(skip_all)] + pub fn apply_prov_model_for_agent_id( + &self, + connection: &mut PgConnection, + mut model: ProvModel, + id: &AgentId, + ns: &ExternalId, + ) -> Result { + if let Some(agent) = schema::agent::table + .inner_join(schema::namespace::dsl::namespace) + .filter(schema::agent::external_id.eq(id.external_id_part())) + .filter(schema::namespace::external_id.eq(ns)) + .select(query::Agent::as_select()) + .first(connection) + .optional()? + { + let namespace = self.namespace_by_external_id(connection, ns)?.0; + self.populate_prov_model_for_agent(agent, &namespace, &mut model, connection)?; + } + Ok(model) + } + + #[instrument(skip_all)] + pub fn prov_model_for_activity_id( + &self, + connection: &mut PgConnection, + id: &ActivityId, + ns: &ExternalId, + ) -> Result { + let activity = schema::activity::table + .inner_join(schema::namespace::dsl::namespace) + .filter(schema::activity::external_id.eq(id.external_id_part())) + .filter(schema::namespace::external_id.eq(ns)) + .select(query::Activity::as_select()) + .first(connection)?; + + let namespace = self.namespace_by_external_id(connection, ns)?.0; + + let mut model = ProvModel::default(); + self.populate_prov_model_for_activity(activity, &namespace, &mut model, connection)?; + Ok(model) + } + + #[instrument(skip_all)] + pub fn apply_prov_model_for_activity_id( + &self, + connection: &mut PgConnection, + mut model: ProvModel, + id: &ActivityId, + ns: &ExternalId, + ) -> Result { + if let Some(activity) = schema::activity::table + .inner_join(schema::namespace::dsl::namespace) + .filter(schema::activity::external_id.eq(id.external_id_part())) + .filter(schema::namespace::external_id.eq(ns)) + .select(query::Activity::as_select()) + .first(connection) + .optional()? + { + let namespace = self.namespace_by_external_id(connection, ns)?.0; + self.populate_prov_model_for_activity(activity, &namespace, &mut model, connection)?; + } + Ok(model) + } + + #[instrument(skip_all)] + pub fn prov_model_for_entity_id( + &self, + connection: &mut PgConnection, + id: &EntityId, + ns: &ExternalId, + ) -> Result { + let entity = schema::entity::table + .inner_join(schema::namespace::dsl::namespace) + .filter(schema::entity::external_id.eq(id.external_id_part())) + .filter(schema::namespace::external_id.eq(ns)) + .select(query::Entity::as_select()) + .first(connection)?; + + let namespace = self.namespace_by_external_id(connection, ns)?.0; + + let mut model = ProvModel::default(); + self.populate_prov_model_for_entity(entity, &namespace, &mut model, connection)?; + Ok(model) + } + + #[instrument(skip_all)] + pub fn apply_prov_model_for_entity_id( + &self, + connection: &mut PgConnection, + mut model: ProvModel, + id: &EntityId, + ns: &ExternalId, + ) -> Result { + if let Some(entity) = schema::entity::table + .inner_join(schema::namespace::dsl::namespace) + .filter(schema::entity::external_id.eq(id.external_id_part())) + .filter(schema::namespace::external_id.eq(ns)) + .select(query::Entity::as_select()) + .first(connection) + .optional()? + { + let namespace = self.namespace_by_external_id(connection, ns)?.0; + self.populate_prov_model_for_entity(entity, &namespace, &mut model, connection)?; + } + Ok(model) + } + + #[instrument(skip_all)] + pub fn prov_model_for_usage( + &self, + connection: &mut PgConnection, + mut model: ProvModel, + id: &EntityId, + activity_id: &ActivityId, + ns: &ExternalId, + ) -> Result { + if let Some(entity) = schema::entity::table + .inner_join(schema::namespace::dsl::namespace) + .filter(schema::entity::external_id.eq(id.external_id_part())) + .filter(schema::namespace::external_id.eq(ns)) + .select(query::Entity::as_select()) + .first(connection) + .optional()? + { + if let Some(activity) = schema::activity::table + .inner_join(schema::namespace::dsl::namespace) + .filter(schema::activity::external_id.eq(id.external_id_part())) + .filter(schema::namespace::external_id.eq(ns)) + .select(query::Activity::as_select()) + .first(connection) + .optional()? + { + let namespace = self.namespace_by_external_id(connection, ns)?.0; + for used in schema::usage::table + .filter(schema::usage::activity_id.eq(activity.id)) + .order(schema::usage::activity_id.asc()) + .inner_join(schema::entity::table) + .select(schema::entity::external_id) + .load::(connection)? + { + model.used(namespace.clone(), activity_id, &EntityId::from_external_id(used)); + } + self.populate_prov_model_for_entity(entity, &namespace, &mut model, connection)?; + self.populate_prov_model_for_activity( + activity, &namespace, &mut model, connection, + )?; + } + } + Ok(model) + } } diff --git a/crates/chronicle-persistence/src/query.rs b/crates/chronicle-persistence/src/query.rs index 8b53edfc4..5b5de60ea 100644 --- a/crates/chronicle-persistence/src/query.rs +++ b/crates/chronicle-persistence/src/query.rs @@ -5,62 +5,62 @@ use diesel::prelude::*; #[derive(Queryable, Selectable)] #[diesel(table_name = namespace)] pub struct Namespace { - pub external_id: String, - pub uuid: String, + pub external_id: String, + pub uuid: String, } #[derive(Queryable)] pub struct LedgerSync { - pub bc_offset: String, - pub sync_time: Option, + pub bc_offset: String, + pub sync_time: Option, } #[derive(Insertable)] #[diesel(table_name = namespace)] pub struct NewNamespace<'a> { - pub external_id: &'a str, - pub uuid: &'a str, + pub external_id: &'a str, + pub uuid: &'a str, } #[derive(Insertable)] #[diesel(table_name = ledgersync)] pub struct NewOffset<'a> { - pub bc_offset: &'a str, - pub sync_time: Option, + pub bc_offset: &'a str, + pub sync_time: Option, } #[derive(Insertable, Queryable, Selectable)] #[diesel(table_name = entity_attribute)] pub struct EntityAttribute { - pub entity_id: i32, - pub typename: String, - pub value: String, + pub entity_id: i32, + pub typename: String, + pub value: String, } #[derive(Insertable, Queryable, Selectable)] #[diesel(table_name = activity_attribute)] pub struct ActivityAttribute { - pub activity_id: i32, - pub typename: String, - pub value: String, + pub activity_id: i32, + pub typename: String, + pub value: String, } #[derive(Insertable, Queryable, Selectable)] #[diesel(table_name = agent_attribute)] pub struct AgentAttribute { - pub agent_id: i32, - pub typename: String, - pub value: String, + pub agent_id: i32, + pub typename: String, + pub value: String, } #[derive(Insertable)] #[diesel(table_name = activity)] pub struct NewActivity<'a> { - pub external_id: &'a str, - pub namespace_id: i32, - pub started: Option, - pub ended: Option, - pub domaintype: Option<&'a str>, + pub external_id: &'a str, + pub namespace_id: i32, + pub started: Option, + pub ended: Option, + pub domaintype: Option<&'a str>, } #[derive(Debug, Queryable, Selectable, Identifiable, Associations, PartialEq)] @@ -71,12 +71,12 @@ pub struct NewActivity<'a> { #[diesel(belongs_to(Usage, foreign_key = id))] #[diesel(table_name = agent)] pub struct Agent { - pub id: i32, - pub external_id: String, - pub namespace_id: i32, - pub domaintype: Option, - pub current: i32, - pub identity_id: Option, + pub id: i32, + pub external_id: String, + pub namespace_id: i32, + pub domaintype: Option, + pub current: i32, + pub identity_id: Option, } #[derive(Debug, Queryable, Selectable, Identifiable, PartialEq)] @@ -84,12 +84,12 @@ pub struct Agent { #[diesel(belongs_to(Generation))] #[diesel(table_name = activity)] pub struct Activity { - pub id: i32, - pub external_id: String, - pub namespace_id: i32, - pub domaintype: Option, - pub started: Option, - pub ended: Option, + pub id: i32, + pub external_id: String, + pub namespace_id: i32, + pub domaintype: Option, + pub started: Option, + pub ended: Option, } #[derive(Debug, Queryable, Identifiable, Associations, Selectable)] @@ -99,10 +99,10 @@ pub struct Activity { #[diesel(belongs_to(Derivation, foreign_key = id))] #[diesel(table_name = entity)] pub struct Entity { - pub id: i32, - pub external_id: String, - pub namespace_id: i32, - pub domaintype: Option, + pub id: i32, + pub external_id: String, + pub namespace_id: i32, + pub domaintype: Option, } #[derive(Debug, Queryable, Selectable, Identifiable, Associations, PartialEq)] @@ -110,8 +110,8 @@ pub struct Entity { #[diesel(primary_key(activity_id, informing_activity_id))] #[diesel(belongs_to(Activity, foreign_key = activity_id, foreign_key = informing_activity_id))] pub struct WasInformedBy { - activity_id: i32, - informing_activity_id: i32, + activity_id: i32, + informing_activity_id: i32, } #[derive(Debug, Queryable, Selectable, Identifiable, Associations, PartialEq)] @@ -120,8 +120,8 @@ pub struct WasInformedBy { #[diesel(belongs_to(Activity))] #[diesel(belongs_to(Entity, foreign_key = generated_entity_id))] pub struct Generation { - activity_id: i32, - generated_entity_id: i32, + activity_id: i32, + generated_entity_id: i32, } #[derive(Debug, Queryable, Selectable, Identifiable, Associations, PartialEq)] @@ -130,8 +130,8 @@ pub struct Generation { #[diesel(belongs_to(Activity))] #[diesel(belongs_to(Entity))] pub struct Usage { - activity_id: i32, - entity_id: i32, + activity_id: i32, + entity_id: i32, } #[derive(Debug, Queryable, Selectable, Identifiable, Associations, PartialEq)] @@ -140,9 +140,9 @@ pub struct Usage { #[diesel(belongs_to(Activity))] #[diesel(primary_key(agent_id, activity_id, role))] pub struct Association { - agent_id: i32, - activity_id: i32, - role: String, + agent_id: i32, + activity_id: i32, + role: String, } #[derive(Debug, Queryable, Selectable, Associations, Identifiable, PartialEq)] @@ -151,9 +151,9 @@ pub struct Association { #[diesel(belongs_to(Agent))] #[diesel(belongs_to(Entity))] pub struct Attribution { - agent_id: i32, - entity_id: i32, - role: String, + agent_id: i32, + entity_id: i32, + role: String, } #[derive(Debug, Queryable, Selectable, Associations, Identifiable, PartialEq)] @@ -162,10 +162,10 @@ pub struct Attribution { #[diesel(belongs_to(Activity))] #[diesel(primary_key(delegate_id, responsible_id, activity_id, role))] pub struct Delegation { - delegate_id: i32, - responsible_id: i32, - activity_id: i32, - role: String, + delegate_id: i32, + responsible_id: i32, + activity_id: i32, + role: String, } #[derive(Debug, Queryable, Selectable, Identifiable, Associations, PartialEq)] @@ -174,17 +174,17 @@ pub struct Delegation { #[diesel(belongs_to(Entity, foreign_key = generated_entity_id, foreign_key = used_entity_id))] #[diesel(primary_key(activity_id, used_entity_id, generated_entity_id, typ))] pub struct Derivation { - activity_id: i32, - used_entity_id: i32, - generated_entity_id: i32, - typ: i32, + activity_id: i32, + used_entity_id: i32, + generated_entity_id: i32, + typ: i32, } #[derive(Insertable, Queryable, Selectable)] #[diesel(table_name = agent)] pub struct NewAgent<'a> { - pub external_id: &'a str, - pub namespace_id: i32, - pub current: i32, - pub domaintype: Option<&'a str>, + pub external_id: &'a str, + pub namespace_id: i32, + pub current: i32, + pub domaintype: Option<&'a str>, } diff --git a/crates/chronicle-persistence/src/queryable.rs b/crates/chronicle-persistence/src/queryable.rs index 370cd7d87..9dfc97806 100644 --- a/crates/chronicle-persistence/src/queryable.rs +++ b/crates/chronicle-persistence/src/queryable.rs @@ -5,39 +5,39 @@ use diesel::{Queryable, Selectable}; #[derive(Default, Queryable, Selectable, SimpleObject)] #[diesel(table_name = crate::schema::agent)] pub struct Agent { - pub id: i32, - pub external_id: String, - pub namespace_id: i32, - pub domaintype: Option, - pub current: i32, - pub identity_id: Option, + pub id: i32, + pub external_id: String, + pub namespace_id: i32, + pub domaintype: Option, + pub current: i32, + pub identity_id: Option, } #[derive(Default, Queryable, Selectable, SimpleObject)] #[diesel(table_name = crate::schema::activity)] pub struct Activity { - pub id: i32, - pub external_id: String, - pub namespace_id: i32, - pub domaintype: Option, - pub started: Option, - pub ended: Option, + pub id: i32, + pub external_id: String, + pub namespace_id: i32, + pub domaintype: Option, + pub started: Option, + pub ended: Option, } #[derive(Queryable, Selectable, SimpleObject)] #[diesel(table_name = crate::schema::entity)] pub struct Entity { - pub id: i32, - pub external_id: String, - pub namespace_id: i32, - pub domaintype: Option, + pub id: i32, + pub external_id: String, + pub namespace_id: i32, + pub domaintype: Option, } #[derive(Default, Queryable)] pub struct Namespace { - _id: i32, - uuid: String, - external_id: String, + _id: i32, + uuid: String, + external_id: String, } #[Object] @@ -47,11 +47,11 @@ pub struct Namespace { /// In order to work on the same namespace discrete Chronicle instances must share /// the uuid part. impl Namespace { - async fn external_id(&self) -> &str { - &self.external_id - } + async fn external_id(&self) -> &str { + &self.external_id + } - async fn uuid(&self) -> &str { - &self.uuid - } + async fn uuid(&self) -> &str { + &self.uuid + } } diff --git a/crates/chronicle-signing/src/embedded_secret_manager_source.rs b/crates/chronicle-signing/src/embedded_secret_manager_source.rs index 9e57ad1ea..fce3024a0 100644 --- a/crates/chronicle-signing/src/embedded_secret_manager_source.rs +++ b/crates/chronicle-signing/src/embedded_secret_manager_source.rs @@ -1,13 +1,11 @@ use async_trait::async_trait; -use k256::{ - SecretKey, -}; +use k256::SecretKey; use rand::{rngs::StdRng, SeedableRng}; use secret_vault::{Secret, SecretMetadata, SecretVaultRef, SecretVaultResult, SecretsSource}; use secret_vault_value::SecretValue; use std::{ - collections::{BTreeMap, HashMap}, - sync::Arc, + collections::{BTreeMap, HashMap}, + sync::Arc, }; use tokio::sync::Mutex; use tracing::debug; @@ -15,60 +13,60 @@ use tracing::debug; use crate::SecretError; pub struct EmbeddedSecretManagerSource { - secrets: Arc>>>, - seeds: BTreeMap, + secrets: Arc>>>, + seeds: BTreeMap, } impl EmbeddedSecretManagerSource { - pub fn new() -> Self { - Self { secrets: Arc::new(Mutex::new(HashMap::new())), seeds: BTreeMap::default() } - } + pub fn new() -> Self { + Self { secrets: Arc::new(Mutex::new(HashMap::new())), seeds: BTreeMap::default() } + } - pub fn new_seeded(seeds: BTreeMap) -> Self { - Self { secrets: Arc::new(Mutex::new(HashMap::new())), seeds } - } + pub fn new_seeded(seeds: BTreeMap) -> Self { + Self { secrets: Arc::new(Mutex::new(HashMap::new())), seeds } + } } fn new_signing_key(name: &str, seeds: &BTreeMap) -> Result { - let secret = if let Some(seed) = seeds.get(name) { - SecretKey::from_be_bytes(seed).map_err(|_| SecretError::BadSeed)? - } else { - SecretKey::random(StdRng::from_entropy()) - }; - let secret_bytes = secret.to_be_bytes(); - let hex_encoded = format!("0x{}", hex::encode(secret_bytes)); + let secret = if let Some(seed) = seeds.get(name) { + SecretKey::from_be_bytes(seed).map_err(|_| SecretError::BadSeed)? + } else { + SecretKey::random(StdRng::from_entropy()) + }; + let secret_bytes = secret.to_be_bytes(); + let hex_encoded = format!("0x{}", hex::encode(secret_bytes)); - Ok(hex_encoded) + Ok(hex_encoded) } #[async_trait] impl SecretsSource for EmbeddedSecretManagerSource { - fn name(&self) -> String { - "EmbeddedSecretManager".to_string() - } + fn name(&self) -> String { + "EmbeddedSecretManager".to_string() + } - // Simply create and cache a new signing key for each novel reference - async fn get_secrets( - &self, - references: &[SecretVaultRef], - ) -> SecretVaultResult> { - debug!(get_secrets=?references, "Getting secrets from embedded source"); + // Simply create and cache a new signing key for each novel reference + async fn get_secrets( + &self, + references: &[SecretVaultRef], + ) -> SecretVaultResult> { + debug!(get_secrets=?references, "Getting secrets from embedded source"); - let mut result_map: HashMap = HashMap::new(); - let mut secrets = self.secrets.lock().await; - for secret_ref in references.iter() { - let secret = secrets.entry(secret_ref.clone()).or_insert_with(|| { - let secret = - new_signing_key(secret_ref.key.secret_name.as_ref(), &self.seeds).unwrap(); - secret.into_bytes() - }); + let mut result_map: HashMap = HashMap::new(); + let mut secrets = self.secrets.lock().await; + for secret_ref in references.iter() { + let secret = secrets.entry(secret_ref.clone()).or_insert_with(|| { + let secret = + new_signing_key(secret_ref.key.secret_name.as_ref(), &self.seeds).unwrap(); + secret.into_bytes() + }); - let secret_value = SecretValue::from(secret); - let metadata = SecretMetadata::create_from_ref(secret_ref); + let secret_value = SecretValue::from(secret); + let metadata = SecretMetadata::create_from_ref(secret_ref); - result_map.insert(secret_ref.clone(), Secret::new(secret_value, metadata)); - } + result_map.insert(secret_ref.clone(), Secret::new(secret_value, metadata)); + } - Ok(result_map) - } + Ok(result_map) + } } diff --git a/crates/chronicle-signing/src/lib.rs b/crates/chronicle-signing/src/lib.rs index 51fa2817a..dae5f7831 100644 --- a/crates/chronicle-signing/src/lib.rs +++ b/crates/chronicle-signing/src/lib.rs @@ -1,17 +1,15 @@ -use k256::{ - ecdsa::{ - signature::{Signer, Verifier}, - Signature, SigningKey, VerifyingKey, - } +use k256::ecdsa::{ + signature::{Signer, Verifier}, + Signature, SigningKey, VerifyingKey, }; use secret_vault::{ - errors::SecretVaultError, FilesSource, FilesSourceOptions, MultipleSecretsSources, SecretName, - SecretNamespace, SecretVaultBuilder, SecretVaultRef, SecretVaultView, + errors::SecretVaultError, FilesSource, FilesSourceOptions, MultipleSecretsSources, SecretName, + SecretNamespace, SecretVaultBuilder, SecretVaultRef, SecretVaultView, }; use std::{ - collections::BTreeMap, - path::{Path, PathBuf}, - sync::Arc, + collections::BTreeMap, + path::{Path, PathBuf}, + sync::Arc, }; use thiserror::Error; use tracing::instrument; @@ -29,551 +27,558 @@ pub static OPA_PK: &str = "opa-pk"; #[derive(Error, Debug)] pub enum SecretError { - #[error("Invalid public key")] - InvalidPublicKey, - #[error("Invalid private key")] - InvalidPrivateKey, - #[error("No public key found")] - NoPublicKeyFound, - #[error("No private key found")] - NoPrivateKeyFound, - #[error("Decoding failure")] - DecodingFailure, - - #[error("Vault {source}")] - SecretVault { - #[from] - #[source] - source: SecretVaultError, - }, - - #[error("Bad BIP39 seed")] - BadSeed, + #[error("Invalid public key")] + InvalidPublicKey, + #[error("Invalid private key")] + InvalidPrivateKey, + #[error("No public key found")] + NoPublicKeyFound, + #[error("No private key found")] + NoPrivateKeyFound, + #[error("Decoding failure")] + DecodingFailure, + + #[error("Vault {source}")] + SecretVault { + #[from] + #[source] + source: SecretVaultError, + }, + + #[error("Bad BIP39 seed")] + BadSeed, } pub enum ChronicleSecretsOptions { - // Connect to hashicorp vault for secrets - Vault(vault_secret_manager_source::VaultSecretManagerSourceOptions), - // Generate secrets from entropy in memory on demand - Embedded, - - //Seed secrets with name using a map of secret name to BIP39 seed phrase - Seeded(BTreeMap), - //Filesystem based keys - Filesystem(PathBuf), + // Connect to hashicorp vault for secrets + Vault(vault_secret_manager_source::VaultSecretManagerSourceOptions), + // Generate secrets from entropy in memory on demand + Embedded, + + //Seed secrets with name using a map of secret name to BIP39 seed phrase + Seeded(BTreeMap), + //Filesystem based keys + Filesystem(PathBuf), } impl ChronicleSecretsOptions { - // Get secrets from Hashicorp vault - pub fn stored_in_vault( - vault_url: &Url, - token: &str, - mount_path: &str, - ) -> ChronicleSecretsOptions { - ChronicleSecretsOptions::Vault( - vault_secret_manager_source::VaultSecretManagerSourceOptions::new( - vault_url.clone(), - token, - mount_path, - ), - ) - } - - // Load secrets from filesystem at path - pub fn stored_at_path(path: &Path) -> ChronicleSecretsOptions { - ChronicleSecretsOptions::Filesystem(path.to_owned()) - } - - // Generate secrets in memory on demand - pub fn generate_in_memory() -> ChronicleSecretsOptions { - ChronicleSecretsOptions::Embedded - } - - // Use supplied seeds, or fall back to entropy - pub fn seeded(seeds: BTreeMap) -> ChronicleSecretsOptions { - ChronicleSecretsOptions::Seeded(seeds) - } + // Get secrets from Hashicorp vault + pub fn stored_in_vault( + vault_url: &Url, + token: &str, + mount_path: &str, + ) -> ChronicleSecretsOptions { + ChronicleSecretsOptions::Vault( + vault_secret_manager_source::VaultSecretManagerSourceOptions::new( + vault_url.clone(), + token, + mount_path, + ), + ) + } + + // Load secrets from filesystem at path + pub fn stored_at_path(path: &Path) -> ChronicleSecretsOptions { + ChronicleSecretsOptions::Filesystem(path.to_owned()) + } + + // Generate secrets in memory on demand + pub fn generate_in_memory() -> ChronicleSecretsOptions { + ChronicleSecretsOptions::Embedded + } + + // Use supplied seeds, or fall back to entropy + pub fn seeded(seeds: BTreeMap) -> ChronicleSecretsOptions { + ChronicleSecretsOptions::Seeded(seeds) + } } #[derive(Clone)] pub struct ChronicleSigning { - vault: Arc>>, + vault: Arc>>, } impl core::fmt::Debug for ChronicleSigning { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.debug_struct("ChronicleSecrets").finish() - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("ChronicleSecrets").finish() + } } impl ChronicleSigning { - pub async fn new( - // Secrets are namespace / name pairs - required_secret_names: Vec<(String, String)>, - // Secret stores are namespaced - options: Vec<(String, ChronicleSecretsOptions)>, - ) -> Result { - let mut multi_source = MultipleSecretsSources::new(); - let required_secret_refs: Vec<_> = required_secret_names - .into_iter() - .map(|(namespace, name)| { - SecretVaultRef::new(SecretName::new(name)) - .with_namespace(SecretNamespace::new(namespace)) - }) - .collect(); - - for options in options { - match options { - (namespace, ChronicleSecretsOptions::Embedded) => { - let source = embedded_secret_manager_source::EmbeddedSecretManagerSource::new(); - multi_source = - multi_source.add_source(&SecretNamespace::new(namespace), source); - } - (namespace, ChronicleSecretsOptions::Seeded(seeds)) => { - let source = - embedded_secret_manager_source::EmbeddedSecretManagerSource::new_seeded( - seeds, - ); - multi_source = - multi_source.add_source(&SecretNamespace::new(namespace), source); - } - (namespace, ChronicleSecretsOptions::Vault(options)) => { - let source = - vault_secret_manager_source::VaultSecretManagerSource::with_options( - options, - ) - .await?; - multi_source = - multi_source.add_source(&SecretNamespace::new(namespace), source); - } - (namespace, ChronicleSecretsOptions::Filesystem(path)) => { - let source = FilesSource::with_options(FilesSourceOptions { - root_path: Some(path.into_boxed_path()), - }); - multi_source = - multi_source.add_source(&SecretNamespace::new(namespace), source); - } - } - } - - let vault = SecretVaultBuilder::with_source(multi_source) - .with_secret_refs(required_secret_refs.iter().collect()) - .build()?; - - vault.refresh().await?; - Ok(Self { vault: Arc::new(tokio::sync::Mutex::new(Box::new(vault.viewer()))) }) - } + pub async fn new( + // Secrets are namespace / name pairs + required_secret_names: Vec<(String, String)>, + // Secret stores are namespaced + options: Vec<(String, ChronicleSecretsOptions)>, + ) -> Result { + let mut multi_source = MultipleSecretsSources::new(); + let required_secret_refs: Vec<_> = required_secret_names + .into_iter() + .map(|(namespace, name)| { + SecretVaultRef::new(SecretName::new(name)) + .with_namespace(SecretNamespace::new(namespace)) + }) + .collect(); + + for options in options { + match options { + (namespace, ChronicleSecretsOptions::Embedded) => { + let source = embedded_secret_manager_source::EmbeddedSecretManagerSource::new(); + multi_source = + multi_source.add_source(&SecretNamespace::new(namespace), source); + }, + (namespace, ChronicleSecretsOptions::Seeded(seeds)) => { + let source = + embedded_secret_manager_source::EmbeddedSecretManagerSource::new_seeded( + seeds, + ); + multi_source = + multi_source.add_source(&SecretNamespace::new(namespace), source); + }, + (namespace, ChronicleSecretsOptions::Vault(options)) => { + let source = + vault_secret_manager_source::VaultSecretManagerSource::with_options( + options, + ) + .await?; + multi_source = + multi_source.add_source(&SecretNamespace::new(namespace), source); + }, + (namespace, ChronicleSecretsOptions::Filesystem(path)) => { + let source = FilesSource::with_options(FilesSourceOptions { + root_path: Some(path.into_boxed_path()), + }); + multi_source = + multi_source.add_source(&SecretNamespace::new(namespace), source); + }, + } + } + + let vault = SecretVaultBuilder::with_source(multi_source) + .with_secret_refs(required_secret_refs.iter().collect()) + .build()?; + + vault.refresh().await?; + Ok(Self { vault: Arc::new(tokio::sync::Mutex::new(Box::new(vault.viewer()))) }) + } } #[async_trait::async_trait] pub trait WithSecret { - async fn with_signing_key( - &self, - secret_namespace: &str, - secret_name: &str, - f: F, - ) -> Result - where - F: Fn(SigningKey) -> T, - F: Send, - T: Send; - async fn with_verifying_key( - &self, - secret_namespace: &str, - secret_name: &str, - f: F, - ) -> Result - where - F: Fn(VerifyingKey) -> T, - F: Send, - T: Send; - - async fn verifying_key( - &self, - secret_namespace: &str, - secret_name: &str, - ) -> Result; + async fn with_signing_key( + &self, + secret_namespace: &str, + secret_name: &str, + f: F, + ) -> Result + where + F: Fn(SigningKey) -> T, + F: Send, + T: Send; + async fn with_verifying_key( + &self, + secret_namespace: &str, + secret_name: &str, + f: F, + ) -> Result + where + F: Fn(VerifyingKey) -> T, + F: Send, + T: Send; + + async fn verifying_key( + &self, + secret_namespace: &str, + secret_name: &str, + ) -> Result; } #[async_trait::async_trait] pub trait OwnedSecret { - async fn copy_signing_key( - &self, - secret_namespace: &str, - secret_name: &str, - ) -> Result; + async fn copy_signing_key( + &self, + secret_namespace: &str, + secret_name: &str, + ) -> Result; } #[async_trait::async_trait] impl OwnedSecret for T { - async fn copy_signing_key( - &self, - secret_namespace: &str, - secret_name: &str, - ) -> Result { - let secret = - WithSecret::with_signing_key(self, secret_namespace, secret_name, |secret| secret) - .await?; - - Ok(secret) - } + async fn copy_signing_key( + &self, + secret_namespace: &str, + secret_name: &str, + ) -> Result { + let secret = + WithSecret::with_signing_key(self, secret_namespace, secret_name, |secret| secret) + .await?; + + Ok(secret) + } } #[async_trait::async_trait] impl WithSecret for ChronicleSigning { - async fn with_signing_key( - &self, - secret_namespace: &str, - secret_name: &str, - f: F, - ) -> Result - where - F: Fn(SigningKey) -> T, - F: Send, - T: Send, - { - let secret_ref = SecretVaultRef::new(SecretName::new(secret_name.to_owned())) - .with_namespace(secret_namespace.into()); - let secret = self.vault.lock().await.require_secret_by_ref(&secret_ref).await?; - - let signing_result = secret.value.exposed_in_as_str(|secret| { - ( - // Convert hex encoded seed to SigningKey - hex::decode(secret.trim_start_matches("0x")).map_err(|_| SecretError::DecodingFailure).and_then( - |secret| SigningKey::from_bytes(&secret) - .map_err(|_| SecretError::InvalidPrivateKey)) - .map(&f), - secret - ) - }); - - Ok(signing_result?) - } - - async fn with_verifying_key( - &self, - secret_namespace: &str, - secret_name: &str, - f: F, - ) -> Result - where - F: Fn(VerifyingKey) -> T, - F: Send, - T: Send, - { - let secret_ref = SecretVaultRef::new(SecretName::new(secret_name.to_owned())) - .with_namespace(secret_namespace.into()); - let secret = self.vault.lock().await.require_secret_by_ref(&secret_ref).await?; - - let signing_result = secret.value.exposed_in_as_str(|secret| { - ( - // Convert hex encoded seed to SigningKey - hex::decode(secret.trim_start_matches("0x")).map_err(|_| SecretError::DecodingFailure).and_then( - |secret| SigningKey::from_bytes(&secret) - .map_err(|_| SecretError::InvalidPrivateKey)) - .map(|signing_key| f(signing_key.verifying_key())), - secret - ) - }); - - Ok(signing_result?) - } - - async fn verifying_key( - &self, - secret_namespace: &str, - secret_name: &str, - ) -> Result { - let secret_ref = SecretVaultRef::new(SecretName::new(secret_name.to_owned())) - .with_namespace(secret_namespace.into()); - let secret = self.vault.lock().await.require_secret_by_ref(&secret_ref).await?; - - let key = secret.value.exposed_in_as_str(|secret| { - ( - // Convert hex encoded seed to SigningKey - hex::decode(secret.trim_start_matches("0x")).map_err(|_| SecretError::DecodingFailure).and_then( - |decoded_secret| SigningKey::from_bytes(&decoded_secret) - .map_err(|_| SecretError::InvalidPrivateKey)) - .map(|signing_key| signing_key.verifying_key()), - secret - ) - }); - - Ok(key?) - } + async fn with_signing_key( + &self, + secret_namespace: &str, + secret_name: &str, + f: F, + ) -> Result + where + F: Fn(SigningKey) -> T, + F: Send, + T: Send, + { + let secret_ref = SecretVaultRef::new(SecretName::new(secret_name.to_owned())) + .with_namespace(secret_namespace.into()); + let secret = self.vault.lock().await.require_secret_by_ref(&secret_ref).await?; + + let signing_result = secret.value.exposed_in_as_str(|secret| { + ( + // Convert hex encoded seed to SigningKey + hex::decode(secret.trim_start_matches("0x")) + .map_err(|_| SecretError::DecodingFailure) + .and_then(|secret| { + SigningKey::from_bytes(&secret).map_err(|_| SecretError::InvalidPrivateKey) + }) + .map(&f), + secret, + ) + }); + + Ok(signing_result?) + } + + async fn with_verifying_key( + &self, + secret_namespace: &str, + secret_name: &str, + f: F, + ) -> Result + where + F: Fn(VerifyingKey) -> T, + F: Send, + T: Send, + { + let secret_ref = SecretVaultRef::new(SecretName::new(secret_name.to_owned())) + .with_namespace(secret_namespace.into()); + let secret = self.vault.lock().await.require_secret_by_ref(&secret_ref).await?; + + let signing_result = secret.value.exposed_in_as_str(|secret| { + ( + // Convert hex encoded seed to SigningKey + hex::decode(secret.trim_start_matches("0x")) + .map_err(|_| SecretError::DecodingFailure) + .and_then(|secret| { + SigningKey::from_bytes(&secret).map_err(|_| SecretError::InvalidPrivateKey) + }) + .map(|signing_key| f(signing_key.verifying_key())), + secret, + ) + }); + + Ok(signing_result?) + } + + async fn verifying_key( + &self, + secret_namespace: &str, + secret_name: &str, + ) -> Result { + let secret_ref = SecretVaultRef::new(SecretName::new(secret_name.to_owned())) + .with_namespace(secret_namespace.into()); + let secret = self.vault.lock().await.require_secret_by_ref(&secret_ref).await?; + + let key = secret.value.exposed_in_as_str(|secret| { + ( + // Convert hex encoded seed to SigningKey + hex::decode(secret.trim_start_matches("0x")) + .map_err(|_| SecretError::DecodingFailure) + .and_then(|decoded_secret| { + SigningKey::from_bytes(&decoded_secret) + .map_err(|_| SecretError::InvalidPrivateKey) + }) + .map(|signing_key| signing_key.verifying_key()), + secret, + ) + }); + + Ok(key?) + } } /// Trait for signing with a key known by chronicle #[async_trait::async_trait] pub trait ChronicleSigner { - /// Sign data with the a known key and return a signature - async fn sign( - &self, - secret_namespace: &str, - secret_name: &str, - data: &[u8], - ) -> Result; - - /// Verify a signature with a known key - async fn verify( - &self, - secret_namespace: &str, - secret_name: &str, - data: &[u8], - signature: &[u8], - ) -> Result; + /// Sign data with the a known key and return a signature + async fn sign( + &self, + secret_namespace: &str, + secret_name: &str, + data: &[u8], + ) -> Result; + + /// Verify a signature with a known key + async fn verify( + &self, + secret_namespace: &str, + secret_name: &str, + data: &[u8], + signature: &[u8], + ) -> Result; } #[async_trait::async_trait] impl ChronicleSigner for T { - /// Sign data with the chronicle key and return a signature - async fn sign( - &self, - secret_namespace: &str, - secret_name: &str, - data: &[u8], - ) -> Result { - self.with_signing_key(secret_namespace, secret_name, |signing_key| { - let s: Signature = signing_key.sign(data); - s - }) - .await - } - - /// Verify a signature with the chronicle key - async fn verify( - &self, - secret_namespace: &str, - secret_name: &str, - data: &[u8], - signature: &[u8], - ) -> Result { - self.with_verifying_key(secret_namespace, secret_name, |verifying_key| { - let signature: Signature = k256::ecdsa::signature::Signature::from_bytes(signature) - .map_err(|_| SecretError::InvalidPublicKey)?; - - Ok(verifying_key.verify(data, &signature).is_ok()) - }) - .await? - } + /// Sign data with the chronicle key and return a signature + async fn sign( + &self, + secret_namespace: &str, + secret_name: &str, + data: &[u8], + ) -> Result { + self.with_signing_key(secret_namespace, secret_name, |signing_key| { + let s: Signature = signing_key.sign(data); + s + }) + .await + } + + /// Verify a signature with the chronicle key + async fn verify( + &self, + secret_namespace: &str, + secret_name: &str, + data: &[u8], + signature: &[u8], + ) -> Result { + self.with_verifying_key(secret_namespace, secret_name, |verifying_key| { + let signature: Signature = k256::ecdsa::signature::Signature::from_bytes(signature) + .map_err(|_| SecretError::InvalidPublicKey)?; + + Ok(verifying_key.verify(data, &signature).is_ok()) + }) + .await? + } } /// Trait for signing with a key known by the batcher #[async_trait::async_trait] pub trait BatcherKnownKeyNamesSigner { - /// Sign data with the batcher key and return a signature in low-s form, as this - /// is required by sawtooth for batcher signatures - async fn batcher_sign(&self, data: &[u8]) -> Result, SecretError>; + /// Sign data with the batcher key and return a signature in low-s form, as this + /// is required by sawtooth for batcher signatures + async fn batcher_sign(&self, data: &[u8]) -> Result, SecretError>; - /// Verify a signature with the batcher key - async fn batcher_verify(&self, data: &[u8], signature: &[u8]) -> Result; + /// Verify a signature with the batcher key + async fn batcher_verify(&self, data: &[u8], signature: &[u8]) -> Result; - /// Get the verifying key for the batcher key - async fn batcher_verifying(&self) -> Result; + /// Get the verifying key for the batcher key + async fn batcher_verifying(&self) -> Result; } /// Trait for signing with a key known by chronicle #[async_trait::async_trait] pub trait ChronicleKnownKeyNamesSigner { - /// Sign data with the chronicle key and return a signature - async fn chronicle_sign(&self, data: &[u8]) -> Result, SecretError>; + /// Sign data with the chronicle key and return a signature + async fn chronicle_sign(&self, data: &[u8]) -> Result, SecretError>; - /// Verify a signature with the chronicle key - async fn chronicle_verify(&self, data: &[u8], signature: &[u8]) -> Result; + /// Verify a signature with the chronicle key + async fn chronicle_verify(&self, data: &[u8], signature: &[u8]) -> Result; - /// Get the verifying key for the chronicle key - async fn chronicle_verifying(&self) -> Result; + /// Get the verifying key for the chronicle key + async fn chronicle_verifying(&self) -> Result; } /// Trait for signing with a key known by OPA #[async_trait::async_trait] pub trait OpaKnownKeyNamesSigner { - /// Sign data with the OPA key and return a signature - async fn opa_sign(&self, data: &[u8]) -> Result, SecretError>; + /// Sign data with the OPA key and return a signature + async fn opa_sign(&self, data: &[u8]) -> Result, SecretError>; - /// Verify a signature with the OPA key - async fn opa_verify(&self, data: &[u8], signature: &[u8]) -> Result; + /// Verify a signature with the OPA key + async fn opa_verify(&self, data: &[u8], signature: &[u8]) -> Result; - /// Get the verifying key for the OPA key - async fn opa_verifying(&self) -> Result; + /// Get the verifying key for the OPA key + async fn opa_verifying(&self) -> Result; } #[async_trait::async_trait] impl BatcherKnownKeyNamesSigner for T { - // Sign with the batcher key and return a signature in low-s form, as this - // is required by sawtooth for batcher signatures - #[instrument(skip(self, data), level = "trace", name = "batcher_sign", fields( + // Sign with the batcher key and return a signature in low-s form, as this + // is required by sawtooth for batcher signatures + #[instrument(skip(self, data), level = "trace", name = "batcher_sign", fields( namespace = BATCHER_NAMESPACE, pk = BATCHER_PK ))] - async fn batcher_sign(&self, data: &[u8]) -> Result, SecretError> { - let s = self.sign(BATCHER_NAMESPACE, BATCHER_PK, data).await?; + async fn batcher_sign(&self, data: &[u8]) -> Result, SecretError> { + let s = self.sign(BATCHER_NAMESPACE, BATCHER_PK, data).await?; - let s = s.normalize_s().unwrap_or(s); + let s = s.normalize_s().unwrap_or(s); - Ok(s.to_vec()) - } + Ok(s.to_vec()) + } - #[instrument(skip(self, data, signature), level = "trace", name = "batcher_verify", fields( + #[instrument(skip(self, data, signature), level = "trace", name = "batcher_verify", fields( namespace = BATCHER_NAMESPACE, pk = BATCHER_PK ))] - async fn batcher_verify(&self, data: &[u8], signature: &[u8]) -> Result { - self.verify(BATCHER_NAMESPACE, BATCHER_PK, data, signature).await - } + async fn batcher_verify(&self, data: &[u8], signature: &[u8]) -> Result { + self.verify(BATCHER_NAMESPACE, BATCHER_PK, data, signature).await + } - #[instrument(skip(self), level = "trace", name = "batcher_verifying", fields( + #[instrument(skip(self), level = "trace", name = "batcher_verifying", fields( namespace = BATCHER_NAMESPACE, pk = BATCHER_PK ))] - async fn batcher_verifying(&self) -> Result { - self.verifying_key(BATCHER_NAMESPACE, BATCHER_PK).await - } + async fn batcher_verifying(&self) -> Result { + self.verifying_key(BATCHER_NAMESPACE, BATCHER_PK).await + } } #[async_trait::async_trait] impl ChronicleKnownKeyNamesSigner for T { - #[instrument(skip(self, data), level = "trace", name = "chronicle_sign", fields( + #[instrument(skip(self, data), level = "trace", name = "chronicle_sign", fields( namespace = CHRONICLE_NAMESPACE, pk = CHRONICLE_PK ))] - async fn chronicle_sign(&self, data: &[u8]) -> Result, SecretError> { - Ok(self.sign(CHRONICLE_NAMESPACE, CHRONICLE_PK, data).await?.to_vec()) - } + async fn chronicle_sign(&self, data: &[u8]) -> Result, SecretError> { + Ok(self.sign(CHRONICLE_NAMESPACE, CHRONICLE_PK, data).await?.to_vec()) + } - #[instrument(skip(self, data, signature), level = "trace", name = "chronicle_verify", fields( + #[instrument(skip(self, data, signature), level = "trace", name = "chronicle_verify", fields( namespace = CHRONICLE_NAMESPACE, pk = CHRONICLE_PK ))] - async fn chronicle_verify(&self, data: &[u8], signature: &[u8]) -> Result { - self.verify(CHRONICLE_NAMESPACE, CHRONICLE_PK, data, signature).await - } + async fn chronicle_verify(&self, data: &[u8], signature: &[u8]) -> Result { + self.verify(CHRONICLE_NAMESPACE, CHRONICLE_PK, data, signature).await + } - #[instrument(skip(self), level = "trace", name = "chronicle_verifying", fields( + #[instrument(skip(self), level = "trace", name = "chronicle_verifying", fields( namespace = CHRONICLE_NAMESPACE, pk = CHRONICLE_PK ))] - async fn chronicle_verifying(&self) -> Result { - self.verifying_key(CHRONICLE_NAMESPACE, CHRONICLE_PK).await - } + async fn chronicle_verifying(&self) -> Result { + self.verifying_key(CHRONICLE_NAMESPACE, CHRONICLE_PK).await + } } #[async_trait::async_trait] impl OpaKnownKeyNamesSigner for T { - #[instrument(skip(self), level = "trace", name = "opa_sign", fields( + #[instrument(skip(self), level = "trace", name = "opa_sign", fields( namespace = OPA_NAMESPACE, pk = OPA_PK ))] - async fn opa_sign(&self, data: &[u8]) -> Result, SecretError> { - let s = self.sign(OPA_NAMESPACE, OPA_PK, data).await?; + async fn opa_sign(&self, data: &[u8]) -> Result, SecretError> { + let s = self.sign(OPA_NAMESPACE, OPA_PK, data).await?; - let s = s.normalize_s().unwrap_or(s); + let s = s.normalize_s().unwrap_or(s); - Ok(s.to_vec()) - } + Ok(s.to_vec()) + } - #[instrument(skip(self, data, signature), level = "trace", name = "opa_verify", fields( + #[instrument(skip(self, data, signature), level = "trace", name = "opa_verify", fields( namespace = OPA_NAMESPACE, pk = OPA_PK ))] - async fn opa_verify(&self, data: &[u8], signature: &[u8]) -> Result { - self.verify(OPA_NAMESPACE, OPA_PK, data, signature).await - } + async fn opa_verify(&self, data: &[u8], signature: &[u8]) -> Result { + self.verify(OPA_NAMESPACE, OPA_PK, data, signature).await + } - #[instrument(skip(self), level = "trace", name = "opa_verifying", fields( + #[instrument(skip(self), level = "trace", name = "opa_verifying", fields( namespace = OPA_NAMESPACE, pk = OPA_PK ))] - async fn opa_verifying(&self) -> Result { - self.verifying_key(OPA_NAMESPACE, OPA_PK).await - } + async fn opa_verifying(&self) -> Result { + self.verifying_key(OPA_NAMESPACE, OPA_PK).await + } } pub fn chronicle_secret_names() -> Vec<(String, String)> { - vec![ - (CHRONICLE_NAMESPACE.to_string(), CHRONICLE_PK.to_string()), - (BATCHER_NAMESPACE.to_string(), BATCHER_PK.to_string()), - ] + vec![ + (CHRONICLE_NAMESPACE.to_string(), CHRONICLE_PK.to_string()), + (BATCHER_NAMESPACE.to_string(), BATCHER_PK.to_string()), + ] } pub fn opa_secret_names() -> Vec<(String, String)> { - vec![ - (OPA_NAMESPACE.to_string(), OPA_PK.to_string()), - (BATCHER_NAMESPACE.to_string(), BATCHER_PK.to_string()), - ] + vec![ + (OPA_NAMESPACE.to_string(), OPA_PK.to_string()), + (BATCHER_NAMESPACE.to_string(), BATCHER_PK.to_string()), + ] } #[cfg(test)] mod tests { - use super::*; - use k256::schnorr::signature::Signature; - - #[tokio::test] - async fn embedded_keys() { - let secrets = ChronicleSigning::new( - chronicle_secret_names(), - vec![(CHRONICLE_NAMESPACE.to_string(), ChronicleSecretsOptions::Embedded)], - ) - .await - .unwrap(); - - secrets - .with_signing_key(CHRONICLE_NAMESPACE, "chronicle-pk", |signing_key| { - assert_eq!(signing_key.to_bytes().len(), 32, "Signing key should be 32 bytes"); - }) - .await - .unwrap(); - - secrets - .with_verifying_key(CHRONICLE_NAMESPACE, "chronicle-pk", |verifying_key| { - assert_eq!(verifying_key.to_bytes().len(), 33, "Verifying key should be 33 bytes"); - }) - .await - .unwrap(); - - let sig = secrets - .sign(CHRONICLE_NAMESPACE, "chronicle-pk", "hello world".as_bytes()) - .await - .unwrap(); - - assert!(secrets - .verify(CHRONICLE_NAMESPACE, "chronicle-pk", "hello world".as_bytes(), sig.as_bytes()) - .await - .unwrap()); - - assert!(!secrets - .verify(CHRONICLE_NAMESPACE, "chronicle-pk", "boom".as_bytes(), sig.as_bytes()) - .await - .unwrap()); - } - - #[tokio::test] - async fn vault_keys() { - let secrets = ChronicleSigning::new( - chronicle_secret_names(), - vec![(CHRONICLE_NAMESPACE.to_string(), ChronicleSecretsOptions::Embedded)], - ) - .await - .unwrap(); - - secrets - .with_signing_key(CHRONICLE_NAMESPACE, "chronicle-pk", |signing_key| { - assert_eq!(signing_key.to_bytes().len(), 32, "Signing key should be 32 bytes"); - }) - .await - .unwrap(); - - secrets - .with_verifying_key(CHRONICLE_NAMESPACE, "chronicle-pk", |verifying_key| { - assert_eq!(verifying_key.to_bytes().len(), 33, "Verifying key should be 33 bytes"); - }) - .await - .unwrap(); - - let sig = secrets - .sign(CHRONICLE_NAMESPACE, "chronicle-pk", "hello world".as_bytes()) - .await - .unwrap(); - - assert!(secrets - .verify(CHRONICLE_NAMESPACE, "chronicle-pk", "hello world".as_bytes(), sig.as_bytes()) - .await - .unwrap()); - - assert!(!secrets - .verify(CHRONICLE_NAMESPACE, "chronicle-pk", "boom".as_bytes(), sig.as_bytes()) - .await - .unwrap()); - } + use super::*; + use k256::schnorr::signature::Signature; + + #[tokio::test] + async fn embedded_keys() { + let secrets = ChronicleSigning::new( + chronicle_secret_names(), + vec![(CHRONICLE_NAMESPACE.to_string(), ChronicleSecretsOptions::Embedded)], + ) + .await + .unwrap(); + + secrets + .with_signing_key(CHRONICLE_NAMESPACE, "chronicle-pk", |signing_key| { + assert_eq!(signing_key.to_bytes().len(), 32, "Signing key should be 32 bytes"); + }) + .await + .unwrap(); + + secrets + .with_verifying_key(CHRONICLE_NAMESPACE, "chronicle-pk", |verifying_key| { + assert_eq!(verifying_key.to_bytes().len(), 33, "Verifying key should be 33 bytes"); + }) + .await + .unwrap(); + + let sig = secrets + .sign(CHRONICLE_NAMESPACE, "chronicle-pk", "hello world".as_bytes()) + .await + .unwrap(); + + assert!(secrets + .verify(CHRONICLE_NAMESPACE, "chronicle-pk", "hello world".as_bytes(), sig.as_bytes()) + .await + .unwrap()); + + assert!(!secrets + .verify(CHRONICLE_NAMESPACE, "chronicle-pk", "boom".as_bytes(), sig.as_bytes()) + .await + .unwrap()); + } + + #[tokio::test] + async fn vault_keys() { + let secrets = ChronicleSigning::new( + chronicle_secret_names(), + vec![(CHRONICLE_NAMESPACE.to_string(), ChronicleSecretsOptions::Embedded)], + ) + .await + .unwrap(); + + secrets + .with_signing_key(CHRONICLE_NAMESPACE, "chronicle-pk", |signing_key| { + assert_eq!(signing_key.to_bytes().len(), 32, "Signing key should be 32 bytes"); + }) + .await + .unwrap(); + + secrets + .with_verifying_key(CHRONICLE_NAMESPACE, "chronicle-pk", |verifying_key| { + assert_eq!(verifying_key.to_bytes().len(), 33, "Verifying key should be 33 bytes"); + }) + .await + .unwrap(); + + let sig = secrets + .sign(CHRONICLE_NAMESPACE, "chronicle-pk", "hello world".as_bytes()) + .await + .unwrap(); + + assert!(secrets + .verify(CHRONICLE_NAMESPACE, "chronicle-pk", "hello world".as_bytes(), sig.as_bytes()) + .await + .unwrap()); + + assert!(!secrets + .verify(CHRONICLE_NAMESPACE, "chronicle-pk", "boom".as_bytes(), sig.as_bytes()) + .await + .unwrap()); + } } diff --git a/crates/chronicle-signing/src/vault_secret_manager_source.rs b/crates/chronicle-signing/src/vault_secret_manager_source.rs index 897c27f47..fca9c5259 100644 --- a/crates/chronicle-signing/src/vault_secret_manager_source.rs +++ b/crates/chronicle-signing/src/vault_secret_manager_source.rs @@ -2,120 +2,120 @@ use std::{collections::HashMap, sync::Arc}; use async_trait::*; use secret_vault::{ - errors::{SecretVaultError, SecretVaultErrorPublicGenericDetails, SecretsSourceError}, - Secret, SecretMetadata, SecretVaultRef, SecretVaultResult, SecretsSource, + errors::{SecretVaultError, SecretVaultErrorPublicGenericDetails, SecretsSourceError}, + Secret, SecretMetadata, SecretVaultRef, SecretVaultResult, SecretsSource, }; use secret_vault_value::SecretValue; use tokio::sync::Mutex; use tracing::*; use url::Url; use vaultrs::{ - client::{VaultClient, VaultClientSettingsBuilder}, - kv2, + client::{VaultClient, VaultClientSettingsBuilder}, + kv2, }; #[derive(Debug, Clone, Eq, PartialEq)] pub struct VaultSecretManagerSourceOptions { - pub vault_url: Url, - pub token: String, - pub mount_path: String, + pub vault_url: Url, + pub token: String, + pub mount_path: String, } impl VaultSecretManagerSourceOptions { - pub fn new(vault_url: Url, token: &str, mount_path: &str) -> Self { - VaultSecretManagerSourceOptions { - vault_url, - token: token.to_owned(), - mount_path: mount_path.to_owned(), - } - } + pub fn new(vault_url: Url, token: &str, mount_path: &str) -> Self { + VaultSecretManagerSourceOptions { + vault_url, + token: token.to_owned(), + mount_path: mount_path.to_owned(), + } + } } #[derive(Clone)] pub struct VaultSecretManagerSource { - options: VaultSecretManagerSourceOptions, - client: Arc>, + options: VaultSecretManagerSourceOptions, + client: Arc>, } impl VaultSecretManagerSource { - pub async fn with_options(options: VaultSecretManagerSourceOptions) -> SecretVaultResult { - Ok(VaultSecretManagerSource { - options: options.clone(), - client: Arc::new(Mutex::new( - VaultClient::new( - VaultClientSettingsBuilder::default() - .address(options.vault_url.as_str()) - .token(options.token) - .build() - .unwrap(), - ) - .map_err(|e| { - SecretVaultError::SecretsSourceError( - SecretsSourceError::new( - SecretVaultErrorPublicGenericDetails::new(format!("{}", e)), - format!("Vault error: {}", e), - ) - .with_root_cause(Box::new(e)), - ) - })?, - )), - }) - } + pub async fn with_options(options: VaultSecretManagerSourceOptions) -> SecretVaultResult { + Ok(VaultSecretManagerSource { + options: options.clone(), + client: Arc::new(Mutex::new( + VaultClient::new( + VaultClientSettingsBuilder::default() + .address(options.vault_url.as_str()) + .token(options.token) + .build() + .unwrap(), + ) + .map_err(|e| { + SecretVaultError::SecretsSourceError( + SecretsSourceError::new( + SecretVaultErrorPublicGenericDetails::new(format!("{}", e)), + format!("Vault error: {}", e), + ) + .with_root_cause(Box::new(e)), + ) + })?, + )), + }) + } } #[async_trait] impl SecretsSource for VaultSecretManagerSource { - fn name(&self) -> String { - "HashiVaultSecretManager".to_string() - } + fn name(&self) -> String { + "HashiVaultSecretManager".to_string() + } - async fn get_secrets( - &self, - references: &[SecretVaultRef], - ) -> SecretVaultResult> { - let mut result_map: HashMap = HashMap::new(); - let client = &*self.client.lock().await; + async fn get_secrets( + &self, + references: &[SecretVaultRef], + ) -> SecretVaultResult> { + let mut result_map: HashMap = HashMap::new(); + let client = &*self.client.lock().await; - let mut results = vec![]; - for secret_ref in references { - results.push(( - secret_ref.clone(), - kv2::read(client, &self.options.mount_path, secret_ref.key.secret_name.as_ref()) - .await, - )); - } + let mut results = vec![]; + for secret_ref in references { + results.push(( + secret_ref.clone(), + kv2::read(client, &self.options.mount_path, secret_ref.key.secret_name.as_ref()) + .await, + )); + } - for (secret_ref, result) in results { - match result { - Ok(vault_secret) => { - let metadata = SecretMetadata::create_from_ref(&secret_ref); - result_map.insert( - secret_ref.clone(), - Secret::new(SecretValue::new(vault_secret), metadata), - ); - } - Err(err) => { - error!( + for (secret_ref, result) in results { + match result { + Ok(vault_secret) => { + let metadata = SecretMetadata::create_from_ref(&secret_ref); + result_map.insert( + secret_ref.clone(), + Secret::new(SecretValue::new(vault_secret), metadata), + ); + }, + Err(err) => { + error!( "Unable to read secret or secret version {}:{}/{:?}: {}.", self.options.mount_path, &secret_ref.key.secret_name, &secret_ref.key.secret_version, err ); - return Err(SecretVaultError::SecretsSourceError(SecretsSourceError::new( - SecretVaultErrorPublicGenericDetails::new(format!( - "Unable to read secret or secret version {}/{:?}: {}.", - self.options.mount_path, &secret_ref.key.secret_name, err - )), - format!( - "Unable to read secret or secret version {}/{:?}: {}.", - self.options.mount_path, &secret_ref.key.secret_name, err - ), - ))); - } - } - } + return Err(SecretVaultError::SecretsSourceError(SecretsSourceError::new( + SecretVaultErrorPublicGenericDetails::new(format!( + "Unable to read secret or secret version {}/{:?}: {}.", + self.options.mount_path, &secret_ref.key.secret_name, err + )), + format!( + "Unable to read secret or secret version {}/{:?}: {}.", + self.options.mount_path, &secret_ref.key.secret_name, err + ), + ))); + }, + } + } - Ok(result_map) - } + Ok(result_map) + } } diff --git a/crates/chronicle-synth/build.rs b/crates/chronicle-synth/build.rs index 5a1d86bbe..afb2c9546 100644 --- a/crates/chronicle-synth/build.rs +++ b/crates/chronicle-synth/build.rs @@ -1,8 +1,8 @@ fn main() { - //Create a .VERSION file containing 'local' if it does not exist + //Create a .VERSION file containing 'local' if it does not exist - let version_file = std::path::Path::new("../../.VERSION"); - if !version_file.exists() { - std::fs::write(version_file, "local").expect("Unable to write file"); - } + let version_file = std::path::Path::new("../../.VERSION"); + if !version_file.exists() { + std::fs::write(version_file, "local").expect("Unable to write file"); + } } diff --git a/crates/chronicle-synth/src/collection.rs b/crates/chronicle-synth/src/collection.rs index 3285ce80d..79bf899a7 100644 --- a/crates/chronicle-synth/src/collection.rs +++ b/crates/chronicle-synth/src/collection.rs @@ -1,7 +1,7 @@ use std::{ - fmt::Display, - fs::File, - path::{Path, PathBuf}, + fmt::Display, + fs::File, + path::{Path, PathBuf}, }; use serde_json::Value; @@ -12,208 +12,208 @@ use crate::error::ChronicleSynthError; /// operation collection. #[derive(Debug)] pub enum Collection { - Operation(Operation), - Generator(Generator), + Operation(Operation), + Generator(Generator), } /// `Operation` refers to a Synth schema collection that generates a Chronicle operation. /// An `Operation` usually has dependencies in the form of component [`Generator`]s. #[derive(Debug)] pub enum Operation { - ActivityExists, - ActivityUses, - AgentActsOnBehalfOf, - AgentExists, - CreateNamespace, - EndActivity, - EntityDerive, - EntityExists, - SetActivityAttributes, - SetAgentAttributes, - SetEntityAttributes, - StartActivity, - WasAssociatedWith, - WasAssociatedWithHasRole, - WasAttributedTo, - WasGeneratedBy, - WasInformedBy, - DomainCollection(DomainCollection), + ActivityExists, + ActivityUses, + AgentActsOnBehalfOf, + AgentExists, + CreateNamespace, + EndActivity, + EntityDerive, + EntityExists, + SetActivityAttributes, + SetAgentAttributes, + SetEntityAttributes, + StartActivity, + WasAssociatedWith, + WasAssociatedWithHasRole, + WasAttributedTo, + WasGeneratedBy, + WasInformedBy, + DomainCollection(DomainCollection), } /// `Generator` refers to a Synth schema collection that generates a component of a Chronicle /// operation, as opposed to being an operation itself. A `Generator` should have no dependencies. #[derive(Debug)] pub enum Generator { - ActivityName, - SecondActivityName, - AgentName, - SecondAgentName, - Attribute, - Attributes, - DateTime, - DomainTypeId, - EntityName, - SecondEntityName, - Namespace, - NamespaceUuid, - Role, - Roles, - SameNamespaceName, - SameNamespaceUuid, - DomainCollection(DomainCollection), + ActivityName, + SecondActivityName, + AgentName, + SecondAgentName, + Attribute, + Attributes, + DateTime, + DomainTypeId, + EntityName, + SecondEntityName, + Namespace, + NamespaceUuid, + Role, + Roles, + SameNamespaceName, + SameNamespaceUuid, + DomainCollection(DomainCollection), } /// Represents a Synth collection that is generated specifically for a Chronicle domain. #[derive(Debug)] pub struct DomainCollection { - pub name: String, - pub schema: Value, + pub name: String, + pub schema: Value, } impl DomainCollection { - pub fn new(name: impl Into, schema: Value) -> Self { - let name = name.into(); - Self { name, schema } - } + pub fn new(name: impl Into, schema: Value) -> Self { + let name = name.into(); + Self { name, schema } + } } pub trait CollectionHandling { - fn name(&self) -> String - where - Self: Display, - { - self.to_string() - } - - fn path(&self) -> PathBuf - where - Self: Display, - { - Path::new(&format!("{}.json", self)).to_path_buf() - } - - fn json_schema(&self) -> Result - where - Self: Display; + fn name(&self) -> String + where + Self: Display, + { + self.to_string() + } + + fn path(&self) -> PathBuf + where + Self: Display, + { + Path::new(&format!("{}.json", self)).to_path_buf() + } + + fn json_schema(&self) -> Result + where + Self: Display; } impl From for Collection { - fn from(operation: Operation) -> Self { - Self::Operation(operation) - } + fn from(operation: Operation) -> Self { + Self::Operation(operation) + } } impl From for Collection { - fn from(generator: Generator) -> Self { - Self::Generator(generator) - } + fn from(generator: Generator) -> Self { + Self::Generator(generator) + } } impl Display for Collection { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Collection::Operation(operation) => write!(f, "{}", operation), - Collection::Generator(generator) => write!(f, "{}", generator), - } - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Collection::Operation(operation) => write!(f, "{}", operation), + Collection::Generator(generator) => write!(f, "{}", generator), + } + } } impl CollectionHandling for Collection { - fn json_schema(&self) -> Result { - match self { - Collection::Operation(operation) => operation.json_schema(), - Collection::Generator(generator) => generator.json_schema(), - } - } + fn json_schema(&self) -> Result { + match self { + Collection::Operation(operation) => operation.json_schema(), + Collection::Generator(generator) => generator.json_schema(), + } + } } impl CollectionHandling for Operation { - fn json_schema(&self) -> Result - where - Self: Display, - { - match self { - Self::DomainCollection(domain_collection) => Ok(domain_collection.schema.to_owned()), - _ => { - let path = self.path(); - let reader = File::open(path)?; - let schema: serde_json::Value = serde_json::from_reader(reader)?; - Ok(schema) - } - } - } + fn json_schema(&self) -> Result + where + Self: Display, + { + match self { + Self::DomainCollection(domain_collection) => Ok(domain_collection.schema.to_owned()), + _ => { + let path = self.path(); + let reader = File::open(path)?; + let schema: serde_json::Value = serde_json::from_reader(reader)?; + Ok(schema) + }, + } + } } impl Display for Operation { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!( - f, - "{}", - match self { - Self::ActivityExists => "activity_exists", - Self::ActivityUses => "activity_uses", - Self::AgentActsOnBehalfOf => "agent_acts_on_behalf_of", - Self::AgentExists => "agent_exists", - Self::CreateNamespace => "create_namespace", - Self::EndActivity => "end_activity", - Self::EntityDerive => "entity_derive", - Self::EntityExists => "entity_exists", - Self::SetActivityAttributes => "set_activity_attributes", - Self::SetAgentAttributes => "set_agent_attributes", - Self::SetEntityAttributes => "set_entity_attributes", - Self::StartActivity => "start_activity", - Self::WasAssociatedWith => "was_associated_with", - Self::WasAssociatedWithHasRole => "was_associated_with_has_role", - Self::WasAttributedTo => "was_attributed_to", - Self::WasGeneratedBy => "was_generated_by", - Self::WasInformedBy => "was_informed_by", - Self::DomainCollection(domain_collection) => &domain_collection.name, - } - ) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!( + f, + "{}", + match self { + Self::ActivityExists => "activity_exists", + Self::ActivityUses => "activity_uses", + Self::AgentActsOnBehalfOf => "agent_acts_on_behalf_of", + Self::AgentExists => "agent_exists", + Self::CreateNamespace => "create_namespace", + Self::EndActivity => "end_activity", + Self::EntityDerive => "entity_derive", + Self::EntityExists => "entity_exists", + Self::SetActivityAttributes => "set_activity_attributes", + Self::SetAgentAttributes => "set_agent_attributes", + Self::SetEntityAttributes => "set_entity_attributes", + Self::StartActivity => "start_activity", + Self::WasAssociatedWith => "was_associated_with", + Self::WasAssociatedWithHasRole => "was_associated_with_has_role", + Self::WasAttributedTo => "was_attributed_to", + Self::WasGeneratedBy => "was_generated_by", + Self::WasInformedBy => "was_informed_by", + Self::DomainCollection(domain_collection) => &domain_collection.name, + } + ) + } } impl CollectionHandling for Generator { - fn json_schema(&self) -> Result - where - Self: Display, - { - match self { - Self::DomainCollection(domain_collection) => Ok(domain_collection.schema.to_owned()), - _ => { - let path = self.path(); - let reader = File::open(path)?; - let schema: serde_json::Value = serde_json::from_reader(reader)?; - Ok(schema) - } - } - } + fn json_schema(&self) -> Result + where + Self: Display, + { + match self { + Self::DomainCollection(domain_collection) => Ok(domain_collection.schema.to_owned()), + _ => { + let path = self.path(); + let reader = File::open(path)?; + let schema: serde_json::Value = serde_json::from_reader(reader)?; + Ok(schema) + }, + } + } } impl Display for Generator { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!( - f, - "{}", - match self { - Self::ActivityName => "activity_name", - Self::SecondActivityName => "second_activity_name", - Self::AgentName => "agent_name", - Self::SecondAgentName => "second_agent_name", - Self::Attribute => "attribute", - Self::Attributes => "attributes", - Self::DateTime => "date_time", - Self::DomainTypeId => "domain_type_id", - Self::EntityName => "entity_name", - Self::SecondEntityName => "second_entity_name", - Self::Namespace => "namespace", - Self::NamespaceUuid => "namespace_uuid", - Self::Role => "role", - Self::Roles => "roles", - Self::SameNamespaceName => "same_namespace_name", - Self::SameNamespaceUuid => "same_namespace_uuid", - Self::DomainCollection(dc) => &dc.name, - } - ) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!( + f, + "{}", + match self { + Self::ActivityName => "activity_name", + Self::SecondActivityName => "second_activity_name", + Self::AgentName => "agent_name", + Self::SecondAgentName => "second_agent_name", + Self::Attribute => "attribute", + Self::Attributes => "attributes", + Self::DateTime => "date_time", + Self::DomainTypeId => "domain_type_id", + Self::EntityName => "entity_name", + Self::SecondEntityName => "second_entity_name", + Self::Namespace => "namespace", + Self::NamespaceUuid => "namespace_uuid", + Self::Role => "role", + Self::Roles => "roles", + Self::SameNamespaceName => "same_namespace_name", + Self::SameNamespaceUuid => "same_namespace_uuid", + Self::DomainCollection(dc) => &dc.name, + } + ) + } } diff --git a/crates/chronicle-synth/src/domain.rs b/crates/chronicle-synth/src/domain.rs index a0980d302..beab53d95 100644 --- a/crates/chronicle-synth/src/domain.rs +++ b/crates/chronicle-synth/src/domain.rs @@ -6,109 +6,109 @@ use serde_json::json; use common::domain::PrimitiveType; use crate::{ - collection::{Collection, DomainCollection, Generator, Operation}, - error::ChronicleSynthError, + collection::{Collection, DomainCollection, Generator, Operation}, + error::ChronicleSynthError, }; #[derive(Debug)] pub struct TypesAttributesRoles { - pub name: String, - entities: BTreeMap>, - agents: BTreeMap>, - activities: BTreeMap>, - roles: Vec, + pub name: String, + entities: BTreeMap>, + agents: BTreeMap>, + activities: BTreeMap>, + roles: Vec, } impl TypesAttributesRoles { - /// Creates a new `TypesAttributesRoles` instance from a YAML file at the specified path. - /// - /// # Arguments - /// - /// * `path` - The path to the YAML file. - /// - /// # Returns - /// - /// A `Result` containing the `TypesAttributesRoles` instance, or an error if the operation - /// fails. - pub fn from_file(path: &Path) -> Result { - #[derive(Debug, Deserialize)] - struct ChronicleDomain { - #[serde(skip)] - _roles_doc: Option, - roles: Vec, - name: String, - attributes: BTreeMap, - entities: BTreeMap, - agents: BTreeMap, - activities: BTreeMap, - } - - impl ChronicleDomain { - fn from_path(path: &Path) -> Result { - let yaml: String = std::fs::read_to_string(path)?; - let domain: ChronicleDomain = serde_yaml::from_str(&yaml)?; - Ok(domain) - } - } - - impl From for TypesAttributesRoles { - fn from(value: ChronicleDomain) -> Self { - let mut attribute_types = BTreeMap::new(); - attribute_types.extend(value.attributes); - - let entities = value - .entities - .into_iter() - .map(|(entity_type, attributes)| { - (entity_type, attributes.link_attribute_types(&attribute_types)) - }) - .collect(); - let agents = value - .agents - .into_iter() - .map(|(agent_type, attributes)| { - (agent_type, attributes.link_attribute_types(&attribute_types)) - }) - .collect(); - let activities = value - .activities - .into_iter() - .map(|(activity_type, attributes)| { - (activity_type, attributes.link_attribute_types(&attribute_types)) - }) - .collect(); - - Self { name: value.name, entities, agents, activities, roles: value.roles } - } - } - - let domain = ChronicleDomain::from_path(path)?; - Ok(domain.into()) - } - - pub fn generate_domain_collections(&self) -> Result, ChronicleSynthError> { - let mut collections = vec![self.generate_roles()?]; - collections.extend(self.generate_activity_schema()?); - collections.extend(self.generate_agent_schema()?); - collections.extend(self.generate_entity_schema()?); - Ok(collections) - } - - fn generate_roles(&self) -> Result { - generate_roles(&self.roles) - } - - fn generate_activity_schema(&self) -> Result, ChronicleSynthError> { - generate_schema(&self.activities) - } - - fn generate_agent_schema(&self) -> Result, ChronicleSynthError> { - generate_schema(&self.agents) - } - - fn generate_entity_schema(&self) -> Result, ChronicleSynthError> { - generate_schema(&self.entities) - } + /// Creates a new `TypesAttributesRoles` instance from a YAML file at the specified path. + /// + /// # Arguments + /// + /// * `path` - The path to the YAML file. + /// + /// # Returns + /// + /// A `Result` containing the `TypesAttributesRoles` instance, or an error if the operation + /// fails. + pub fn from_file(path: &Path) -> Result { + #[derive(Debug, Deserialize)] + struct ChronicleDomain { + #[serde(skip)] + _roles_doc: Option, + roles: Vec, + name: String, + attributes: BTreeMap, + entities: BTreeMap, + agents: BTreeMap, + activities: BTreeMap, + } + + impl ChronicleDomain { + fn from_path(path: &Path) -> Result { + let yaml: String = std::fs::read_to_string(path)?; + let domain: ChronicleDomain = serde_yaml::from_str(&yaml)?; + Ok(domain) + } + } + + impl From for TypesAttributesRoles { + fn from(value: ChronicleDomain) -> Self { + let mut attribute_types = BTreeMap::new(); + attribute_types.extend(value.attributes); + + let entities = value + .entities + .into_iter() + .map(|(entity_type, attributes)| { + (entity_type, attributes.link_attribute_types(&attribute_types)) + }) + .collect(); + let agents = value + .agents + .into_iter() + .map(|(agent_type, attributes)| { + (agent_type, attributes.link_attribute_types(&attribute_types)) + }) + .collect(); + let activities = value + .activities + .into_iter() + .map(|(activity_type, attributes)| { + (activity_type, attributes.link_attribute_types(&attribute_types)) + }) + .collect(); + + Self { name: value.name, entities, agents, activities, roles: value.roles } + } + } + + let domain = ChronicleDomain::from_path(path)?; + Ok(domain.into()) + } + + pub fn generate_domain_collections(&self) -> Result, ChronicleSynthError> { + let mut collections = vec![self.generate_roles()?]; + collections.extend(self.generate_activity_schema()?); + collections.extend(self.generate_agent_schema()?); + collections.extend(self.generate_entity_schema()?); + Ok(collections) + } + + fn generate_roles(&self) -> Result { + generate_roles(&self.roles) + } + + fn generate_activity_schema(&self) -> Result, ChronicleSynthError> { + generate_schema(&self.activities) + } + + fn generate_agent_schema(&self) -> Result, ChronicleSynthError> { + generate_schema(&self.agents) + } + + fn generate_entity_schema(&self) -> Result, ChronicleSynthError> { + generate_schema(&self.entities) + } } #[derive(Debug, Deserialize, Eq, PartialEq, Hash, PartialOrd, Ord)] @@ -118,108 +118,108 @@ struct AttributeType(String); struct ParsedDomainType(String); impl ParsedDomainType { - fn as_str(&self) -> &str { - &self.0 - } + fn as_str(&self) -> &str { + &self.0 + } } #[derive(Debug, Deserialize)] struct Role(String); impl Role { - fn as_str(&self) -> &str { - &self.0 - } + fn as_str(&self) -> &str { + &self.0 + } } #[derive(Debug)] enum SynthType { - String, - Object, - Number, - Bool, + String, + Object, + Number, + Bool, } impl From<&ChroniclePrimitive> for SynthType { - fn from(value: &ChroniclePrimitive) -> Self { - match value.r#type { - PrimitiveType::String => SynthType::String, - PrimitiveType::JSON => SynthType::Object, - PrimitiveType::Int => SynthType::Number, - PrimitiveType::Bool => SynthType::Bool, - } - } + fn from(value: &ChroniclePrimitive) -> Self { + match value.r#type { + PrimitiveType::String => SynthType::String, + PrimitiveType::JSON => SynthType::Object, + PrimitiveType::Int => SynthType::Number, + PrimitiveType::Bool => SynthType::Bool, + } + } } #[derive(Debug, Deserialize)] struct ChroniclePrimitive { - #[serde(skip)] - _doc: Option, - #[serde(rename = "type")] - r#type: PrimitiveType, + #[serde(skip)] + _doc: Option, + #[serde(rename = "type")] + r#type: PrimitiveType, } #[derive(Debug, Deserialize)] struct Attributes { - #[serde(skip)] - _doc: Option, - attributes: Vec, + #[serde(skip)] + _doc: Option, + attributes: Vec, } impl Attributes { - fn link_attribute_types( - self, - attribute_types: &BTreeMap, - ) -> BTreeMap { - let mut attr = BTreeMap::new(); - for attr_type in self.attributes { - let r#type: SynthType = attribute_types.get(&attr_type).unwrap().into(); - attr.insert(attr_type, r#type); - } - attr - } + fn link_attribute_types( + self, + attribute_types: &BTreeMap, + ) -> BTreeMap { + let mut attr = BTreeMap::new(); + for attr_type in self.attributes { + let r#type: SynthType = attribute_types.get(&attr_type).unwrap().into(); + attr.insert(attr_type, r#type); + } + attr + } } fn generate_roles(roles: &[Role]) -> Result { - let mut variants = vec![json!({ + let mut variants = vec![json!({ "type": "string", "constant": "UNSPECIFIED" })]; - // Uppercase guaranteed by the Linter - for role in roles { - variants.push(json!({ + // Uppercase guaranteed by the Linter + for role in roles { + variants.push(json!({ "type": "string", "constant": role.as_str() })); - } + } - let roles = json!({ + let roles = json!({ "type": "one_of", "variants": variants }); - let domain_collection = DomainCollection::new("roles", roles); + let domain_collection = DomainCollection::new("roles", roles); - Ok(Collection::Generator(Generator::DomainCollection(domain_collection))) + Ok(Collection::Generator(Generator::DomainCollection(domain_collection))) } fn domain_type_id_for_domain(ParsedDomainType(r#type): &ParsedDomainType) -> Collection { - let domain_type_id = json!({ + let domain_type_id = json!({ "type": "string", "constant": r#type }); - let collection_name = format!("{}_domain_type_id", r#type.to_lowercase()); - let domain_collection = DomainCollection::new(collection_name, domain_type_id); + let collection_name = format!("{}_domain_type_id", r#type.to_lowercase()); + let domain_collection = DomainCollection::new(collection_name, domain_type_id); - Collection::Generator(Generator::DomainCollection(domain_collection)) + Collection::Generator(Generator::DomainCollection(domain_collection)) } fn set_attributes(type_name_lower: &str) -> Collection { - let type_collection = format!("@{}_attributes", type_name_lower); - let type_domain_type = format!("@{}_domain_type_id", type_name_lower); - let type_attributes = json!({ + let type_collection = format!("@{}_attributes", type_name_lower); + let type_domain_type = format!("@{}_domain_type_id", type_name_lower); + let type_attributes = json!({ "type": "object", "@id": "_:n1", "@type": { @@ -252,101 +252,101 @@ fn set_attributes(type_name_lower: &str) -> Collection { "http://chronicle.works/chronicleoperations/ns#namespaceUuid": "@same_namespace_uuid" }); - let name = format!("set_{}_attributes", type_name_lower); - let domain_collection = DomainCollection::new(name, type_attributes); - Collection::Operation(Operation::DomainCollection(domain_collection)) + let name = format!("set_{}_attributes", type_name_lower); + let domain_collection = DomainCollection::new(name, type_attributes); + Collection::Operation(Operation::DomainCollection(domain_collection)) } fn type_attribute_variants( - type_name_lower: &str, - attributes: &BTreeMap, + type_name_lower: &str, + attributes: &BTreeMap, ) -> Result { - let mut type_attribute_variants: BTreeMap = maplit::btreemap! { + let mut type_attribute_variants: BTreeMap = maplit::btreemap! { "type".to_string() => json!("object"), }; - for (AttributeType(attribute), r#type) in attributes { - let type_attribute_variant = match r#type { - SynthType::String => { - json!({ + for (AttributeType(attribute), r#type) in attributes { + let type_attribute_variant = match r#type { + SynthType::String => { + json!({ "type": "string", "faker": { "generator": "bs_noun" } }) - } - SynthType::Number => { - json!({ + }, + SynthType::Number => { + json!({ "type": "number", "subtype": "u32" }) - } - SynthType::Bool => { - json!({ + }, + SynthType::Bool => { + json!({ "type": "bool", "frequency": 0.5 }) - } - // Object will be an empty object. - // This is something that could be tweaked on a case by case basis given some domain - // knowledge - SynthType::Object => { - json!({ + }, + // Object will be an empty object. + // This is something that could be tweaked on a case by case basis given some domain + // knowledge + SynthType::Object => { + json!({ "type": "object", }) - } - }; + }, + }; - type_attribute_variants.insert(attribute.clone(), type_attribute_variant); - } + type_attribute_variants.insert(attribute.clone(), type_attribute_variant); + } - let name = format!("{}_attributes", type_name_lower); - let schema = serde_json::to_value(type_attribute_variants)?; - let collection = DomainCollection::new(name, schema); + let name = format!("{}_attributes", type_name_lower); + let schema = serde_json::to_value(type_attribute_variants)?; + let collection = DomainCollection::new(name, schema); - Ok(Collection::Generator(Generator::DomainCollection(collection))) + Ok(Collection::Generator(Generator::DomainCollection(collection))) } fn generate_schema( - types_attributes: &BTreeMap>, + types_attributes: &BTreeMap>, ) -> Result, ChronicleSynthError> { - let mut collections = Vec::new(); + let mut collections = Vec::new(); - for (r#type, attributes) in types_attributes { - let collection1 = domain_type_id_for_domain(r#type); - collections.push(collection1); + for (r#type, attributes) in types_attributes { + let collection1 = domain_type_id_for_domain(r#type); + collections.push(collection1); - let type_name_lower = r#type.as_str().to_lowercase(); + let type_name_lower = r#type.as_str().to_lowercase(); - let collection2 = set_attributes(&type_name_lower); - collections.push(collection2); + let collection2 = set_attributes(&type_name_lower); + collections.push(collection2); - let collection3 = type_attribute_variants(&type_name_lower, attributes)?; - collections.push(collection3); - } - Ok(collections) + let collection3 = type_attribute_variants(&type_name_lower, attributes)?; + collections.push(collection3); + } + Ok(collections) } #[cfg(test)] mod tests { - use maplit::btreemap; + use maplit::btreemap; - use crate::collection::CollectionHandling; + use crate::collection::CollectionHandling; - use super::*; + use super::*; - #[test] - fn test_type_attribute_variants() { - // Create a sample attribute map with two attributes - let attributes = btreemap! { + #[test] + fn test_type_attribute_variants() { + // Create a sample attribute map with two attributes + let attributes = btreemap! { AttributeType("TestAttribute1".to_owned()) => SynthType::String, AttributeType("TestAttribute2".to_owned()) => SynthType::Number, }; - // Call the function being tested - let result = type_attribute_variants("test_type", &attributes).unwrap(); + // Call the function being tested + let result = type_attribute_variants("test_type", &attributes).unwrap(); - // Assert that the function returns a Collection with the expected properties - insta::assert_json_snapshot!(result.json_schema().unwrap().to_string(), @r###""{\"TestAttribute1\":{\"faker\":{\"generator\":\"bs_noun\"},\"type\":\"string\"},\"TestAttribute2\":{\"subtype\":\"u32\",\"type\":\"number\"},\"type\":\"object\"}""###); - } + // Assert that the function returns a Collection with the expected properties + insta::assert_json_snapshot!(result.json_schema().unwrap().to_string(), @r###""{\"TestAttribute1\":{\"faker\":{\"generator\":\"bs_noun\"},\"type\":\"string\"},\"TestAttribute2\":{\"subtype\":\"u32\",\"type\":\"number\"},\"type\":\"object\"}""###); + } } diff --git a/crates/chronicle-synth/src/error.rs b/crates/chronicle-synth/src/error.rs index b22dc7ddc..fba455705 100644 --- a/crates/chronicle-synth/src/error.rs +++ b/crates/chronicle-synth/src/error.rs @@ -2,31 +2,31 @@ use thiserror::Error; #[derive(Error, Debug)] pub enum ChronicleSynthError { - #[error("Chronicle domain parsing error: {0}")] - ModelError( - #[from] - #[source] - common::domain::ModelError, - ), + #[error("Chronicle domain parsing error: {0}")] + ModelError( + #[from] + #[source] + common::domain::ModelError, + ), - #[error("Invalid JSON: {0}")] - JsonError( - #[from] - #[source] - serde_json::Error, - ), + #[error("Invalid JSON: {0}")] + JsonError( + #[from] + #[source] + serde_json::Error, + ), - #[error("I/O error: {0}")] - IO( - #[from] - #[source] - std::io::Error, - ), + #[error("I/O error: {0}")] + IO( + #[from] + #[source] + std::io::Error, + ), - #[error("YAML parsing error: {0}")] - YamlError( - #[from] - #[source] - serde_yaml::Error, - ), + #[error("YAML parsing error: {0}")] + YamlError( + #[from] + #[source] + serde_yaml::Error, + ), } diff --git a/crates/chronicle-synth/src/generate.rs b/crates/chronicle-synth/src/generate.rs index 5897106b8..184c981ac 100644 --- a/crates/chronicle-synth/src/generate.rs +++ b/crates/chronicle-synth/src/generate.rs @@ -1,9 +1,9 @@ //! A command-line interface for generating Chronicle Synth schema for a given domain. use std::{ - fs::File, - io::{BufReader, Write}, - path::{Path, PathBuf}, + fs::File, + io::{BufReader, Write}, + path::{Path, PathBuf}, }; use clap::StructOpt; @@ -12,66 +12,66 @@ use serde::{Deserialize, Serialize}; use chronicle::codegen::linter::check_files; use chronicle_synth::{ - collection::{Collection, CollectionHandling}, - domain::TypesAttributesRoles, - error::ChronicleSynthError, + collection::{Collection, CollectionHandling}, + domain::TypesAttributesRoles, + error::ChronicleSynthError, }; #[derive(StructOpt)] #[structopt( - name = "chronicle-domain-synth", - about = "Generate Chronicle Synth schema for your domain", - author = "Blockchain Technology Partners" + name = "chronicle-domain-synth", + about = "Generate Chronicle Synth schema for your domain", + author = "Blockchain Technology Partners" )] struct Cli { - #[structopt( - value_name = "FILE", - help = "Chronicle domain definition file", - parse(from_os_str), - default_value = "crates/chronicle-synth/domain.yaml" - )] - domain_file: PathBuf, + #[structopt( + value_name = "FILE", + help = "Chronicle domain definition file", + parse(from_os_str), + default_value = "crates/chronicle-synth/domain.yaml" + )] + domain_file: PathBuf, } const COLLECT_SCRIPT: &str = "./crates/chronicle-synth/collect"; fn main() -> Result<(), ChronicleSynthError> { - let args = Cli::from_args(); + let args = Cli::from_args(); - let domain_file = args.domain_file.as_path(); + let domain_file = args.domain_file.as_path(); - // Use Chronicle Domain Linter to check the domain definition file - let filenames = vec![domain_file.to_str().ok_or_else(|| { - ChronicleSynthError::IO(std::io::Error::new( - std::io::ErrorKind::InvalidInput, - "Invalid path argument", - )) - })?]; + // Use Chronicle Domain Linter to check the domain definition file + let filenames = vec![domain_file.to_str().ok_or_else(|| { + ChronicleSynthError::IO(std::io::Error::new( + std::io::ErrorKind::InvalidInput, + "Invalid path argument", + )) + })?]; - check_files(filenames); + check_files(filenames); - println!("{}", "No domain definition errors detected.".green()); + println!("{}", "No domain definition errors detected.".green()); - generate_synth_collections(domain_file)?; + generate_synth_collections(domain_file)?; - // Run the `collect` script to collate the complete set of Synth collections for the domain - let output = std::process::Command::new("bash") - .args([COLLECT_SCRIPT]) - .output() - .expect("Failed to execute 'collect' command"); + // Run the `collect` script to collate the complete set of Synth collections for the domain + let output = std::process::Command::new("bash") + .args([COLLECT_SCRIPT]) + .output() + .expect("Failed to execute 'collect' command"); - println!("{}", String::from_utf8_lossy(&output.stdout)); + println!("{}", String::from_utf8_lossy(&output.stdout)); - println!( - "{} contains the additional Synth collections generated for your domain.", - "crates/chronicle-synth/domain-schema/".underline() - ); - println!( - "The complete set of Synth collections for your domain can be found in '{}'.", - "crates/chronicle-synth/collections/".underline() - ); + println!( + "{} contains the additional Synth collections generated for your domain.", + "crates/chronicle-synth/domain-schema/".underline() + ); + println!( + "The complete set of Synth collections for your domain can be found in '{}'.", + "crates/chronicle-synth/collections/".underline() + ); - Ok(()) + Ok(()) } /// Generates Synth collections for the given domain definition file. @@ -100,24 +100,24 @@ fn main() -> Result<(), ChronicleSynthError> { /// } /// ``` fn generate_synth_collections(domain_file: &Path) -> Result<(), ChronicleSynthError> { - let generator = TypesAttributesRoles::from_file(domain_file)?; - println!("Generating schema for domain: {}.", generator.name.underline()); - - let dir_path = PathBuf::from(DOMAIN_SCHEMA_TARGET_DIRECTORY); - std::fs::create_dir_all(&dir_path)?; - - let collections = generator.generate_domain_collections()?; - for collection in collections { - write_collection(&collection, &dir_path)?; - - match collection { - Collection::Operation(_) => {} - Collection::Generator(collection) => { - append_to_exclude_list(EXCLUDE_LIST, &collection.name())?; - } - } - } - Ok(()) + let generator = TypesAttributesRoles::from_file(domain_file)?; + println!("Generating schema for domain: {}.", generator.name.underline()); + + let dir_path = PathBuf::from(DOMAIN_SCHEMA_TARGET_DIRECTORY); + std::fs::create_dir_all(&dir_path)?; + + let collections = generator.generate_domain_collections()?; + for collection in collections { + write_collection(&collection, &dir_path)?; + + match collection { + Collection::Operation(_) => {}, + Collection::Generator(collection) => { + append_to_exclude_list(EXCLUDE_LIST, &collection.name())?; + }, + } + } + Ok(()) } const DOMAIN_SCHEMA_TARGET_DIRECTORY: &str = "./crates/chronicle-synth/domain-schema"; @@ -126,85 +126,86 @@ const EXCLUDE_LIST: &str = "./crates/chronicle-synth/exclude_collections.json"; #[derive(Deserialize, Serialize)] struct ExcludeCollections { - exclude: Vec, + exclude: Vec, } impl ExcludeCollections { - fn from_file(filename: impl AsRef) -> Result { - let file = File::open(filename)?; - let reader = BufReader::new(file); - let exclude_collections = serde_json::from_reader(reader)?; - Ok(exclude_collections) - } + fn from_file(filename: impl AsRef) -> Result { + let file = File::open(filename)?; + let reader = BufReader::new(file); + let exclude_collections = serde_json::from_reader(reader)?; + Ok(exclude_collections) + } } fn write_collection(collection: &Collection, dir_path: &Path) -> Result<(), ChronicleSynthError> { - let file_path = dir_path.join(collection.path()); - let mut file = File::create(file_path)?; - let schema = collection.json_schema()?; - file.write_all(serde_json::to_string(&schema)?.as_bytes())?; - Ok(()) + let file_path = dir_path.join(collection.path()); + let mut file = File::create(file_path)?; + let schema = collection.json_schema()?; + file.write_all(serde_json::to_string(&schema)?.as_bytes())?; + Ok(()) } /// Appends a collection name to the "exclude list" file, a list of collection files to be ignored /// when generating the domain schema. See `generate` script in this repository for more /// information. fn append_to_exclude_list( - path: impl AsRef, - collection: &str, + path: impl AsRef, + collection: &str, ) -> Result<(), ChronicleSynthError> { - let collection = collection.to_string(); - let mut list = ExcludeCollections::from_file(&path)?; + let collection = collection.to_string(); + let mut list = ExcludeCollections::from_file(&path)?; - if list.exclude.contains(&collection) { - return Ok(()); - } else { - list.exclude.push(collection); - } + if list.exclude.contains(&collection) { + return Ok(()); + } else { + list.exclude.push(collection); + } - let mut file = File::create(&path)?; - file.write_all(serde_json::to_string_pretty(&list)?.as_bytes())?; + let mut file = File::create(&path)?; + file.write_all(serde_json::to_string_pretty(&list)?.as_bytes())?; - Ok(()) + Ok(()) } #[cfg(test)] mod tests { - use assert_fs::prelude::*; + use assert_fs::prelude::*; - use super::*; + use super::*; - const PATH: &str = "test_exclude_collections.json"; + const PATH: &str = "test_exclude_collections.json"; - fn create_test_exclude_collections() -> Result> { - let file = assert_fs::NamedTempFile::new(PATH)?; + fn create_test_exclude_collections( + ) -> Result> { + let file = assert_fs::NamedTempFile::new(PATH)?; - file.write_str( - r#" + file.write_str( + r#" { "exclude": [ "already_ignore_this" ] } "#, - )?; + )?; - Ok(file) - } + Ok(file) + } - #[test] - fn test_append_to_exclude_list() -> Result<(), ChronicleSynthError> { - let file = create_test_exclude_collections().unwrap(); + #[test] + fn test_append_to_exclude_list() -> Result<(), ChronicleSynthError> { + let file = create_test_exclude_collections().unwrap(); - // Call the function to append to the exclude list - append_to_exclude_list(file.path(), "ignore_this_collection_when_printing")?; + // Call the function to append to the exclude list + append_to_exclude_list(file.path(), "ignore_this_collection_when_printing")?; - // Read the contents of the file and check if the collection was added - let file = File::open(file.path())?; - let reader = BufReader::new(file); - let exclude_collections: ExcludeCollections = serde_json::from_reader(reader)?; + // Read the contents of the file and check if the collection was added + let file = File::open(file.path())?; + let reader = BufReader::new(file); + let exclude_collections: ExcludeCollections = serde_json::from_reader(reader)?; - insta::assert_json_snapshot!(exclude_collections, @r###" + insta::assert_json_snapshot!(exclude_collections, @r###" { "exclude": [ "already_ignore_this", @@ -212,28 +213,29 @@ mod tests { ] }"###); - Ok(()) - } + Ok(()) + } - #[test] - fn test_append_to_exclude_list_skips_collections_already_on_list() -> Result<(), ChronicleSynthError> { - let file = create_test_exclude_collections().unwrap(); + #[test] + fn test_append_to_exclude_list_skips_collections_already_on_list( + ) -> Result<(), ChronicleSynthError> { + let file = create_test_exclude_collections().unwrap(); - // Call the function to append to the exclude list - append_to_exclude_list(file.path(), "already_ignore_this")?; + // Call the function to append to the exclude list + append_to_exclude_list(file.path(), "already_ignore_this")?; - // Read the contents of the file and check if the collection was added - let file = File::open(file.path())?; - let reader = BufReader::new(file); - let exclude_collections: ExcludeCollections = serde_json::from_reader(reader)?; + // Read the contents of the file and check if the collection was added + let file = File::open(file.path())?; + let reader = BufReader::new(file); + let exclude_collections: ExcludeCollections = serde_json::from_reader(reader)?; - insta::assert_json_snapshot!(exclude_collections, @r###" + insta::assert_json_snapshot!(exclude_collections, @r###" { "exclude": [ "already_ignore_this" ] }"###); - Ok(()) - } + Ok(()) + } } diff --git a/crates/chronicle-telemetry/Cargo.toml b/crates/chronicle-telemetry/Cargo.toml index cb4687945..99479b378 100644 --- a/crates/chronicle-telemetry/Cargo.toml +++ b/crates/chronicle-telemetry/Cargo.toml @@ -9,19 +9,21 @@ version = "0.7.5" cfg-if = { workspace = true } console-subscriber = { workspace = true } tracing = { workspace = true } -tracing-flame = { version = "^0.2" } -tracing-log = { workspace = true } -tracing-opentelemetry = "0.24" tracing-subscriber = { version = "^0.3.15", features = [ - "default", + "std", "registry", "env-filter", - "json", + "json" ] } -opentelemetry_sdk = { version = "0.23", features = ["rt-tokio", "metrics", "trace"] } -opentelemetry-otlp = { version = "0.16.0", features = ["trace", "metrics"] } -opentelemetry-semantic-conventions = { version = "0.15.0" } -opentelemetry-proto = { version = "0.6.0"} +log = "^0.4.21" +opentelemetry-appender-log = { git="https://github.com/open-telemetry/opentelemetry-rust",version="0.4", default-features = false} +opentelemetry-appender-tracing = { git="https://github.com/open-telemetry/opentelemetry-rust",version="0.4", default-features = false} +opentelemetry = {git="https://github.com/open-telemetry/opentelemetry-rust", version = "0.23", features = ["metrics", "logs"] } +opentelemetry_sdk = {git="https://github.com/open-telemetry/opentelemetry-rust", version = "0.23", features = ["rt-tokio", "logs"] } +opentelemetry-otlp = {git="https://github.com/open-telemetry/opentelemetry-rust", version = "0.16.0", features = ["tonic", "metrics", "logs","grpc-tonic","tls-roots"] } +opentelemetry-semantic-conventions = {git="https://github.com/open-telemetry/opentelemetry-rust", version = "0.15.0" } +opentelemetry-stdout = {git="https://github.com/open-telemetry/opentelemetry-rust", version = "0.4", features = ["logs"]} + url = { workspace = true, features = ["serde"] } [features] diff --git a/crates/chronicle-telemetry/src/telemetry.rs b/crates/chronicle-telemetry/src/telemetry.rs index c83f0b20f..9ff9ee9a1 100644 --- a/crates/chronicle-telemetry/src/telemetry.rs +++ b/crates/chronicle-telemetry/src/telemetry.rs @@ -1,169 +1,89 @@ -use tracing::subscriber::set_global_default; -use tracing_flame::FlameLayer; -use tracing_log::{log::LevelFilter, LogTracer}; -use tracing_subscriber::{prelude::*, EnvFilter, Registry}; +use opentelemetry::{ + global, + logs::LogError, + metrics::MetricsError, + trace::{TraceContextExt, TraceError, Tracer, TracerProvider}, + Key, KeyValue, +}; +use opentelemetry_appender_log::OpenTelemetryLogBridge; +use opentelemetry_appender_tracing::layer::OpenTelemetryTracingBridge; +use opentelemetry_otlp::{ExportConfig, WithExportConfig}; +use opentelemetry_sdk::{runtime, trace as sdktrace, trace::Config, Resource}; +use tracing::Level; +use tracing_subscriber::{fmt::format::FmtSpan, prelude::*, EnvFilter}; -use opentelemetry_sdk::trace; -use opentelemetry_sdk::Resource; -#[derive(Debug, Clone, Copy)] -pub enum ConsoleLogging { - Off, - Pretty, - Json, +fn init_tracer_provider() -> Result { + opentelemetry_otlp::new_pipeline() + .tracing() + .with_exporter(opentelemetry_otlp::new_exporter().tonic()) + .with_trace_config(Config::default()) + .install_batch(runtime::Tokio) } -#[cfg(feature = "tokio-tracing")] -macro_rules! console_layer { - () => { - console_subscriber::ConsoleLayer::builder().with_default_env().spawn() - }; -} +fn init_metrics() -> Result { -macro_rules! stdio_layer { - () => { - tracing_subscriber::fmt::layer() - .with_level(true) - .with_target(true) - .with_thread_ids(true) - }; + opentelemetry_otlp::new_pipeline() + .metrics(runtime::Tokio) + .with_exporter(opentelemetry_otlp::new_exporter().tonic()) + .build() } -macro_rules! oltp_exporter_layer { - () => {{ - let tracer = opentelemetry_otlp::new_pipeline() - .tracing() - .with_exporter(opentelemetry_otlp::new_exporter().tonic()) - .with_trace_config( - trace::config().with_resource(Resource::default()), - ) - .install_batch(opentelemetry_sdk::runtime::Tokio) - .expect("Failed to install OpenTelemetry tracer"); - - tracing_opentelemetry::layer().with_tracer(tracer) - }}; +fn init_logs() -> Result { + opentelemetry_otlp::new_pipeline() + .logging() + .with_exporter(opentelemetry_otlp::new_exporter().tonic()) + .install_batch(runtime::Tokio) } -pub struct OptionalDrop { - inner: Option, +#[derive(Debug, Clone, Copy)] +pub enum ConsoleLogging { + Off, + Pretty, + Json, } -impl OptionalDrop { - pub fn new(inner: T) -> Self { - Self { inner: Some(inner) } - } -} +pub fn telemetry(console_logging: ConsoleLogging) { + let result = init_tracer_provider(); + assert!(result.is_ok(), "Init tracer failed with error: {:?}", result.err()); + let tracer_provider = result.unwrap(); + global::set_tracer_provider(tracer_provider.clone()); -impl Drop for OptionalDrop { - fn drop(&mut self) { - self.inner.take(); - } -} + let result = init_metrics(); + assert!(result.is_ok(), "Init metrics failed with error: {:?}", result.err()); + let meter_provider = result.unwrap(); + global::set_meter_provider(meter_provider.clone()); -pub fn telemetry( - otel_enable: bool, - console_logging: ConsoleLogging, -) -> impl Drop { - full_telemetry(otel_enable, None, console_logging) -} + // Initialize logs and save the logger_provider. + let logger_provider = init_logs().unwrap(); -pub fn full_telemetry( - otel_export: bool, - flame_file: Option<&str>, - console_logging: ConsoleLogging, -) -> impl Drop { - let (flame_layer, guard) = flame_file - .map(|path| { - let (flame_layer, guard) = FlameLayer::with_file(path).unwrap(); - (Some(flame_layer), Some(guard)) - }) - .unwrap_or((None, None)); + // Create a new OpenTelemetryTracingBridge using the above LoggerProvider. + let trace_bridge = OpenTelemetryTracingBridge::new(&logger_provider); - LogTracer::init_with_filter(LevelFilter::Trace).ok(); + let filter = EnvFilter::from_default_env(); - let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("error")); - match (otel_export, flame_layer, console_logging) { - (true, Some(flame_layer), ConsoleLogging::Json) => set_global_default( - Registry::default() - .with(env_filter) - .with(flame_layer) - .with(stdio_layer!().json()) - .with(oltp_exporter_layer!()), - ), + let fmt_layer: Option + Send + Sync>> = match console_logging { + ConsoleLogging::Json => Some(Box::new(tracing_subscriber::fmt::layer() + .with_span_events(FmtSpan::ACTIVE) + .compact() + .json())), + ConsoleLogging::Pretty => Some(Box::new(tracing_subscriber::fmt::layer() + .with_span_events(FmtSpan::ACTIVE) + .compact() + .pretty())), + ConsoleLogging::Off => None, + }; - (true, Some(flame_layer), ConsoleLogging::Pretty) => set_global_default( - Registry::default() - .with(env_filter) - .with(flame_layer) - .with(stdio_layer!().pretty()) - .with(oltp_exporter_layer!()), - ), - (true, Some(flame_layer), ConsoleLogging::Off) => set_global_default( - Registry::default() - .with(env_filter) - .with(flame_layer) - .with(oltp_exporter_layer!()), - ), - (false, Some(flame_layer), ConsoleLogging::Json) => set_global_default( - Registry::default() - .with(env_filter) - .with(flame_layer) - .with(stdio_layer!().json()), - ), - (false, Some(flame_layer), ConsoleLogging::Pretty) => { - cfg_if::cfg_if! { - if #[cfg(feature = "tokio-tracing")] { - set_global_default(Registry::default() - .with(env_filter) - .with(flame_layer) - .with(stdio_layer!().pretty()) - .with(console_layer!())) - } else { - set_global_default(Registry::default() - .with(env_filter) - .with(flame_layer) - .with(stdio_layer!().pretty()) - ) - } - } - } - (true, None, ConsoleLogging::Json) => set_global_default( - Registry::default() - .with(env_filter) - .with(stdio_layer!().json()) - .with(oltp_exporter_layer!()), - ), - (true, None, ConsoleLogging::Pretty) => set_global_default( - Registry::default() - .with(env_filter) - .with(stdio_layer!().pretty()) - .with(oltp_exporter_layer!()), - ), - (true, None, ConsoleLogging::Off) => { - let otel_layer = oltp_exporter_layer!(); - set_global_default(Registry::default().with(env_filter).with(otel_layer)) - } - (false, None, ConsoleLogging::Json) => - set_global_default(Registry::default().with(env_filter).with(stdio_layer!().json())), - (false, None, ConsoleLogging::Pretty) => { - cfg_if::cfg_if! { - if #[cfg(feature = "tokio-tracing")] { - set_global_default(Registry::default() - .with(env_filter) - .with(stdio_layer!().pretty()) - .with(console_layer!())) - } else { - set_global_default(Registry::default() - .with(env_filter) - .with(stdio_layer!().pretty()) - ) - } - } - } - _ => set_global_default(Registry::default().with(env_filter)), - } - .map_err(|e| eprintln!("Failed to set global default subscriber: {:?}", e)) - .ok(); + let registry = tracing_subscriber::registry() + .with(filter) + .with(trace_bridge); - OptionalDrop::new(guard) -} + if let Some(layer) = fmt_layer { + registry.with(layer).init(); + } else { + registry.init(); + } + log::info!("Log bridge to telemetry initialized"); + tracing::info!("Trace bridge to telemetry initialized"); + let _ = tracing::span!(Level::INFO, "Span telemetry initialized").enter(); +} diff --git a/crates/chronicle-test-infrastructure/src/api_test.rs b/crates/chronicle-test-infrastructure/src/api_test.rs index 0dd9cd891..a729b849d 100644 --- a/crates/chronicle-test-infrastructure/src/api_test.rs +++ b/crates/chronicle-test-infrastructure/src/api_test.rs @@ -1,15 +1,15 @@ use api::commands::{ - ActivityCommand, AgentCommand, ApiCommand, EntityCommand, ImportCommand, NamespaceCommand, + ActivityCommand, AgentCommand, ApiCommand, EntityCommand, ImportCommand, NamespaceCommand, }; use chrono::{TimeZone, Utc}; use common::{ - attributes::{Attribute, Attributes}, - identity::AuthId, - prov::{ - json_ld::ToJson, - operations::{ChronicleOperation, DerivationType}, - ActivityId, AgentId, DomaintypeId, EntityId, NamespaceId, - }, + attributes::{Attribute, Attributes}, + identity::AuthId, + prov::{ + json_ld::ToJson, + operations::{ChronicleOperation, DerivationType}, + ActivityId, AgentId, DomaintypeId, EntityId, NamespaceId, + }, }; use uuid::Uuid; @@ -18,10 +18,10 @@ use crate::substitutes::test_api; // Creates a mock file containing JSON-LD of the ChronicleOperations // that would be created by the given command, although not in any particular order. fn test_create_agent_operations_import() -> assert_fs::NamedTempFile { - let file = assert_fs::NamedTempFile::new("import.json").unwrap(); - assert_fs::prelude::FileWriteStr::write_str( - &file, - r#" + let file = assert_fs::NamedTempFile::new("import.json").unwrap(); + assert_fs::prelude::FileWriteStr::write_str( + &file, + r#" [ { "@id": "_:n1", @@ -94,32 +94,32 @@ fn test_create_agent_operations_import() -> assert_fs::NamedTempFile { } ] "#, - ) - .unwrap(); - file + ) + .unwrap(); + file } #[tokio::test] async fn test_import_operations() { - let mut api = test_api().await; + let mut api = test_api().await; - let file = test_create_agent_operations_import(); + let file = test_create_agent_operations_import(); - let contents = std::fs::read_to_string(file.path()).unwrap(); + let contents = std::fs::read_to_string(file.path()).unwrap(); - let json_array = serde_json::from_str::>(&contents).unwrap(); + let json_array = serde_json::from_str::>(&contents).unwrap(); - let mut operations = Vec::with_capacity(json_array.len()); - for value in json_array.into_iter() { - let op = ChronicleOperation::from_json(&value) - .await - .expect("Failed to parse imported JSON-LD to ChronicleOperation"); - operations.push(op); - } + let mut operations = Vec::with_capacity(json_array.len()); + for value in json_array.into_iter() { + let op = ChronicleOperation::from_json(&value) + .await + .expect("Failed to parse imported JSON-LD to ChronicleOperation"); + operations.push(op); + } - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!(api + insta::assert_json_snapshot!(api .dispatch(ApiCommand::Import(ImportCommand { operations: operations.clone() } ), identity.clone()) .await .unwrap() @@ -148,8 +148,8 @@ async fn test_import_operations() { } "###); - // Check that the operations that do not result in data changes are not submitted - insta::assert_json_snapshot!(api + // Check that the operations that do not result in data changes are not submitted + insta::assert_json_snapshot!(api .dispatch(ApiCommand::Import(ImportCommand { operations } ), identity) .await .unwrap() @@ -175,11 +175,11 @@ async fn test_import_operations() { #[tokio::test] async fn create_namespace() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!(api + insta::assert_json_snapshot!(api .dispatch(ApiCommand::NameSpace(NamespaceCommand::Create { id: "testns".into(), }), identity) @@ -199,11 +199,11 @@ async fn create_namespace() { #[tokio::test] async fn create_agent() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!(api.dispatch(ApiCommand::Agent(AgentCommand::Create { + insta::assert_json_snapshot!(api.dispatch(ApiCommand::Agent(AgentCommand::Create { id: "testagent".into(), namespace: "testns".into(), attributes: Attributes::new( @@ -244,11 +244,11 @@ async fn create_agent() { #[tokio::test] async fn create_system_activity() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!(api.dispatch(ApiCommand::Activity(ActivityCommand::Create { + insta::assert_json_snapshot!(api.dispatch(ApiCommand::Activity(ActivityCommand::Create { id: "testactivity".into(), namespace: common::prov::SYSTEM_ID.into(), attributes: Attributes::new( @@ -289,11 +289,11 @@ async fn create_system_activity() { #[tokio::test] async fn create_activity() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!(api.dispatch(ApiCommand::Activity(ActivityCommand::Create { + insta::assert_json_snapshot!(api.dispatch(ApiCommand::Activity(ActivityCommand::Create { id: "testactivity".into(), namespace: "testns".into(), attributes: Attributes::new( @@ -334,11 +334,11 @@ async fn create_activity() { #[tokio::test] async fn start_activity() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Agent(AgentCommand::Create { id: "testagent".into(), namespace: "testns".into(), @@ -377,17 +377,17 @@ async fn start_activity() { } "###); - api.dispatch( - ApiCommand::Agent(AgentCommand::UseInContext { - id: AgentId::from_external_id("testagent"), - namespace: "testns".into(), - }), - identity.clone(), - ) - .await - .unwrap(); - - insta::assert_json_snapshot!( + api.dispatch( + ApiCommand::Agent(AgentCommand::UseInContext { + id: AgentId::from_external_id("testagent"), + namespace: "testns".into(), + }), + identity.clone(), + ) + .await + .unwrap(); + + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::Start { id: ActivityId::from_external_id("testactivity"), namespace: "testns".into(), @@ -435,11 +435,11 @@ async fn start_activity() { #[tokio::test] async fn contradict_attributes() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Agent(AgentCommand::Create { id: "testagent".into(), namespace: "testns".into(), @@ -478,35 +478,35 @@ async fn contradict_attributes() { } "###); - let res = api - .dispatch( - ApiCommand::Agent(AgentCommand::Create { - id: "testagent".into(), - namespace: "testns".into(), - attributes: Attributes::new( - Some(DomaintypeId::from_external_id("test")), - [Attribute { - typ: "test".to_owned(), - value: serde_json::Value::String("test2".to_owned()).into(), - }] - .into_iter() - .collect(), - ), - }), - identity, - ) - .await; - - insta::assert_snapshot!(res.err().unwrap().to_string(), @r###"Contradiction: Contradiction { attribute value change: test Attribute { typ: "test", value: SerdeWrapper(String("test2")) } Attribute { typ: "test", value: SerdeWrapper(String("test")) } }"###); + let res = api + .dispatch( + ApiCommand::Agent(AgentCommand::Create { + id: "testagent".into(), + namespace: "testns".into(), + attributes: Attributes::new( + Some(DomaintypeId::from_external_id("test")), + [Attribute { + typ: "test".to_owned(), + value: serde_json::Value::String("test2".to_owned()).into(), + }] + .into_iter() + .collect(), + ), + }), + identity, + ) + .await; + + insta::assert_snapshot!(res.err().unwrap().to_string(), @r###"Contradiction: Contradiction { attribute value change: test Attribute { typ: "test", value: SerdeWrapper(String("test2")) } Attribute { typ: "test", value: SerdeWrapper(String("test")) } }"###); } #[tokio::test] async fn contradict_start_time() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Agent(AgentCommand::Create { id: "testagent".into(), namespace: "testns".into(), @@ -545,17 +545,17 @@ async fn contradict_start_time() { } "###); - api.dispatch( - ApiCommand::Agent(AgentCommand::UseInContext { - id: AgentId::from_external_id("testagent"), - namespace: "testns".into(), - }), - identity.clone(), - ) - .await - .unwrap(); - - insta::assert_json_snapshot!( + api.dispatch( + ApiCommand::Agent(AgentCommand::UseInContext { + id: AgentId::from_external_id("testagent"), + namespace: "testns".into(), + }), + identity.clone(), + ) + .await + .unwrap(); + + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::Start { id: ActivityId::from_external_id("testactivity"), namespace: "testns".into(), @@ -600,29 +600,29 @@ async fn contradict_start_time() { } "###); - // Should contradict - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::Start { - id: ActivityId::from_external_id("testactivity"), - namespace: "testns".into(), - time: Some(Utc.with_ymd_and_hms(2018, 7, 8, 9, 10, 11).unwrap()), - agent: None, - }), - identity, - ) - .await; - - insta::assert_snapshot!(res.err().unwrap().to_string(), @"Contradiction: Contradiction { start date alteration: 2014-07-08T09:10:11+00:00 2018-07-08T09:10:11+00:00 }"); + // Should contradict + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::Start { + id: ActivityId::from_external_id("testactivity"), + namespace: "testns".into(), + time: Some(Utc.with_ymd_and_hms(2018, 7, 8, 9, 10, 11).unwrap()), + agent: None, + }), + identity, + ) + .await; + + insta::assert_snapshot!(res.err().unwrap().to_string(), @"Contradiction: Contradiction { start date alteration: 2014-07-08T09:10:11+00:00 2018-07-08T09:10:11+00:00 }"); } #[tokio::test] async fn contradict_end_time() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Agent(AgentCommand::Create { id: "testagent".into(), namespace: "testns".into(), @@ -661,17 +661,17 @@ async fn contradict_end_time() { } "###); - api.dispatch( - ApiCommand::Agent(AgentCommand::UseInContext { - id: AgentId::from_external_id("testagent"), - namespace: "testns".into(), - }), - identity.clone(), - ) - .await - .unwrap(); - - insta::assert_json_snapshot!( + api.dispatch( + ApiCommand::Agent(AgentCommand::UseInContext { + id: AgentId::from_external_id("testagent"), + namespace: "testns".into(), + }), + identity.clone(), + ) + .await + .unwrap(); + + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::End { id: ActivityId::from_external_id("testactivity"), namespace: "testns".into(), @@ -716,29 +716,29 @@ async fn contradict_end_time() { } "###); - // Should contradict - let res = api - .dispatch( - ApiCommand::Activity(ActivityCommand::End { - id: ActivityId::from_external_id("testactivity"), - namespace: "testns".into(), - time: Some(Utc.with_ymd_and_hms(2022, 7, 8, 9, 10, 11).unwrap()), - agent: None, - }), - identity, - ) - .await; - - insta::assert_snapshot!(res.err().unwrap().to_string(), @"Contradiction: Contradiction { end date alteration: 2018-07-08T09:10:11+00:00 2022-07-08T09:10:11+00:00 }"); + // Should contradict + let res = api + .dispatch( + ApiCommand::Activity(ActivityCommand::End { + id: ActivityId::from_external_id("testactivity"), + namespace: "testns".into(), + time: Some(Utc.with_ymd_and_hms(2022, 7, 8, 9, 10, 11).unwrap()), + agent: None, + }), + identity, + ) + .await; + + insta::assert_snapshot!(res.err().unwrap().to_string(), @"Contradiction: Contradiction { end date alteration: 2018-07-08T09:10:11+00:00 2022-07-08T09:10:11+00:00 }"); } #[tokio::test] async fn end_activity() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Agent(AgentCommand::Create { id: "testagent".into(), namespace: "testns".into(), @@ -777,17 +777,17 @@ async fn end_activity() { } "###); - api.dispatch( - ApiCommand::Agent(AgentCommand::UseInContext { - id: AgentId::from_external_id("testagent"), - namespace: "testns".into(), - }), - identity.clone(), - ) - .await - .unwrap(); - - insta::assert_json_snapshot!( + api.dispatch( + ApiCommand::Agent(AgentCommand::UseInContext { + id: AgentId::from_external_id("testagent"), + namespace: "testns".into(), + }), + identity.clone(), + ) + .await + .unwrap(); + + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::Start { id: ActivityId::from_external_id("testactivity"), namespace: "testns".into(), @@ -832,7 +832,7 @@ async fn end_activity() { } "###); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::End { id: ActivityId::from_external_id("testactivity"), @@ -882,11 +882,11 @@ async fn end_activity() { #[tokio::test] async fn activity_use() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Agent(AgentCommand::Create { id: "testagent".into(), namespace: "testns".into(), @@ -925,17 +925,17 @@ async fn activity_use() { } "###); - api.dispatch( - ApiCommand::Agent(AgentCommand::UseInContext { - id: AgentId::from_external_id("testagent"), - namespace: "testns".into(), - }), - identity.clone(), - ) - .await - .unwrap(); - - insta::assert_json_snapshot!( + api.dispatch( + ApiCommand::Agent(AgentCommand::UseInContext { + id: AgentId::from_external_id("testagent"), + namespace: "testns".into(), + }), + identity.clone(), + ) + .await + .unwrap(); + + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::Create { id: "testactivity".into(), namespace: "testns".into(), @@ -974,7 +974,7 @@ async fn activity_use() { } "###); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::Use { id: EntityId::from_external_id("testentity"), namespace: "testns".into(), @@ -1017,7 +1017,7 @@ async fn activity_use() { } "###); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::End { id: ActivityId::from_external_id("testactivity"), namespace: "testns".into(), @@ -1073,11 +1073,11 @@ async fn activity_use() { #[tokio::test] async fn activity_generate() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::Create { id: "testactivity".into(), namespace: "testns".into(), @@ -1116,7 +1116,7 @@ async fn activity_generate() { } "###); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Activity(ActivityCommand::Generate { id: EntityId::from_external_id("testentity"), namespace: "testns".into(), @@ -1146,11 +1146,11 @@ async fn activity_generate() { #[tokio::test] async fn derive_entity_abstract() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Entity(EntityCommand::Derive { id: EntityId::from_external_id("testgeneratedentity"), namespace: "testns".into(), @@ -1193,11 +1193,11 @@ async fn derive_entity_abstract() { #[tokio::test] async fn derive_entity_primary_source() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Entity(EntityCommand::Derive { id: EntityId::from_external_id("testgeneratedentity"), namespace: "testns".into(), @@ -1240,11 +1240,11 @@ async fn derive_entity_primary_source() { #[tokio::test] async fn derive_entity_revision() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Entity(EntityCommand::Derive { id: EntityId::from_external_id("testgeneratedentity"), namespace: "testns".into(), @@ -1287,11 +1287,11 @@ async fn derive_entity_revision() { #[tokio::test] async fn derive_entity_quotation() { - let mut api = test_api().await; + let mut api = test_api().await; - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - insta::assert_json_snapshot!( + insta::assert_json_snapshot!( api.dispatch(ApiCommand::Entity(EntityCommand::Derive { id: EntityId::from_external_id("testgeneratedentity"), namespace: "testns".into(), diff --git a/crates/chronicle-test-infrastructure/src/cli_test.rs b/crates/chronicle-test-infrastructure/src/cli_test.rs index c7cd8c0ab..c882cad3d 100644 --- a/crates/chronicle-test-infrastructure/src/cli_test.rs +++ b/crates/chronicle-test-infrastructure/src/cli_test.rs @@ -1,79 +1,79 @@ use api::commands::ApiCommand; use chronicle::{ - bootstrap::{CliModel, SubCommand}, - codegen::ChronicleDomainDef, - PrimitiveType, + bootstrap::{CliModel, SubCommand}, + codegen::ChronicleDomainDef, + PrimitiveType, }; use common::{ - identity::AuthId, - prov::{json_ld::ToJson, ActivityId, AgentId, ChronicleIri, EntityId, ProvModel}, + identity::AuthId, + prov::{json_ld::ToJson, ActivityId, AgentId, ChronicleIri, EntityId, ProvModel}, }; use crate::substitutes::test_api; fn get_api_cmd(command_line: &str) -> ApiCommand { - let cli = test_cli_model(); - let matches = cli.as_cmd().get_matches_from(command_line.split_whitespace()); - cli.matches(&matches).unwrap().unwrap() + let cli = test_cli_model(); + let matches = cli.as_cmd().get_matches_from(command_line.split_whitespace()); + cli.matches(&matches).unwrap().unwrap() } async fn parse_and_execute(command_line: &str, cli: CliModel) -> Box { - let mut api = test_api().await; + let mut api = test_api().await; - let matches = cli.as_cmd().get_matches_from(command_line.split_whitespace()); + let matches = cli.as_cmd().get_matches_from(command_line.split_whitespace()); - let cmd = cli.matches(&matches).unwrap().unwrap(); + let cmd = cli.matches(&matches).unwrap().unwrap(); - let identity = AuthId::chronicle(); + let identity = AuthId::chronicle(); - api.dispatch(cmd, identity).await.unwrap().unwrap().0 + api.dispatch(cmd, identity).await.unwrap().unwrap().0 } fn test_cli_model() -> CliModel { - CliModel::from( - ChronicleDomainDef::build("test") - .with_attribute_type("testString", None, PrimitiveType::String) - .unwrap() - .with_attribute_type("testBool", None, PrimitiveType::Bool) - .unwrap() - .with_attribute_type("testInt", None, PrimitiveType::Int) - .unwrap() - .with_attribute_type("testJSON", None, PrimitiveType::JSON) - .unwrap() - .with_activity("testActivity", None, |b| { - b.with_attribute("testString") - .unwrap() - .with_attribute("testBool") - .unwrap() - .with_attribute("testInt") - }) - .unwrap() - .with_agent("testAgent", None, |b| { - b.with_attribute("testString") - .unwrap() - .with_attribute("testBool") - .unwrap() - .with_attribute("testInt") - }) - .unwrap() - .with_entity("testEntity", None, |b| { - b.with_attribute("testString") - .unwrap() - .with_attribute("testBool") - .unwrap() - .with_attribute("testInt") - }) - .unwrap() - .build(), - ) + CliModel::from( + ChronicleDomainDef::build("test") + .with_attribute_type("testString", None, PrimitiveType::String) + .unwrap() + .with_attribute_type("testBool", None, PrimitiveType::Bool) + .unwrap() + .with_attribute_type("testInt", None, PrimitiveType::Int) + .unwrap() + .with_attribute_type("testJSON", None, PrimitiveType::JSON) + .unwrap() + .with_activity("testActivity", None, |b| { + b.with_attribute("testString") + .unwrap() + .with_attribute("testBool") + .unwrap() + .with_attribute("testInt") + }) + .unwrap() + .with_agent("testAgent", None, |b| { + b.with_attribute("testString") + .unwrap() + .with_attribute("testBool") + .unwrap() + .with_attribute("testInt") + }) + .unwrap() + .with_entity("testEntity", None, |b| { + b.with_attribute("testString") + .unwrap() + .with_attribute("testBool") + .unwrap() + .with_attribute("testInt") + }) + .unwrap() + .build(), + ) } #[tokio::test] async fn agent_define() { - let command_line = r#"chronicle test-agent-agent define test_agent --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns "#; + let command_line = r#"chronicle test-agent-agent define test_agent --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns "#; - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &parse_and_execute(command_line, test_cli_model()).await.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -97,12 +97,12 @@ async fn agent_define() { #[tokio::test] async fn agent_define_id() { - let id = ChronicleIri::from(common::prov::AgentId::from_external_id("test_agent")); - let command_line = format!( - r#"chronicle test-agent-agent define --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns --id {id} "# - ); + let id = ChronicleIri::from(common::prov::AgentId::from_external_id("test_agent")); + let command_line = format!( + r#"chronicle test-agent-agent define --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns --id {id} "# + ); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &parse_and_execute(&command_line, test_cli_model()).await.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -126,14 +126,14 @@ async fn agent_define_id() { #[tokio::test] async fn agent_use() { - let mut api = test_api().await; + let mut api = test_api().await; - // note, if you don't supply all three types of attribute this won't run - let command_line = r#"chronicle test-agent-agent define testagent --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 23 "#; + // note, if you don't supply all three types of attribute this won't run + let command_line = r#"chronicle test-agent-agent define testagent --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 23 "#; - let cmd = get_api_cmd(command_line); + let cmd = get_api_cmd(command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -154,20 +154,20 @@ async fn agent_use() { } "###); - let id = AgentId::from_external_id("testagent"); + let id = AgentId::from_external_id("testagent"); - let command_line = format!(r#"chronicle test-agent-agent use --namespace testns {id} "#); - let cmd = get_api_cmd(&command_line); + let command_line = format!(r#"chronicle test-agent-agent use --namespace testns {id} "#); + let cmd = get_api_cmd(&command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - let id = ActivityId::from_external_id("testactivity"); - let command_line = format!( - r#"chronicle test-activity-activity start {id} --namespace testns --time 2014-07-08T09:10:11Z "# - ); - let cmd = get_api_cmd(&command_line); + let id = ActivityId::from_external_id("testactivity"); + let command_line = format!( + r#"chronicle test-activity-activity start {id} --namespace testns --time 2014-07-08T09:10:11Z "# + ); + let cmd = get_api_cmd(&command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -204,10 +204,10 @@ async fn agent_use() { #[tokio::test] async fn entity_define() { - let command_line = r#"chronicle test-entity-entity define test_entity --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns "#; - let _delta = parse_and_execute(command_line, test_cli_model()); + let command_line = r#"chronicle test-entity-entity define test_entity --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns "#; + let _delta = parse_and_execute(command_line, test_cli_model()); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &parse_and_execute(command_line, test_cli_model()).await.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -231,12 +231,12 @@ async fn entity_define() { #[tokio::test] async fn entity_define_id() { - let id = ChronicleIri::from(common::prov::EntityId::from_external_id("test_entity")); - let command_line = format!( - r#"chronicle test-entity-entity define --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns --id {id} "# - ); + let id = ChronicleIri::from(common::prov::EntityId::from_external_id("test_entity")); + let command_line = format!( + r#"chronicle test-entity-entity define --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns --id {id} "# + ); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &parse_and_execute(&command_line, test_cli_model()).await.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -260,17 +260,17 @@ async fn entity_define_id() { #[tokio::test] async fn entity_derive_abstract() { - let mut api = test_api().await; + let mut api = test_api().await; - let generated_entity_id = EntityId::from_external_id("testgeneratedentity"); - let used_entity_id = EntityId::from_external_id("testusedentity"); + let generated_entity_id = EntityId::from_external_id("testgeneratedentity"); + let used_entity_id = EntityId::from_external_id("testusedentity"); - let command_line = format!( - r#"chronicle test-entity-entity derive {generated_entity_id} {used_entity_id} --namespace testns "# - ); - let cmd = get_api_cmd(&command_line); + let command_line = format!( + r#"chronicle test-entity-entity derive {generated_entity_id} {used_entity_id} --namespace testns "# + ); + let cmd = get_api_cmd(&command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -301,17 +301,17 @@ async fn entity_derive_abstract() { #[tokio::test] async fn entity_derive_primary_source() { - let mut api = test_api().await; + let mut api = test_api().await; - let generated_entity_id = EntityId::from_external_id("testgeneratedentity"); - let used_entity_id = EntityId::from_external_id("testusedentity"); + let generated_entity_id = EntityId::from_external_id("testgeneratedentity"); + let used_entity_id = EntityId::from_external_id("testusedentity"); - let command_line = format!( - r#"chronicle test-entity-entity derive {generated_entity_id} {used_entity_id} --namespace testns --subtype primary-source "# - ); - let cmd = get_api_cmd(&command_line); + let command_line = format!( + r#"chronicle test-entity-entity derive {generated_entity_id} {used_entity_id} --namespace testns --subtype primary-source "# + ); + let cmd = get_api_cmd(&command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -342,17 +342,17 @@ async fn entity_derive_primary_source() { #[tokio::test] async fn entity_derive_revision() { - let mut api = test_api().await; + let mut api = test_api().await; - let generated_entity_id = EntityId::from_external_id("testgeneratedentity"); - let used_entity_id = EntityId::from_external_id("testusedentity"); + let generated_entity_id = EntityId::from_external_id("testgeneratedentity"); + let used_entity_id = EntityId::from_external_id("testusedentity"); - let command_line = format!( - r#"chronicle test-entity-entity derive {generated_entity_id} {used_entity_id} --namespace testns --subtype revision "# - ); - let cmd = get_api_cmd(&command_line); + let command_line = format!( + r#"chronicle test-entity-entity derive {generated_entity_id} {used_entity_id} --namespace testns --subtype revision "# + ); + let cmd = get_api_cmd(&command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -383,17 +383,17 @@ async fn entity_derive_revision() { #[tokio::test] async fn entity_derive_quotation() { - let mut api = test_api().await; + let mut api = test_api().await; - let generated_entity_id = EntityId::from_external_id("testgeneratedentity"); - let used_entity_id = EntityId::from_external_id("testusedentity"); + let generated_entity_id = EntityId::from_external_id("testgeneratedentity"); + let used_entity_id = EntityId::from_external_id("testusedentity"); - let command_line = format!( - r#"chronicle test-entity-entity derive {generated_entity_id} {used_entity_id} --namespace testns --subtype quotation "# - ); - let cmd = get_api_cmd(&command_line); + let command_line = format!( + r#"chronicle test-entity-entity derive {generated_entity_id} {used_entity_id} --namespace testns --subtype quotation "# + ); + let cmd = get_api_cmd(&command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -424,9 +424,9 @@ async fn entity_derive_quotation() { #[tokio::test] async fn activity_define() { - let command_line = r#"chronicle test-activity-activity define test_activity --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns "#; + let command_line = r#"chronicle test-activity-activity define test_activity --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns "#; - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &parse_and_execute(command_line, test_cli_model()).await.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -450,12 +450,12 @@ async fn activity_define() { #[tokio::test] async fn activity_define_id() { - let id = ChronicleIri::from(common::prov::ActivityId::from_external_id("test_activity")); - let command_line = format!( - r#"chronicle test-activity-activity define --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns --id {id} "# - ); + let id = ChronicleIri::from(common::prov::ActivityId::from_external_id("test_activity")); + let command_line = format!( + r#"chronicle test-activity-activity define --test-bool-attr false --test-string-attr "test" --test-int-attr 23 --namespace testns --id {id} "# + ); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &parse_and_execute(&command_line, test_cli_model()).await.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -479,25 +479,25 @@ async fn activity_define_id() { #[tokio::test] async fn activity_start() { - let mut api = test_api().await; + let mut api = test_api().await; - let command_line = r#"chronicle test-agent-agent define testagent --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; - let cmd = get_api_cmd(command_line); + let command_line = r#"chronicle test-agent-agent define testagent --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; + let cmd = get_api_cmd(command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - let id = ChronicleIri::from(AgentId::from_external_id("testagent")); - let command_line = format!(r#"chronicle test-agent-agent use --namespace testns {id} "#); - let cmd = get_api_cmd(&command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + let id = ChronicleIri::from(AgentId::from_external_id("testagent")); + let command_line = format!(r#"chronicle test-agent-agent use --namespace testns {id} "#); + let cmd = get_api_cmd(&command_line); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - let id = ChronicleIri::from(ActivityId::from_external_id("testactivity")); - let command_line = format!( - r#"chronicle test-activity-activity start {id} --namespace testns --time 2014-07-08T09:10:11Z "# - ); - let cmd = get_api_cmd(&command_line); + let id = ChronicleIri::from(ActivityId::from_external_id("testactivity")); + let command_line = format!( + r#"chronicle test-activity-activity start {id} --namespace testns --time 2014-07-08T09:10:11Z "# + ); + let cmd = get_api_cmd(&command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -534,33 +534,33 @@ async fn activity_start() { #[tokio::test] async fn activity_end() { - let mut api = test_api().await; + let mut api = test_api().await; - let command_line = r#"chronicle test-agent-agent define testagent --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; - let cmd = get_api_cmd(command_line); + let command_line = r#"chronicle test-agent-agent define testagent --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; + let cmd = get_api_cmd(command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - let id = ChronicleIri::from(AgentId::from_external_id("testagent")); - let command_line = format!(r#"chronicle test-agent-agent use --namespace testns {id} "#); - let cmd = get_api_cmd(&command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + let id = ChronicleIri::from(AgentId::from_external_id("testagent")); + let command_line = format!(r#"chronicle test-agent-agent use --namespace testns {id} "#); + let cmd = get_api_cmd(&command_line); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - let id = ChronicleIri::from(ActivityId::from_external_id("testactivity")); - let command_line = format!( - r#"chronicle test-activity-activity start {id} --namespace testns --time 2014-07-08T09:10:11Z "# - ); - let cmd = get_api_cmd(&command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + let id = ChronicleIri::from(ActivityId::from_external_id("testactivity")); + let command_line = format!( + r#"chronicle test-activity-activity start {id} --namespace testns --time 2014-07-08T09:10:11Z "# + ); + let cmd = get_api_cmd(&command_line); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - // Should end the last opened activity - let id = ActivityId::from_external_id("testactivity"); - let command_line = format!( - r#"chronicle test-activity-activity end --namespace testns --time 2014-08-09T09:10:12Z {id} "# - ); - let cmd = get_api_cmd(&command_line); + // Should end the last opened activity + let id = ActivityId::from_external_id("testactivity"); + let command_line = format!( + r#"chronicle test-activity-activity end --namespace testns --time 2014-08-09T09:10:12Z {id} "# + ); + let cmd = get_api_cmd(&command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -598,21 +598,21 @@ async fn activity_end() { #[tokio::test] async fn activity_generate() { - let mut api = test_api().await; + let mut api = test_api().await; - let command_line = r#"chronicle test-activity-activity define testactivity --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; - let cmd = get_api_cmd(command_line); + let command_line = r#"chronicle test-activity-activity define testactivity --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; + let cmd = get_api_cmd(command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - let activity_id = ActivityId::from_external_id("testactivity"); - let entity_id = EntityId::from_external_id("testentity"); - let command_line = format!( - r#"chronicle test-activity-activity generate --namespace testns {entity_id} {activity_id} "# - ); - let cmd = get_api_cmd(&command_line); + let activity_id = ActivityId::from_external_id("testactivity"); + let entity_id = EntityId::from_external_id("testentity"); + let command_line = format!( + r#"chronicle test-activity-activity generate --namespace testns {entity_id} {activity_id} "# + ); + let cmd = get_api_cmd(&command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" @@ -632,31 +632,31 @@ async fn activity_generate() { #[tokio::test] async fn activity_use() { - let mut api = test_api().await; + let mut api = test_api().await; - let command_line = r#"chronicle test-agent-agent define testagent --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; - let cmd = get_api_cmd(command_line); + let command_line = r#"chronicle test-agent-agent define testagent --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; + let cmd = get_api_cmd(command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - let id = ChronicleIri::from(AgentId::from_external_id("testagent")); - let command_line = format!(r#"chronicle test-agent-agent use --namespace testns {id} "#); - let cmd = get_api_cmd(&command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + let id = ChronicleIri::from(AgentId::from_external_id("testagent")); + let command_line = format!(r#"chronicle test-agent-agent use --namespace testns {id} "#); + let cmd = get_api_cmd(&command_line); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - let command_line = r#"chronicle test-activity-activity define testactivity --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; - let cmd = get_api_cmd(command_line); - api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); + let command_line = r#"chronicle test-activity-activity define testactivity --namespace testns --test-string-attr "test" --test-bool-attr true --test-int-attr 40 "#; + let cmd = get_api_cmd(command_line); + api.dispatch(cmd, AuthId::chronicle()).await.unwrap(); - let activity_id = ActivityId::from_external_id("testactivity"); - let entity_id = EntityId::from_external_id("testentity"); - let command_line = format!( - r#"chronicle test-activity-activity use --namespace testns {entity_id} {activity_id} "# - ); + let activity_id = ActivityId::from_external_id("testactivity"); + let entity_id = EntityId::from_external_id("testentity"); + let command_line = format!( + r#"chronicle test-activity-activity use --namespace testns {entity_id} {activity_id} "# + ); - let cmd = get_api_cmd(&command_line); + let cmd = get_api_cmd(&command_line); - insta::assert_snapshot!( + insta::assert_snapshot!( serde_json::to_string_pretty( &api.dispatch(cmd, AuthId::chronicle()).await.unwrap().unwrap().0.to_json().compact_stable_order().await.unwrap() ).unwrap() , @r###" diff --git a/crates/chronicle-test-infrastructure/src/substitutes/mockchain.rs b/crates/chronicle-test-infrastructure/src/substitutes/mockchain.rs index a32a6b3fd..b02ac9221 100644 --- a/crates/chronicle-test-infrastructure/src/substitutes/mockchain.rs +++ b/crates/chronicle-test-infrastructure/src/substitutes/mockchain.rs @@ -1,8 +1,8 @@ use frame_support::traits::{ConstU16, ConstU64}; use sp_core::H256; use sp_runtime::{ - traits::{BlakeTwo256, IdentityLookup}, - BuildStorage, + traits::{BlakeTwo256, IdentityLookup}, + BuildStorage, }; type Block = frame_system::mocking::MockBlock; @@ -17,44 +17,44 @@ frame_support::construct_runtime!( ); impl frame_system::Config for Test { - type AccountData = (); - type AccountId = u64; - type BaseCallFilter = frame_support::traits::Everything; - type Block = Block; - type BlockHashCount = ConstU64<250>; - type BlockLength = (); - type BlockWeights = (); - type DbWeight = (); - type Hash = H256; - type Hashing = BlakeTwo256; - type Lookup = IdentityLookup; - type MaxConsumers = frame_support::traits::ConstU32<16>; - type Nonce = u64; - type OnKilledAccount = (); - type OnNewAccount = (); - type OnSetCode = (); - type PalletInfo = PalletInfo; - type RuntimeCall = RuntimeCall; - type RuntimeEvent = RuntimeEvent; - type RuntimeOrigin = RuntimeOrigin; - type SS58Prefix = ConstU16<42>; - type SystemWeightInfo = (); - type Version = (); - type RuntimeTask = (); - type SingleBlockMigrations = (); - type MultiBlockMigrator = (); - type PreInherents = (); - type PostInherents = (); - type PostTransactions = (); + type AccountData = (); + type AccountId = u64; + type BaseCallFilter = frame_support::traits::Everything; + type Block = Block; + type BlockHashCount = ConstU64<250>; + type BlockLength = (); + type BlockWeights = (); + type DbWeight = (); + type Hash = H256; + type Hashing = BlakeTwo256; + type Lookup = IdentityLookup; + type MaxConsumers = frame_support::traits::ConstU32<16>; + type MultiBlockMigrator = (); + type Nonce = u64; + type OnKilledAccount = (); + type OnNewAccount = (); + type OnSetCode = (); + type PalletInfo = PalletInfo; + type PostInherents = (); + type PostTransactions = (); + type PreInherents = (); + type RuntimeCall = RuntimeCall; + type RuntimeEvent = RuntimeEvent; + type RuntimeOrigin = RuntimeOrigin; + type RuntimeTask = (); + type SS58Prefix = ConstU16<42>; + type SingleBlockMigrations = (); + type SystemWeightInfo = (); + type Version = (); } impl pallet_chronicle::Config for Test { - type OperationSubmission = protocol_substrate_chronicle::common::ledger::OperationSubmission; - type RuntimeEvent = RuntimeEvent; - type WeightInfo = (); + type OperationSubmission = protocol_substrate_chronicle::common::ledger::OperationSubmission; + type RuntimeEvent = RuntimeEvent; + type WeightInfo = (); } // Build genesis storage according to the mock runtime. pub fn new_test_ext() -> sp_io::TestExternalities { - frame_system::GenesisConfig::::default().build_storage().unwrap().into() + frame_system::GenesisConfig::::default().build_storage().unwrap().into() } diff --git a/crates/chronicle-test-infrastructure/src/substitutes/mod.rs b/crates/chronicle-test-infrastructure/src/substitutes/mod.rs index 2fc550c87..d77442d97 100644 --- a/crates/chronicle-test-infrastructure/src/substitutes/mod.rs +++ b/crates/chronicle-test-infrastructure/src/substitutes/mod.rs @@ -3,171 +3,171 @@ mod stubstrate; use crate::substitutes::stubstrate::Stubstrate; use api::{ - ApiError, commands::{ApiCommand, ApiResponse}, + commands::{ApiCommand, ApiResponse}, + ApiError, }; use common::{ - identity::AuthId, - prov::{ChronicleTransactionId, ProvModel}, + identity::AuthId, + prov::{ChronicleTransactionId, ProvModel}, }; use pallet_chronicle::NamespaceId; use uuid::Uuid; use chronicle_signing::{ - BATCHER_NAMESPACE, CHRONICLE_NAMESPACE, chronicle_secret_names, ChronicleSecretsOptions, - ChronicleSigning, + chronicle_secret_names, ChronicleSecretsOptions, ChronicleSigning, BATCHER_NAMESPACE, + CHRONICLE_NAMESPACE, }; use diesel::{ - Connection, - PgConnection, r2d2::{ConnectionManager, Pool}, + r2d2::{ConnectionManager, Pool}, + Connection, PgConnection, }; -use testcontainers::{Container, images::postgres::Postgres}; +use testcontainers::{images::postgres::Postgres, Container}; +use api::{Api, ApiDispatch, UuidGen}; use lazy_static::lazy_static; use testcontainers::clients; -use api::{Api, UuidGen}; -use api::ApiDispatch; lazy_static! { static ref CLIENT: clients::Cli = clients::Cli::default(); } pub struct TemporaryDatabase<'a> { - db_uris: Vec, - container: Container<'a, Postgres>, + db_uris: Vec, + container: Container<'a, Postgres>, } impl<'a> Drop for TemporaryDatabase<'a> { - #[tracing::instrument(skip(self))] - fn drop(&mut self) { - self.container.stop(); - } + #[tracing::instrument(skip(self))] + fn drop(&mut self) { + self.container.stop(); + } } impl<'a> TemporaryDatabase<'a> { - pub fn connection_pool(&self) -> Result>, r2d2::Error> { - let db_uri = self - .db_uris - .iter() - .find(|db_uri| PgConnection::establish(db_uri).is_ok()) - .expect("cannot establish connection"); - Pool::builder().build(ConnectionManager::::new(db_uri)) - } + pub fn connection_pool(&self) -> Result>, r2d2::Error> { + let db_uri = self + .db_uris + .iter() + .find(|db_uri| PgConnection::establish(db_uri).is_ok()) + .expect("cannot establish connection"); + Pool::builder().build(ConnectionManager::::new(db_uri)) + } } impl<'a> Default for TemporaryDatabase<'a> { - fn default() -> Self { - let container = CLIENT.run(Postgres::default()); - const PORT: u16 = 5432; - Self { - db_uris: vec![ - format!("postgresql://postgres@127.0.0.1:{}/", container.get_host_port_ipv4(PORT)), - format!("postgresql://postgres@{}:{}/", container.get_bridge_ip_address(), PORT), - ], - container, - } - } + fn default() -> Self { + let container = CLIENT.run(Postgres::default()); + const PORT: u16 = 5432; + Self { + db_uris: vec![ + format!("postgresql://postgres@127.0.0.1:{}/", container.get_host_port_ipv4(PORT)), + format!("postgresql://postgres@{}:{}/", container.get_bridge_ip_address(), PORT), + ], + container, + } + } } pub struct TestDispatch<'a> { - api: ApiDispatch, - db: TemporaryDatabase<'a>, - _substrate: Stubstrate, + api: ApiDispatch, + db: TemporaryDatabase<'a>, + _substrate: Stubstrate, } impl<'a> TestDispatch<'a> { - /// Returns a reference to the ApiDispatch. - pub fn api_dispatch(&self) -> &ApiDispatch { - &self.api - } - - /// Returns a reference to the TemporaryDatabase. - pub fn temporary_database(&self) -> &TemporaryDatabase<'a> { - &self.db - } + /// Returns a reference to the ApiDispatch. + pub fn api_dispatch(&self) -> &ApiDispatch { + &self.api + } + + /// Returns a reference to the TemporaryDatabase. + pub fn temporary_database(&self) -> &TemporaryDatabase<'a> { + &self.db + } } impl<'a> TestDispatch<'a> { - pub async fn dispatch( - &mut self, - command: ApiCommand, - identity: AuthId, - ) -> Result, ChronicleTransactionId)>, ApiError> { - // We can sort of get final on chain state here by using a map of subject to model - match self.api.dispatch(command, identity).await? { - ApiResponse::Submission { .. } | ApiResponse::ImportSubmitted { .. } => { - // Recv until we get a commit notification - loop { - let commit = self.api.notify_commit.subscribe().recv().await.unwrap(); - match commit { - common::ledger::SubmissionStage::Submitted(Ok(_)) => continue, - common::ledger::SubmissionStage::Committed(commit, _id) => - return Ok(Some((commit.delta, commit.tx_id))), - common::ledger::SubmissionStage::Submitted(Err(e)) => panic!("{e:?}"), - common::ledger::SubmissionStage::NotCommitted((_, tx, _id)) => { - panic!("{tx:?}") - } - } - } - } - ApiResponse::AlreadyRecorded { subject: _, prov } => - Ok(Some((prov, ChronicleTransactionId::default()))), - _ => Ok(None), - } - } + pub async fn dispatch( + &mut self, + command: ApiCommand, + identity: AuthId, + ) -> Result, ChronicleTransactionId)>, ApiError> { + // We can sort of get final on chain state here by using a map of subject to model + match self.api.dispatch(command, identity).await? { + ApiResponse::Submission { .. } | ApiResponse::ImportSubmitted { .. } => { + // Recv until we get a commit notification + loop { + let commit = self.api.notify_commit.subscribe().recv().await.unwrap(); + match commit { + common::ledger::SubmissionStage::Submitted(Ok(_)) => continue, + common::ledger::SubmissionStage::Committed(commit, _id) => + return Ok(Some((commit.delta, commit.tx_id))), + common::ledger::SubmissionStage::Submitted(Err(e)) => panic!("{e:?}"), + common::ledger::SubmissionStage::NotCommitted((_, tx, _id)) => { + panic!("{tx:?}") + }, + } + } + }, + ApiResponse::AlreadyRecorded { subject: _, prov } => + Ok(Some((prov, ChronicleTransactionId::default()))), + _ => Ok(None), + } + } } #[derive(Debug, Clone)] struct SameUuid; impl UuidGen for SameUuid { - fn uuid() -> Uuid { - Uuid::parse_str("5a0ab5b8-eeb7-4812-9fe3-6dd69bd20cea").unwrap() - } + fn uuid() -> Uuid { + Uuid::parse_str("5a0ab5b8-eeb7-4812-9fe3-6dd69bd20cea").unwrap() + } } pub async fn embed_substrate() -> Stubstrate { - stubstrate::Stubstrate::new() + stubstrate::Stubstrate::new() } pub async fn test_api<'a>() -> TestDispatch<'a> { - chronicle_telemetry::telemetry(false, chronicle_telemetry::ConsoleLogging::Pretty); - - let secrets = ChronicleSigning::new( - chronicle_secret_names(), - vec![ - (CHRONICLE_NAMESPACE.to_string(), ChronicleSecretsOptions::generate_in_memory()), - (BATCHER_NAMESPACE.to_string(), ChronicleSecretsOptions::generate_in_memory()), - ], - ) - .await - .unwrap(); - - let embed_substrate = embed_substrate().await; - let database = TemporaryDatabase::default(); - let pool = database.connection_pool().unwrap(); - - let liveness_check_interval = None; - - let dispatch = Api::new( - pool, - embed_substrate.clone(), - SameUuid, - secrets, - vec![NamespaceId::from_external_id( - "testns", - Uuid::parse_str("11111111-1111-1111-1111-111111111111").unwrap(), - )], - None, - liveness_check_interval, - ) - .await - .unwrap(); - - TestDispatch { - api: dispatch, - db: database, // share the lifetime - _substrate: embed_substrate, - } + chronicle_telemetry::telemetry(chronicle_telemetry::ConsoleLogging::Pretty); + + let secrets = ChronicleSigning::new( + chronicle_secret_names(), + vec![ + (CHRONICLE_NAMESPACE.to_string(), ChronicleSecretsOptions::generate_in_memory()), + (BATCHER_NAMESPACE.to_string(), ChronicleSecretsOptions::generate_in_memory()), + ], + ) + .await + .unwrap(); + + let embed_substrate = embed_substrate().await; + let database = TemporaryDatabase::default(); + let pool = database.connection_pool().unwrap(); + + let liveness_check_interval = None; + + let dispatch = Api::new( + pool, + embed_substrate.clone(), + SameUuid, + secrets, + vec![NamespaceId::from_external_id( + "testns", + Uuid::parse_str("11111111-1111-1111-1111-111111111111").unwrap(), + )], + None, + liveness_check_interval, + ) + .await + .unwrap(); + + TestDispatch { + api: dispatch, + db: database, // share the lifetime + _substrate: embed_substrate, + } } diff --git a/crates/chronicle-test-infrastructure/src/substitutes/stubstrate.rs b/crates/chronicle-test-infrastructure/src/substitutes/stubstrate.rs index a63d1bbe9..ceb69f236 100644 --- a/crates/chronicle-test-infrastructure/src/substitutes/stubstrate.rs +++ b/crates/chronicle-test-infrastructure/src/substitutes/stubstrate.rs @@ -3,151 +3,151 @@ use pallet_chronicle::{chronicle_core::OperationSubmission, ChronicleTransaction use super::mockchain::{new_test_ext, ChronicleModule, RuntimeEvent, RuntimeOrigin, System, Test}; use protocol_abstract::{ - BlockId, FromBlock, LedgerEvent, LedgerEventContext, LedgerReader, LedgerWriter, Position, Span, + BlockId, FromBlock, LedgerEvent, LedgerEventContext, LedgerReader, LedgerWriter, Position, Span, }; use protocol_substrate::{PolkadotConfig, SubstrateStateReader, SubxtClientError}; use protocol_substrate_chronicle::{ - protocol::WriteConsistency, ChronicleEvent, ChronicleEventCodec, ChronicleTransaction, + protocol::WriteConsistency, ChronicleEvent, ChronicleEventCodec, ChronicleTransaction, }; use std::sync::{Arc, Mutex}; use subxt::metadata::{DecodeWithMetadata, EncodeWithMetadata}; #[derive(Clone)] pub struct Stubstrate { - rt: Arc>, - tx: tokio::sync::broadcast::Sender, - events: Arc>>, + rt: Arc>, + tx: tokio::sync::broadcast::Sender, + events: Arc>>, } impl Default for Stubstrate { - fn default() -> Self { - Self::new() - } + fn default() -> Self { + Self::new() + } } impl Stubstrate { - pub fn new() -> Self { - let (tx, _rx) = tokio::sync::broadcast::channel(100); - Self { rt: Arc::new(Mutex::new(new_test_ext())), tx, events: Arc::new(Mutex::new(vec![])) } - } - - #[tracing::instrument(skip(self))] - pub fn readable_events(&self) -> Vec { - self.events.lock().unwrap().clone() - } - - pub fn stored_prov(&self) -> Vec { - self.rt.lock().unwrap().execute_with(|| { - pallet_chronicle::Provenance::::iter_values() - .map(|k| k.try_into().unwrap()) - .collect() - }) - } + pub fn new() -> Self { + let (tx, _rx) = tokio::sync::broadcast::channel(100); + Self { rt: Arc::new(Mutex::new(new_test_ext())), tx, events: Arc::new(Mutex::new(vec![])) } + } + + #[tracing::instrument(skip(self))] + pub fn readable_events(&self) -> Vec { + self.events.lock().unwrap().clone() + } + + pub fn stored_prov(&self) -> Vec { + self.rt.lock().unwrap().execute_with(|| { + pallet_chronicle::Provenance::::iter_values() + .map(|k| k.try_into().unwrap()) + .collect() + }) + } } #[async_trait::async_trait] impl LedgerReader for Stubstrate { - type Error = SubxtClientError; - type Event = ChronicleEvent; - type EventCodec = ChronicleEventCodec; - - async fn block_height(&self) -> Result<(Position, BlockId), Self::Error> { - unimplemented!(); - } - - /// Subscribe to state updates from this ledger, starting at `offset`, and - /// ending the stream after `number_of_blocks` blocks have been processed. - async fn state_updates( - &self, - // The block to start from - from_block: FromBlock, - // The number of blocks to process before ending the stream - _number_of_blocks: Option, - ) -> Result>, Self::Error> { - tracing::debug!("Starting state updates stream from block {:?}", from_block); - let rx = self.tx.subscribe(); - let stream = tokio_stream::wrappers::BroadcastStream::new(rx) - .map(|event| { - let event = event.unwrap(); - let correlation_id = event.correlation_id().into(); - (event, correlation_id, BlockId::Unknown, Position::from(0), Span::NotTraced) - }) - .boxed(); - Ok(stream) - } + type Error = SubxtClientError; + type Event = ChronicleEvent; + type EventCodec = ChronicleEventCodec; + + async fn block_height(&self) -> Result<(Position, BlockId), Self::Error> { + unimplemented!(); + } + + /// Subscribe to state updates from this ledger, starting at `offset`, and + /// ending the stream after `number_of_blocks` blocks have been processed. + async fn state_updates( + &self, + // The block to start from + from_block: FromBlock, + // The number of blocks to process before ending the stream + _number_of_blocks: Option, + ) -> Result>, Self::Error> { + tracing::debug!("Starting state updates stream from block {:?}", from_block); + let rx = self.tx.subscribe(); + let stream = tokio_stream::wrappers::BroadcastStream::new(rx) + .map(|event| { + let event = event.unwrap(); + let correlation_id = event.correlation_id().into(); + (event, correlation_id, BlockId::Unknown, Position::from(0), Span::NotTraced) + }) + .boxed(); + Ok(stream) + } } #[async_trait::async_trait] impl LedgerWriter for Stubstrate { - type Error = SubxtClientError; - type Submittable = OperationSubmission; - type Transaction = ChronicleTransaction; - - // Minimally process the transaction offline to get a transaction id and submittable type - async fn pre_submit( - &self, - tx: Self::Transaction, - ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error> { - Ok(( - OperationSubmission { - correlation_id: tx.correlation_id.into_bytes(), - identity: tx.identity, - items: tx.operations, - }, - tx.correlation_id.into(), - )) - } - - // Submit is used to submit a transaction to the ledger - async fn do_submit( - &self, - _consistency: WriteConsistency, - submittable: Self::Submittable, - ) -> Result { - let correlation_id = submittable.correlation_id; - self.rt.lock().unwrap().execute_with(|| { - System::set_block_number(1); - ChronicleModule::apply(RuntimeOrigin::signed(1), submittable).unwrap(); - - let ev = System::events().last().unwrap().event.clone(); - - let opa_event = match ev { - RuntimeEvent::ChronicleModule(event) => match event { - Event::::Applied(diff, identity, correlation_id) => - Some(ChronicleEvent::Committed { diff, identity, correlation_id }), - Event::::Contradiction(contradiction, identity, correlation_id) => - Some(ChronicleEvent::Contradicted { - contradiction, - identity, - correlation_id, - }), - _ => None, - }, - _ => None, - }; - - if let Some(event) = opa_event { - self.events.lock().unwrap().push(event.clone()); - self.tx.send(event).unwrap(); - } else { - tracing::warn!("Received an event that is not an OpaEvent"); - } - }); - - Ok(correlation_id.into()) - } + type Error = SubxtClientError; + type Submittable = OperationSubmission; + type Transaction = ChronicleTransaction; + + // Minimally process the transaction offline to get a transaction id and submittable type + async fn pre_submit( + &self, + tx: Self::Transaction, + ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error> { + Ok(( + OperationSubmission { + correlation_id: tx.correlation_id.into_bytes(), + identity: tx.identity, + items: tx.operations, + }, + tx.correlation_id.into(), + )) + } + + // Submit is used to submit a transaction to the ledger + async fn do_submit( + &self, + _consistency: WriteConsistency, + submittable: Self::Submittable, + ) -> Result { + let correlation_id = submittable.correlation_id; + self.rt.lock().unwrap().execute_with(|| { + System::set_block_number(1); + ChronicleModule::apply(RuntimeOrigin::signed(1), submittable).unwrap(); + + let ev = System::events().last().unwrap().event.clone(); + + let opa_event = match ev { + RuntimeEvent::ChronicleModule(event) => match event { + Event::::Applied(diff, identity, correlation_id) => + Some(ChronicleEvent::Committed { diff, identity, correlation_id }), + Event::::Contradiction(contradiction, identity, correlation_id) => + Some(ChronicleEvent::Contradicted { + contradiction, + identity, + correlation_id, + }), + _ => None, + }, + _ => None, + }; + + if let Some(event) = opa_event { + self.events.lock().unwrap().push(event.clone()); + self.tx.send(event).unwrap(); + } else { + tracing::warn!("Received an event that is not an OpaEvent"); + } + }); + + Ok(correlation_id.into()) + } } #[async_trait::async_trait] impl SubstrateStateReader for Stubstrate { - type Error = SubxtClientError; - - async fn get_state_entry( - &self, - _pallet_name: &str, - _entry_name: &str, - _address: K, - ) -> Result, Self::Error> { - unimplemented!() - } + type Error = SubxtClientError; + + async fn get_state_entry( + &self, + _pallet_name: &str, + _entry_name: &str, + _address: K, + ) -> Result, Self::Error> { + unimplemented!() + } } diff --git a/crates/chronicle/src/bootstrap/cli.rs b/crates/chronicle/src/bootstrap/cli.rs index 206d02ac4..9a2c21e66 100644 --- a/crates/chronicle/src/bootstrap/cli.rs +++ b/crates/chronicle/src/bootstrap/cli.rs @@ -1,23 +1,25 @@ use std::convert::Infallible; use clap::{ - *, builder::{PossibleValuesParser, StringValueParser}, + *, }; use thiserror::Error; use tokio::sync::broadcast::error::RecvError; use tracing::info; use user_error::UFE; -use api::ApiError; -use api::commands::{ActivityCommand, AgentCommand, ApiCommand, EntityCommand}; +use api::{ + commands::{ActivityCommand, AgentCommand, ApiCommand, EntityCommand}, + ApiError, +}; use chronicle_signing::SecretError; use common::{ attributes::{Attribute, Attributes}, opa::std::{FromUrlError, OpaExecutorError, PolicyLoaderError}, prov::{ - ActivityId, AgentId, DomaintypeId, EntityId, ExternalId, - ExternalIdPart, json_ld::CompactionError, operations::DerivationType, ParseIriError, + json_ld::CompactionError, operations::DerivationType, ActivityId, AgentId, DomaintypeId, + EntityId, ExternalId, ExternalIdPart, ParseIriError, }, }; use protocol_substrate::SubxtClientError; @@ -31,348 +33,348 @@ use crate::{ #[derive(Debug, Error)] pub enum CliError { - #[error("Missing argument: {arg}")] - MissingArgument { arg: String }, - - #[error("Invalid argument {arg} expected {expected} got {got}")] - InvalidArgument { arg: String, expected: String, got: String }, - - #[error("Bad argument: {0}")] - ArgumentParsing( - #[from] - #[source] - clap::Error, - ), - - #[error("Invalid IRI: {0}")] - InvalidIri( - #[from] - #[source] - iri_string::validate::Error, - ), - - #[error("Invalid Chronicle IRI: {0}")] - InvalidChronicleIri( - #[from] - #[source] - ParseIriError, - ), - - #[error("Invalid JSON: {0}")] - InvalidJson( - #[from] - #[source] - serde_json::Error, - ), - - #[error("Invalid URI: {0}")] - InvalidUri( - #[from] - #[source] - url::ParseError, - ), - - #[error("Invalid timestamp: {0}")] - InvalidTimestamp( - #[from] - #[source] - chrono::ParseError, - ), - - #[error("Invalid coercion: {arg}")] - InvalidCoercion { arg: String }, - - #[error("API failure: {0}")] - ApiError( - #[from] - #[source] - ApiError, - ), - - #[error("Secrets : {0}")] - Secrets( - #[from] - #[source] - SecretError, - ), - - #[error("IO error: {0}")] - InputOutput( - #[from] - #[source] - std::io::Error, - ), - - #[error("Invalid configuration file: {0}")] - ConfigInvalid( - #[from] - #[source] - toml::de::Error, - ), - - #[error("Invalid path: {path}")] - InvalidPath { path: String }, - - #[error("Invalid JSON-LD: {0}")] - Ld( - #[from] - #[source] - CompactionError, - ), - - #[error("Failure in commit notification stream: {0}")] - CommitNoticiationStream( - #[from] - #[source] - RecvError, - ), - - #[error("Policy loader error: {0}")] - OpaPolicyLoader( - #[from] - #[source] - PolicyLoaderError, - ), - - #[error("OPA executor error: {0}")] - OpaExecutor( - #[from] - #[source] - OpaExecutorError, - ), - - #[error("Sawtooth communication error: {source}")] - SubstrateError { - #[from] - #[source] - source: SubxtClientError, - }, - - #[error("UTF-8 error: {0}")] - Utf8Error( - #[from] - #[source] - std::str::Utf8Error, - ), - - #[error("Url conversion: {0}")] - FromUrlError( - #[from] - #[source] - FromUrlError, - ), - - #[error("No on chain settings, but they are required by Chronicle")] - NoOnChainSettings, + #[error("Missing argument: {arg}")] + MissingArgument { arg: String }, + + #[error("Invalid argument {arg} expected {expected} got {got}")] + InvalidArgument { arg: String, expected: String, got: String }, + + #[error("Bad argument: {0}")] + ArgumentParsing( + #[from] + #[source] + clap::Error, + ), + + #[error("Invalid IRI: {0}")] + InvalidIri( + #[from] + #[source] + iri_string::validate::Error, + ), + + #[error("Invalid Chronicle IRI: {0}")] + InvalidChronicleIri( + #[from] + #[source] + ParseIriError, + ), + + #[error("Invalid JSON: {0}")] + InvalidJson( + #[from] + #[source] + serde_json::Error, + ), + + #[error("Invalid URI: {0}")] + InvalidUri( + #[from] + #[source] + url::ParseError, + ), + + #[error("Invalid timestamp: {0}")] + InvalidTimestamp( + #[from] + #[source] + chrono::ParseError, + ), + + #[error("Invalid coercion: {arg}")] + InvalidCoercion { arg: String }, + + #[error("API failure: {0}")] + ApiError( + #[from] + #[source] + ApiError, + ), + + #[error("Secrets : {0}")] + Secrets( + #[from] + #[source] + SecretError, + ), + + #[error("IO error: {0}")] + InputOutput( + #[from] + #[source] + std::io::Error, + ), + + #[error("Invalid configuration file: {0}")] + ConfigInvalid( + #[from] + #[source] + toml::de::Error, + ), + + #[error("Invalid path: {path}")] + InvalidPath { path: String }, + + #[error("Invalid JSON-LD: {0}")] + Ld( + #[from] + #[source] + CompactionError, + ), + + #[error("Failure in commit notification stream: {0}")] + CommitNoticiationStream( + #[from] + #[source] + RecvError, + ), + + #[error("Policy loader error: {0}")] + OpaPolicyLoader( + #[from] + #[source] + PolicyLoaderError, + ), + + #[error("OPA executor error: {0}")] + OpaExecutor( + #[from] + #[source] + OpaExecutorError, + ), + + #[error("Sawtooth communication error: {source}")] + SubstrateError { + #[from] + #[source] + source: SubxtClientError, + }, + + #[error("UTF-8 error: {0}")] + Utf8Error( + #[from] + #[source] + std::str::Utf8Error, + ), + + #[error("Url conversion: {0}")] + FromUrlError( + #[from] + #[source] + FromUrlError, + ), + + #[error("No on chain settings, but they are required by Chronicle")] + NoOnChainSettings, } impl CliError { - pub fn missing_argument(arg: impl Into) -> Self { - Self::MissingArgument { arg: arg.into() } - } + pub fn missing_argument(arg: impl Into) -> Self { + Self::MissingArgument { arg: arg.into() } + } } /// Ugly but we need this until ! is stable, see impl From for CliError { - fn from(_: Infallible) -> Self { - unreachable!() - } + fn from(_: Infallible) -> Self { + unreachable!() + } } impl UFE for CliError {} pub trait SubCommand { - fn as_cmd(&self) -> Command; - fn matches(&self, matches: &ArgMatches) -> Result, CliError>; + fn as_cmd(&self) -> Command; + fn matches(&self, matches: &ArgMatches) -> Result, CliError>; } pub struct AttributeCliModel { - pub attribute: AttributeDef, - pub attribute_name: String, - pub attribute_help: String, + pub attribute: AttributeDef, + pub attribute_name: String, + pub attribute_help: String, } impl AttributeCliModel { - pub fn new(attribute: AttributeDef) -> Self { - Self { - attribute_name: format!("{}-attr", attribute.as_cli_name()), - attribute_help: format!("The value of the {} attribute", attribute.as_type_name()), - attribute, - } - } - - pub fn as_arg(&self) -> Arg { - Arg::new(&*self.attribute_name) - .long(&self.attribute_name) - .help(&*self.attribute_help) - .takes_value(true) - .required(true) - } + pub fn new(attribute: AttributeDef) -> Self { + Self { + attribute_name: format!("{}-attr", attribute.as_cli_name()), + attribute_help: format!("The value of the {} attribute", attribute.as_type_name()), + attribute, + } + } + + pub fn as_arg(&self) -> Arg { + Arg::new(&*self.attribute_name) + .long(&self.attribute_name) + .help(&*self.attribute_help) + .takes_value(true) + .required(true) + } } pub struct AgentCliModel { - pub agent: AgentDef, - pub attributes: Vec, - pub about: String, - pub define_about: String, - pub external_id: String, + pub agent: AgentDef, + pub attributes: Vec, + pub about: String, + pub define_about: String, + pub external_id: String, } impl AgentCliModel { - pub fn new(agent: &AgentDef) -> Self { - let attributes = agent - .attributes - .iter() - .map(|attr| AttributeCliModel::new(attr.clone())) - .collect(); - Self { + pub fn new(agent: &AgentDef) -> Self { + let attributes = agent + .attributes + .iter() + .map(|attr| AttributeCliModel::new(attr.clone())) + .collect(); + Self { agent: agent.clone(), attributes, external_id: agent.as_cli_name(), about: format!("Operations on {} agents", agent.as_type_name()), define_about: format!("Define an agent of type {} with the given external_id or IRI, redefinition with different attribute values is not allowed", agent.as_type_name()), } - } + } } fn name_from<'a, Id>( - args: &'a ArgMatches, - name_param: &str, - id_param: &str, + args: &'a ArgMatches, + name_param: &str, + id_param: &str, ) -> Result - where - Id: 'a + TryFrom + ExternalIdPart, +where + Id: 'a + TryFrom + ExternalIdPart, { - if let Some(external_id) = args.get_one::(name_param) { - Ok(ExternalId::from(external_id)) - } else if let Some(iri) = args.get_one::(id_param) { - let id = Id::try_from(iri.to_string())?; - Ok(id.external_id_part().to_owned()) - } else { - Err(CliError::MissingArgument { arg: format!("Missing {name_param} and {id_param}") }) - } + if let Some(external_id) = args.get_one::(name_param) { + Ok(ExternalId::from(external_id)) + } else if let Some(iri) = args.get_one::(id_param) { + let id = Id::try_from(iri.to_string())?; + Ok(id.external_id_part().to_owned()) + } else { + Err(CliError::MissingArgument { arg: format!("Missing {name_param} and {id_param}") }) + } } fn id_from<'a, Id>(args: &'a ArgMatches, id_param: &str) -> Result - where - Id: 'a + TryFrom + ExternalIdPart, +where + Id: 'a + TryFrom + ExternalIdPart, { - if let Some(id) = args.get_one::(id_param) { - Ok(Id::try_from(id.to_string())?) - } else { - Err(CliError::MissingArgument { arg: format!("Missing {id_param} ") }) - } + if let Some(id) = args.get_one::(id_param) { + Ok(Id::try_from(id.to_string())?) + } else { + Err(CliError::MissingArgument { arg: format!("Missing {id_param} ") }) + } } fn id_from_option<'a, Id>(args: &'a ArgMatches, id_param: &str) -> Result, CliError> - where - Id: 'a + TryFrom + ExternalIdPart, +where + Id: 'a + TryFrom + ExternalIdPart, { - match id_from(args, id_param) { - Err(CliError::MissingArgument { .. }) => Ok(None), - Err(e) => Err(e), - Ok(id) => Ok(Some(id)), - } + match id_from(args, id_param) { + Err(CliError::MissingArgument { .. }) => Ok(None), + Err(e) => Err(e), + Ok(id) => Ok(Some(id)), + } } fn namespace_from(args: &ArgMatches) -> Result { - if let Some(namespace) = args.get_one::("namespace") { - Ok(ExternalId::from(namespace)) - } else { - Err(CliError::MissingArgument { arg: "namespace".to_owned() }) - } + if let Some(namespace) = args.get_one::("namespace") { + Ok(ExternalId::from(namespace)) + } else { + Err(CliError::MissingArgument { arg: "namespace".to_owned() }) + } } /// Deserialize to a JSON value and ensure that it matches the specified primitive type, we need to /// force any bare literal text to be quoted use of coercion afterwards will produce a proper json /// value type for non strings fn attribute_value_from_param( - arg: &str, - value: &str, - typ: PrimitiveType, + arg: &str, + value: &str, + typ: PrimitiveType, ) -> Result { - let value = { - if !value.contains('"') { - format!(r#""{value}""#) - } else { - value.to_owned() - } - }; - - let mut value = serde_json::from_str(&value)?; - match typ { - PrimitiveType::Bool => { - if let Some(coerced) = valico::json_dsl::boolean() - .coerce(&mut value, ".") - .map_err(|_e| CliError::InvalidCoercion { arg: arg.to_owned() })? - { - Ok(coerced) - } else { - Ok(value) - } - } - PrimitiveType::String => { - if let Some(coerced) = valico::json_dsl::string() - .coerce(&mut value, ".") - .map_err(|_e| CliError::InvalidCoercion { arg: arg.to_owned() })? - { - Ok(coerced) - } else { - Ok(value) - } - } - PrimitiveType::Int => { - if let Some(coerced) = valico::json_dsl::i64() - .coerce(&mut value, ".") - .map_err(|_e| CliError::InvalidCoercion { arg: arg.to_owned() })? - { - Ok(coerced) - } else { - Ok(value) - } - } - PrimitiveType::JSON => { - if let Some(coerced) = valico::json_dsl::object() - .coerce(&mut value, ".") - .map_err(|_e| CliError::InvalidCoercion { arg: arg.to_owned() })? - { - Ok(coerced) - } else { - Ok(value) - } - } - } + let value = { + if !value.contains('"') { + format!(r#""{value}""#) + } else { + value.to_owned() + } + }; + + let mut value = serde_json::from_str(&value)?; + match typ { + PrimitiveType::Bool => { + if let Some(coerced) = valico::json_dsl::boolean() + .coerce(&mut value, ".") + .map_err(|_e| CliError::InvalidCoercion { arg: arg.to_owned() })? + { + Ok(coerced) + } else { + Ok(value) + } + }, + PrimitiveType::String => { + if let Some(coerced) = valico::json_dsl::string() + .coerce(&mut value, ".") + .map_err(|_e| CliError::InvalidCoercion { arg: arg.to_owned() })? + { + Ok(coerced) + } else { + Ok(value) + } + }, + PrimitiveType::Int => { + if let Some(coerced) = valico::json_dsl::i64() + .coerce(&mut value, ".") + .map_err(|_e| CliError::InvalidCoercion { arg: arg.to_owned() })? + { + Ok(coerced) + } else { + Ok(value) + } + }, + PrimitiveType::JSON => { + if let Some(coerced) = valico::json_dsl::object() + .coerce(&mut value, ".") + .map_err(|_e| CliError::InvalidCoercion { arg: arg.to_owned() })? + { + Ok(coerced) + } else { + Ok(value) + } + }, + } } fn attributes_from( - args: &ArgMatches, - typ: impl AsRef, - attributes: &[AttributeCliModel], + args: &ArgMatches, + typ: impl AsRef, + attributes: &[AttributeCliModel], ) -> Result { - Ok(Attributes::new( - Some(DomaintypeId::from_external_id(typ)), - attributes - .iter() - .map(|attr| { - let value = attribute_value_from_param( - &attr.attribute_name, - args.get_one::(&attr.attribute_name).unwrap(), - attr.attribute.primitive_type, - )?; - Ok::<_, CliError>(Attribute { - typ: attr.attribute.as_type_name(), - value: value.into(), - }) - }) - .collect::, _>>()?, - )) + Ok(Attributes::new( + Some(DomaintypeId::from_external_id(typ)), + attributes + .iter() + .map(|attr| { + let value = attribute_value_from_param( + &attr.attribute_name, + args.get_one::(&attr.attribute_name).unwrap(), + attr.attribute.primitive_type, + )?; + Ok::<_, CliError>(Attribute { + typ: attr.attribute.as_type_name(), + value: value.into(), + }) + }) + .collect::, _>>()?, + )) } impl SubCommand for AgentCliModel { - fn as_cmd(&self) -> Command { - let cmd = Command::new(&*self.external_id).about(&*self.about); + fn as_cmd(&self) -> Command { + let cmd = Command::new(&*self.external_id).about(&*self.about); - let mut define = Command::new("define") + let mut define = Command::new("define") .about(&*self.define_about) .arg(Arg::new("external_id") .help("An externally meaningful identifier for the agent, e.g. a URI or relational id") @@ -393,80 +395,80 @@ impl SubCommand for AgentCliModel { .takes_value(true), ); - for attr in &self.attributes { - define = define.arg(attr.as_arg()); - } + for attr in &self.attributes { + define = define.arg(attr.as_arg()); + } - cmd.subcommand(define).subcommand( - Command::new("use") - .about("Make the specified agent the context for activities and entities") - .arg( - Arg::new("id") - .help("A valid chronicle agent IRI") - .required(true) - .takes_value(true), - ) - .arg( - Arg::new("namespace") - .short('n') - .long("namespace") - .default_value("default") - .required(false) - .takes_value(true), - ), - ) - } - - fn matches(&self, matches: &ArgMatches) -> Result, CliError> { - if let Some(matches) = matches.subcommand_matches("define") { - return Ok(Some(ApiCommand::Agent(AgentCommand::Create { - id: name_from::(matches, "external_id", "id")?, - namespace: namespace_from(matches)?, - attributes: attributes_from(matches, &self.agent.external_id, &self.attributes)?, - }))); - } + cmd.subcommand(define).subcommand( + Command::new("use") + .about("Make the specified agent the context for activities and entities") + .arg( + Arg::new("id") + .help("A valid chronicle agent IRI") + .required(true) + .takes_value(true), + ) + .arg( + Arg::new("namespace") + .short('n') + .long("namespace") + .default_value("default") + .required(false) + .takes_value(true), + ), + ) + } + + fn matches(&self, matches: &ArgMatches) -> Result, CliError> { + if let Some(matches) = matches.subcommand_matches("define") { + return Ok(Some(ApiCommand::Agent(AgentCommand::Create { + id: name_from::(matches, "external_id", "id")?, + namespace: namespace_from(matches)?, + attributes: attributes_from(matches, &self.agent.external_id, &self.attributes)?, + }))); + } - if let Some(matches) = matches.subcommand_matches("use") { - return Ok(Some(ApiCommand::Agent(AgentCommand::UseInContext { - id: id_from(matches, "id")?, - namespace: namespace_from(matches)?, - }))); - }; + if let Some(matches) = matches.subcommand_matches("use") { + return Ok(Some(ApiCommand::Agent(AgentCommand::UseInContext { + id: id_from(matches, "id")?, + namespace: namespace_from(matches)?, + }))); + }; - Ok(None) - } + Ok(None) + } } pub struct ActivityCliModel { - pub activity: ActivityDef, - pub attributes: Vec, - pub about: String, - pub define_about: String, - pub external_id: String, + pub activity: ActivityDef, + pub attributes: Vec, + pub about: String, + pub define_about: String, + pub external_id: String, } impl ActivityCliModel { - fn new(activity: &ActivityDef) -> Self { - let attributes = activity - .attributes - .iter() - .map(|attr| AttributeCliModel::new(attr.clone())) - .collect(); - Self { + fn new(activity: &ActivityDef) -> Self { + let attributes = activity + .attributes + .iter() + .map(|attr| AttributeCliModel::new(attr.clone())) + .collect(); + Self { activity: activity.clone(), attributes, external_id: activity.as_cli_name(), about: format!("Operations on {} activities", activity.as_type_name()), define_about: format!("Define an activity of type {} with the given external_id or IRI, redefinition with different attribute values is not allowed", activity.as_type_name()), } - } + } } impl SubCommand for ActivityCliModel { - fn as_cmd(&self) -> Command { - let cmd = Command::new(&*self.external_id).about(&*self.about); + fn as_cmd(&self) -> Command { + let cmd = Command::new(&*self.external_id).about(&*self.about); - let mut define = + let mut define = Command::new("define") .about(&*self.define_about) .arg(Arg::new("external_id") @@ -488,11 +490,11 @@ impl SubCommand for ActivityCliModel { .takes_value(true), ); - for attr in &self.attributes { - define = define.arg(attr.as_arg()); - } + for attr in &self.attributes { + define = define.arg(attr.as_arg()); + } - cmd.subcommand(define) + cmd.subcommand(define) .subcommand( Command::new("start") .about("Record this activity as started at the specified time, if no time is specified the current time is used") @@ -627,94 +629,94 @@ impl SubCommand for ActivityCliModel { .takes_value(true), ) ) - } - - fn matches(&self, matches: &ArgMatches) -> Result, CliError> { - if let Some(matches) = matches.subcommand_matches("define") { - return Ok(Some(ApiCommand::Activity(ActivityCommand::Create { - id: name_from::(matches, "external_id", "id")?, - namespace: namespace_from(matches)?, - attributes: attributes_from(matches, &self.activity.external_id, &self.attributes)?, - }))); - } + } + + fn matches(&self, matches: &ArgMatches) -> Result, CliError> { + if let Some(matches) = matches.subcommand_matches("define") { + return Ok(Some(ApiCommand::Activity(ActivityCommand::Create { + id: name_from::(matches, "external_id", "id")?, + namespace: namespace_from(matches)?, + attributes: attributes_from(matches, &self.activity.external_id, &self.attributes)?, + }))); + } - if let Some(matches) = matches.subcommand_matches("start") { - return Ok(Some(ApiCommand::Activity(ActivityCommand::Start { - id: id_from(matches, "id")?, - namespace: namespace_from(matches)?, - time: matches.get_one::("time").map(|t| t.parse()).transpose()?, - agent: id_from_option(matches, "agent_id")?, - }))); - }; - - if let Some(matches) = matches.subcommand_matches("end") { - return Ok(Some(ApiCommand::Activity(ActivityCommand::End { - id: id_from(matches, "id")?, - namespace: namespace_from(matches)?, - time: matches.get_one::("time").map(|t| t.parse()).transpose()?, - agent: id_from_option(matches, "agent_id")?, - }))); - }; - - if let Some(matches) = matches.subcommand_matches("instant") { - return Ok(Some(ApiCommand::Activity(ActivityCommand::Instant { - id: id_from(matches, "id")?, - namespace: namespace_from(matches)?, - time: matches.get_one::("time").map(|t| t.parse()).transpose()?, - agent: id_from_option(matches, "agent_id")?, - }))); - }; - - if let Some(matches) = matches.subcommand_matches("use") { - return Ok(Some(ApiCommand::Activity(ActivityCommand::Use { - id: id_from(matches, "entity_id")?, - namespace: namespace_from(matches)?, - activity: id_from(matches, "activity_id")?, - }))); - }; - - if let Some(matches) = matches.subcommand_matches("generate") { - return Ok(Some(ApiCommand::Activity(ActivityCommand::Generate { - id: id_from(matches, "entity_id")?, - namespace: namespace_from(matches)?, - activity: id_from(matches, "activity_id")?, - }))); - }; - - Ok(None) - } + if let Some(matches) = matches.subcommand_matches("start") { + return Ok(Some(ApiCommand::Activity(ActivityCommand::Start { + id: id_from(matches, "id")?, + namespace: namespace_from(matches)?, + time: matches.get_one::("time").map(|t| t.parse()).transpose()?, + agent: id_from_option(matches, "agent_id")?, + }))); + }; + + if let Some(matches) = matches.subcommand_matches("end") { + return Ok(Some(ApiCommand::Activity(ActivityCommand::End { + id: id_from(matches, "id")?, + namespace: namespace_from(matches)?, + time: matches.get_one::("time").map(|t| t.parse()).transpose()?, + agent: id_from_option(matches, "agent_id")?, + }))); + }; + + if let Some(matches) = matches.subcommand_matches("instant") { + return Ok(Some(ApiCommand::Activity(ActivityCommand::Instant { + id: id_from(matches, "id")?, + namespace: namespace_from(matches)?, + time: matches.get_one::("time").map(|t| t.parse()).transpose()?, + agent: id_from_option(matches, "agent_id")?, + }))); + }; + + if let Some(matches) = matches.subcommand_matches("use") { + return Ok(Some(ApiCommand::Activity(ActivityCommand::Use { + id: id_from(matches, "entity_id")?, + namespace: namespace_from(matches)?, + activity: id_from(matches, "activity_id")?, + }))); + }; + + if let Some(matches) = matches.subcommand_matches("generate") { + return Ok(Some(ApiCommand::Activity(ActivityCommand::Generate { + id: id_from(matches, "entity_id")?, + namespace: namespace_from(matches)?, + activity: id_from(matches, "activity_id")?, + }))); + }; + + Ok(None) + } } pub struct EntityCliModel { - pub entity: EntityDef, - pub attributes: Vec, - pub about: String, - pub define_about: String, - pub external_id: String, + pub entity: EntityDef, + pub attributes: Vec, + pub about: String, + pub define_about: String, + pub external_id: String, } impl EntityCliModel { - pub fn new(entity: &EntityDef) -> Self { - let attributes = entity - .attributes - .iter() - .map(|attr| AttributeCliModel::new(attr.clone())) - .collect(); - Self { + pub fn new(entity: &EntityDef) -> Self { + let attributes = entity + .attributes + .iter() + .map(|attr| AttributeCliModel::new(attr.clone())) + .collect(); + Self { entity: entity.clone(), attributes, external_id: entity.as_cli_name(), about: format!("Operations on {} entities", entity.as_type_name()), define_about: format!("Define an entity of type {} with the given external_id or IRI, redefinition with different attribute values is not allowed", entity.as_type_name()), } - } + } } impl SubCommand for EntityCliModel { - fn as_cmd(&self) -> Command { - let cmd = Command::new(&self.external_id).about(&*self.about); + fn as_cmd(&self) -> Command { + let cmd = Command::new(&self.external_id).about(&*self.about); - let mut define = + let mut define = Command::new("define") .about(&*self.define_about) .arg(Arg::new("external_id") @@ -736,91 +738,91 @@ impl SubCommand for EntityCliModel { .takes_value(true), ); - for attr in &self.attributes { - define = define.arg(attr.as_arg()); - } + for attr in &self.attributes { + define = define.arg(attr.as_arg()); + } - cmd.subcommand(define).subcommand( - Command::new("derive") - .about("Derivation of entities from other entities") - .arg( - Arg::new("subtype") - .help("The derivation subtype") - .long("subtype") - .required(false) - .takes_value(true) - .value_parser(PossibleValuesParser::new([ - "revision", - "quotation", - "primary-source", - ])), - ) - .arg( - Arg::new("generated_entity_id") - .help("A valid chronicle entity IRI for the generated entity") - .takes_value(true) - .required(true), - ) - .arg( - Arg::new("used_entity_id") - .help("A valid chronicle entity IRI for the used entity") - .takes_value(true) - .required(true), - ) - .arg( - Arg::new("activity_id") - .help("The activity IRI that generated the entity") - .long("activity") - .takes_value(true) - .required(false), - ) - .arg( - Arg::new("namespace") - .short('n') - .long("namespace") - .default_value("default") - .required(false) - .takes_value(true), - ), - ) - } - - fn matches(&self, matches: &ArgMatches) -> Result, CliError> { - if let Some(matches) = matches.subcommand_matches("define") { - return Ok(Some(ApiCommand::Entity(EntityCommand::Create { - id: name_from::(matches, "external_id", "id")?, - namespace: namespace_from(matches)?, - attributes: attributes_from(matches, &self.entity.external_id, &self.attributes)?, - }))); - } + cmd.subcommand(define).subcommand( + Command::new("derive") + .about("Derivation of entities from other entities") + .arg( + Arg::new("subtype") + .help("The derivation subtype") + .long("subtype") + .required(false) + .takes_value(true) + .value_parser(PossibleValuesParser::new([ + "revision", + "quotation", + "primary-source", + ])), + ) + .arg( + Arg::new("generated_entity_id") + .help("A valid chronicle entity IRI for the generated entity") + .takes_value(true) + .required(true), + ) + .arg( + Arg::new("used_entity_id") + .help("A valid chronicle entity IRI for the used entity") + .takes_value(true) + .required(true), + ) + .arg( + Arg::new("activity_id") + .help("The activity IRI that generated the entity") + .long("activity") + .takes_value(true) + .required(false), + ) + .arg( + Arg::new("namespace") + .short('n') + .long("namespace") + .default_value("default") + .required(false) + .takes_value(true), + ), + ) + } + + fn matches(&self, matches: &ArgMatches) -> Result, CliError> { + if let Some(matches) = matches.subcommand_matches("define") { + return Ok(Some(ApiCommand::Entity(EntityCommand::Create { + id: name_from::(matches, "external_id", "id")?, + namespace: namespace_from(matches)?, + attributes: attributes_from(matches, &self.entity.external_id, &self.attributes)?, + }))); + } - if let Some(matches) = matches.subcommand_matches("derive") { - return Ok(Some(ApiCommand::Entity(EntityCommand::Derive { - namespace: namespace_from(matches)?, - id: id_from(matches, "generated_entity_id")?, - derivation: matches - .get_one::("subtype") - .map(|v| match v.as_str() { - "revision" => DerivationType::Revision, - "quotation" => DerivationType::Quotation, - "primary-source" => DerivationType::PrimarySource, - _ => unreachable!(), // Guaranteed by PossibleValuesParser - }) - .unwrap_or(DerivationType::None), - activity: id_from_option(matches, "activity_id")?, - used_entity: id_from(matches, "used_entity_id")?, - }))); - } + if let Some(matches) = matches.subcommand_matches("derive") { + return Ok(Some(ApiCommand::Entity(EntityCommand::Derive { + namespace: namespace_from(matches)?, + id: id_from(matches, "generated_entity_id")?, + derivation: matches + .get_one::("subtype") + .map(|v| match v.as_str() { + "revision" => DerivationType::Revision, + "quotation" => DerivationType::Quotation, + "primary-source" => DerivationType::PrimarySource, + _ => unreachable!(), // Guaranteed by PossibleValuesParser + }) + .unwrap_or(DerivationType::None), + activity: id_from_option(matches, "activity_id")?, + used_entity: id_from(matches, "used_entity_id")?, + }))); + } - Ok(None) - } + Ok(None) + } } pub struct CliModel { - pub domain: ChronicleDomainDef, - pub agents: Vec, - pub entities: Vec, - pub activities: Vec, + pub domain: ChronicleDomainDef, + pub agents: Vec, + pub entities: Vec, + pub activities: Vec, } pub const LONG_VERSION: &str = const_format::formatcp!( @@ -831,31 +833,23 @@ pub const LONG_VERSION: &str = const_format::formatcp!( ); impl From for CliModel { - fn from(val: ChronicleDomainDef) -> Self { - info!(chronicle_version = LONG_VERSION); - CliModel { - agents: val.agents.iter().map(AgentCliModel::new).collect(), - entities: val.entities.iter().map(EntityCliModel::new).collect(), - activities: val.activities.iter().map(ActivityCliModel::new).collect(), - domain: val, - } - } + fn from(val: ChronicleDomainDef) -> Self { + info!(chronicle_version = LONG_VERSION); + CliModel { + agents: val.agents.iter().map(AgentCliModel::new).collect(), + entities: val.entities.iter().map(EntityCliModel::new).collect(), + activities: val.activities.iter().map(ActivityCliModel::new).collect(), + domain: val, + } + } } impl SubCommand for CliModel { - fn as_cmd(&self) -> Command { - let mut app = Command::new("chronicle") + fn as_cmd(&self) -> Command { + let mut app = Command::new("chronicle") .version(LONG_VERSION) .author("Blockchain Technology Partners") .about("Write and query provenance data to distributed ledgers") - .arg( - Arg::new("enable-otel") - .short('i') - .long("enable-otel") - .value_name("enable-otel") - .takes_value(false) - .help("Instrument using OLTP environment"), - ) .arg(Arg::new("console-logging").long("console-logging") .takes_value(true) .possible_values(["pretty", "json"]) @@ -1053,48 +1047,48 @@ impl SubCommand for CliModel { ) ); - for agent in self.agents.iter() { - app = app.subcommand(agent.as_cmd()); - } - for activity in self.activities.iter() { - app = app.subcommand(activity.as_cmd()); - } - for entity in self.entities.iter() { - app = app.subcommand(entity.as_cmd()); - } - - #[cfg(not(feature = "devmode"))] - { - app = app.arg( - Arg::new("batcher-key-from-path") - .long("batcher-key-from-path") - .takes_value(true) - .value_parser(clap::builder::PathBufValueParser::new()) - .value_hint(ValueHint::DirPath) - .help("Path to a directory containing the key for signing batches") - .conflicts_with("batcher-key-from-vault") - .conflicts_with("batcher-key-generated"), - ); - - app = app.arg( - Arg::new("batcher-key-from-vault") - .long("batcher-key-from-vault") - .takes_value(false) - .help("Use Hashicorp Vault to store the batcher key") - .conflicts_with("batcher-key-from-path") - .conflicts_with("batcher-key-generated"), - ); - - app = app.arg( - Arg::new("batcher-key-generated") - .long("batcher-key-generated") - .takes_value(false) - .help("Generate the batcher key in memory") - .conflicts_with("batcher-key-from-path") - .conflicts_with("batcher-key-from-vault"), - ); + for agent in self.agents.iter() { + app = app.subcommand(agent.as_cmd()); + } + for activity in self.activities.iter() { + app = app.subcommand(activity.as_cmd()); + } + for entity in self.entities.iter() { + app = app.subcommand(entity.as_cmd()); + } - app = app.arg( + #[cfg(not(feature = "devmode"))] + { + app = app.arg( + Arg::new("batcher-key-from-path") + .long("batcher-key-from-path") + .takes_value(true) + .value_parser(clap::builder::PathBufValueParser::new()) + .value_hint(ValueHint::DirPath) + .help("Path to a directory containing the key for signing batches") + .conflicts_with("batcher-key-from-vault") + .conflicts_with("batcher-key-generated"), + ); + + app = app.arg( + Arg::new("batcher-key-from-vault") + .long("batcher-key-from-vault") + .takes_value(false) + .help("Use Hashicorp Vault to store the batcher key") + .conflicts_with("batcher-key-from-path") + .conflicts_with("batcher-key-generated"), + ); + + app = app.arg( + Arg::new("batcher-key-generated") + .long("batcher-key-generated") + .takes_value(false) + .help("Generate the batcher key in memory") + .conflicts_with("batcher-key-from-path") + .conflicts_with("batcher-key-from-vault"), + ); + + app = app.arg( Arg::new("chronicle-key-from-path") .long("chronicle-key-from-path") .takes_value(true) @@ -1105,102 +1099,102 @@ impl SubCommand for CliModel { .conflicts_with("chronicle-key-generated"), ); - app = app.arg( - Arg::new("chronicle-key-from-vault") - .long("chronicle-key-from-vault") - .takes_value(false) - .help("Use Hashicorp Vault to store the Chronicle key") - .conflicts_with("chronicle-key-from-path") - .conflicts_with("chronicle-key-generated"), - ); - - app = app.arg( - Arg::new("chronicle-key-generated") - .long("chronicle-key-generated") - .takes_value(false) - .help("Generate the Chronicle key in memory") - .conflicts_with("chronicle-key-from-path") - .conflicts_with("chronicle-key-from-vault"), - ); - - app = app.arg( - Arg::new("vault-address") - .long("vault-address") - .takes_value(true) - .value_hint(ValueHint::Url) - .help("URL for connecting to Hashicorp Vault") - .env("VAULT_ADDRESS"), - ); - - app = app.arg( - Arg::new("vault-token") - .long("vault-token") - .takes_value(true) - .help("Token for connecting to Hashicorp Vault") - .env("VAULT_TOKEN"), - ); - - app = app.arg( - Arg::new("vault-mount-path") - .long("vault-mount-path") - .takes_value(true) - .value_hint(ValueHint::DirPath) - .help("Mount path for vault secrets") - .env("VAULT_MOUNT_PATH"), - ); - - app.arg( - Arg::new("validator") - .long("validator") - .value_name("validator") - .value_hint(ValueHint::Url) - .help("Sets validator address") - .takes_value(true), - ) - .arg( - Arg::new("embedded-opa-policy") - .long("embedded-opa-policy") - .takes_value(false) - .help( - "Operate without an external OPA policy, using an embedded default policy", - ), - ) - } - #[cfg(feature = "devmode")] - { - app - } - } - - /// Iterate our possible subcommands via model and short circuit with the first one that matches - fn matches(&self, matches: &ArgMatches) -> Result, CliError> { - for (agent, matches) in self.agents.iter().filter_map(|agent| { - matches.subcommand_matches(&agent.external_id).map(|matches| (agent, matches)) - }) { - if let Some(cmd) = agent.matches(matches)? { - return Ok(Some(cmd)); - } - } - for (entity, matches) in self.entities.iter().filter_map(|entity| { - matches.subcommand_matches(&entity.external_id).map(|matches| (entity, matches)) - }) { - if let Some(cmd) = entity.matches(matches)? { - return Ok(Some(cmd)); - } - } - for (activity, matches) in self.activities.iter().filter_map(|activity| { - matches - .subcommand_matches(&activity.external_id) - .map(|matches| (activity, matches)) - }) { - if let Some(cmd) = activity.matches(matches)? { - return Ok(Some(cmd)); - } - } - Ok(None) - } + app = app.arg( + Arg::new("chronicle-key-from-vault") + .long("chronicle-key-from-vault") + .takes_value(false) + .help("Use Hashicorp Vault to store the Chronicle key") + .conflicts_with("chronicle-key-from-path") + .conflicts_with("chronicle-key-generated"), + ); + + app = app.arg( + Arg::new("chronicle-key-generated") + .long("chronicle-key-generated") + .takes_value(false) + .help("Generate the Chronicle key in memory") + .conflicts_with("chronicle-key-from-path") + .conflicts_with("chronicle-key-from-vault"), + ); + + app = app.arg( + Arg::new("vault-address") + .long("vault-address") + .takes_value(true) + .value_hint(ValueHint::Url) + .help("URL for connecting to Hashicorp Vault") + .env("VAULT_ADDRESS"), + ); + + app = app.arg( + Arg::new("vault-token") + .long("vault-token") + .takes_value(true) + .help("Token for connecting to Hashicorp Vault") + .env("VAULT_TOKEN"), + ); + + app = app.arg( + Arg::new("vault-mount-path") + .long("vault-mount-path") + .takes_value(true) + .value_hint(ValueHint::DirPath) + .help("Mount path for vault secrets") + .env("VAULT_MOUNT_PATH"), + ); + + app.arg( + Arg::new("validator") + .long("validator") + .value_name("validator") + .value_hint(ValueHint::Url) + .help("Sets validator address") + .takes_value(true), + ) + .arg( + Arg::new("embedded-opa-policy") + .long("embedded-opa-policy") + .takes_value(false) + .help( + "Operate without an external OPA policy, using an embedded default policy", + ), + ) + } + #[cfg(feature = "devmode")] + { + app + } + } + + /// Iterate our possible subcommands via model and short circuit with the first one that matches + fn matches(&self, matches: &ArgMatches) -> Result, CliError> { + for (agent, matches) in self.agents.iter().filter_map(|agent| { + matches.subcommand_matches(&agent.external_id).map(|matches| (agent, matches)) + }) { + if let Some(cmd) = agent.matches(matches)? { + return Ok(Some(cmd)); + } + } + for (entity, matches) in self.entities.iter().filter_map(|entity| { + matches.subcommand_matches(&entity.external_id).map(|matches| (entity, matches)) + }) { + if let Some(cmd) = entity.matches(matches)? { + return Ok(Some(cmd)); + } + } + for (activity, matches) in self.activities.iter().filter_map(|activity| { + matches + .subcommand_matches(&activity.external_id) + .map(|matches| (activity, matches)) + }) { + if let Some(cmd) = activity.matches(matches)? { + return Ok(Some(cmd)); + } + } + Ok(None) + } } pub fn cli(domain: ChronicleDomainDef) -> CliModel { - CliModel::from(domain) + CliModel::from(domain) } diff --git a/crates/chronicle/src/bootstrap/mod.rs b/crates/chronicle/src/bootstrap/mod.rs index 9ce72c26f..61a801c5e 100644 --- a/crates/chronicle/src/bootstrap/mod.rs +++ b/crates/chronicle/src/bootstrap/mod.rs @@ -1,17 +1,16 @@ use std::{ collections::{BTreeSet, HashMap}, - io::{self}, + io::{self, IsTerminal}, net::{SocketAddr, ToSocketAddrs}, str::FromStr, }; -use std::io::IsTerminal; use async_graphql::ObjectType; use clap::{ArgMatches, Command}; use clap_complete::{generate, Generator, Shell}; use diesel::{ - PgConnection, r2d2::{ConnectionManager, Pool}, + PgConnection, }; use futures::{Future, FutureExt, StreamExt}; use tracing::{debug, error, info, instrument, warn}; @@ -22,30 +21,25 @@ use api::{ chronicle_graphql::{ ChronicleApiServer, ChronicleGraphQl, EndpointSecurityConfiguration, JwksUri, SecurityConf, UserInfoUri, - }, commands::ApiResponse, StoreError, + }, + commands::ApiResponse, + Api, ApiDispatch, ApiError, StoreError, UuidGen, }; -use api::{Api, UuidGen}; -use api::ApiDispatch; -use api::ApiError; -use chronicle_persistence::database::{DatabaseConnector, get_connection_with_retry}; +use chronicle_persistence::database::{get_connection_with_retry, DatabaseConnector}; use chronicle_signing::{ - BATCHER_NAMESPACE, CHRONICLE_NAMESPACE, chronicle_secret_names, ChronicleSecretsOptions, - ChronicleSigning, + chronicle_secret_names, ChronicleSecretsOptions, ChronicleSigning, BATCHER_NAMESPACE, + CHRONICLE_NAMESPACE, }; use chronicle_telemetry::{self, ConsoleLogging}; pub use cli::*; -use common::{ - opa::{ - PolicyAddress, - std::{load_bytes_from_stdin, load_bytes_from_url}, - }, - prov::json_ld::ToJson, -}; use common::{ identity::AuthId, ledger::SubmissionStage, - opa::{OpaSettings, std::ExecutorContext}, - prov::{NamespaceId, operations::ChronicleOperation}, + opa::{ + std::{load_bytes_from_stdin, load_bytes_from_url, ExecutorContext}, + OpaSettings, PolicyAddress, + }, + prov::{json_ld::ToJson, operations::ChronicleOperation, NamespaceId}, }; #[cfg(feature = "devmode")] use embedded_substrate::EmbeddedSubstrate; @@ -61,42 +55,39 @@ pub mod opa; #[cfg(not(feature = "devmode"))] fn validator_address(options: &ArgMatches) -> Result, CliError> { - Ok(options - .value_of("validator") - .map(str::to_string) - .ok_or(CliError::MissingArgument { arg: "validator".to_owned() }) - .and_then(|s| Url::parse(&s).map_err(CliError::from)) - .map(|u| u.socket_addrs(|| Some(4004))) - .map_err(CliError::from)??) + Ok(options + .value_of("validator") + .map(str::to_string) + .ok_or(CliError::MissingArgument { arg: "validator".to_owned() }) + .and_then(|s| Url::parse(&s).map_err(CliError::from)) + .map(|u| u.socket_addrs(|| Some(4004))) + .map_err(CliError::from)??) } #[allow(dead_code)] #[cfg(not(feature = "devmode"))] #[tracing::instrument(level = "debug", skip(options))] async fn ledger( - options: &ArgMatches, + options: &ArgMatches, ) -> Result, CliError> { - let url = options - .value_of("validator") - .map(str::to_string) - .ok_or_else(|| CliError::MissingArgument { arg: "validator".to_owned() })?; - + let url = options + .value_of("validator") + .map(str::to_string) + .ok_or_else(|| CliError::MissingArgument { arg: "validator".to_owned() })?; - let client = ChronicleSubstrateClient::::connect( - url - ) - .await?; + let client = + ChronicleSubstrateClient::::connect(url).await?; - Ok(client) + Ok(client) } #[allow(dead_code)] #[cfg(feature = "devmode")] async fn in_mem_ledger( - _options: &ArgMatches, + _options: &ArgMatches, ) -> Result, ApiError> { - embedded_substrate::shared_dev_node_rpc_on_arbitrary_port() - .await - .map_err(|e| ApiError::EmbeddedSubstrate(e.into())) + embedded_substrate::shared_dev_node_rpc_on_arbitrary_port() + .await + .map_err(|e| ApiError::EmbeddedSubstrate(e.into())) } #[derive(Debug, Clone)] @@ -107,40 +98,40 @@ impl UuidGen for UniqueUuid {} type ConnectionPool = Pool>; struct RemoteDatabaseConnector { - db_uri: String, + db_uri: String, } #[async_trait::async_trait] impl DatabaseConnector<(), StoreError> for RemoteDatabaseConnector { - async fn try_connect(&self) -> Result<((), Pool>), StoreError> { - use diesel::Connection; - PgConnection::establish(&self.db_uri)?; - Ok(((), Pool::builder().build(ConnectionManager::::new(&self.db_uri))?)) - } - - fn should_retry(&self, error: &StoreError) -> bool { - matches!(error, StoreError::DbConnection(diesel::ConnectionError::BadConnection(_))) - } + async fn try_connect(&self) -> Result<((), Pool>), StoreError> { + use diesel::Connection; + PgConnection::establish(&self.db_uri)?; + Ok(((), Pool::builder().build(ConnectionManager::::new(&self.db_uri))?)) + } + + fn should_retry(&self, error: &StoreError) -> bool { + matches!(error, StoreError::DbConnection(diesel::ConnectionError::BadConnection(_))) + } } #[instrument(skip(db_uri))] //Do not log db_uri, as it can contain passwords async fn pool_remote(db_uri: impl ToString) -> Result { - let (_, pool) = - get_connection_with_retry(RemoteDatabaseConnector { db_uri: db_uri.to_string() }).await?; - Ok(pool) + let (_, pool) = + get_connection_with_retry(RemoteDatabaseConnector { db_uri: db_uri.to_string() }).await?; + Ok(pool) } #[instrument(skip_all)] pub async fn arrow_api_server( - domain: &ChronicleDomainDef, - api: &ApiDispatch, - pool: &ConnectionPool, - addresses: Option>, - security_conf: EndpointSecurityConfiguration, - record_batch_size: usize, - operation_batch_size: usize, -) -> Result> + Send>, ApiError> { - tracing::info!( + domain: &ChronicleDomainDef, + api: &ApiDispatch, + pool: &ConnectionPool, + addresses: Option>, + security_conf: EndpointSecurityConfiguration, + record_batch_size: usize, + operation_batch_size: usize, +) -> Result> + Send>, ApiError> { + tracing::info!( addresses = ?addresses, allow_anonymous = ?security_conf.allow_anonymous, jwt_must_claim = ?security_conf.must_claim, @@ -149,228 +140,228 @@ pub async fn arrow_api_server( "Starting arrow flight with the provided configuration" ); - match addresses { - Some(addresses) => chronicle_arrow::run_flight_service( - domain, - pool, - api, - security_conf, - &addresses, - record_batch_size, - ) - .await - .map_err(|e| ApiError::ArrowService(e.into())) - .map(|_| Some(futures::future::ready(Ok(())))), - None => Ok(None), - } + match addresses { + Some(addresses) => chronicle_arrow::run_flight_service( + domain, + pool, + api, + security_conf, + &addresses, + record_batch_size, + ) + .await + .map_err(|e| ApiError::ArrowService(e.into())) + .map(|_| Some(futures::future::ready(Ok(())))), + None => Ok(None), + } } pub async fn graphql_api_server( - api: &ApiDispatch, - pool: &ConnectionPool, - gql: ChronicleGraphQl, - graphql_interface: Option>, - security_conf: &SecurityConf, - serve_graphql: bool, - serve_data: bool, -) -> Result> + Send>, ApiError> - where - Query: ObjectType + Copy + Send + 'static, - Mutation: ObjectType + Copy + Send + 'static, + api: &ApiDispatch, + pool: &ConnectionPool, + gql: ChronicleGraphQl, + graphql_interface: Option>, + security_conf: &SecurityConf, + serve_graphql: bool, + serve_data: bool, +) -> Result> + Send>, ApiError> +where + Query: ObjectType + Copy + Send + 'static, + Mutation: ObjectType + Copy + Send + 'static, { - if let Some(addresses) = graphql_interface { - gql.serve_api( - pool.clone(), - api.clone(), - addresses, - security_conf, - serve_graphql, - serve_data, - ) - .await?; - Ok(Some(futures::future::ready(Ok(())))) - } else { - Ok(None) - } + if let Some(addresses) = graphql_interface { + gql.serve_api( + pool.clone(), + api.clone(), + addresses, + security_conf, + serve_graphql, + serve_data, + ) + .await?; + Ok(Some(futures::future::ready(Ok(())))) + } else { + Ok(None) + } } #[allow(dead_code)] fn namespace_bindings(options: &ArgMatches) -> Vec { - options - .values_of("namespace-binding") - .map(|values| { - values - .map(|value| { - let (id, uuid) = value.split_once(':').unwrap(); - - let uuid = uuid::Uuid::parse_str(uuid).unwrap(); - NamespaceId::from_external_id(id, uuid) - }) - .collect() - }) - .unwrap_or_default() + options + .values_of("namespace-binding") + .map(|values| { + values + .map(|value| { + let (id, uuid) = value.split_once(':').unwrap(); + + let uuid = uuid::Uuid::parse_str(uuid).unwrap(); + NamespaceId::from_external_id(id, uuid) + }) + .collect() + }) + .unwrap_or_default() } fn vault_secrets_options(options: &ArgMatches) -> Result { - let vault_url = options - .value_of("vault-url") - .ok_or_else(|| CliError::missing_argument("vault-url"))?; - let token = options - .value_of("vault-token") - .ok_or_else(|| CliError::missing_argument("vault-token"))?; - let mount_path = options - .value_of("vault-mount-path") - .ok_or_else(|| CliError::missing_argument("vault-mount-path"))?; - Ok(ChronicleSecretsOptions::stored_in_vault(&Url::parse(vault_url)?, token, mount_path)) + let vault_url = options + .value_of("vault-url") + .ok_or_else(|| CliError::missing_argument("vault-url"))?; + let token = options + .value_of("vault-token") + .ok_or_else(|| CliError::missing_argument("vault-token"))?; + let mount_path = options + .value_of("vault-mount-path") + .ok_or_else(|| CliError::missing_argument("vault-mount-path"))?; + Ok(ChronicleSecretsOptions::stored_in_vault(&Url::parse(vault_url)?, token, mount_path)) } #[cfg(not(feature = "devmode"))] async fn chronicle_signing(options: &ArgMatches) -> Result { - // Determine batcher configuration - - use std::path::PathBuf; - let batcher_options: ChronicleSecretsOptions = match ( - options.get_one::("batcher-key-from-path"), - options.contains_id("batcher-key-from-vault"), - options.contains_id("batcher-key-generated"), - ) { - (Some(path), _, _) => ChronicleSecretsOptions::stored_at_path(std::path::Path::new(path)), - (_, true, _) => vault_secrets_options(options)?, - (_, _, true) => ChronicleSecretsOptions::generate_in_memory(), - _ => unreachable!("CLI should always set batcher key"), - }; - - let chronicle_options = match ( - options.get_one::("chronicle-key-from-path"), - options.contains_id("chronicle-key-from-vault"), - options.contains_id("chronicle-key-generated"), - ) { - (Some(path), _, _) => ChronicleSecretsOptions::stored_at_path(path), - (_, true, _) => vault_secrets_options(options)?, - (_, _, true) => ChronicleSecretsOptions::generate_in_memory(), - _ => unreachable!("CLI should always set chronicle key"), - }; - - Ok(ChronicleSigning::new( - chronicle_secret_names(), - vec![ - (CHRONICLE_NAMESPACE.to_string(), chronicle_options), - (BATCHER_NAMESPACE.to_string(), batcher_options), - ], - ) - .await?) + // Determine batcher configuration + + use std::path::PathBuf; + let batcher_options: ChronicleSecretsOptions = match ( + options.get_one::("batcher-key-from-path"), + options.contains_id("batcher-key-from-vault"), + options.contains_id("batcher-key-generated"), + ) { + (Some(path), _, _) => ChronicleSecretsOptions::stored_at_path(std::path::Path::new(path)), + (_, true, _) => vault_secrets_options(options)?, + (_, _, true) => ChronicleSecretsOptions::generate_in_memory(), + _ => unreachable!("CLI should always set batcher key"), + }; + + let chronicle_options = match ( + options.get_one::("chronicle-key-from-path"), + options.contains_id("chronicle-key-from-vault"), + options.contains_id("chronicle-key-generated"), + ) { + (Some(path), _, _) => ChronicleSecretsOptions::stored_at_path(path), + (_, true, _) => vault_secrets_options(options)?, + (_, _, true) => ChronicleSecretsOptions::generate_in_memory(), + _ => unreachable!("CLI should always set chronicle key"), + }; + + Ok(ChronicleSigning::new( + chronicle_secret_names(), + vec![ + (CHRONICLE_NAMESPACE.to_string(), chronicle_options), + (BATCHER_NAMESPACE.to_string(), batcher_options), + ], + ) + .await?) } #[cfg(feature = "devmode")] async fn chronicle_signing(_options: &ArgMatches) -> Result { - Ok(ChronicleSigning::new( - chronicle_secret_names(), - vec![ - (CHRONICLE_NAMESPACE.to_string(), ChronicleSecretsOptions::generate_in_memory()), - (BATCHER_NAMESPACE.to_string(), ChronicleSecretsOptions::generate_in_memory()), - ], - ) - .await?) + Ok(ChronicleSigning::new( + chronicle_secret_names(), + vec![ + (CHRONICLE_NAMESPACE.to_string(), ChronicleSecretsOptions::generate_in_memory()), + (BATCHER_NAMESPACE.to_string(), ChronicleSecretsOptions::generate_in_memory()), + ], + ) + .await?) } #[cfg(not(feature = "devmode"))] pub async fn api( - pool: &ConnectionPool, - options: &ArgMatches, - policy_address: Option, - liveness_check_interval: Option, + pool: &ConnectionPool, + options: &ArgMatches, + policy_address: Option, + liveness_check_interval: Option, ) -> Result { - let ledger = ledger(options).await?; - - Ok(Api::new( - pool.clone(), - ledger, - UniqueUuid, - chronicle_signing(options).await?, - namespace_bindings(options), - policy_address, - liveness_check_interval, - ) - .await?) + let ledger = ledger(options).await?; + + Ok(Api::new( + pool.clone(), + ledger, + UniqueUuid, + chronicle_signing(options).await?, + namespace_bindings(options), + policy_address, + liveness_check_interval, + ) + .await?) } #[cfg(feature = "devmode")] pub async fn api( - pool: &ConnectionPool, - options: &ArgMatches, - remote_opa: Option, - liveness_check_interval: Option, + pool: &ConnectionPool, + options: &ArgMatches, + remote_opa: Option, + liveness_check_interval: Option, ) -> Result { - use protocol_substrate::PolkadotConfig; - - let embedded_tp = in_mem_ledger(options).await?; - - Ok(Api::new( - pool.clone(), - embedded_tp.connect_chronicle::().await?, - UniqueUuid, - chronicle_signing(options).await?, - vec![], - remote_opa, - liveness_check_interval, - ) - .await?) + use protocol_substrate::PolkadotConfig; + + let embedded_tp = in_mem_ledger(options).await?; + + Ok(Api::new( + pool.clone(), + embedded_tp.connect_chronicle::().await?, + UniqueUuid, + chronicle_signing(options).await?, + vec![], + remote_opa, + liveness_check_interval, + ) + .await?) } fn construct_db_uri(matches: &ArgMatches) -> String { - fn encode(string: &str) -> String { - use percent_encoding::{NON_ALPHANUMERIC, utf8_percent_encode}; - utf8_percent_encode(string, NON_ALPHANUMERIC).to_string() - } - - let password = match std::env::var("PGPASSWORD") { - Ok(password) => { - debug!("PGPASSWORD is set, using for DB connection"); - format!(":{}", encode(password.as_str())) - } - Err(_) => { - debug!("PGPASSWORD is not set, omitting for DB connection"); - String::new() - } - }; - - format!( - "postgresql://{}{}@{}:{}/{}", - encode( - matches - .value_of("database-username") - .expect("CLI should always set database user") - ), - password, - encode(matches.value_of("database-host").expect("CLI should always set database host")), - encode(matches.value_of("database-port").expect("CLI should always set database port")), - encode(matches.value_of("database-name").expect("CLI should always set database name")) - ) + fn encode(string: &str) -> String { + use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC}; + utf8_percent_encode(string, NON_ALPHANUMERIC).to_string() + } + + let password = match std::env::var("PGPASSWORD") { + Ok(password) => { + debug!("PGPASSWORD is set, using for DB connection"); + format!(":{}", encode(password.as_str())) + }, + Err(_) => { + debug!("PGPASSWORD is not set, omitting for DB connection"); + String::new() + }, + }; + + format!( + "postgresql://{}{}@{}:{}/{}", + encode( + matches + .value_of("database-username") + .expect("CLI should always set database user") + ), + password, + encode(matches.value_of("database-host").expect("CLI should always set database host")), + encode(matches.value_of("database-port").expect("CLI should always set database port")), + encode(matches.value_of("database-name").expect("CLI should always set database name")) + ) } #[derive(Debug, Clone)] pub enum ConfiguredOpa { - Embedded(ExecutorContext), - Remote(ExecutorContext, OpaSettings), - Url(ExecutorContext), + Embedded(ExecutorContext), + Remote(ExecutorContext, OpaSettings), + Url(ExecutorContext), } impl ConfiguredOpa { - pub fn context(&self) -> &ExecutorContext { - match self { - ConfiguredOpa::Embedded(context) => context, - ConfiguredOpa::Remote(context, _) => context, - ConfiguredOpa::Url(context) => context, - } - } - - pub fn remote_settings(&self) -> Option { - match self { - ConfiguredOpa::Embedded(_) => None, - ConfiguredOpa::Remote(_, settings) => Some(settings.policy_address), - ConfiguredOpa::Url(_) => None, - } - } + pub fn context(&self) -> &ExecutorContext { + match self { + ConfiguredOpa::Embedded(context) => context, + ConfiguredOpa::Remote(context, _) => context, + ConfiguredOpa::Url(context) => context, + } + } + + pub fn remote_settings(&self) -> Option { + match self { + ConfiguredOpa::Embedded(_) => None, + ConfiguredOpa::Remote(_, settings) => Some(settings.policy_address), + ConfiguredOpa::Url(_) => None, + } + } } /// If embedded-opa-policy is set, we will use the embedded policy, otherwise we @@ -379,10 +370,10 @@ impl ConfiguredOpa { #[cfg(feature = "devmode")] #[allow(unused_variables)] async fn configure_opa(options: &ArgMatches) -> Result { - let (default_policy_name, entrypoint) = - ("allow_transactions", "allow_transactions.allowed_users"); - let opa = opa_executor_from_embedded_policy(default_policy_name, entrypoint).await?; - Ok(ConfiguredOpa::Embedded(opa)) + let (default_policy_name, entrypoint) = + ("allow_transactions", "allow_transactions.allowed_users"); + let opa = opa_executor_from_embedded_policy(default_policy_name, entrypoint).await?; + Ok(ConfiguredOpa::Embedded(opa)) } // Check if the `embedded-opa-policy` flag is present in the CLI options. @@ -406,276 +397,276 @@ async fn configure_opa(options: &ArgMatches) -> Result #[cfg(not(feature = "devmode"))] #[instrument(skip(options))] async fn configure_opa(options: &ArgMatches) -> Result { - if options.is_present("embedded-opa-policy") { - let (default_policy_name, entrypoint) = - ("allow_transactions", "allow_transactions.allowed_users"); - let opa = opa_executor_from_embedded_policy(default_policy_name, entrypoint).await?; - tracing::warn!( + if options.is_present("embedded-opa-policy") { + let (default_policy_name, entrypoint) = + ("allow_transactions", "allow_transactions.allowed_users"); + let opa = opa_executor_from_embedded_policy(default_policy_name, entrypoint).await?; + tracing::warn!( "Chronicle operating in an insecure mode with an embedded default OPA policy" ); - Ok(ConfiguredOpa::Embedded(opa)) - } else if let Some(url) = options.value_of("opa-bundle-address") { - let (policy_name, entrypoint) = ( - options.value_of("opa-policy-name").unwrap(), - options.value_of("opa-policy-entrypoint").unwrap(), - ); - let opa = self::opa::opa_executor_from_url(url, policy_name, entrypoint).await?; - tracing::info!("Chronicle operating with OPA policy from URL"); - - Ok(ConfiguredOpa::Url(opa)) - } else { - let (opa, settings) = self::opa::opa_executor_from_substrate_state( - &ChronicleSubstrateClient::connect_socket_addr(validator_address(options)?[0]).await?, - &protocol_substrate_opa::OpaSubstrateClient::connect_socket_addr( - validator_address(options)?[0], - ) - .await?, - ) - .await?; - - if let Some(settings) = settings { - tracing::info!(use_on_chain_opa= ?settings, "Chronicle operating in secure mode with on chain OPA policy"); - Ok(ConfiguredOpa::Remote(opa, settings)) - } else { - tracing::warn!( + Ok(ConfiguredOpa::Embedded(opa)) + } else if let Some(url) = options.value_of("opa-bundle-address") { + let (policy_name, entrypoint) = ( + options.value_of("opa-policy-name").unwrap(), + options.value_of("opa-policy-entrypoint").unwrap(), + ); + let opa = self::opa::opa_executor_from_url(url, policy_name, entrypoint).await?; + tracing::info!("Chronicle operating with OPA policy from URL"); + + Ok(ConfiguredOpa::Url(opa)) + } else { + let (opa, settings) = self::opa::opa_executor_from_substrate_state( + &ChronicleSubstrateClient::connect_socket_addr(validator_address(options)?[0]).await?, + &protocol_substrate_opa::OpaSubstrateClient::connect_socket_addr( + validator_address(options)?[0], + ) + .await?, + ) + .await?; + + if let Some(settings) = settings { + tracing::info!(use_on_chain_opa= ?settings, "Chronicle operating in secure mode with on chain OPA policy"); + Ok(ConfiguredOpa::Remote(opa, settings)) + } else { + tracing::warn!( "Chronicle operating in an insecure mode with an embedded default OPA policy" ); - tracing::warn!(use_on_chain_opa= ?settings, "Chronicle operating in secure mode with on chain OPA policy"); - let (default_policy_name, entrypoint) = - ("allow_transactions", "allow_transactions.allowed_users"); - let opa = opa_executor_from_embedded_policy(default_policy_name, entrypoint).await?; - - Ok(ConfiguredOpa::Embedded(opa)) - } - } + tracing::warn!(use_on_chain_opa= ?settings, "Chronicle operating in secure mode with on chain OPA policy"); + let (default_policy_name, entrypoint) = + ("allow_transactions", "allow_transactions.allowed_users"); + let opa = opa_executor_from_embedded_policy(default_policy_name, entrypoint).await?; + + Ok(ConfiguredOpa::Embedded(opa)) + } + } } /// If `--liveness-check` is set, we use either the interval in seconds provided or the default of /// 1800. Otherwise, we use `None` to disable the depth charge. fn configure_depth_charge(matches: &ArgMatches) -> Option { - if let Some(serve_api_matches) = matches.subcommand_matches("serve-api") { - if let Some(interval) = serve_api_matches.value_of("liveness-check") { - let parsed_interval = interval.parse::().unwrap_or_else(|e| { - warn!("Failed to parse '--liveness-check' value: {e}"); - 1800 - }); - - if parsed_interval == 1800 { - debug!("Using default liveness health check interval value: 1800"); - } else { - debug!("Using custom liveness health check interval value: {parsed_interval}"); - } - return Some(parsed_interval); - } - } - debug!("Liveness health check disabled"); - None + if let Some(serve_api_matches) = matches.subcommand_matches("serve-api") { + if let Some(interval) = serve_api_matches.value_of("liveness-check") { + let parsed_interval = interval.parse::().unwrap_or_else(|e| { + warn!("Failed to parse '--liveness-check' value: {e}"); + 1800 + }); + + if parsed_interval == 1800 { + debug!("Using default liveness health check interval value: 1800"); + } else { + debug!("Using custom liveness health check interval value: {parsed_interval}"); + } + return Some(parsed_interval); + } + } + debug!("Liveness health check disabled"); + None } #[instrument(skip(gql, cli))] async fn execute_subcommand( - gql: ChronicleGraphQl, - domain: &ChronicleDomainDef, - cli: CliModel, + gql: ChronicleGraphQl, + domain: &ChronicleDomainDef, + cli: CliModel, ) -> Result<(ApiResponse, ApiDispatch), CliError> - where - Query: ObjectType + Copy, - Mutation: ObjectType + Copy, +where + Query: ObjectType + Copy, + Mutation: ObjectType + Copy, { - dotenvy::dotenv().ok(); - - let matches = cli.as_cmd().get_matches(); - - let pool = pool_remote(&construct_db_uri(&matches)).await?; - - let opa = configure_opa(&matches).await?; - - let liveness_check_interval = configure_depth_charge(&matches); - - let api = api(&pool, &matches, opa.remote_settings(), liveness_check_interval).await?; - let ret_api = api.clone(); - - if let Some(matches) = matches.subcommand_matches("serve-api") { - let interface = match matches.get_many::("interface") { - Some(interface_args) => { - let mut addrs = Vec::new(); - for interface_arg in interface_args { - addrs.extend(interface_arg.to_socket_addrs()?); - } - Some(addrs) - } - None => None, - }; - - let arrow_interface = match matches.get_many::("arrow-interface") { - Some(interface_args) => { - let mut addrs = Vec::new(); - for interface_arg in interface_args { - addrs.extend(interface_arg.to_socket_addrs()?); - } - Some(addrs) - } - None => None, - }; - - let jwks_uri = if let Some(uri) = matches.value_of("jwks-address") { - Some(JwksUri::new(Url::from_str(uri)?)) - } else { - None - }; - - let userinfo_uri = if let Some(uri) = matches.value_of("userinfo-address") { - Some(UserInfoUri::new(Url::from_str(uri)?)) - } else { - None - }; - - let allow_anonymous = !matches.is_present("require-auth"); - - let id_claims = matches.get_many::("id-claims").map(|id_claims| { - let mut id_keys = BTreeSet::new(); - for id_claim in id_claims { - id_keys.extend(id_claim.split_whitespace().map(|s| s.to_string())); - } - id_keys - }); - - let mut jwt_must_claim: HashMap = HashMap::new(); - for (name, value) in std::env::vars() { - if let Some(name) = name.strip_prefix("JWT_MUST_CLAIM_") { - jwt_must_claim.insert(name.to_lowercase(), value); - } - } - if let Some(mut claims) = matches.get_many::("jwt-must-claim") { - while let (Some(name), Some(value)) = (claims.next(), claims.next()) { - jwt_must_claim.insert(name.clone(), value.clone()); - } - } - - let endpoints: Vec = - matches.get_many("offer-endpoints").unwrap().map(String::clone).collect(); - - let security_conf = SecurityConf::new( - jwks_uri, - userinfo_uri, - id_claims, - jwt_must_claim.clone(), - allow_anonymous, - opa.context().clone(), - ); - - let arrow = arrow_api_server( - domain, - &api, - &pool, - arrow_interface, - security_conf.as_endpoint_conf(30), - 1000, - 100, - ); - - let serve_graphql = endpoints.contains(&"graphql".to_string()); - let serve_data = endpoints.contains(&"data".to_string()); - - let gql = graphql_api_server( - &api, - &pool, - gql, - interface, - &security_conf, - serve_graphql, - serve_data, - ); - - tokio::task::spawn(async move { - use async_signals::Signals; - - let mut signals = Signals::new(vec![libc::SIGHUP, libc::SIGINT]).unwrap(); - - signals.next().await; - chronicle_arrow::trigger_shutdown(); - api::chronicle_graphql::trigger_shutdown(); - }); - - let (gql_result, arrow_result) = tokio::join!(gql, arrow); - - if let Err(e) = gql_result { - return Err(e.into()); - } - if let Err(e) = arrow_result { - return Err(e.into()); - } - - Ok((ApiResponse::Unit, ret_api)) - } else if let Some(matches) = matches.subcommand_matches("import") { - let data = if let Some(url) = matches.value_of("url") { - let data = load_bytes_from_url(url).await?; - info!("Loaded import data from {:?}", url); - data - } else { - if std::io::stdin().is_terminal() { - eprintln!("Attempting to import data from standard input, press Ctrl-D to finish."); - } - info!("Attempting to read import data from stdin..."); - let data = load_bytes_from_stdin()?; - info!("Loaded {} bytes of import data from stdin", data.len()); - data - }; - - let data = std::str::from_utf8(&data)?; - - if data.trim().is_empty() { - eprintln!("Import data is empty, nothing to import"); - return Ok((ApiResponse::Unit, ret_api)); - } - - let json_array = serde_json::from_str::>(data)?; - - let mut operations = Vec::new(); - for value in json_array.into_iter() { - let op = ChronicleOperation::from_json(&value) - .await - .expect("Failed to parse imported JSON-LD to ChronicleOperation"); - operations.push(op); - } - - info!("Loading import data complete"); + dotenvy::dotenv().ok(); + + let matches = cli.as_cmd().get_matches(); + + let pool = pool_remote(&construct_db_uri(&matches)).await?; + + let opa = configure_opa(&matches).await?; + + let liveness_check_interval = configure_depth_charge(&matches); + + let api = api(&pool, &matches, opa.remote_settings(), liveness_check_interval).await?; + let ret_api = api.clone(); + + if let Some(matches) = matches.subcommand_matches("serve-api") { + let interface = match matches.get_many::("interface") { + Some(interface_args) => { + let mut addrs = Vec::new(); + for interface_arg in interface_args { + addrs.extend(interface_arg.to_socket_addrs()?); + } + Some(addrs) + }, + None => None, + }; + + let arrow_interface = match matches.get_many::("arrow-interface") { + Some(interface_args) => { + let mut addrs = Vec::new(); + for interface_arg in interface_args { + addrs.extend(interface_arg.to_socket_addrs()?); + } + Some(addrs) + }, + None => None, + }; + + let jwks_uri = if let Some(uri) = matches.value_of("jwks-address") { + Some(JwksUri::new(Url::from_str(uri)?)) + } else { + None + }; + + let userinfo_uri = if let Some(uri) = matches.value_of("userinfo-address") { + Some(UserInfoUri::new(Url::from_str(uri)?)) + } else { + None + }; + + let allow_anonymous = !matches.is_present("require-auth"); + + let id_claims = matches.get_many::("id-claims").map(|id_claims| { + let mut id_keys = BTreeSet::new(); + for id_claim in id_claims { + id_keys.extend(id_claim.split_whitespace().map(|s| s.to_string())); + } + id_keys + }); + + let mut jwt_must_claim: HashMap = HashMap::new(); + for (name, value) in std::env::vars() { + if let Some(name) = name.strip_prefix("JWT_MUST_CLAIM_") { + jwt_must_claim.insert(name.to_lowercase(), value); + } + } + if let Some(mut claims) = matches.get_many::("jwt-must-claim") { + while let (Some(name), Some(value)) = (claims.next(), claims.next()) { + jwt_must_claim.insert(name.clone(), value.clone()); + } + } + + let endpoints: Vec = + matches.get_many("offer-endpoints").unwrap().map(String::clone).collect(); + + let security_conf = SecurityConf::new( + jwks_uri, + userinfo_uri, + id_claims, + jwt_must_claim.clone(), + allow_anonymous, + opa.context().clone(), + ); - let identity = AuthId::chronicle(); + let arrow = arrow_api_server( + domain, + &api, + &pool, + arrow_interface, + security_conf.as_endpoint_conf(30), + 1000, + 100, + ); - let response = api.handle_import_command(identity, operations).await?; + let serve_graphql = endpoints.contains(&"graphql".to_string()); + let serve_data = endpoints.contains(&"data".to_string()); + + let gql = graphql_api_server( + &api, + &pool, + gql, + interface, + &security_conf, + serve_graphql, + serve_data, + ); - Ok((response, ret_api)) - } else if let Some(cmd) = cli.matches(&matches)? { - let identity = AuthId::chronicle(); - Ok((api.dispatch(cmd, identity).await?, ret_api)) - } else { - Ok((ApiResponse::Unit, ret_api)) - } + tokio::task::spawn(async move { + use async_signals::Signals; + + let mut signals = Signals::new(vec![libc::SIGHUP, libc::SIGINT]).unwrap(); + + signals.next().await; + chronicle_arrow::trigger_shutdown(); + api::chronicle_graphql::trigger_shutdown(); + }); + + let (gql_result, arrow_result) = tokio::join!(gql, arrow); + + if let Err(e) = gql_result { + return Err(e.into()); + } + if let Err(e) = arrow_result { + return Err(e.into()); + } + + Ok((ApiResponse::Unit, ret_api)) + } else if let Some(matches) = matches.subcommand_matches("import") { + let data = if let Some(url) = matches.value_of("url") { + let data = load_bytes_from_url(url).await?; + info!("Loaded import data from {:?}", url); + data + } else { + if std::io::stdin().is_terminal() { + eprintln!("Attempting to import data from standard input, press Ctrl-D to finish."); + } + info!("Attempting to read import data from stdin..."); + let data = load_bytes_from_stdin()?; + info!("Loaded {} bytes of import data from stdin", data.len()); + data + }; + + let data = std::str::from_utf8(&data)?; + + if data.trim().is_empty() { + eprintln!("Import data is empty, nothing to import"); + return Ok((ApiResponse::Unit, ret_api)); + } + + let json_array = serde_json::from_str::>(data)?; + + let mut operations = Vec::new(); + for value in json_array.into_iter() { + let op = ChronicleOperation::from_json(&value) + .await + .expect("Failed to parse imported JSON-LD to ChronicleOperation"); + operations.push(op); + } + + info!("Loading import data complete"); + + let identity = AuthId::chronicle(); + + let response = api.handle_import_command(identity, operations).await?; + + Ok((response, ret_api)) + } else if let Some(cmd) = cli.matches(&matches)? { + let identity = AuthId::chronicle(); + Ok((api.dispatch(cmd, identity).await?, ret_api)) + } else { + Ok((ApiResponse::Unit, ret_api)) + } } fn get_namespace(matches: &ArgMatches) -> NamespaceId { - let namespace_id = matches.value_of("namespace-id").unwrap(); - let namespace_uuid = matches.value_of("namespace-uuid").unwrap(); - let uuid = uuid::Uuid::try_parse(namespace_uuid) - .unwrap_or_else(|_| panic!("cannot parse namespace UUID: {}", namespace_uuid)); - NamespaceId::from_external_id(namespace_id, uuid) + let namespace_id = matches.value_of("namespace-id").unwrap(); + let namespace_uuid = matches.value_of("namespace-uuid").unwrap(); + let uuid = uuid::Uuid::try_parse(namespace_uuid) + .unwrap_or_else(|_| panic!("cannot parse namespace UUID: {}", namespace_uuid)); + NamespaceId::from_external_id(namespace_id, uuid) } async fn config_and_exec( - gql: ChronicleGraphQl, - domain: &ChronicleDomainDef, - model: CliModel, + gql: ChronicleGraphQl, + domain: &ChronicleDomainDef, + model: CliModel, ) -> Result<(), CliError> - where - Query: ObjectType + Copy, - Mutation: ObjectType + Copy, +where + Query: ObjectType + Copy, + Mutation: ObjectType + Copy, { - use colored_json::prelude::*; + use colored_json::prelude::*; - let response = execute_subcommand(gql, domain, model).await?; + let response = execute_subcommand(gql, domain, model).await?; - match response { + match response { ( ApiResponse::Submission { subject, @@ -796,59 +787,57 @@ async fn config_and_exec( "DepthChargeSubmitted is an unexpected API response for transaction: {tx_id}. Depth charge not implemented." ), }; - Ok(()) + Ok(()) } fn print_completions(gen: G, app: &mut Command) { - generate(gen, app, app.get_name().to_string(), &mut io::stdout()); + generate(gen, app, app.get_name().to_string(), &mut io::stdout()); } pub async fn bootstrap( - domain: ChronicleDomainDef, - gql: ChronicleGraphQl, + domain: ChronicleDomainDef, + gql: ChronicleGraphQl, ) where - Query: ObjectType + 'static + Copy, - Mutation: ObjectType + 'static + Copy, + Query: ObjectType + 'static + Copy, + Mutation: ObjectType + 'static + Copy, { - let matches = cli(domain.clone()).as_cmd().get_matches(); - - if let Some(generator) = matches.subcommand_matches("completions") { - let shell = generator.get_one::("shell").unwrap().parse::().unwrap(); - print_completions(shell.to_owned(), &mut cli(domain.clone()).as_cmd()); - std::process::exit(0); - } - - if matches.subcommand_matches("export-schema").is_some() { - print!("{}", gql.exportable_schema()); - std::process::exit(0); - } - chronicle_telemetry::telemetry( - matches - .get_one::("enable-otel").is_some(), - if matches.contains_id("console-logging") { - match matches.get_one::("console-logging") { - Some(level) => match level.as_str() { - "pretty" => ConsoleLogging::Pretty, - "json" => ConsoleLogging::Json, - _ => ConsoleLogging::Off, - }, - _ => ConsoleLogging::Off, - } - } else if matches.subcommand_name() == Some("serve-api") { - ConsoleLogging::Pretty - } else { - ConsoleLogging::Off - }, - ); - - config_and_exec(gql, &domain, domain.clone().into()) - .await - .map_err(|e| { - error!(?e, "Api error"); - e.into_ufe().print(); - std::process::exit(1); - }) - .ok(); - - std::process::exit(0); + let matches = cli(domain.clone()).as_cmd().get_matches(); + + if let Some(generator) = matches.subcommand_matches("completions") { + let shell = generator.get_one::("shell").unwrap().parse::().unwrap(); + print_completions(shell.to_owned(), &mut cli(domain.clone()).as_cmd()); + std::process::exit(0); + } + + if matches.subcommand_matches("export-schema").is_some() { + print!("{}", gql.exportable_schema()); + std::process::exit(0); + } + chronicle_telemetry::telemetry( + if matches.contains_id("console-logging") { + match matches.get_one::("console-logging") { + Some(level) => match level.as_str() { + "pretty" => ConsoleLogging::Pretty, + "json" => ConsoleLogging::Json, + _ => ConsoleLogging::Off, + }, + _ => ConsoleLogging::Off, + } + } else if matches.subcommand_name() == Some("serve-api") { + ConsoleLogging::Pretty + } else { + ConsoleLogging::Off + }, + ); + + config_and_exec(gql, &domain, domain.clone().into()) + .await + .map_err(|e| { + error!(?e, "Api error"); + e.into_ufe().print(); + std::process::exit(1); + }) + .ok(); + + std::process::exit(0); } diff --git a/crates/chronicle/src/bootstrap/opa.rs b/crates/chronicle/src/bootstrap/opa.rs index bf45300ca..7b52e194f 100644 --- a/crates/chronicle/src/bootstrap/opa.rs +++ b/crates/chronicle/src/bootstrap/opa.rs @@ -3,463 +3,463 @@ use opa::bundle::Bundle; use tracing::{debug, error, info, instrument}; use common::opa::{ - OpaSettings, - std::{ExecutorContext, load_bytes_from_url, PolicyLoader, PolicyLoaderError}, + std::{load_bytes_from_url, ExecutorContext, PolicyLoader, PolicyLoaderError}, + OpaSettings, }; use protocol_substrate::SubxtClientError; use protocol_substrate_chronicle::{ChronicleSubstrateClient, SettingsLoader}; -use protocol_substrate_opa::{loader::SubstratePolicyLoader, OpaSubstrateClient, policy_hash}; +use protocol_substrate_opa::{loader::SubstratePolicyLoader, policy_hash, OpaSubstrateClient}; use super::CliError; /// OPA policy loader for policies passed via CLI or embedded in Chronicle #[derive(Clone, Default)] pub struct CliPolicyLoader { - address: String, - rule_name: String, - entrypoint: String, - policy: Vec, + address: String, + rule_name: String, + entrypoint: String, + policy: Vec, } impl CliPolicyLoader { - pub fn new() -> Self { - Self { ..Default::default() } - } - - #[instrument(level = "trace", skip(self), ret)] - async fn get_policy_from_file(&mut self) -> Result, PolicyLoaderError> { - let bundle = Bundle::from_file(self.get_address())?; - - self.load_policy_from_bundle(&bundle)?; - - Ok(self.get_policy().to_vec()) - } - - /// Create a loaded [`CliPolicyLoader`] from name of an embedded dev policy and entrypoint - pub fn from_embedded_policy(policy: &str, entrypoint: &str) -> Result { - if let Some(file) = common::opa::std::EmbeddedOpaPolicies::get("bundle.tar.gz") { - let bytes = file.data.as_ref(); - let bundle = Bundle::from_bytes(bytes)?; - let mut loader = CliPolicyLoader::new(); - loader.set_rule_name(policy); - loader.set_entrypoint(entrypoint); - loader.load_policy_from_bundle(&bundle)?; - Ok(loader) - } else { - Err(PolicyLoaderError::EmbeddedOpaPolicies) - } - } - - /// Create a loaded [`CliPolicyLoader`] from an OPA policy's bytes and entrypoint - pub fn from_policy_bytes( - policy: &str, - entrypoint: &str, - bytes: &[u8], - ) -> Result { - let mut loader = CliPolicyLoader::new(); - loader.set_rule_name(policy); - loader.set_entrypoint(entrypoint); - let bundle = Bundle::from_bytes(bytes)?; - loader.load_policy_from_bundle(&bundle)?; - Ok(loader) - } + pub fn new() -> Self { + Self { ..Default::default() } + } + + #[instrument(level = "trace", skip(self), ret)] + async fn get_policy_from_file(&mut self) -> Result, PolicyLoaderError> { + let bundle = Bundle::from_file(self.get_address())?; + + self.load_policy_from_bundle(&bundle)?; + + Ok(self.get_policy().to_vec()) + } + + /// Create a loaded [`CliPolicyLoader`] from name of an embedded dev policy and entrypoint + pub fn from_embedded_policy(policy: &str, entrypoint: &str) -> Result { + if let Some(file) = common::opa::std::EmbeddedOpaPolicies::get("bundle.tar.gz") { + let bytes = file.data.as_ref(); + let bundle = Bundle::from_bytes(bytes)?; + let mut loader = CliPolicyLoader::new(); + loader.set_rule_name(policy); + loader.set_entrypoint(entrypoint); + loader.load_policy_from_bundle(&bundle)?; + Ok(loader) + } else { + Err(PolicyLoaderError::EmbeddedOpaPolicies) + } + } + + /// Create a loaded [`CliPolicyLoader`] from an OPA policy's bytes and entrypoint + pub fn from_policy_bytes( + policy: &str, + entrypoint: &str, + bytes: &[u8], + ) -> Result { + let mut loader = CliPolicyLoader::new(); + loader.set_rule_name(policy); + loader.set_entrypoint(entrypoint); + let bundle = Bundle::from_bytes(bytes)?; + loader.load_policy_from_bundle(&bundle)?; + Ok(loader) + } } #[async_trait::async_trait] impl PolicyLoader for CliPolicyLoader { - fn set_address(&mut self, address: &str) { - self.address = address.to_owned() - } - - fn set_rule_name(&mut self, name: &str) { - self.rule_name = name.to_owned() - } - - fn set_entrypoint(&mut self, entrypoint: &str) { - self.entrypoint = entrypoint.to_owned() - } - - fn get_address(&self) -> &str { - &self.address - } - - fn get_rule_name(&self) -> &str { - &self.rule_name - } - - fn get_entrypoint(&self) -> &str { - &self.entrypoint - } - - fn get_policy(&self) -> &[u8] { - &self.policy - } - - fn load_policy_from_bytes(&mut self, policy: &[u8]) { - self.policy = policy.to_vec() - } - - async fn load_policy(&mut self) -> Result<(), PolicyLoaderError> { - self.policy = self.get_policy_from_file().await?; - Ok(()) - } - - fn hash(&self) -> String { - hex::encode(policy_hash(&self.policy)) - } + fn set_address(&mut self, address: &str) { + self.address = address.to_owned() + } + + fn set_rule_name(&mut self, name: &str) { + self.rule_name = name.to_owned() + } + + fn set_entrypoint(&mut self, entrypoint: &str) { + self.entrypoint = entrypoint.to_owned() + } + + fn get_address(&self) -> &str { + &self.address + } + + fn get_rule_name(&self) -> &str { + &self.rule_name + } + + fn get_entrypoint(&self) -> &str { + &self.entrypoint + } + + fn get_policy(&self) -> &[u8] { + &self.policy + } + + fn load_policy_from_bytes(&mut self, policy: &[u8]) { + self.policy = policy.to_vec() + } + + async fn load_policy(&mut self) -> Result<(), PolicyLoaderError> { + self.policy = self.get_policy_from_file().await?; + Ok(()) + } + + fn hash(&self) -> String { + hex::encode(policy_hash(&self.policy)) + } } #[derive(Clone, Default)] pub struct UrlPolicyLoader { - policy_id: String, - address: String, - policy: Vec, - entrypoint: String, + policy_id: String, + address: String, + policy: Vec, + entrypoint: String, } impl UrlPolicyLoader { - pub fn new(url: &str, policy_id: &str, entrypoint: &str) -> Self { - Self { - address: url.into(), - policy_id: policy_id.to_owned(), - entrypoint: entrypoint.to_owned(), - ..Default::default() - } - } + pub fn new(url: &str, policy_id: &str, entrypoint: &str) -> Self { + Self { + address: url.into(), + policy_id: policy_id.to_owned(), + entrypoint: entrypoint.to_owned(), + ..Default::default() + } + } } #[async_trait::async_trait] impl PolicyLoader for UrlPolicyLoader { - fn set_address(&mut self, address: &str) { - self.address = address.to_owned(); - } + fn set_address(&mut self, address: &str) { + self.address = address.to_owned(); + } - fn set_rule_name(&mut self, name: &str) { - self.policy_id = name.to_owned(); - } + fn set_rule_name(&mut self, name: &str) { + self.policy_id = name.to_owned(); + } - fn set_entrypoint(&mut self, entrypoint: &str) { - self.entrypoint = entrypoint.to_owned(); - } + fn set_entrypoint(&mut self, entrypoint: &str) { + self.entrypoint = entrypoint.to_owned(); + } - fn get_address(&self) -> &str { - &self.address - } + fn get_address(&self) -> &str { + &self.address + } - fn get_rule_name(&self) -> &str { - &self.policy_id - } + fn get_rule_name(&self) -> &str { + &self.policy_id + } - fn get_entrypoint(&self) -> &str { - &self.entrypoint - } + fn get_entrypoint(&self) -> &str { + &self.entrypoint + } - fn get_policy(&self) -> &[u8] { - &self.policy - } + fn get_policy(&self) -> &[u8] { + &self.policy + } - fn load_policy_from_bytes(&mut self, policy: &[u8]) { - self.policy = policy.to_vec(); - } + fn load_policy_from_bytes(&mut self, policy: &[u8]) { + self.policy = policy.to_vec(); + } - async fn load_policy(&mut self) -> Result<(), PolicyLoaderError> { - let address = &self.address; - let bundle = load_bytes_from_url(address).await?; + async fn load_policy(&mut self) -> Result<(), PolicyLoaderError> { + let address = &self.address; + let bundle = load_bytes_from_url(address).await?; - info!(loaded_policy_bytes=?bundle.len(), "Loaded policy bundle"); + info!(loaded_policy_bytes=?bundle.len(), "Loaded policy bundle"); - if bundle.is_empty() { - error!("Policy not found: {}", self.get_rule_name()); - return Err(PolicyLoaderError::MissingPolicy(self.get_rule_name().to_string())); - } + if bundle.is_empty() { + error!("Policy not found: {}", self.get_rule_name()); + return Err(PolicyLoaderError::MissingPolicy(self.get_rule_name().to_string())); + } - self.load_policy_from_bundle(&Bundle::from_bytes(&*bundle)?) - } + self.load_policy_from_bundle(&Bundle::from_bytes(&*bundle)?) + } - fn hash(&self) -> String { - hex::encode(policy_hash(&self.policy)) - } + fn hash(&self) -> String { + hex::encode(policy_hash(&self.policy)) + } } trait SetRuleOptions { - fn rule_addr(&mut self, options: &ArgMatches) -> Result<(), CliError>; - fn rule_entrypoint(&mut self, options: &ArgMatches) -> Result<(), CliError>; - fn set_addr_and_entrypoint(&mut self, options: &ArgMatches) -> Result<(), CliError> { - self.rule_addr(options)?; - self.rule_entrypoint(options)?; - Ok(()) - } + fn rule_addr(&mut self, options: &ArgMatches) -> Result<(), CliError>; + fn rule_entrypoint(&mut self, options: &ArgMatches) -> Result<(), CliError>; + fn set_addr_and_entrypoint(&mut self, options: &ArgMatches) -> Result<(), CliError> { + self.rule_addr(options)?; + self.rule_entrypoint(options)?; + Ok(()) + } } impl SetRuleOptions for CliPolicyLoader { - fn rule_addr(&mut self, options: &ArgMatches) -> Result<(), CliError> { - if let Some(val) = options.get_one::("opa-rule") { - self.set_address(val); - Ok(()) - } else { - Err(CliError::MissingArgument { arg: "opa-rule".to_string() }) - } - } - - fn rule_entrypoint(&mut self, options: &ArgMatches) -> Result<(), CliError> { - if let Some(val) = options.get_one::("opa-entrypoint") { - self.set_entrypoint(val); - Ok(()) - } else { - Err(CliError::MissingArgument { arg: "opa-entrypoint".to_string() }) - } - } + fn rule_addr(&mut self, options: &ArgMatches) -> Result<(), CliError> { + if let Some(val) = options.get_one::("opa-rule") { + self.set_address(val); + Ok(()) + } else { + Err(CliError::MissingArgument { arg: "opa-rule".to_string() }) + } + } + + fn rule_entrypoint(&mut self, options: &ArgMatches) -> Result<(), CliError> { + if let Some(val) = options.get_one::("opa-entrypoint") { + self.set_entrypoint(val); + Ok(()) + } else { + Err(CliError::MissingArgument { arg: "opa-entrypoint".to_string() }) + } + } } #[instrument()] pub async fn opa_executor_from_embedded_policy( - policy_name: &str, - entrypoint: &str, + policy_name: &str, + entrypoint: &str, ) -> Result { - let loader = CliPolicyLoader::from_embedded_policy(policy_name, entrypoint)?; - Ok(ExecutorContext::from_loader(&loader)?) + let loader = CliPolicyLoader::from_embedded_policy(policy_name, entrypoint)?; + Ok(ExecutorContext::from_loader(&loader)?) } pub async fn read_opa_settings( - client: &ChronicleSubstrateClient, + client: &ChronicleSubstrateClient, ) -> Result, SubxtClientError> { - client.load_settings_from_storage().await + client.load_settings_from_storage().await } #[instrument(skip(chronicle_client, opa_client))] pub async fn opa_executor_from_substrate_state( - chronicle_client: &ChronicleSubstrateClient, - opa_client: &OpaSubstrateClient, + chronicle_client: &ChronicleSubstrateClient, + opa_client: &OpaSubstrateClient, ) -> Result<(ExecutorContext, Option), CliError> { - let opa_settings = read_opa_settings(chronicle_client).await?; - debug!(on_chain_opa_policy = ?opa_settings); - if let Some(opa_settings) = opa_settings { - let mut loader = SubstratePolicyLoader::new(opa_settings.clone(), opa_client); - loader.load_policy().await?; - - Ok((ExecutorContext::from_loader(&loader)?, Some(opa_settings))) - } else { - Err(CliError::NoOnChainSettings) - } + let opa_settings = read_opa_settings(chronicle_client).await?; + debug!(on_chain_opa_policy = ?opa_settings); + if let Some(opa_settings) = opa_settings { + let mut loader = SubstratePolicyLoader::new(opa_settings.clone(), opa_client); + loader.load_policy().await?; + + Ok((ExecutorContext::from_loader(&loader)?, Some(opa_settings))) + } else { + Err(CliError::NoOnChainSettings) + } } #[instrument()] pub async fn opa_executor_from_url( - url: &str, - policy_name: &str, - entrypoint: &str, + url: &str, + policy_name: &str, + entrypoint: &str, ) -> Result { - let mut loader = UrlPolicyLoader::new(url, policy_name, entrypoint); - loader.load_policy().await?; - Ok(ExecutorContext::from_loader(&loader)?) + let mut loader = UrlPolicyLoader::new(url, policy_name, entrypoint); + loader.load_policy().await?; + Ok(ExecutorContext::from_loader(&loader)?) } #[cfg(test)] mod tests { - use std::{collections::BTreeSet, io::Write}; - - use serde_json::Value; - - use common::{ - identity::{AuthId, IdentityContext, JwtClaims, OpaData}, - opa::std::{EmbeddedOpaPolicies, OpaExecutor, OpaExecutorError, WasmtimeOpaExecutor}, - }; - - use super::*; - - fn chronicle_id() -> AuthId { - AuthId::chronicle() - } - - fn chronicle_user_opa_data() -> OpaData { - OpaData::Operation(IdentityContext::new( - AuthId::chronicle(), - Value::default(), - Value::default(), - )) - } - - fn allow_all_users() -> (String, String) { - let policy_name = "allow_transactions".to_string(); - let entrypoint = "allow_transactions/allowed_users".to_string(); - (policy_name, entrypoint) - } - - fn anonymous_user() -> AuthId { - AuthId::anonymous() - } - - fn anonymous_user_opa_data() -> OpaData { - OpaData::Operation(IdentityContext::new( - AuthId::anonymous(), - Value::default(), - Value::default(), - )) - } - - fn jwt_user() -> AuthId { - let claims = JwtClaims( - serde_json::json!({ + use std::{collections::BTreeSet, io::Write}; + + use serde_json::Value; + + use common::{ + identity::{AuthId, IdentityContext, JwtClaims, OpaData}, + opa::std::{EmbeddedOpaPolicies, OpaExecutor, OpaExecutorError, WasmtimeOpaExecutor}, + }; + + use super::*; + + fn chronicle_id() -> AuthId { + AuthId::chronicle() + } + + fn chronicle_user_opa_data() -> OpaData { + OpaData::Operation(IdentityContext::new( + AuthId::chronicle(), + Value::default(), + Value::default(), + )) + } + + fn allow_all_users() -> (String, String) { + let policy_name = "allow_transactions".to_string(); + let entrypoint = "allow_transactions/allowed_users".to_string(); + (policy_name, entrypoint) + } + + fn anonymous_user() -> AuthId { + AuthId::anonymous() + } + + fn anonymous_user_opa_data() -> OpaData { + OpaData::Operation(IdentityContext::new( + AuthId::anonymous(), + Value::default(), + Value::default(), + )) + } + + fn jwt_user() -> AuthId { + let claims = JwtClaims( + serde_json::json!({ "sub": "abcdef", }) - .as_object() - .unwrap() - .to_owned(), - ); - AuthId::from_jwt_claims(&claims, &BTreeSet::from(["sub".to_string()])).unwrap() - } - - fn jwt_user_opa_data() -> OpaData { - OpaData::Operation(IdentityContext::new(jwt_user(), Value::default(), Value::default())) - } - - #[test] - fn policy_loader_invalid_rule() { - let (_policy, entrypoint) = allow_all_users(); - let invalid_rule = "a_rule_that_does_not_exist"; - match CliPolicyLoader::from_embedded_policy(invalid_rule, &entrypoint) { - Err(e) => { - insta::assert_snapshot!(e.to_string(), @"Policy not found: a_rule_that_does_not_exist") - } - _ => panic!("expected error"), - } - } - - #[tokio::test] - async fn opa_executor_allow_chronicle_users() -> Result<(), OpaExecutorError> { - let (policy, entrypoint) = allow_all_users(); - let loader = CliPolicyLoader::from_embedded_policy(&policy, &entrypoint)?; - let mut executor = WasmtimeOpaExecutor::from_loader(&loader).unwrap(); - assert!(executor.evaluate(&chronicle_id(), &chronicle_user_opa_data()).await.is_ok()); - Ok(()) - } - - #[tokio::test] - async fn opa_executor_allow_anonymous_users() -> Result<(), OpaExecutorError> { - let (policy, entrypoint) = allow_all_users(); - let loader = CliPolicyLoader::from_embedded_policy(&policy, &entrypoint)?; - let mut executor = WasmtimeOpaExecutor::from_loader(&loader).unwrap(); - executor.evaluate(&anonymous_user(), &anonymous_user_opa_data()).await.unwrap(); - Ok(()) - } - - #[tokio::test] - async fn opa_executor_allow_jwt_users() -> Result<(), OpaExecutorError> { - let (policy, entrypoint) = allow_all_users(); - let loader = CliPolicyLoader::from_embedded_policy(&policy, &entrypoint)?; - let mut executor = WasmtimeOpaExecutor::from_loader(&loader)?; - assert!(executor.evaluate(&jwt_user(), &jwt_user_opa_data()).await.is_ok()); - Ok(()) - } - - const BUNDLE_FILE: &str = "bundle.tar.gz"; - - fn embedded_policy_bundle() -> Result, PolicyLoaderError> { - EmbeddedOpaPolicies::get(BUNDLE_FILE) - .map(|file| file.data.to_vec()) - .ok_or(PolicyLoaderError::EmbeddedOpaPolicies) - } - - #[tokio::test] - async fn test_load_policy_from_http_url() { - let embedded_bundle = embedded_policy_bundle().unwrap(); - let (rule, entrypoint) = allow_all_users(); - - let mut server = mockito::Server::new_async().await; - // Start the mock server and define the response - let _m = server.mock("GET", "/bundle.tar.gz").with_body(&embedded_bundle).create(); - - // Create the URL policy loader - let mut loader = - UrlPolicyLoader::new(&format!("{}/bundle.tar.gz", server.url()), &rule, &entrypoint); - - // Load the policy - let result = loader.load_policy().await; - assert!(result.is_ok()); - - let bundle = Bundle::from_bytes(&embedded_bundle).unwrap(); - - // Extract the policy from the bundle we embedded in the binary - let policy_from_embedded_bundle = bundle - .wasm_policies - .iter() - .find(|p| p.entrypoint == rule) - .map(|p| p.bytes.as_ref()) - .ok_or(PolicyLoaderError::MissingPolicy(rule.to_string())) - .unwrap(); - - // Get the loaded policy from the url - let policy_from_url = loader.get_policy(); - - assert_eq!(&policy_from_url, &policy_from_embedded_bundle); - } - - #[tokio::test] - async fn test_load_policy_from_file_url() { - let embedded_bundle = embedded_policy_bundle().unwrap(); - let (rule, entrypoint) = allow_all_users(); - - let temp_dir = tempfile::tempdir().unwrap(); - let policy_path = temp_dir.path().join("bundle.tar.gz"); - let mut file = std::fs::File::create(&policy_path).unwrap(); - file.write_all(&embedded_bundle).unwrap(); - - // Create the file URL policy loader - let file_url = format!("file://{}", policy_path.to_string_lossy()); - let mut loader = UrlPolicyLoader::new(&file_url, &rule, &entrypoint); - - // Load the policy - let result = loader.load_policy().await; - assert!(result.is_ok()); - - let bundle = Bundle::from_bytes(&embedded_bundle).unwrap(); - - // Extract the policy from the bundle we embedded in the binary - let policy_from_embedded_bundle = bundle - .wasm_policies - .iter() - .find(|p| p.entrypoint == rule) - .map(|p| p.bytes.as_ref()) - .ok_or(PolicyLoaderError::MissingPolicy(rule.to_string())) - .unwrap(); - - // Get the loaded policy from the file URL - let policy_from_file_url = loader.get_policy(); - - assert_eq!(policy_from_embedded_bundle, policy_from_file_url); - } - - #[tokio::test] - async fn test_load_policy_from_bare_path() { - let embedded_bundle = embedded_policy_bundle().unwrap(); - let (rule, entrypoint) = allow_all_users(); - - let temp_dir = tempfile::tempdir().unwrap(); - let policy_path = temp_dir.path().join("bundle.tar.gz"); - let mut file = std::fs::File::create(&policy_path).unwrap(); - file.write_all(&embedded_bundle).unwrap(); - - // Create the bare path policy loader - let mut loader = UrlPolicyLoader::new(&policy_path.to_string_lossy(), &rule, &entrypoint); - - // Load the policy - let result = loader.load_policy().await; - assert!(result.is_ok()); - - let bundle = Bundle::from_bytes(&embedded_bundle).unwrap(); - - // Extract the policy from the bundle we embedded in the binary - let policy_from_embedded_bundle = bundle - .wasm_policies - .iter() - .find(|p| p.entrypoint == rule) - .map(|p| p.bytes.as_ref()) - .ok_or(PolicyLoaderError::MissingPolicy(rule.to_string())) - .unwrap(); - - // Get the loaded policy from the url - let policy_from_bare_path_url = loader.get_policy(); - - assert_eq!(policy_from_embedded_bundle, policy_from_bare_path_url); - } + .as_object() + .unwrap() + .to_owned(), + ); + AuthId::from_jwt_claims(&claims, &BTreeSet::from(["sub".to_string()])).unwrap() + } + + fn jwt_user_opa_data() -> OpaData { + OpaData::Operation(IdentityContext::new(jwt_user(), Value::default(), Value::default())) + } + + #[test] + fn policy_loader_invalid_rule() { + let (_policy, entrypoint) = allow_all_users(); + let invalid_rule = "a_rule_that_does_not_exist"; + match CliPolicyLoader::from_embedded_policy(invalid_rule, &entrypoint) { + Err(e) => { + insta::assert_snapshot!(e.to_string(), @"Policy not found: a_rule_that_does_not_exist") + }, + _ => panic!("expected error"), + } + } + + #[tokio::test] + async fn opa_executor_allow_chronicle_users() -> Result<(), OpaExecutorError> { + let (policy, entrypoint) = allow_all_users(); + let loader = CliPolicyLoader::from_embedded_policy(&policy, &entrypoint)?; + let mut executor = WasmtimeOpaExecutor::from_loader(&loader).unwrap(); + assert!(executor.evaluate(&chronicle_id(), &chronicle_user_opa_data()).await.is_ok()); + Ok(()) + } + + #[tokio::test] + async fn opa_executor_allow_anonymous_users() -> Result<(), OpaExecutorError> { + let (policy, entrypoint) = allow_all_users(); + let loader = CliPolicyLoader::from_embedded_policy(&policy, &entrypoint)?; + let mut executor = WasmtimeOpaExecutor::from_loader(&loader).unwrap(); + executor.evaluate(&anonymous_user(), &anonymous_user_opa_data()).await.unwrap(); + Ok(()) + } + + #[tokio::test] + async fn opa_executor_allow_jwt_users() -> Result<(), OpaExecutorError> { + let (policy, entrypoint) = allow_all_users(); + let loader = CliPolicyLoader::from_embedded_policy(&policy, &entrypoint)?; + let mut executor = WasmtimeOpaExecutor::from_loader(&loader)?; + assert!(executor.evaluate(&jwt_user(), &jwt_user_opa_data()).await.is_ok()); + Ok(()) + } + + const BUNDLE_FILE: &str = "bundle.tar.gz"; + + fn embedded_policy_bundle() -> Result, PolicyLoaderError> { + EmbeddedOpaPolicies::get(BUNDLE_FILE) + .map(|file| file.data.to_vec()) + .ok_or(PolicyLoaderError::EmbeddedOpaPolicies) + } + + #[tokio::test] + async fn test_load_policy_from_http_url() { + let embedded_bundle = embedded_policy_bundle().unwrap(); + let (rule, entrypoint) = allow_all_users(); + + let mut server = mockito::Server::new_async().await; + // Start the mock server and define the response + let _m = server.mock("GET", "/bundle.tar.gz").with_body(&embedded_bundle).create(); + + // Create the URL policy loader + let mut loader = + UrlPolicyLoader::new(&format!("{}/bundle.tar.gz", server.url()), &rule, &entrypoint); + + // Load the policy + let result = loader.load_policy().await; + assert!(result.is_ok()); + + let bundle = Bundle::from_bytes(&embedded_bundle).unwrap(); + + // Extract the policy from the bundle we embedded in the binary + let policy_from_embedded_bundle = bundle + .wasm_policies + .iter() + .find(|p| p.entrypoint == rule) + .map(|p| p.bytes.as_ref()) + .ok_or(PolicyLoaderError::MissingPolicy(rule.to_string())) + .unwrap(); + + // Get the loaded policy from the url + let policy_from_url = loader.get_policy(); + + assert_eq!(&policy_from_url, &policy_from_embedded_bundle); + } + + #[tokio::test] + async fn test_load_policy_from_file_url() { + let embedded_bundle = embedded_policy_bundle().unwrap(); + let (rule, entrypoint) = allow_all_users(); + + let temp_dir = tempfile::tempdir().unwrap(); + let policy_path = temp_dir.path().join("bundle.tar.gz"); + let mut file = std::fs::File::create(&policy_path).unwrap(); + file.write_all(&embedded_bundle).unwrap(); + + // Create the file URL policy loader + let file_url = format!("file://{}", policy_path.to_string_lossy()); + let mut loader = UrlPolicyLoader::new(&file_url, &rule, &entrypoint); + + // Load the policy + let result = loader.load_policy().await; + assert!(result.is_ok()); + + let bundle = Bundle::from_bytes(&embedded_bundle).unwrap(); + + // Extract the policy from the bundle we embedded in the binary + let policy_from_embedded_bundle = bundle + .wasm_policies + .iter() + .find(|p| p.entrypoint == rule) + .map(|p| p.bytes.as_ref()) + .ok_or(PolicyLoaderError::MissingPolicy(rule.to_string())) + .unwrap(); + + // Get the loaded policy from the file URL + let policy_from_file_url = loader.get_policy(); + + assert_eq!(policy_from_embedded_bundle, policy_from_file_url); + } + + #[tokio::test] + async fn test_load_policy_from_bare_path() { + let embedded_bundle = embedded_policy_bundle().unwrap(); + let (rule, entrypoint) = allow_all_users(); + + let temp_dir = tempfile::tempdir().unwrap(); + let policy_path = temp_dir.path().join("bundle.tar.gz"); + let mut file = std::fs::File::create(&policy_path).unwrap(); + file.write_all(&embedded_bundle).unwrap(); + + // Create the bare path policy loader + let mut loader = UrlPolicyLoader::new(&policy_path.to_string_lossy(), &rule, &entrypoint); + + // Load the policy + let result = loader.load_policy().await; + assert!(result.is_ok()); + + let bundle = Bundle::from_bytes(&embedded_bundle).unwrap(); + + // Extract the policy from the bundle we embedded in the binary + let policy_from_embedded_bundle = bundle + .wasm_policies + .iter() + .find(|p| p.entrypoint == rule) + .map(|p| p.bytes.as_ref()) + .ok_or(PolicyLoaderError::MissingPolicy(rule.to_string())) + .unwrap(); + + // Get the loaded policy from the url + let policy_from_bare_path_url = loader.get_policy(); + + assert_eq!(policy_from_embedded_bundle, policy_from_bare_path_url); + } } diff --git a/crates/chronicle/src/codegen/linter.rs b/crates/chronicle/src/codegen/linter.rs index 2b509f81e..c968dff74 100644 --- a/crates/chronicle/src/codegen/linter.rs +++ b/crates/chronicle/src/codegen/linter.rs @@ -5,149 +5,149 @@ use jsonschema::{error::ValidationErrorKind, JSONSchema}; use common::domain::{DomainFileInput, ResourceDef}; fn bad_filename(filename: &str) { - println!("JSON or YAML filename extension required for {filename}"); - exit(2); + println!("JSON or YAML filename extension required for {filename}"); + exit(2); } fn build_json_validator(domain: &str) -> JSONSchema { - let json = match serde_json::from_str(domain) { - Ok(json) => json, - Err(error) => { - println!("failed to parse valid JSON from domain schema: {error}"); - exit(2); - } - }; - match JSONSchema::options().with_draft(jsonschema::Draft::Draft7).compile(&json) { - Ok(json_schema) => json_schema, - Err(error) => { - println!("failed to interpret JSON as a domain schema: {error}"); - exit(2); - } - } + let json = match serde_json::from_str(domain) { + Ok(json) => json, + Err(error) => { + println!("failed to parse valid JSON from domain schema: {error}"); + exit(2); + }, + }; + match JSONSchema::options().with_draft(jsonschema::Draft::Draft7).compile(&json) { + Ok(json_schema) => json_schema, + Err(error) => { + println!("failed to interpret JSON as a domain schema: {error}"); + exit(2); + }, + } } fn check_json_valid(json_validator: &JSONSchema, json_data: &str) { - let json = match serde_json::from_str(json_data) { - Ok(json) => json, - Err(error) => { - println!("failed to parse valid JSON: {error}"); - exit(2); - } - }; - let validation = json_validator.validate(&json); - if let Err(errors) = validation { - for error in errors { - println!("path {} contains invalid data: {}", error.instance_path, error); - if let ValidationErrorKind::Pattern { pattern } = error.kind { - match pattern.as_str() { - "^[A-Z][A-Z0-9_]*$" => { - println!("hint: start with capital letter, use only CAPITALS_UNDERSCORES_NUM8ER5"); - } - "[A-Z][A-Za-z0-9]*$" => { - println!("hint: start with capital letter, use only LettersAndNum8er5"); - } - _ => {} - } - } - } - exit(2); - } + let json = match serde_json::from_str(json_data) { + Ok(json) => json, + Err(error) => { + println!("failed to parse valid JSON: {error}"); + exit(2); + }, + }; + let validation = json_validator.validate(&json); + if let Err(errors) = validation { + for error in errors { + println!("path {} contains invalid data: {}", error.instance_path, error); + if let ValidationErrorKind::Pattern { pattern } = error.kind { + match pattern.as_str() { + "^[A-Z][A-Z0-9_]*$" => { + println!("hint: start with capital letter, use only CAPITALS_UNDERSCORES_NUM8ER5"); + }, + "[A-Z][A-Za-z0-9]*$" => { + println!("hint: start with capital letter, use only LettersAndNum8er5"); + }, + _ => {}, + } + } + } + exit(2); + } } fn check_yaml_valid(json_validator: &JSONSchema, yaml_data: &str) { - let json = match serde_yaml::from_str::(yaml_data) { - Ok(json) => json, - Err(error) => { - println!("failed to parse valid YAML: {error}"); - exit(2); - } - }; - let json_data = match serde_json::to_string(&json) { - Ok(json_data) => json_data, - Err(error) => { - println!("failed to write valid JSON from YAML: {error}"); - exit(2); - } - }; - check_json_valid(json_validator, &json_data); + let json = match serde_yaml::from_str::(yaml_data) { + Ok(json) => json, + Err(error) => { + println!("failed to parse valid YAML: {error}"); + exit(2); + }, + }; + let json_data = match serde_json::to_string(&json) { + Ok(json_data) => json_data, + Err(error) => { + println!("failed to write valid JSON from YAML: {error}"); + exit(2); + }, + }; + check_json_valid(json_validator, &json_data); } fn read_json_domain(data: &str) -> DomainFileInput { - match serde_json::from_str(data) { - Ok(domain) => domain, - Err(error) => { - println!("failed to interpret JSON as a domain: {error}"); - exit(2); - } - } + match serde_json::from_str(data) { + Ok(domain) => domain, + Err(error) => { + println!("failed to interpret JSON as a domain: {error}"); + exit(2); + }, + } } fn read_yaml_domain(data: &str) -> DomainFileInput { - match serde_yaml::from_str(data) { - Ok(domain) => domain, - Err(error) => { - println!("failed to interpret YAML as a domain: {error}"); - exit(2); - } - } + match serde_yaml::from_str(data) { + Ok(domain) => domain, + Err(error) => { + println!("failed to interpret YAML as a domain: {error}"); + exit(2); + }, + } } fn check_domain_attributes( - element: &str, - attributes: &HashSet, - named_resources: Vec<(&String, &ResourceDef)>, + element: &str, + attributes: &HashSet, + named_resources: Vec<(&String, &ResourceDef)>, ) { - let mut is_error = false; - for (name, resource) in named_resources { - for attribute in resource.attributes.iter() { - if !(attributes.contains(&attribute.0)) { - println!("{} named {} has unknown attribute {}", element, name, attribute.0); - is_error = true; - } - } - } - if is_error { - exit(2); - } + let mut is_error = false; + for (name, resource) in named_resources { + for attribute in resource.attributes.iter() { + if !(attributes.contains(&attribute.0)) { + println!("{} named {} has unknown attribute {}", element, name, attribute.0); + is_error = true; + } + } + } + if is_error { + exit(2); + } } fn check_domain(domain: DomainFileInput) { - let attributes = domain.attributes.keys().map(std::clone::Clone::clone).collect(); - check_domain_attributes("agent", &attributes, domain.agents.iter().collect()); - check_domain_attributes("entity", &attributes, domain.entities.iter().collect()); - check_domain_attributes("activity", &attributes, domain.activities.iter().collect()); + let attributes = domain.attributes.keys().map(std::clone::Clone::clone).collect(); + check_domain_attributes("agent", &attributes, domain.agents.iter().collect()); + check_domain_attributes("entity", &attributes, domain.entities.iter().collect()); + check_domain_attributes("activity", &attributes, domain.activities.iter().collect()); } pub fn check_files(filenames: Vec<&str>) { - let json_validator = build_json_validator(include_str!("../../schema/domain.json")); - for filename in filenames { - let filepath = Path::new(filename); - let data = match std::fs::read_to_string(filepath) { - Ok(data) => data, - Err(error) => { - println!("failed to read {filename}: {error}"); - exit(2); - } - }; - match filepath.extension() { - Some(extension) => { - match extension.to_ascii_lowercase().to_str() { - Some("json") | Some("jsn") => { - check_json_valid(&json_validator, data.as_str()); - check_domain(read_json_domain(&data)); - } - Some("yaml") | Some("yml") => { - check_yaml_valid(&json_validator, data.as_str()); - check_domain(read_yaml_domain(&data)); - } - _ => { - bad_filename(filename); - } - }; - } - None => { - bad_filename(filename); - } - }; - } + let json_validator = build_json_validator(include_str!("../../schema/domain.json")); + for filename in filenames { + let filepath = Path::new(filename); + let data = match std::fs::read_to_string(filepath) { + Ok(data) => data, + Err(error) => { + println!("failed to read {filename}: {error}"); + exit(2); + }, + }; + match filepath.extension() { + Some(extension) => { + match extension.to_ascii_lowercase().to_str() { + Some("json") | Some("jsn") => { + check_json_valid(&json_validator, data.as_str()); + check_domain(read_json_domain(&data)); + }, + Some("yaml") | Some("yml") => { + check_yaml_valid(&json_validator, data.as_str()); + check_domain(read_yaml_domain(&data)); + }, + _ => { + bad_filename(filename); + }, + }; + }, + None => { + bad_filename(filename); + }, + }; + } } diff --git a/crates/chronicle/src/codegen/mod.rs b/crates/chronicle/src/codegen/mod.rs index ec58ace81..648a34480 100644 --- a/crates/chronicle/src/codegen/mod.rs +++ b/crates/chronicle/src/codegen/mod.rs @@ -4,27 +4,29 @@ use std::{io::Write, path::Path}; use genco::prelude::*; -pub use common::domain::{AttributesTypeName, Builder, CliName, PrimitiveType, Property, TypeName}; -pub use common::domain::{ActivityDef, AgentDef, AttributeDef, ChronicleDomainDef, EntityDef}; +pub use common::domain::{ + ActivityDef, AgentDef, AttributeDef, AttributesTypeName, Builder, ChronicleDomainDef, CliName, + EntityDef, PrimitiveType, Property, TypeName, +}; pub mod linter; fn agent_union_type_name() -> String { - "Agent".to_owned() + "Agent".to_owned() } fn entity_union_type_name() -> String { - "Entity".to_owned() + "Entity".to_owned() } fn activity_union_type_name() -> String { - "Activity".to_owned() + "Activity".to_owned() } fn gen_attribute_scalars(attributes: &[AttributeDef]) -> rust::Tokens { - let graphql_new_type = &rust::import("chronicle::async_graphql", "NewType"); - let chronicle_json = &rust::import("chronicle::common::prov", "ChronicleJSON"); - quote! { + let graphql_new_type = &rust::import("chronicle::async_graphql", "NewType"); + let chronicle_json = &rust::import("chronicle::common::prov", "ChronicleJSON"); + quote! { #(for attribute in attributes.iter() => #[derive(Clone, #graphql_new_type)] #[graphql(name = #_(#(attribute.as_scalar_type())), visible=true)] @@ -43,14 +45,14 @@ fn gen_attribute_scalars(attributes: &[AttributeDef]) -> rust::Tokens { } fn gen_association_and_attribution_unions() -> rust::Tokens { - let simple_object = &rust::import("chronicle::async_graphql", "SimpleObject").qualified(); + let simple_object = &rust::import("chronicle::async_graphql", "SimpleObject").qualified(); - let agent_ref_doc = include_str!("../../../../domain_docs/agent_ref.md"); - let association_doc = include_str!("../../../../domain_docs/association.md"); - let attribution_doc = include_str!("../../../../domain_docs/attribution.md"); - let entity_ref_doc = include_str!("../../../../domain_docs/entity_ref.md"); + let agent_ref_doc = include_str!("../../../../domain_docs/agent_ref.md"); + let association_doc = include_str!("../../../../domain_docs/association.md"); + let attribution_doc = include_str!("../../../../domain_docs/attribution.md"); + let entity_ref_doc = include_str!("../../../../domain_docs/entity_ref.md"); - quote! { + quote! { #[doc = #_(#agent_ref_doc)] #[derive(#simple_object)] @@ -88,20 +90,20 @@ fn gen_association_and_attribution_unions() -> rust::Tokens { } fn gen_type_enums(domain: &ChronicleDomainDef) -> rust::Tokens { - let graphql_enum = &rust::import("chronicle::async_graphql", "Enum"); - let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); - let prov_role = &rust::import("chronicle::common::prov", "Role").qualified(); - - let activity_type_doc = include_str!("../../../../domain_docs/activity_type.md"); - let agent_type_doc = include_str!("../../../../domain_docs/agent_type.md"); - let entity_type_doc = include_str!("../../../../domain_docs/entity_type.md"); - let prov_activity_doc = include_str!("../../../../domain_docs/prov_activity.md"); - let prov_agent_doc = include_str!("../../../../domain_docs/prov_agent.md"); - let prov_entity_doc = include_str!("../../../../domain_docs/prov_entity.md"); - let role_doc = include_str!("../../../../domain_docs/role.md"); - let unspecified_doc = include_str!("../../../../domain_docs/unspecified.md"); - - quote! { + let graphql_enum = &rust::import("chronicle::async_graphql", "Enum"); + let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); + let prov_role = &rust::import("chronicle::common::prov", "Role").qualified(); + + let activity_type_doc = include_str!("../../../../domain_docs/activity_type.md"); + let agent_type_doc = include_str!("../../../../domain_docs/agent_type.md"); + let entity_type_doc = include_str!("../../../../domain_docs/entity_type.md"); + let prov_activity_doc = include_str!("../../../../domain_docs/prov_activity.md"); + let prov_agent_doc = include_str!("../../../../domain_docs/prov_agent.md"); + let prov_entity_doc = include_str!("../../../../domain_docs/prov_entity.md"); + let role_doc = include_str!("../../../../domain_docs/role.md"); + let unspecified_doc = include_str!("../../../../domain_docs/unspecified.md"); + + quote! { #[derive(#graphql_enum, Copy, Clone, Eq, PartialEq)] #[allow(clippy::upper_case_acronyms)] #[doc = #_(#role_doc)] @@ -235,12 +237,12 @@ fn gen_type_enums(domain: &ChronicleDomainDef) -> rust::Tokens { } fn gen_agent_union(agents: &[AgentDef]) -> rust::Tokens { - let union_macro = rust::import("chronicle::async_graphql", "Union").qualified(); + let union_macro = rust::import("chronicle::async_graphql", "Union").qualified(); - let agent_doc = include_str!("../../../../domain_docs/agent.md"); - let prov_agent_doc = include_str!("../../../../domain_docs/prov_agent.md"); + let agent_doc = include_str!("../../../../domain_docs/agent.md"); + let prov_agent_doc = include_str!("../../../../domain_docs/prov_agent.md"); - quote! { + quote! { #[doc = #_(#agent_doc)] #[allow(clippy::enum_variant_names)] #[allow(clippy::upper_case_acronyms)] @@ -259,12 +261,12 @@ fn gen_agent_union(agents: &[AgentDef]) -> rust::Tokens { } fn gen_entity_union(entities: &[EntityDef]) -> rust::Tokens { - let union_macro = rust::import("chronicle::async_graphql", "Union").qualified(); + let union_macro = rust::import("chronicle::async_graphql", "Union").qualified(); - let entity_doc = include_str!("../../../../domain_docs/entity.md"); - let prov_entity_doc = include_str!("../../../../domain_docs/prov_entity.md"); + let entity_doc = include_str!("../../../../domain_docs/entity.md"); + let prov_entity_doc = include_str!("../../../../domain_docs/prov_entity.md"); - quote! { + quote! { #[doc = #_(#entity_doc)] #[allow(clippy::enum_variant_names)] #[allow(clippy::upper_case_acronyms)] @@ -283,12 +285,12 @@ fn gen_entity_union(entities: &[EntityDef]) -> rust::Tokens { } fn gen_activity_union(activities: &[ActivityDef]) -> rust::Tokens { - let union_macro = rust::import("chronicle::async_graphql", "Union").qualified(); + let union_macro = rust::import("chronicle::async_graphql", "Union").qualified(); - let activity_doc = include_str!("../../../../domain_docs/activity.md"); - let prov_activity_doc = include_str!("../../../../domain_docs/prov_activity.md"); + let activity_doc = include_str!("../../../../domain_docs/activity.md"); + let prov_activity_doc = include_str!("../../../../domain_docs/prov_activity.md"); - quote! { + quote! { #[doc = #_(#activity_doc)] #[allow(clippy::enum_variant_names)] #[allow(clippy::upper_case_acronyms)] @@ -307,35 +309,35 @@ fn gen_activity_union(activities: &[ActivityDef]) -> rust::Tokens { } fn gen_activity_definition(activity: &ActivityDef) -> rust::Tokens { - let abstract_activity = - &rust::import("chronicle::persistence::queryable", "Activity").qualified(); - let activity_impl = &rust::import("chronicle::api::chronicle_graphql", "activity").qualified(); - let namespace = &rust::import("chronicle::persistence::queryable", "Namespace").qualified(); - let activity_id = &rust::import("chronicle::common::prov", "ActivityId").qualified(); - let async_graphql_error_extensions = - &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); - - let timezone = &rust::import("chronicle::chrono", "TimeZone").direct(); - let object = rust::import("chronicle::async_graphql", "Object").qualified(); - let async_result = &rust::import("chronicle::async_graphql", "Result").qualified(); - let context = &rust::import("chronicle::async_graphql", "Context").qualified(); - let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); - let date_time = &rust::import("chronicle::chrono", "DateTime"); - let utc = &rust::import("chronicle::chrono", "Utc"); - let chronicle_json = &rust::import("chronicle::common::prov", "ChronicleJSON"); - - let end_doc = include_str!("../../../../domain_docs/end.md"); - let external_id_doc = include_str!("../../../../domain_docs/external_id.md"); - let generated_doc = include_str!("../../../../domain_docs/generated.md"); - let id_doc = include_str!("../../../../domain_docs/id.md"); - let namespace_doc = include_str!("../../../../domain_docs/namespace.md"); - let start_doc = include_str!("../../../../domain_docs/start.md"); - let type_doc = include_str!("../../../../domain_docs/type.md"); - let used_doc = include_str!("../../../../domain_docs/used.md"); - let was_associated_with_doc = include_str!("../../../../domain_docs/was_associated_with.md"); - let was_informed_by_doc = include_str!("../../../../domain_docs/was_informed_by.md"); - - quote! { + let abstract_activity = + &rust::import("chronicle::persistence::queryable", "Activity").qualified(); + let activity_impl = &rust::import("chronicle::api::chronicle_graphql", "activity").qualified(); + let namespace = &rust::import("chronicle::persistence::queryable", "Namespace").qualified(); + let activity_id = &rust::import("chronicle::common::prov", "ActivityId").qualified(); + let async_graphql_error_extensions = + &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); + + let timezone = &rust::import("chronicle::chrono", "TimeZone").direct(); + let object = rust::import("chronicle::async_graphql", "Object").qualified(); + let async_result = &rust::import("chronicle::async_graphql", "Result").qualified(); + let context = &rust::import("chronicle::async_graphql", "Context").qualified(); + let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); + let date_time = &rust::import("chronicle::chrono", "DateTime"); + let utc = &rust::import("chronicle::chrono", "Utc"); + let chronicle_json = &rust::import("chronicle::common::prov", "ChronicleJSON"); + + let end_doc = include_str!("../../../../domain_docs/end.md"); + let external_id_doc = include_str!("../../../../domain_docs/external_id.md"); + let generated_doc = include_str!("../../../../domain_docs/generated.md"); + let id_doc = include_str!("../../../../domain_docs/id.md"); + let namespace_doc = include_str!("../../../../domain_docs/namespace.md"); + let start_doc = include_str!("../../../../domain_docs/start.md"); + let type_doc = include_str!("../../../../domain_docs/type.md"); + let used_doc = include_str!("../../../../domain_docs/used.md"); + let was_associated_with_doc = include_str!("../../../../domain_docs/was_associated_with.md"); + let was_informed_by_doc = include_str!("../../../../domain_docs/was_informed_by.md"); + + quote! { #(register(activity_impl)) #[allow(clippy::upper_case_acronyms)] @@ -463,31 +465,31 @@ fn gen_activity_definition(activity: &ActivityDef) -> rust::Tokens { } fn gen_entity_definition(entity: &EntityDef) -> rust::Tokens { - let abstract_entity = &rust::import("chronicle::persistence::queryable", "Entity").qualified(); - let entity_impl = &rust::import("chronicle::api::chronicle_graphql", "entity").qualified(); - let namespace = &rust::import("chronicle::persistence::queryable", "Namespace").qualified(); - let entity_id = &rust::import("chronicle::common::prov", "EntityId").qualified(); - - let object = rust::import("chronicle::async_graphql", "Object").qualified(); - let async_result = &rust::import("chronicle::async_graphql", "Result").qualified(); - let context = &rust::import("chronicle::async_graphql", "Context").qualified(); - let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); - let chronicle_json = &rust::import("chronicle::common::prov", "ChronicleJSON"); - let async_graphql_error_extensions = - &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); - - let external_id_doc = include_str!("../../../../domain_docs/external_id.md"); - let had_primary_source_doc = include_str!("../../../../domain_docs/had_primary_source.md"); - let id_doc = include_str!("../../../../domain_docs/id.md"); - let namespace_doc = include_str!("../../../../domain_docs/namespace.md"); - let type_doc = include_str!("../../../../domain_docs/type.md"); - let was_attributed_to_doc = include_str!("../../../../domain_docs/was_attributed_to.md"); - let was_derived_from_doc = include_str!("../../../../domain_docs/was_derived_from.md"); - let was_generated_by_doc = include_str!("../../../../domain_docs/was_generated_by.md"); - let was_quoted_from_doc = include_str!("../../../../domain_docs/was_quoted_from.md"); - let was_revision_of_doc = include_str!("../../../../domain_docs/was_revision_of.md"); - - quote! { + let abstract_entity = &rust::import("chronicle::persistence::queryable", "Entity").qualified(); + let entity_impl = &rust::import("chronicle::api::chronicle_graphql", "entity").qualified(); + let namespace = &rust::import("chronicle::persistence::queryable", "Namespace").qualified(); + let entity_id = &rust::import("chronicle::common::prov", "EntityId").qualified(); + + let object = rust::import("chronicle::async_graphql", "Object").qualified(); + let async_result = &rust::import("chronicle::async_graphql", "Result").qualified(); + let context = &rust::import("chronicle::async_graphql", "Context").qualified(); + let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); + let chronicle_json = &rust::import("chronicle::common::prov", "ChronicleJSON"); + let async_graphql_error_extensions = + &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); + + let external_id_doc = include_str!("../../../../domain_docs/external_id.md"); + let had_primary_source_doc = include_str!("../../../../domain_docs/had_primary_source.md"); + let id_doc = include_str!("../../../../domain_docs/id.md"); + let namespace_doc = include_str!("../../../../domain_docs/namespace.md"); + let type_doc = include_str!("../../../../domain_docs/type.md"); + let was_attributed_to_doc = include_str!("../../../../domain_docs/was_attributed_to.md"); + let was_derived_from_doc = include_str!("../../../../domain_docs/was_derived_from.md"); + let was_generated_by_doc = include_str!("../../../../domain_docs/was_generated_by.md"); + let was_quoted_from_doc = include_str!("../../../../domain_docs/was_quoted_from.md"); + let was_revision_of_doc = include_str!("../../../../domain_docs/was_revision_of.md"); + + quote! { #(register(entity_impl)) #[allow(clippy::upper_case_acronyms)] @@ -630,27 +632,27 @@ fn gen_entity_definition(entity: &EntityDef) -> rust::Tokens { } fn gen_agent_definition(agent: &AgentDef) -> rust::Tokens { - let abstract_agent = &rust::import("chronicle::persistence::queryable", "Agent").qualified(); - let agent_impl = &rust::import("chronicle::api::chronicle_graphql", "agent").qualified(); - let namespace = &rust::import("chronicle::persistence::queryable", "Namespace").qualified(); - let agent_union_type = &agent_union_type_name(); - let object = rust::import("chronicle::async_graphql", "Object").qualified(); - let async_result = &rust::import("chronicle::async_graphql", "Result").qualified(); - let context = &rust::import("chronicle::async_graphql", "Context").qualified(); - let agent_id = &rust::import("chronicle::common::prov", "AgentId"); - let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); - let chronicle_json = &rust::import("chronicle::common::prov", "ChronicleJSON"); - let async_graphql_error_extensions = - &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); - - let acted_on_behalf_of_doc = include_str!("../../../../domain_docs/acted_on_behalf_of.md"); - let attribution_doc = include_str!("../../../../domain_docs/attribution.md"); - let external_id_doc = include_str!("../../../../domain_docs/external_id.md"); - let id_doc = include_str!("../../../../domain_docs/id.md"); - let namespace_doc = include_str!("../../../../domain_docs/namespace.md"); - let type_doc = include_str!("../../../../domain_docs/type.md"); - - quote! { + let abstract_agent = &rust::import("chronicle::persistence::queryable", "Agent").qualified(); + let agent_impl = &rust::import("chronicle::api::chronicle_graphql", "agent").qualified(); + let namespace = &rust::import("chronicle::persistence::queryable", "Namespace").qualified(); + let agent_union_type = &agent_union_type_name(); + let object = rust::import("chronicle::async_graphql", "Object").qualified(); + let async_result = &rust::import("chronicle::async_graphql", "Result").qualified(); + let context = &rust::import("chronicle::async_graphql", "Context").qualified(); + let agent_id = &rust::import("chronicle::common::prov", "AgentId"); + let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); + let chronicle_json = &rust::import("chronicle::common::prov", "ChronicleJSON"); + let async_graphql_error_extensions = + &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); + + let acted_on_behalf_of_doc = include_str!("../../../../domain_docs/acted_on_behalf_of.md"); + let attribution_doc = include_str!("../../../../domain_docs/attribution.md"); + let external_id_doc = include_str!("../../../../domain_docs/external_id.md"); + let id_doc = include_str!("../../../../domain_docs/id.md"); + let namespace_doc = include_str!("../../../../domain_docs/namespace.md"); + let type_doc = include_str!("../../../../domain_docs/type.md"); + + quote! { #(register(agent_impl)) @@ -744,12 +746,12 @@ fn gen_agent_definition(agent: &AgentDef) -> rust::Tokens { } fn gen_abstract_prov_attributes() -> rust::Tokens { - let input_object = &rust::import("chronicle::async_graphql", "InputObject").qualified(); - let abstract_attributes = - &rust::import("chronicle::common::attributes", "Attributes").qualified(); - let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); + let input_object = &rust::import("chronicle::async_graphql", "InputObject").qualified(); + let abstract_attributes = + &rust::import("chronicle::common::attributes", "Attributes").qualified(); + let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); - quote! { + quote! { #[derive(#input_object, Clone)] pub struct ProvAgentAttributes { #[graphql(name = "type")] @@ -800,19 +802,19 @@ fn gen_abstract_prov_attributes() -> rust::Tokens { } fn gen_attribute_definition(typ: impl TypeName, attributes: &[AttributeDef]) -> rust::Tokens { - let abstract_attribute = - &rust::import("chronicle::common::attributes", "Attribute").qualified(); - let abstract_attributes = - &rust::import("chronicle::common::attributes", "Attributes").qualified(); - let input_object = rust::import("chronicle::async_graphql", "InputObject").qualified(); - let domain_type_id = rust::import("chronicle::common::prov", "DomaintypeId"); - let serde_value = &rust::import("chronicle::serde_json", "Value"); - - if attributes.is_empty() { - return quote! {}; - } - - quote! { + let abstract_attribute = + &rust::import("chronicle::common::attributes", "Attribute").qualified(); + let abstract_attributes = + &rust::import("chronicle::common::attributes", "Attributes").qualified(); + let input_object = rust::import("chronicle::async_graphql", "InputObject").qualified(); + let domain_type_id = rust::import("chronicle::common::prov", "DomaintypeId"); + let serde_value = &rust::import("chronicle::serde_json", "Value"); + + if attributes.is_empty() { + return quote! {}; + } + + quote! { #[derive(#input_object)] #[graphql(name = #_(#(typ.attributes_type_name_preserve_inflection())))] pub struct #(typ.attributes_type_name_preserve_inflection()) { @@ -849,12 +851,12 @@ fn gen_attribute_definition(typ: impl TypeName, attributes: &[AttributeDef]) -> } fn gen_mappers(domain: &ChronicleDomainDef) -> rust::Tokens { - let agent_impl = &rust::import("chronicle::persistence::queryable", "Agent").qualified(); - let role = &rust::import("chronicle::common::prov", "Role").qualified(); - let entity_impl = &rust::import("chronicle::persistence::queryable", "Entity").qualified(); - let activity_impl = &rust::import("chronicle::persistence::queryable", "Activity").qualified(); + let agent_impl = &rust::import("chronicle::persistence::queryable", "Agent").qualified(); + let role = &rust::import("chronicle::common::prov", "Role").qualified(); + let entity_impl = &rust::import("chronicle::persistence::queryable", "Entity").qualified(); + let activity_impl = &rust::import("chronicle::persistence::queryable", "Activity").qualified(); - quote! { + quote! { #[allow(clippy::match_single_binding)] fn map_agent_to_domain_type(agent: #agent_impl) -> #(agent_union_type_name()) { match agent.domaintype.as_deref() { @@ -960,34 +962,34 @@ fn gen_mappers(domain: &ChronicleDomainDef) -> rust::Tokens { } fn gen_query() -> rust::Tokens { - let query_impl = &rust::import("chronicle::api::chronicle_graphql", "query").qualified(); - - let graphql_object = &rust::import("chronicle::async_graphql", "Object"); - let graphql_result = &rust::import("chronicle::async_graphql", "Result"); - let graphql_id = &rust::import("chronicle::async_graphql", "ID"); - let graphql_context = &rust::import("chronicle::async_graphql", "Context"); - let graphql_connection = &rust::import("chronicle::async_graphql::connection", "Connection"); - let async_graphql_error_extensions = - &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); - - let agent_id = &rust::import("chronicle::common::prov", "AgentIdOrExternal"); - let entity_id = &rust::import("chronicle::common::prov", "EntityIdOrExternal"); - let activity_id = &rust::import("chronicle::common::prov", "ActivityIdOrExternal"); - let empty_fields = - &rust::import("chronicle::async_graphql::connection", "EmptyFields").qualified(); - - let timeline_order = - &rust::import("chronicle::api::chronicle_graphql", "TimelineOrder").qualified(); - - let activities_by_type_doc = include_str!("../../../../domain_docs/activities_by_type.md"); - let activity_by_id_doc = include_str!("../../../../domain_docs/activity_by_id.md"); - let activity_timeline_doc = include_str!("../../../../domain_docs/activity_timeline.md"); - let agent_by_id_doc = include_str!("../../../../domain_docs/agent_by_id.md"); - let agents_by_type_doc = include_str!("../../../../domain_docs/agents_by_type.md"); - let entities_by_type_doc = include_str!("../../../../domain_docs/entities_by_type.md"); - let entity_by_id_doc = include_str!("../../../../domain_docs/entity_by_id.md"); - - quote! { + let query_impl = &rust::import("chronicle::api::chronicle_graphql", "query").qualified(); + + let graphql_object = &rust::import("chronicle::async_graphql", "Object"); + let graphql_result = &rust::import("chronicle::async_graphql", "Result"); + let graphql_id = &rust::import("chronicle::async_graphql", "ID"); + let graphql_context = &rust::import("chronicle::async_graphql", "Context"); + let graphql_connection = &rust::import("chronicle::async_graphql::connection", "Connection"); + let async_graphql_error_extensions = + &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); + + let agent_id = &rust::import("chronicle::common::prov", "AgentIdOrExternal"); + let entity_id = &rust::import("chronicle::common::prov", "EntityIdOrExternal"); + let activity_id = &rust::import("chronicle::common::prov", "ActivityIdOrExternal"); + let empty_fields = + &rust::import("chronicle::async_graphql::connection", "EmptyFields").qualified(); + + let timeline_order = + &rust::import("chronicle::api::chronicle_graphql", "TimelineOrder").qualified(); + + let activities_by_type_doc = include_str!("../../../../domain_docs/activities_by_type.md"); + let activity_by_id_doc = include_str!("../../../../domain_docs/activity_by_id.md"); + let activity_timeline_doc = include_str!("../../../../domain_docs/activity_timeline.md"); + let agent_by_id_doc = include_str!("../../../../domain_docs/agent_by_id.md"); + let agents_by_type_doc = include_str!("../../../../domain_docs/agents_by_type.md"); + let entities_by_type_doc = include_str!("../../../../domain_docs/entities_by_type.md"); + let entity_by_id_doc = include_str!("../../../../domain_docs/entity_by_id.md"); + + quote! { #[derive(Copy, Clone)] pub struct Query; @@ -1208,43 +1210,43 @@ fn gen_query() -> rust::Tokens { } fn gen_mutation(domain: &ChronicleDomainDef) -> rust::Tokens { - let graphql_object = &rust::import("chronicle::async_graphql", "Object"); - - let graphql_result = &rust::import("chronicle::async_graphql", "Result"); - let graphql_context = &rust::import("chronicle::async_graphql", "Context"); - let async_graphql_error_extensions = - &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); - - let submission = &rust::import("chronicle::api::chronicle_graphql", "Submission"); - let impls = &rust::import("chronicle::api::chronicle_graphql", "mutation"); - - let entity_id = &rust::import("chronicle::common::prov", "EntityIdOrExternal"); - let agent_id = &rust::import("chronicle::common::prov", "AgentIdOrExternal"); - let activity_id = &rust::import("chronicle::common::prov", "ActivityIdOrExternal"); - let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); - - let abstract_attributes = - &rust::import("chronicle::common::attributes", "Attributes").qualified(); - - let acted_on_behalf_of_doc = include_str!("../../../../domain_docs/acted_on_behalf_of.md"); - let define_doc = include_str!("../../../../domain_docs/define.md"); - let end_doc = include_str!("../../../../domain_docs/end_activity.md"); - let had_primary_source_doc = include_str!("../../../../domain_docs/had_primary_source.md"); - let instant_activity_doc = include_str!("../../../../domain_docs/instant_activity.md"); - let prov_activity_doc = include_str!("../../../../domain_docs/prov_activity.md"); - let prov_agent_doc = include_str!("../../../../domain_docs/prov_agent.md"); - let prov_entity_doc = include_str!("../../../../domain_docs/prov_entity.md"); - let start_doc = include_str!("../../../../domain_docs/start_activity.md"); - let used_doc = include_str!("../../../../domain_docs/used.md"); - let was_associated_with_doc = include_str!("../../../../domain_docs/was_associated_with.md"); - let was_attributed_to_doc = include_str!("../../../../domain_docs/was_attributed_to.md"); - let was_derived_from_doc = include_str!("../../../../domain_docs/was_derived_from.md"); - let was_generated_by_doc = include_str!("../../../../domain_docs/was_generated_by.md"); - let was_informed_by_doc = include_str!("../../../../domain_docs/was_informed_by.md"); - let was_quoted_from_doc = include_str!("../../../../domain_docs/was_quoted_from.md"); - let was_revision_of_doc = include_str!("../../../../domain_docs/was_revision_of.md"); - - quote! { + let graphql_object = &rust::import("chronicle::async_graphql", "Object"); + + let graphql_result = &rust::import("chronicle::async_graphql", "Result"); + let graphql_context = &rust::import("chronicle::async_graphql", "Context"); + let async_graphql_error_extensions = + &rust::import("chronicle::async_graphql", "ErrorExtensions").qualified(); + + let submission = &rust::import("chronicle::api::chronicle_graphql", "Submission"); + let impls = &rust::import("chronicle::api::chronicle_graphql", "mutation"); + + let entity_id = &rust::import("chronicle::common::prov", "EntityIdOrExternal"); + let agent_id = &rust::import("chronicle::common::prov", "AgentIdOrExternal"); + let activity_id = &rust::import("chronicle::common::prov", "ActivityIdOrExternal"); + let domain_type_id = &rust::import("chronicle::common::prov", "DomaintypeId"); + + let abstract_attributes = + &rust::import("chronicle::common::attributes", "Attributes").qualified(); + + let acted_on_behalf_of_doc = include_str!("../../../../domain_docs/acted_on_behalf_of.md"); + let define_doc = include_str!("../../../../domain_docs/define.md"); + let end_doc = include_str!("../../../../domain_docs/end_activity.md"); + let had_primary_source_doc = include_str!("../../../../domain_docs/had_primary_source.md"); + let instant_activity_doc = include_str!("../../../../domain_docs/instant_activity.md"); + let prov_activity_doc = include_str!("../../../../domain_docs/prov_activity.md"); + let prov_agent_doc = include_str!("../../../../domain_docs/prov_agent.md"); + let prov_entity_doc = include_str!("../../../../domain_docs/prov_entity.md"); + let start_doc = include_str!("../../../../domain_docs/start_activity.md"); + let used_doc = include_str!("../../../../domain_docs/used.md"); + let was_associated_with_doc = include_str!("../../../../domain_docs/was_associated_with.md"); + let was_attributed_to_doc = include_str!("../../../../domain_docs/was_attributed_to.md"); + let was_derived_from_doc = include_str!("../../../../domain_docs/was_derived_from.md"); + let was_generated_by_doc = include_str!("../../../../domain_docs/was_generated_by.md"); + let was_informed_by_doc = include_str!("../../../../domain_docs/was_informed_by.md"); + let was_quoted_from_doc = include_str!("../../../../domain_docs/was_quoted_from.md"); + let was_revision_of_doc = include_str!("../../../../domain_docs/was_revision_of.md"); + + quote! { #[derive(Copy, Clone)] pub struct Mutation; @@ -1561,29 +1563,29 @@ fn gen_mutation(domain: &ChronicleDomainDef) -> rust::Tokens { } fn gen_graphql_type(domain: &ChronicleDomainDef) -> rust::Tokens { - let prov_agent = AgentDef { - external_id: "ProvAgent".to_owned(), - doc: Some(include_str!("../../../../domain_docs/prov_agent.md").to_string()), - attributes: vec![], - }; - let prov_activity = ActivityDef { - external_id: "ProvActivity".to_owned(), - doc: Some(include_str!("../../../../domain_docs/prov_activity.md").to_string()), - attributes: vec![], - }; - let prov_entity = EntityDef { - external_id: "ProvEntity".to_owned(), - doc: Some(include_str!("../../../../domain_docs/prov_entity.md").to_string()), - attributes: vec![], - }; - - let chronicledomaindef = &rust::import("chronicle::codegen", "ChronicleDomainDef"); - let tokio = &rust::import("chronicle", "tokio"); - - let bootstrap = rust::import("chronicle::bootstrap", "bootstrap"); - let chronicle_graphql = rust::import("chronicle::api::chronicle_graphql", "ChronicleGraphQl"); - - quote! { + let prov_agent = AgentDef { + external_id: "ProvAgent".to_owned(), + doc: Some(include_str!("../../../../domain_docs/prov_agent.md").to_string()), + attributes: vec![], + }; + let prov_activity = ActivityDef { + external_id: "ProvActivity".to_owned(), + doc: Some(include_str!("../../../../domain_docs/prov_activity.md").to_string()), + attributes: vec![], + }; + let prov_entity = EntityDef { + external_id: "ProvEntity".to_owned(), + doc: Some(include_str!("../../../../domain_docs/prov_entity.md").to_string()), + attributes: vec![], + }; + + let chronicledomaindef = &rust::import("chronicle::codegen", "ChronicleDomainDef"); + let tokio = &rust::import("chronicle", "tokio"); + + let bootstrap = rust::import("chronicle::bootstrap", "bootstrap"); + let chronicle_graphql = rust::import("chronicle::api::chronicle_graphql", "ChronicleGraphQl"); + + quote! { #(gen_attribute_scalars(&domain.attributes)) #(gen_type_enums(domain)) #(gen_association_and_attribution_unions()) @@ -1617,11 +1619,11 @@ fn gen_graphql_type(domain: &ChronicleDomainDef) -> rust::Tokens { } pub fn generate_chronicle_domain_schema(domain: ChronicleDomainDef, path: impl AsRef) { - let tokens = gen_graphql_type(&domain); + let tokens = gen_graphql_type(&domain); - path.as_ref().parent().map(std::fs::create_dir_all); - let mut f = std::fs::File::create(path).unwrap(); - f.write_all(tokens.to_file_string().unwrap().as_bytes()).unwrap(); + path.as_ref().parent().map(std::fs::create_dir_all); + let mut f = std::fs::File::create(path).unwrap(); + f.write_all(tokens.to_file_string().unwrap().as_bytes()).unwrap(); - f.flush().unwrap(); + f.flush().unwrap(); } diff --git a/crates/chronicle/src/lib.rs b/crates/chronicle/src/lib.rs index c8910253a..d6e3e2d05 100644 --- a/crates/chronicle/src/lib.rs +++ b/crates/chronicle/src/lib.rs @@ -9,11 +9,10 @@ pub use uuid; /// Re-export dependencies for generated code pub use api; pub use chronicle_persistence as persistence; -pub use codegen::{Builder, generate_chronicle_domain_schema, PrimitiveType}; +pub use codegen::{generate_chronicle_domain_schema, Builder, PrimitiveType}; pub use common; pub use crate::bootstrap::bootstrap; pub mod bootstrap; pub mod codegen; - diff --git a/crates/common/build.rs b/crates/common/build.rs index 441139bb0..2c02a6c5c 100644 --- a/crates/common/build.rs +++ b/crates/common/build.rs @@ -3,21 +3,21 @@ use std::{env, fs, io::Result, path::PathBuf}; include!("./src/context.rs"); fn main() -> Result<()> { - let out_str = env::var("OUT_DIR").unwrap(); - let out_path = PathBuf::from(&out_str); - let mut out_path = out_path.ancestors().nth(3).unwrap().to_owned(); - out_path.push("assets"); + let out_str = env::var("OUT_DIR").unwrap(); + let out_path = PathBuf::from(&out_str); + let mut out_path = out_path.ancestors().nth(3).unwrap().to_owned(); + out_path.push("assets"); - if !out_path.exists() { - fs::create_dir(&out_path).expect("Could not create assets dir"); - } + if !out_path.exists() { + fs::create_dir(&out_path).expect("Could not create assets dir"); + } - let context = &*PROV; + let context = &*PROV; - std::fs::write( - std::path::Path::new(&format!("{}/context.json", out_path.as_os_str().to_string_lossy(), )), - serde_json::to_string_pretty(context)?, - )?; + std::fs::write( + std::path::Path::new(&format!("{}/context.json", out_path.as_os_str().to_string_lossy(),)), + serde_json::to_string_pretty(context)?, + )?; - Ok(()) + Ok(()) } diff --git a/crates/common/src/attributes.rs b/crates/common/src/attributes.rs index 811aede2a..029a98f59 100644 --- a/crates/common/src/attributes.rs +++ b/crates/common/src/attributes.rs @@ -1,14 +1,14 @@ #[cfg(feature = "std")] use std::collections::BTreeSet; -#[cfg(not(feature = "std"))] -use parity_scale_codec::{alloc::collections::BTreeSet, alloc::string::String, alloc::vec::Vec}; #[cfg(feature = "parity-encoding")] use parity_scale_codec::Encode; +#[cfg(not(feature = "std"))] +use parity_scale_codec::{alloc::collections::BTreeSet, alloc::string::String, alloc::vec::Vec}; #[cfg(feature = "parity-encoding")] use scale_encode::error::Kind; #[cfg(not(feature = "std"))] -use scale_info::{prelude::borrow::ToOwned}; +use scale_info::prelude::borrow::ToOwned; use serde_json::Value; use crate::prov::DomaintypeId; @@ -17,180 +17,180 @@ use crate::prov::DomaintypeId; pub struct SerdeWrapper(pub Value); impl core::fmt::Display for SerdeWrapper { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match serde_json::to_string(&self.0) { - Ok(json_string) => write!(f, "{}", json_string), - Err(e) => { - tracing::error!("Failed to serialize Value to JSON string: {}", e); - Err(core::fmt::Error) - } - } - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match serde_json::to_string(&self.0) { + Ok(json_string) => write!(f, "{}", json_string), + Err(e) => { + tracing::error!("Failed to serialize Value to JSON string: {}", e); + Err(core::fmt::Error) + }, + } + } } impl From for SerdeWrapper { - fn from(value: Value) -> Self { - SerdeWrapper(value) - } + fn from(value: Value) -> Self { + SerdeWrapper(value) + } } #[cfg(feature = "parity-encoding")] impl scale_encode::EncodeAsType for SerdeWrapper { - fn encode_as_type_to( - &self, - type_id: u32, - _types: &scale_info::PortableRegistry, - out: &mut scale_encode::Vec, - ) -> Result<(), scale_encode::Error> { - let json_string = match serde_json::to_string(&self.0) { - Ok(json_string) => json_string, - Err(e) => { - tracing::error!("Failed to serialize Value to JSON string: {}", e); - return Err(scale_encode::Error::new(scale_encode::error::ErrorKind::WrongShape { - actual: Kind::Str, - expected: type_id, - })); - } - }; - json_string.encode_to(out); - Ok(()) - } + fn encode_as_type_to( + &self, + type_id: u32, + _types: &scale_info::PortableRegistry, + out: &mut scale_encode::Vec, + ) -> Result<(), scale_encode::Error> { + let json_string = match serde_json::to_string(&self.0) { + Ok(json_string) => json_string, + Err(e) => { + tracing::error!("Failed to serialize Value to JSON string: {}", e); + return Err(scale_encode::Error::new(scale_encode::error::ErrorKind::WrongShape { + actual: Kind::Str, + expected: type_id, + })); + }, + }; + json_string.encode_to(out); + Ok(()) + } } #[cfg(feature = "parity-encoding")] impl parity_scale_codec::Encode for SerdeWrapper { - fn encode_to(&self, dest: &mut T) { - let json_string = - serde_json::to_string(&self.0).expect("Failed to serialize Value to JSON string"); - json_string.encode_to(dest); - } + fn encode_to(&self, dest: &mut T) { + let json_string = + serde_json::to_string(&self.0).expect("Failed to serialize Value to JSON string"); + json_string.encode_to(dest); + } } #[cfg(feature = "parity-encoding")] impl parity_scale_codec::Decode for SerdeWrapper { - fn decode( - input: &mut I, - ) -> Result { - let json_string = String::decode(input)?; - let value = serde_json::from_str(&json_string).map_err(|_| { - parity_scale_codec::Error::from("Failed to deserialize JSON string to Value") - })?; - Ok(SerdeWrapper(value)) - } + fn decode( + input: &mut I, + ) -> Result { + let json_string = String::decode(input)?; + let value = serde_json::from_str(&json_string).map_err(|_| { + parity_scale_codec::Error::from("Failed to deserialize JSON string to Value") + })?; + Ok(SerdeWrapper(value)) + } } #[cfg(feature = "parity-encoding")] impl scale_info::TypeInfo for SerdeWrapper { - type Identity = Self; + type Identity = Self; - fn type_info() -> scale_info::Type { - scale_info::Type::builder() - .path(scale_info::Path::new("SerdeWrapper", module_path!())) - .composite(scale_info::build::Fields::unnamed().field(|f| f.ty::())) - } + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("SerdeWrapper", module_path!())) + .composite(scale_info::build::Fields::unnamed().field(|f| f.ty::())) + } } impl From for Value { - fn from(wrapper: SerdeWrapper) -> Self { - wrapper.0 - } + fn from(wrapper: SerdeWrapper) -> Self { + wrapper.0 + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct Attribute { - pub typ: String, - pub value: SerdeWrapper, + pub typ: String, + pub value: SerdeWrapper, } impl core::fmt::Display for Attribute { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!( - f, - "Type: {}, Value: {}", - self.typ, - serde_json::to_string(&self.value.0).unwrap_or_else(|_| String::from("Invalid Value")) - ) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!( + f, + "Type: {}, Value: {}", + self.typ, + serde_json::to_string(&self.value.0).unwrap_or_else(|_| String::from("Invalid Value")) + ) + } } impl Attribute { - pub fn get_type(&self) -> &String { - &self.typ - } + pub fn get_type(&self) -> &String { + &self.typ + } - pub fn get_value(&self) -> &Value { - &self.value.0 - } + pub fn get_value(&self) -> &Value { + &self.value.0 + } - pub fn new(typ: impl AsRef, value: Value) -> Self { - Self { typ: typ.as_ref().to_owned(), value: value.into() } - } + pub fn new(typ: impl AsRef, value: Value) -> Self { + Self { typ: typ.as_ref().to_owned(), value: value.into() } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_encode::EncodeAsType, - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - ) + feature = "parity-encoding", + derive( + scale_encode::EncodeAsType, + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + ) )] #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)] pub struct Attributes { - typ: Option, - items: Vec, + typ: Option, + items: Vec, } impl Attributes { - pub fn new(typ: Option, mut items: Vec) -> Self { - let mut seen_types = BTreeSet::new(); - items.retain(|attr| seen_types.insert(attr.typ.clone())); - items.sort_by(|a, b| a.typ.cmp(&b.typ)); - Self { typ, items } - } - - pub fn get_attribute(&self, key: &str) -> Option<&Attribute> { - self.items.iter().find(|&attribute| attribute.typ == key) - } - - #[tracing::instrument(skip(self))] - pub fn get_values(&self) -> Vec<&Attribute> { - self.items.iter().collect() - } - - pub fn type_only(typ: Option) -> Self { - Self { typ, items: Vec::new() } - } - - pub fn get_typ(&self) -> &Option { - &self.typ - } - - pub fn get_items(&self) -> &[Attribute] { - &self.items - } - - pub fn into_items(self) -> Vec { - self.items - } - - pub fn add_item(&mut self, value: Attribute) { - if !self.items.iter().any(|item| item.typ == value.typ) { - if let Some(pos) = self.items.iter().position(|item| item.typ > value.typ) { - self.items.insert(pos, value); - } else { - self.items.push(value); - } - } - } + pub fn new(typ: Option, mut items: Vec) -> Self { + let mut seen_types = BTreeSet::new(); + items.retain(|attr| seen_types.insert(attr.typ.clone())); + items.sort_by(|a, b| a.typ.cmp(&b.typ)); + Self { typ, items } + } + + pub fn get_attribute(&self, key: &str) -> Option<&Attribute> { + self.items.iter().find(|&attribute| attribute.typ == key) + } + + #[tracing::instrument(skip(self))] + pub fn get_values(&self) -> Vec<&Attribute> { + self.items.iter().collect() + } + + pub fn type_only(typ: Option) -> Self { + Self { typ, items: Vec::new() } + } + + pub fn get_typ(&self) -> &Option { + &self.typ + } + + pub fn get_items(&self) -> &[Attribute] { + &self.items + } + + pub fn into_items(self) -> Vec { + self.items + } + + pub fn add_item(&mut self, value: Attribute) { + if !self.items.iter().any(|item| item.typ == value.typ) { + if let Some(pos) = self.items.iter().position(|item| item.typ > value.typ) { + self.items.insert(pos, value); + } else { + self.items.push(value); + } + } + } } diff --git a/crates/common/src/domain.rs b/crates/common/src/domain.rs index c262d5463..4105771e2 100644 --- a/crates/common/src/domain.rs +++ b/crates/common/src/domain.rs @@ -1,8 +1,8 @@ use std::{collections::BTreeMap, path::Path, str::FromStr}; use inflector::cases::{ - camelcase::to_camel_case, kebabcase::to_kebab_case, pascalcase::to_pascal_case, - snakecase::to_snake_case, + camelcase::to_camel_case, kebabcase::to_kebab_case, pascalcase::to_pascal_case, + snakecase::to_snake_case, }; use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -11,540 +11,540 @@ use crate::prov::DomaintypeId; #[derive(Debug, Error)] pub enum ModelError { - #[error("Attribute not defined argument: {attr}")] - AttributeNotDefined { attr: String }, - - #[error("Model file not readable: {0}")] - ModelFileNotReadable( - #[from] - #[source] - std::io::Error, - ), - - #[error("Model file invalid JSON: {0}")] - ModelFileInvalidJson( - #[from] - #[source] - serde_json::Error, - ), - - #[error("Model file invalid YAML: {0}")] - ModelFileInvalidYaml( - #[from] - #[source] - serde_yaml::Error, - ), + #[error("Attribute not defined argument: {attr}")] + AttributeNotDefined { attr: String }, + + #[error("Model file not readable: {0}")] + ModelFileNotReadable( + #[from] + #[source] + std::io::Error, + ), + + #[error("Model file invalid JSON: {0}")] + ModelFileInvalidJson( + #[from] + #[source] + serde_json::Error, + ), + + #[error("Model file invalid YAML: {0}")] + ModelFileInvalidYaml( + #[from] + #[source] + serde_yaml::Error, + ), } #[derive(Deserialize, Serialize, Debug, Copy, Clone, PartialEq, Eq)] pub enum PrimitiveType { - String, - Bool, - Int, - JSON, + String, + Bool, + Int, + JSON, } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AttributeDef { - pub typ: String, - pub doc: Option, - pub primitive_type: PrimitiveType, + pub typ: String, + pub doc: Option, + pub primitive_type: PrimitiveType, } impl TypeName for AttributeDef { - fn as_type_name(&self) -> String { - to_pascal_case(&self.typ) - } - - fn preserve_inflection(&self) -> String { - match (self.typ.chars().next(), self.typ.chars().nth(1), &self.typ[1..]) { - (_, Some(c), _) if c.is_uppercase() => format!("{}Attribute", self.typ), - (Some(first), _, body) => format!("{}{}Attribute", first.to_lowercase(), body), - _ => format!("{}Attribute", self.typ), - } - } + fn as_type_name(&self) -> String { + to_pascal_case(&self.typ) + } + + fn preserve_inflection(&self) -> String { + match (self.typ.chars().next(), self.typ.chars().nth(1), &self.typ[1..]) { + (_, Some(c), _) if c.is_uppercase() => format!("{}Attribute", self.typ), + (Some(first), _, body) => format!("{}{}Attribute", first.to_lowercase(), body), + _ => format!("{}Attribute", self.typ), + } + } } impl AttributeDef { - pub fn as_scalar_type(&self) -> String { - match (self.typ.chars().next(), self.typ.chars().nth(1), &self.typ[1..]) { - (_, Some(c), _) if c.is_uppercase() => format!("{}Attribute", self.typ), - (Some(first), _, body) => format!("{}{}Attribute", first.to_uppercase(), body), - _ => format!("{}Attribute", self.as_type_name()), - } - } - - pub fn as_property(&self) -> String { - to_snake_case(&format!("{}Attribute", self.typ)) - } - - pub fn from_attribute_file_input(external_id: String, attr: AttributeFileInput) -> Self { - AttributeDef { typ: external_id, doc: attr.doc, primitive_type: attr.typ } - } + pub fn as_scalar_type(&self) -> String { + match (self.typ.chars().next(), self.typ.chars().nth(1), &self.typ[1..]) { + (_, Some(c), _) if c.is_uppercase() => format!("{}Attribute", self.typ), + (Some(first), _, body) => format!("{}{}Attribute", first.to_uppercase(), body), + _ => format!("{}Attribute", self.as_type_name()), + } + } + + pub fn as_property(&self) -> String { + to_snake_case(&format!("{}Attribute", self.typ)) + } + + pub fn from_attribute_file_input(external_id: String, attr: AttributeFileInput) -> Self { + AttributeDef { typ: external_id, doc: attr.doc, primitive_type: attr.typ } + } } /// A external_id formatted for CLI use - kebab-case, singular, lowercase pub trait CliName { - fn as_cli_name(&self) -> String; + fn as_cli_name(&self) -> String; } /// A correctly cased and singularized external_id for the type pub trait TypeName { - fn as_type_name(&self) -> String; - fn preserve_inflection(&self) -> String; + fn as_type_name(&self) -> String; + fn preserve_inflection(&self) -> String; - fn as_method_name(&self) -> String { - format!("define{}", self.as_type_name()) - } + fn as_method_name(&self) -> String { + format!("define{}", self.as_type_name()) + } - fn as_domain_type_id(&self) -> DomaintypeId { - DomaintypeId::from_external_id(self.as_type_name()) - } + fn as_domain_type_id(&self) -> DomaintypeId { + DomaintypeId::from_external_id(self.as_type_name()) + } } /// Entities, Activities and Agents have a specific set of attributes. pub trait AttributesTypeName { - fn attributes_type_name(&self) -> String; - fn attributes_type_name_preserve_inflection(&self) -> String; + fn attributes_type_name(&self) -> String; + fn attributes_type_name_preserve_inflection(&self) -> String; } pub trait Property { - fn as_property(&self) -> String; + fn as_property(&self) -> String; } impl AttributesTypeName for T - where - T: TypeName, +where + T: TypeName, { - fn attributes_type_name(&self) -> String { - to_pascal_case(&format!("{}Attributes", self.as_type_name())) - } + fn attributes_type_name(&self) -> String { + to_pascal_case(&format!("{}Attributes", self.as_type_name())) + } - fn attributes_type_name_preserve_inflection(&self) -> String { - format!("{}Attributes", self.as_type_name()) - } + fn attributes_type_name_preserve_inflection(&self) -> String { + format!("{}Attributes", self.as_type_name()) + } } impl CliName for T - where - T: TypeName, +where + T: TypeName, { - fn as_cli_name(&self) -> String { - to_kebab_case(&self.as_type_name()) - } + fn as_cli_name(&self) -> String { + to_kebab_case(&self.as_type_name()) + } } impl Property for T - where - T: TypeName, +where + T: TypeName, { - fn as_property(&self) -> String { - to_snake_case(&self.as_type_name()) - } + fn as_property(&self) -> String { + to_snake_case(&self.as_type_name()) + } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AgentDef { - pub external_id: String, - pub doc: Option, - pub attributes: Vec, + pub external_id: String, + pub doc: Option, + pub attributes: Vec, } impl TypeName for AgentDef { - fn as_type_name(&self) -> String { - type_name_for_kind("Agent", &self.external_id) - } + fn as_type_name(&self) -> String { + type_name_for_kind("Agent", &self.external_id) + } - fn preserve_inflection(&self) -> String { - preserve_inflection_for_kind("Agent", &self.external_id) - } + fn preserve_inflection(&self) -> String { + preserve_inflection_for_kind("Agent", &self.external_id) + } } impl<'a> TypeName for &'a AgentDef { - fn as_type_name(&self) -> String { - TypeName::as_type_name(*self) - } + fn as_type_name(&self) -> String { + TypeName::as_type_name(*self) + } - fn preserve_inflection(&self) -> String { - TypeName::preserve_inflection(*self) - } + fn preserve_inflection(&self) -> String { + TypeName::preserve_inflection(*self) + } } impl<'a> TypeName for &'a EntityDef { - fn as_type_name(&self) -> String { - TypeName::as_type_name(*self) - } + fn as_type_name(&self) -> String { + TypeName::as_type_name(*self) + } - fn preserve_inflection(&self) -> String { - TypeName::preserve_inflection(*self) - } + fn preserve_inflection(&self) -> String { + TypeName::preserve_inflection(*self) + } } impl<'a> TypeName for &'a ActivityDef { - fn as_type_name(&self) -> String { - TypeName::as_type_name(*self) - } + fn as_type_name(&self) -> String { + TypeName::as_type_name(*self) + } - fn preserve_inflection(&self) -> String { - TypeName::preserve_inflection(*self) - } + fn preserve_inflection(&self) -> String { + TypeName::preserve_inflection(*self) + } } impl AgentDef { - pub fn new( - external_id: impl AsRef, - doc: Option, - attributes: Vec, - ) -> Self { - Self { external_id: external_id.as_ref().to_string(), doc, attributes } - } - - pub fn from_input<'a>( - external_id: String, - doc: Option, - attributes: &BTreeMap, - attribute_references: impl Iterator, - ) -> Result { - Ok(Self { - external_id, - doc, - attributes: attribute_references - .map(|x| { - attributes - .get(&*x.0) - .ok_or_else(|| ModelError::AttributeNotDefined { attr: x.0.to_owned() }) - .map(|attr| AttributeDef { - typ: x.0.to_owned(), - doc: attr.doc.to_owned(), - primitive_type: attr.typ, - }) - }) - .collect::, _>>()?, - }) - } + pub fn new( + external_id: impl AsRef, + doc: Option, + attributes: Vec, + ) -> Self { + Self { external_id: external_id.as_ref().to_string(), doc, attributes } + } + + pub fn from_input<'a>( + external_id: String, + doc: Option, + attributes: &BTreeMap, + attribute_references: impl Iterator, + ) -> Result { + Ok(Self { + external_id, + doc, + attributes: attribute_references + .map(|x| { + attributes + .get(&*x.0) + .ok_or_else(|| ModelError::AttributeNotDefined { attr: x.0.to_owned() }) + .map(|attr| AttributeDef { + typ: x.0.to_owned(), + doc: attr.doc.to_owned(), + primitive_type: attr.typ, + }) + }) + .collect::, _>>()?, + }) + } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct EntityDef { - pub external_id: String, - pub doc: Option, - pub attributes: Vec, + pub external_id: String, + pub doc: Option, + pub attributes: Vec, } impl TypeName for EntityDef { - fn as_type_name(&self) -> String { - type_name_for_kind("Entity", &self.external_id) - } + fn as_type_name(&self) -> String { + type_name_for_kind("Entity", &self.external_id) + } - fn preserve_inflection(&self) -> String { - preserve_inflection_for_kind("Entity", &self.external_id) - } + fn preserve_inflection(&self) -> String { + preserve_inflection_for_kind("Entity", &self.external_id) + } } impl EntityDef { - pub fn new( - external_id: impl AsRef, - doc: Option, - attributes: Vec, - ) -> Self { - Self { external_id: external_id.as_ref().to_string(), doc, attributes } - } - - pub fn from_input<'a>( - external_id: String, - doc: Option, - attributes: &BTreeMap, - attribute_references: impl Iterator, - ) -> Result { - Ok(Self { - external_id, - doc, - attributes: attribute_references - .map(|x| { - attributes - .get(&*x.0) - .ok_or_else(|| ModelError::AttributeNotDefined { attr: x.0.to_owned() }) - .map(|attr| AttributeDef { - typ: x.0.to_owned(), - doc: attr.doc.to_owned(), - primitive_type: attr.typ, - }) - }) - .collect::, _>>()?, - }) - } + pub fn new( + external_id: impl AsRef, + doc: Option, + attributes: Vec, + ) -> Self { + Self { external_id: external_id.as_ref().to_string(), doc, attributes } + } + + pub fn from_input<'a>( + external_id: String, + doc: Option, + attributes: &BTreeMap, + attribute_references: impl Iterator, + ) -> Result { + Ok(Self { + external_id, + doc, + attributes: attribute_references + .map(|x| { + attributes + .get(&*x.0) + .ok_or_else(|| ModelError::AttributeNotDefined { attr: x.0.to_owned() }) + .map(|attr| AttributeDef { + typ: x.0.to_owned(), + doc: attr.doc.to_owned(), + primitive_type: attr.typ, + }) + }) + .collect::, _>>()?, + }) + } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ActivityDef { - pub external_id: String, - pub doc: Option, - pub attributes: Vec, + pub external_id: String, + pub doc: Option, + pub attributes: Vec, } impl TypeName for ActivityDef { - fn as_type_name(&self) -> String { - type_name_for_kind("Activity", &self.external_id) - } + fn as_type_name(&self) -> String { + type_name_for_kind("Activity", &self.external_id) + } - fn preserve_inflection(&self) -> String { - preserve_inflection_for_kind("Activity", &self.external_id) - } + fn preserve_inflection(&self) -> String { + preserve_inflection_for_kind("Activity", &self.external_id) + } } impl ActivityDef { - pub fn new( - external_id: impl AsRef, - doc: Option, - attributes: Vec, - ) -> Self { - Self { external_id: external_id.as_ref().to_string(), doc, attributes } - } - - pub fn from_input<'a>( - external_id: String, - doc: Option, - attributes: &BTreeMap, - attribute_references: impl Iterator, - ) -> Result { - Ok(Self { - external_id, - doc, - attributes: attribute_references - .map(|x| { - attributes - .get(&*x.0) - .ok_or_else(|| ModelError::AttributeNotDefined { attr: x.0.to_owned() }) - .map(|attr| AttributeDef { - typ: x.0.to_owned(), - doc: attr.doc.to_owned(), - primitive_type: attr.typ, - }) - }) - .collect::, _>>()?, - }) - } + pub fn new( + external_id: impl AsRef, + doc: Option, + attributes: Vec, + ) -> Self { + Self { external_id: external_id.as_ref().to_string(), doc, attributes } + } + + pub fn from_input<'a>( + external_id: String, + doc: Option, + attributes: &BTreeMap, + attribute_references: impl Iterator, + ) -> Result { + Ok(Self { + external_id, + doc, + attributes: attribute_references + .map(|x| { + attributes + .get(&*x.0) + .ok_or_else(|| ModelError::AttributeNotDefined { attr: x.0.to_owned() }) + .map(|attr| AttributeDef { + typ: x.0.to_owned(), + doc: attr.doc.to_owned(), + primitive_type: attr.typ, + }) + }) + .collect::, _>>()?, + }) + } } #[derive(Debug, Clone, Serialize, Deserialize)] pub struct RoleDef { - pub external_id: String, + pub external_id: String, } impl RoleDef { - pub fn new(external_id: impl AsRef) -> Self { - Self { external_id: external_id.as_ref().to_string() } - } + pub fn new(external_id: impl AsRef) -> Self { + Self { external_id: external_id.as_ref().to_string() } + } - pub fn from_role_file_input(external_id: String) -> Self { - RoleDef { external_id } - } + pub fn from_role_file_input(external_id: String) -> Self { + RoleDef { external_id } + } } impl TypeName for &RoleDef { - fn as_type_name(&self) -> String { - to_pascal_case(&self.external_id) - } + fn as_type_name(&self) -> String { + to_pascal_case(&self.external_id) + } - fn preserve_inflection(&self) -> String { - self.external_id.clone() - } + fn preserve_inflection(&self) -> String { + self.external_id.clone() + } } fn type_name_for_kind(kind: &str, id: &str) -> String { - if id == format!("Prov{kind}") { - id.to_string() - } else { - match (id.chars().next(), id.chars().nth(1), &id[1..]) { - (_, Some(c), _) if c.is_uppercase() => format!("{id}{kind}"), - (Some(first), _, body) => format!("{}{}{}", first.to_uppercase(), body, kind), - _ => format!("{}{}", to_pascal_case(id), kind), - } - } + if id == format!("Prov{kind}") { + id.to_string() + } else { + match (id.chars().next(), id.chars().nth(1), &id[1..]) { + (_, Some(c), _) if c.is_uppercase() => format!("{id}{kind}"), + (Some(first), _, body) => format!("{}{}{}", first.to_uppercase(), body, kind), + _ => format!("{}{}", to_pascal_case(id), kind), + } + } } fn preserve_inflection_for_kind(kind: &str, id: &str) -> String { - match (id.chars().next(), id.chars().nth(1), &id[1..]) { - (_, Some(c), _) if c.is_uppercase() => format!("{id}{kind}"), - (Some(first), _, body) => format!("{}{}{}", first.to_lowercase(), body, kind), - _ => to_camel_case(&format!("{id}{kind}")), - } + match (id.chars().next(), id.chars().nth(1), &id[1..]) { + (_, Some(c), _) if c.is_uppercase() => format!("{id}{kind}"), + (Some(first), _, body) => format!("{}{}{}", first.to_lowercase(), body, kind), + _ => to_camel_case(&format!("{id}{kind}")), + } } #[derive(Debug, Clone, Serialize, Deserialize, Default)] pub struct ChronicleDomainDef { - name: String, - pub attributes: Vec, - pub agents: Vec, - pub entities: Vec, - pub activities: Vec, - pub roles_doc: Option, - pub roles: Vec, + name: String, + pub attributes: Vec, + pub agents: Vec, + pub entities: Vec, + pub activities: Vec, + pub roles_doc: Option, + pub roles: Vec, } pub struct AgentBuilder<'a>(&'a ChronicleDomainDef, AgentDef); impl<'a> AgentBuilder<'a> { - pub fn new( - domain: &'a ChronicleDomainDef, - external_id: impl AsRef, - doc: Option, - ) -> Self { - Self(domain, AgentDef::new(external_id, doc, vec![])) - } - - pub fn with_attribute(mut self, typ: impl AsRef) -> Result { - let attr = self - .0 - .attribute(typ.as_ref()) - .ok_or(ModelError::AttributeNotDefined { attr: typ.as_ref().to_string() })?; - self.1.attributes.push(attr); - Ok(self) - } + pub fn new( + domain: &'a ChronicleDomainDef, + external_id: impl AsRef, + doc: Option, + ) -> Self { + Self(domain, AgentDef::new(external_id, doc, vec![])) + } + + pub fn with_attribute(mut self, typ: impl AsRef) -> Result { + let attr = self + .0 + .attribute(typ.as_ref()) + .ok_or(ModelError::AttributeNotDefined { attr: typ.as_ref().to_string() })?; + self.1.attributes.push(attr); + Ok(self) + } } impl<'a> From> for AgentDef { - fn from(val: AgentBuilder<'a>) -> Self { - val.1 - } + fn from(val: AgentBuilder<'a>) -> Self { + val.1 + } } pub struct EntityBuilder<'a>(&'a ChronicleDomainDef, EntityDef); impl<'a> EntityBuilder<'a> { - pub fn new( - domain: &'a ChronicleDomainDef, - external_id: impl AsRef, - doc: Option, - ) -> Self { - Self(domain, EntityDef::new(external_id, doc, vec![])) - } - - pub fn with_attribute(mut self, typ: impl AsRef) -> Result { - let attr = self - .0 - .attribute(typ.as_ref()) - .ok_or(ModelError::AttributeNotDefined { attr: typ.as_ref().to_string() })?; - self.1.attributes.push(attr); - Ok(self) - } + pub fn new( + domain: &'a ChronicleDomainDef, + external_id: impl AsRef, + doc: Option, + ) -> Self { + Self(domain, EntityDef::new(external_id, doc, vec![])) + } + + pub fn with_attribute(mut self, typ: impl AsRef) -> Result { + let attr = self + .0 + .attribute(typ.as_ref()) + .ok_or(ModelError::AttributeNotDefined { attr: typ.as_ref().to_string() })?; + self.1.attributes.push(attr); + Ok(self) + } } impl<'a> From> for EntityDef { - fn from(val: EntityBuilder<'a>) -> Self { - val.1 - } + fn from(val: EntityBuilder<'a>) -> Self { + val.1 + } } pub struct ActivityBuilder<'a>(&'a ChronicleDomainDef, ActivityDef); impl<'a> ActivityBuilder<'a> { - pub fn new( - domain: &'a ChronicleDomainDef, - external_id: impl AsRef, - doc: Option, - ) -> Self { - Self(domain, ActivityDef::new(external_id, doc, vec![])) - } - - pub fn with_attribute(mut self, typ: impl AsRef) -> Result { - let attr = self - .0 - .attribute(typ.as_ref()) - .ok_or(ModelError::AttributeNotDefined { attr: typ.as_ref().to_string() })?; - self.1.attributes.push(attr); - Ok(self) - } + pub fn new( + domain: &'a ChronicleDomainDef, + external_id: impl AsRef, + doc: Option, + ) -> Self { + Self(domain, ActivityDef::new(external_id, doc, vec![])) + } + + pub fn with_attribute(mut self, typ: impl AsRef) -> Result { + let attr = self + .0 + .attribute(typ.as_ref()) + .ok_or(ModelError::AttributeNotDefined { attr: typ.as_ref().to_string() })?; + self.1.attributes.push(attr); + Ok(self) + } } impl<'a> From> for ActivityDef { - fn from(val: ActivityBuilder<'a>) -> Self { - val.1 - } + fn from(val: ActivityBuilder<'a>) -> Self { + val.1 + } } pub struct Builder(ChronicleDomainDef); impl Builder { - pub fn new(name: impl AsRef) -> Self { - Builder(ChronicleDomainDef { name: name.as_ref().to_string(), ..Default::default() }) - } - - pub fn with_attribute_type( - mut self, - external_id: impl AsRef, - doc: Option, - typ: PrimitiveType, - ) -> Result { - self.0.attributes.push(AttributeDef { - typ: external_id.as_ref().to_string(), - doc, - primitive_type: typ, - }); - - Ok(self) - } - - pub fn with_agent( - mut self, - external_id: impl AsRef, - doc: Option, - b: impl FnOnce(AgentBuilder<'_>) -> Result, ModelError>, - ) -> Result { - self.0 - .agents - .push(b(AgentBuilder(&self.0, AgentDef::new(external_id, doc, vec![])))?.into()); - Ok(self) - } - - pub fn with_entity( - mut self, - external_id: impl AsRef, - doc: Option, - b: impl FnOnce(EntityBuilder<'_>) -> Result, ModelError>, - ) -> Result { - self.0 - .entities - .push(b(EntityBuilder(&self.0, EntityDef::new(external_id, doc, vec![])))?.into()); - Ok(self) - } - - pub fn with_activity( - mut self, - external_id: impl AsRef, - doc: Option, - b: impl FnOnce(ActivityBuilder<'_>) -> Result, ModelError>, - ) -> Result { - self.0 - .activities - .push(b(ActivityBuilder(&self.0, ActivityDef::new(external_id, doc, vec![])))?.into()); - - Ok(self) - } - - pub fn with_role(mut self, external_id: impl AsRef) -> Result { - self.0.roles.push(RoleDef::new(external_id)); - - Ok(self) - } - - pub fn build(self) -> ChronicleDomainDef { - self.0 - } + pub fn new(name: impl AsRef) -> Self { + Builder(ChronicleDomainDef { name: name.as_ref().to_string(), ..Default::default() }) + } + + pub fn with_attribute_type( + mut self, + external_id: impl AsRef, + doc: Option, + typ: PrimitiveType, + ) -> Result { + self.0.attributes.push(AttributeDef { + typ: external_id.as_ref().to_string(), + doc, + primitive_type: typ, + }); + + Ok(self) + } + + pub fn with_agent( + mut self, + external_id: impl AsRef, + doc: Option, + b: impl FnOnce(AgentBuilder<'_>) -> Result, ModelError>, + ) -> Result { + self.0 + .agents + .push(b(AgentBuilder(&self.0, AgentDef::new(external_id, doc, vec![])))?.into()); + Ok(self) + } + + pub fn with_entity( + mut self, + external_id: impl AsRef, + doc: Option, + b: impl FnOnce(EntityBuilder<'_>) -> Result, ModelError>, + ) -> Result { + self.0 + .entities + .push(b(EntityBuilder(&self.0, EntityDef::new(external_id, doc, vec![])))?.into()); + Ok(self) + } + + pub fn with_activity( + mut self, + external_id: impl AsRef, + doc: Option, + b: impl FnOnce(ActivityBuilder<'_>) -> Result, ModelError>, + ) -> Result { + self.0 + .activities + .push(b(ActivityBuilder(&self.0, ActivityDef::new(external_id, doc, vec![])))?.into()); + + Ok(self) + } + + pub fn with_role(mut self, external_id: impl AsRef) -> Result { + self.0.roles.push(RoleDef::new(external_id)); + + Ok(self) + } + + pub fn build(self) -> ChronicleDomainDef { + self.0 + } } #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct AttributeFileInput { - doc: Option, - #[serde(rename = "type")] - typ: PrimitiveType, + doc: Option, + #[serde(rename = "type")] + typ: PrimitiveType, } impl From<&AttributeDef> for AttributeFileInput { - fn from(attr: &AttributeDef) -> Self { - Self { doc: attr.doc.to_owned(), typ: attr.primitive_type } - } + fn from(attr: &AttributeDef) -> Self { + Self { doc: attr.doc.to_owned(), typ: attr.primitive_type } + } } #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] @@ -552,245 +552,245 @@ pub struct AttributeRef(pub String); #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct ResourceDef { - pub doc: Option, - pub attributes: Vec, + pub doc: Option, + pub attributes: Vec, } impl From<&AgentDef> for ResourceDef { - fn from(agent: &AgentDef) -> Self { - Self { - doc: agent.doc.to_owned(), - attributes: agent - .attributes - .iter() - .map(|attr| AttributeRef(attr.typ.to_owned())) - .collect(), - } - } + fn from(agent: &AgentDef) -> Self { + Self { + doc: agent.doc.to_owned(), + attributes: agent + .attributes + .iter() + .map(|attr| AttributeRef(attr.typ.to_owned())) + .collect(), + } + } } impl From<&EntityDef> for ResourceDef { - fn from(entity: &EntityDef) -> Self { - Self { - doc: entity.doc.to_owned(), - attributes: entity - .attributes - .iter() - .map(|attr| AttributeRef(attr.typ.to_owned())) - .collect(), - } - } + fn from(entity: &EntityDef) -> Self { + Self { + doc: entity.doc.to_owned(), + attributes: entity + .attributes + .iter() + .map(|attr| AttributeRef(attr.typ.to_owned())) + .collect(), + } + } } impl From<&ActivityDef> for ResourceDef { - fn from(activity: &ActivityDef) -> Self { - Self { - doc: activity.doc.to_owned(), - attributes: activity - .attributes - .iter() - .map(|attr| AttributeRef(attr.typ.to_owned())) - .collect(), - } - } + fn from(activity: &ActivityDef) -> Self { + Self { + doc: activity.doc.to_owned(), + attributes: activity + .attributes + .iter() + .map(|attr| AttributeRef(attr.typ.to_owned())) + .collect(), + } + } } #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Default)] pub struct DomainFileInput { - pub name: String, - pub attributes: BTreeMap, - pub agents: BTreeMap, - pub entities: BTreeMap, - pub activities: BTreeMap, - pub roles_doc: Option, - pub roles: Vec, + pub name: String, + pub attributes: BTreeMap, + pub agents: BTreeMap, + pub entities: BTreeMap, + pub activities: BTreeMap, + pub roles_doc: Option, + pub roles: Vec, } impl DomainFileInput { - pub fn new(name: impl AsRef) -> Self { - DomainFileInput { name: name.as_ref().to_string(), ..Default::default() } - } + pub fn new(name: impl AsRef) -> Self { + DomainFileInput { name: name.as_ref().to_string(), ..Default::default() } + } } impl FromStr for DomainFileInput { - type Err = ModelError; - - fn from_str(s: &str) -> Result { - match serde_json::from_str::(s) { - Err(_) => Ok(serde_yaml::from_str::(s)?), - Ok(domain) => Ok(domain), - } - } + type Err = ModelError; + + fn from_str(s: &str) -> Result { + match serde_json::from_str::(s) { + Err(_) => Ok(serde_yaml::from_str::(s)?), + Ok(domain) => Ok(domain), + } + } } impl From<&ChronicleDomainDef> for DomainFileInput { - fn from(domain: &ChronicleDomainDef) -> Self { - let mut file = Self::new(&domain.name); - - for attr in &domain.attributes { - let external_id = attr.typ.to_string(); - file.attributes.insert(external_id, attr.into()); - } - - file.agents = domain - .agents - .iter() - .map(|x| (x.external_id.clone(), ResourceDef::from(x))) - .collect(); - - file.entities = domain - .entities - .iter() - .map(|x| (x.external_id.clone(), ResourceDef::from(x))) - .collect(); - - file.activities = domain - .activities - .iter() - .map(|x| (x.external_id.clone(), ResourceDef::from(x))) - .collect(); - - file.roles_doc = domain.roles_doc.to_owned(); - - file.roles = domain.roles.iter().map(|x| x.as_type_name()).collect(); - - file - } + fn from(domain: &ChronicleDomainDef) -> Self { + let mut file = Self::new(&domain.name); + + for attr in &domain.attributes { + let external_id = attr.typ.to_string(); + file.attributes.insert(external_id, attr.into()); + } + + file.agents = domain + .agents + .iter() + .map(|x| (x.external_id.clone(), ResourceDef::from(x))) + .collect(); + + file.entities = domain + .entities + .iter() + .map(|x| (x.external_id.clone(), ResourceDef::from(x))) + .collect(); + + file.activities = domain + .activities + .iter() + .map(|x| (x.external_id.clone(), ResourceDef::from(x))) + .collect(); + + file.roles_doc = domain.roles_doc.to_owned(); + + file.roles = domain.roles.iter().map(|x| x.as_type_name()).collect(); + + file + } } impl ChronicleDomainDef { - pub fn build(external_id: &str) -> Builder { - Builder::new(external_id) - } - - fn attribute(&self, attr: &str) -> Option { - self.attributes.iter().find(|a| a.typ == attr).cloned() - } - - pub fn from_input_string(s: &str) -> Result { - ChronicleDomainDef::from_str(s) - } - - fn from_json(file: &str) -> Result { - let model = serde_json::from_str::(file)?; - Self::from_model(model) - } - - fn from_yaml(file: &str) -> Result { - let model = serde_yaml::from_str::(file)?; - Self::from_model(model) - } - - pub fn from_file(path: impl AsRef) -> Result { - let path = path.as_ref(); - - let file: String = std::fs::read_to_string(path)?; - - match path.extension() { - Some(ext) if ext == "json" => Self::from_json(&file), - _ => Self::from_yaml(&file), - } - } - - fn from_model(model: DomainFileInput) -> Result { - let mut builder = Builder::new(model.name); - - for (external_id, attr) in model.attributes.iter() { - builder = builder.with_attribute_type(external_id, attr.doc.to_owned(), attr.typ)?; - } - - for (external_id, def) in model.agents { - builder.0.agents.push(AgentDef::from_input( - external_id, - def.doc, - &model.attributes, - def.attributes.iter(), - )?) - } - - for (external_id, def) in model.entities { - builder.0.entities.push(EntityDef::from_input( - external_id, - def.doc, - &model.attributes, - def.attributes.iter(), - )?) - } - - for (external_id, def) in model.activities { - builder.0.activities.push(ActivityDef::from_input( - external_id, - def.doc, - &model.attributes, - def.attributes.iter(), - )?) - } - - if model.roles_doc.is_some() { - builder.0.roles_doc = model.roles_doc; - } - - for role in model.roles { - builder.0.roles.push(RoleDef::from_role_file_input(role)); - } - - Ok(builder.build()) - } - - pub fn to_json_string(&self) -> Result { - let input: DomainFileInput = self.into(); - let json = serde_json::to_string(&input)?; - Ok(json) - } - - fn to_yaml_string(&self) -> Result { - let input: DomainFileInput = self.into(); - let yaml = serde_yaml::to_string(&input)?; - Ok(yaml) - } + pub fn build(external_id: &str) -> Builder { + Builder::new(external_id) + } + + fn attribute(&self, attr: &str) -> Option { + self.attributes.iter().find(|a| a.typ == attr).cloned() + } + + pub fn from_input_string(s: &str) -> Result { + ChronicleDomainDef::from_str(s) + } + + fn from_json(file: &str) -> Result { + let model = serde_json::from_str::(file)?; + Self::from_model(model) + } + + fn from_yaml(file: &str) -> Result { + let model = serde_yaml::from_str::(file)?; + Self::from_model(model) + } + + pub fn from_file(path: impl AsRef) -> Result { + let path = path.as_ref(); + + let file: String = std::fs::read_to_string(path)?; + + match path.extension() { + Some(ext) if ext == "json" => Self::from_json(&file), + _ => Self::from_yaml(&file), + } + } + + fn from_model(model: DomainFileInput) -> Result { + let mut builder = Builder::new(model.name); + + for (external_id, attr) in model.attributes.iter() { + builder = builder.with_attribute_type(external_id, attr.doc.to_owned(), attr.typ)?; + } + + for (external_id, def) in model.agents { + builder.0.agents.push(AgentDef::from_input( + external_id, + def.doc, + &model.attributes, + def.attributes.iter(), + )?) + } + + for (external_id, def) in model.entities { + builder.0.entities.push(EntityDef::from_input( + external_id, + def.doc, + &model.attributes, + def.attributes.iter(), + )?) + } + + for (external_id, def) in model.activities { + builder.0.activities.push(ActivityDef::from_input( + external_id, + def.doc, + &model.attributes, + def.attributes.iter(), + )?) + } + + if model.roles_doc.is_some() { + builder.0.roles_doc = model.roles_doc; + } + + for role in model.roles { + builder.0.roles.push(RoleDef::from_role_file_input(role)); + } + + Ok(builder.build()) + } + + pub fn to_json_string(&self) -> Result { + let input: DomainFileInput = self.into(); + let json = serde_json::to_string(&input)?; + Ok(json) + } + + fn to_yaml_string(&self) -> Result { + let input: DomainFileInput = self.into(); + let yaml = serde_yaml::to_string(&input)?; + Ok(yaml) + } } /// Parse from a yaml formatted string impl FromStr for ChronicleDomainDef { - type Err = ModelError; + type Err = ModelError; - fn from_str(s: &str) -> Result { - Self::from_yaml(s) - } + fn from_str(s: &str) -> Result { + Self::from_yaml(s) + } } #[cfg(test)] pub mod test { - use super::{ChronicleDomainDef, DomainFileInput, EntityDef}; + use super::{ChronicleDomainDef, DomainFileInput, EntityDef}; - use std::cmp::Ordering; + use std::cmp::Ordering; - impl PartialEq for EntityDef { - fn eq(&self, other: &Self) -> bool { - self.external_id == other.external_id - } - } + impl PartialEq for EntityDef { + fn eq(&self, other: &Self) -> bool { + self.external_id == other.external_id + } + } - impl Eq for EntityDef {} + impl Eq for EntityDef {} - impl PartialOrd for EntityDef { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } - } + impl PartialOrd for EntityDef { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } + } - impl Ord for EntityDef { - fn cmp(&self, other: &Self) -> Ordering { - self.external_id.cmp(&other.external_id) - } - } + impl Ord for EntityDef { + fn cmp(&self, other: &Self) -> Ordering { + self.external_id.cmp(&other.external_id) + } + } - use assert_fs::prelude::*; + use assert_fs::prelude::*; - fn create_test_yaml_file() -> Result> { - let file = assert_fs::NamedTempFile::new("test.yml")?; - file.write_str( - r#" + fn create_test_yaml_file() -> Result> { + let file = assert_fs::NamedTempFile::new("test.yml")?; + file.write_str( + r#" name: "chronicle" attributes: String: @@ -830,15 +830,16 @@ pub mod test { roles: - drummer "#, - )?; - Ok(file) - } - - // more than one entity will be in no particular order - fn create_test_yaml_file_single_entity() -> Result> { - let file = assert_fs::NamedTempFile::new("test.yml")?; - file.write_str( - r#" + )?; + Ok(file) + } + + // more than one entity will be in no particular order + fn create_test_yaml_file_single_entity( + ) -> Result> { + let file = assert_fs::NamedTempFile::new("test.yml")?; + file.write_str( + r#" name: "test" attributes: String: @@ -858,14 +859,14 @@ pub mod test { roles: - drummer "#, - )?; - Ok(file) - } - - fn create_test_json_file() -> Result> { - let file = assert_fs::NamedTempFile::new("test.json")?; - file.write_str( - r#" { + )?; + Ok(file) + } + + fn create_test_json_file() -> Result> { + let file = assert_fs::NamedTempFile::new("test.json")?; + file.write_str( + r#" { "name": "chronicle", "attributes": { "String": { @@ -901,19 +902,19 @@ pub mod test { "roles" : ["drummer"] } "#, - )?; - Ok(file) - } + )?; + Ok(file) + } - #[test] - fn json_from_file() -> Result<(), Box> { - let file = create_test_json_file()?; + #[test] + fn json_from_file() -> Result<(), Box> { + let file = create_test_json_file()?; - let mut domain = ChronicleDomainDef::from_file(file.path()).unwrap(); + let mut domain = ChronicleDomainDef::from_file(file.path()).unwrap(); - domain.entities.sort(); + domain.entities.sort(); - insta::assert_yaml_snapshot!(domain, @r###" + insta::assert_yaml_snapshot!(domain, @r###" --- name: chronicle attributes: @@ -952,18 +953,18 @@ pub mod test { - external_id: drummer "###); - Ok(()) - } + Ok(()) + } - #[test] - fn yaml_from_file() -> Result<(), Box> { - let file = create_test_yaml_file()?; + #[test] + fn yaml_from_file() -> Result<(), Box> { + let file = create_test_yaml_file()?; - let mut domain = ChronicleDomainDef::from_file(file.path()).unwrap(); + let mut domain = ChronicleDomainDef::from_file(file.path()).unwrap(); - domain.entities.sort(); + domain.entities.sort(); - insta::assert_yaml_snapshot!(domain, @r###" + insta::assert_yaml_snapshot!(domain, @r###" --- name: chronicle attributes: @@ -1044,18 +1045,18 @@ pub mod test { - external_id: drummer "###); - Ok(()) - } + Ok(()) + } - use std::str::FromStr; + use std::str::FromStr; - #[test] - fn test_chronicle_domain_def_from_str() -> Result<(), Box> { - let file = create_test_yaml_file()?; - let s: String = std::fs::read_to_string(file.path())?; - let domain = ChronicleDomainDef::from_str(&s)?; + #[test] + fn test_chronicle_domain_def_from_str() -> Result<(), Box> { + let file = create_test_yaml_file()?; + let s: String = std::fs::read_to_string(file.path())?; + let domain = ChronicleDomainDef::from_str(&s)?; - insta::assert_yaml_snapshot!(domain, @r###" + insta::assert_yaml_snapshot!(domain, @r###" --- name: chronicle attributes: @@ -1136,17 +1137,17 @@ pub mod test { - external_id: drummer "###); - Ok(()) - } + Ok(()) + } - #[test] - fn test_from_domain_for_file_input() -> Result<(), Box> { - let file = create_test_yaml_file_single_entity()?; - let s: String = std::fs::read_to_string(file.path())?; - let domain = ChronicleDomainDef::from_str(&s)?; - let input = DomainFileInput::from(&domain); + #[test] + fn test_from_domain_for_file_input() -> Result<(), Box> { + let file = create_test_yaml_file_single_entity()?; + let s: String = std::fs::read_to_string(file.path())?; + let domain = ChronicleDomainDef::from_str(&s)?; + let input = DomainFileInput::from(&domain); - insta::assert_yaml_snapshot!(input, @r###" + insta::assert_yaml_snapshot!(input, @r###" --- name: test attributes: @@ -1173,33 +1174,33 @@ pub mod test { - Drummer "###); - Ok(()) - } + Ok(()) + } - use super::{AttributeDef, AttributeFileInput, PrimitiveType}; + use super::{AttributeDef, AttributeFileInput, PrimitiveType}; - #[test] - fn test_from_attribute_def_for_attribute_file_input() { - let attr = AttributeDef { - typ: "string".to_string(), - doc: None, - primitive_type: PrimitiveType::String, - }; - let input = AttributeFileInput::from(&attr); - insta::assert_yaml_snapshot!(input, @r###" + #[test] + fn test_from_attribute_def_for_attribute_file_input() { + let attr = AttributeDef { + typ: "string".to_string(), + doc: None, + primitive_type: PrimitiveType::String, + }; + let input = AttributeFileInput::from(&attr); + insta::assert_yaml_snapshot!(input, @r###" --- doc: ~ type: String "###); - } + } - #[test] - fn test_to_json_string() -> Result<(), Box> { - let file = create_test_yaml_file_single_entity()?; - let s: String = std::fs::read_to_string(file.path())?; - let domain = ChronicleDomainDef::from_str(&s)?; + #[test] + fn test_to_json_string() -> Result<(), Box> { + let file = create_test_yaml_file_single_entity()?; + let s: String = std::fs::read_to_string(file.path())?; + let domain = ChronicleDomainDef::from_str(&s)?; - insta::assert_yaml_snapshot!(domain, @r###" + insta::assert_yaml_snapshot!(domain, @r###" --- name: test attributes: @@ -1232,16 +1233,16 @@ pub mod test { - external_id: drummer "###); - Ok(()) - } + Ok(()) + } - #[test] - fn test_to_yaml_string() -> Result<(), Box> { - let file = create_test_yaml_file_single_entity()?; - let s: String = std::fs::read_to_string(file.path())?; - let domain = ChronicleDomainDef::from_str(&s)?; + #[test] + fn test_to_yaml_string() -> Result<(), Box> { + let file = create_test_yaml_file_single_entity()?; + let s: String = std::fs::read_to_string(file.path())?; + let domain = ChronicleDomainDef::from_str(&s)?; - insta::assert_yaml_snapshot!(domain, @r###" + insta::assert_yaml_snapshot!(domain, @r###" --- name: test attributes: @@ -1274,13 +1275,14 @@ pub mod test { - external_id: drummer "###); - Ok(()) - } + Ok(()) + } - fn create_test_yaml_file_with_acronyms() -> Result> { - let file = assert_fs::NamedTempFile::new("test.yml")?; - file.write_str( - r#" + fn create_test_yaml_file_with_acronyms( + ) -> Result> { + let file = assert_fs::NamedTempFile::new("test.yml")?; + file.write_str( + r#" name: "evidence" attributes: Content: @@ -1336,19 +1338,19 @@ pub mod test { - RESEARCHER - EDITOR "#, - )?; - Ok(file) - } - - #[test] - fn test_from_domain_for_file_input_with_inflections() -> Result<(), Box> - { - let file = create_test_yaml_file_with_acronyms()?; - let s: String = std::fs::read_to_string(file.path())?; - let domain = ChronicleDomainDef::from_str(&s)?; - let input = DomainFileInput::from(&domain); - - insta::assert_yaml_snapshot!(input, @r###" + )?; + Ok(file) + } + + #[test] + fn test_from_domain_for_file_input_with_inflections() -> Result<(), Box> + { + let file = create_test_yaml_file_with_acronyms()?; + let s: String = std::fs::read_to_string(file.path())?; + let domain = ChronicleDomainDef::from_str(&s)?; + let input = DomainFileInput::from(&domain); + + insta::assert_yaml_snapshot!(input, @r###" --- name: evidence attributes: @@ -1422,12 +1424,13 @@ pub mod test { - Researcher - Editor "###); - Ok(()) - } + Ok(()) + } - fn create_test_yaml_file_with_docs() -> Result> { - let file = assert_fs::NamedTempFile::new("test.yml")?; - file.write_str( + fn create_test_yaml_file_with_docs( + ) -> Result> { + let file = assert_fs::NamedTempFile::new("test.yml")?; + file.write_str( r#" name: Artworld attributes: @@ -1556,17 +1559,17 @@ pub mod test { - CREATOR "#, )?; - Ok(file) - } + Ok(file) + } - #[test] - fn test_from_domain_for_file_input_with_docs() -> Result<(), Box> { - let file = create_test_yaml_file_with_docs()?; - let s: String = std::fs::read_to_string(file.path())?; - let domain = ChronicleDomainDef::from_str(&s)?; - let input = DomainFileInput::from(&domain); + #[test] + fn test_from_domain_for_file_input_with_docs() -> Result<(), Box> { + let file = create_test_yaml_file_with_docs()?; + let s: String = std::fs::read_to_string(file.path())?; + let domain = ChronicleDomainDef::from_str(&s)?; + let input = DomainFileInput::from(&domain); - insta::assert_yaml_snapshot!(input, @r###" + insta::assert_yaml_snapshot!(input, @r###" --- name: Artworld attributes: @@ -1627,6 +1630,6 @@ pub mod test { - Seller - Creator "###); - Ok(()) - } + Ok(()) + } } diff --git a/crates/common/src/identity.rs b/crates/common/src/identity.rs index b3419678a..d9cc95fa1 100644 --- a/crates/common/src/identity.rs +++ b/crates/common/src/identity.rs @@ -6,8 +6,8 @@ use std::collections::BTreeSet; use k256::sha2::{Digest, Sha512}; #[cfg(not(feature = "std"))] use parity_scale_codec::{ - alloc::collections::BTreeMap, alloc::collections::BTreeSet, alloc::string::String, - alloc::vec::Vec + alloc::collections::BTreeMap, alloc::collections::BTreeSet, alloc::string::String, + alloc::vec::Vec, }; #[cfg(not(feature = "std"))] use scale_info::{prelude::borrow::ToOwned, prelude::string::ToString}; @@ -22,44 +22,44 @@ use crate::prov::AgentId; #[derive(Error, Debug)] pub enum IdentityError { - #[error("Failed to get agent id from JWT claims")] - JwtClaims, - - #[error("Signer : {0}")] - Signing( - #[from] - #[source] - anyhow::Error, - ), - - #[error("Malformed JSON: {0}")] - SerdeJson( - #[from] - #[source] - serde_json::Error, - ), - - #[error("Serialization error: {0}")] - SerdeJsonSerialize(String), + #[error("Failed to get agent id from JWT claims")] + JwtClaims, + + #[error("Signer : {0}")] + Signing( + #[from] + #[source] + anyhow::Error, + ), + + #[error("Malformed JSON: {0}")] + SerdeJson( + #[from] + #[source] + serde_json::Error, + ), + + #[error("Serialization error: {0}")] + SerdeJsonSerialize(String), } /// Contains the scalar ID and identity claims for a user established via JWT #[derive(Serialize, Deserialize, PartialEq, Eq, Clone)] pub struct JwtId { - pub id: AgentId, - pub claims: Value, + pub id: AgentId, + pub claims: Value, } impl JwtId { - fn new(external_id: &str, claims: Value) -> Self { - Self { id: AgentId::from_external_id(external_id), claims } - } + fn new(external_id: &str, claims: Value) -> Self { + Self { id: AgentId::from_external_id(external_id), claims } + } } impl core::fmt::Debug for JwtId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - f.debug_struct("JwtId").field("id", &self.id).finish_non_exhaustive() - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { + f.debug_struct("JwtId").field("id", &self.id).finish_non_exhaustive() + } } /// Claims from a JWT, referenced in creating an AgentId for a Chronicle user @@ -67,217 +67,217 @@ impl core::fmt::Debug for JwtId { pub struct JwtClaims(pub Map); impl core::fmt::Debug for JwtClaims { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { - let claims = self.0.iter().map(|(k, _v)| (k, "***SECRET***")).collect::>(); - write!(f, "JwtClaims({:?})", claims) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> { + let claims = self.0.iter().map(|(k, _v)| (k, "***SECRET***")).collect::>(); + write!(f, "JwtClaims({:?})", claims) + } } /// Chronicle identity object for authorization #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] #[serde(rename_all = "lowercase", tag = "type")] pub enum AuthId { - Anonymous, - Chronicle, - JWT(JwtId), + Anonymous, + Chronicle, + JWT(JwtId), } impl core::fmt::Display for AuthId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::Anonymous => write!(f, "Anonymous"), - Self::Chronicle => write!(f, "Chronicle"), - Self::JWT(jwt_id) => write!(f, "{}", jwt_id.id), - } - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Anonymous => write!(f, "Anonymous"), + Self::Chronicle => write!(f, "Chronicle"), + Self::JWT(jwt_id) => write!(f, "{}", jwt_id.id), + } + } } impl TryFrom<&str> for AuthId { - type Error = serde_json::Error; + type Error = serde_json::Error; - fn try_from(s: &str) -> Result { - serde_json::from_str(s) - } + fn try_from(s: &str) -> Result { + serde_json::from_str(s) + } } impl AuthId { - /// Establish a Chronicle user via JWT using a provided pointer into the JWT claims, - /// caching the claims with the JWT user identity - pub fn from_jwt_claims( - JwtClaims(claims): &JwtClaims, - id_keys: &BTreeSet, - ) -> Result { - const ZERO: [u8; 1] = [0]; - - let mut hasher = Sha512::new(); - - for id_key in id_keys { - if let Some(Value::String(claim_value)) = claims.get(id_key) { - hasher.update(id_key.as_bytes()); - hasher.update(ZERO); - hasher.update(claim_value.as_bytes()); - hasher.update(ZERO); - } else { - let keys_available: Vec<&String> = claims.keys().collect(); - warn!( + /// Establish a Chronicle user via JWT using a provided pointer into the JWT claims, + /// caching the claims with the JWT user identity + pub fn from_jwt_claims( + JwtClaims(claims): &JwtClaims, + id_keys: &BTreeSet, + ) -> Result { + const ZERO: [u8; 1] = [0]; + + let mut hasher = Sha512::new(); + + for id_key in id_keys { + if let Some(Value::String(claim_value)) = claims.get(id_key) { + hasher.update(id_key.as_bytes()); + hasher.update(ZERO); + hasher.update(claim_value.as_bytes()); + hasher.update(ZERO); + } else { + let keys_available: Vec<&String> = claims.keys().collect(); + warn!( "For constructing Chronicle identity no {id_key:?} field among JWT claims: {keys_available:?}" ); - return Err(IdentityError::JwtClaims); - } - } - - Ok(Self::JWT(JwtId::new(&hex::encode(hasher.finalize()), Value::Object(claims.to_owned())))) - } - - /// Create an Anonymous Chronicle user - pub fn anonymous() -> Self { - Self::Anonymous - } - - /// Create a Chronicle super user - pub fn chronicle() -> Self { - Self::Chronicle - } - - /// Serialize identity to a JSON object containing "type" ("Anonymous", "Chronicle", or "JWT"), - /// and, in the case of a JWT identity, "id" fields - the Input for an OPA check - pub fn identity(&self) -> Result { - serde_json::to_value(self).map_err(|e| IdentityError::SerdeJsonSerialize(e.to_string())) - } + return Err(IdentityError::JwtClaims); + } + } + + Ok(Self::JWT(JwtId::new(&hex::encode(hasher.finalize()), Value::Object(claims.to_owned())))) + } + + /// Create an Anonymous Chronicle user + pub fn anonymous() -> Self { + Self::Anonymous + } + + /// Create a Chronicle super user + pub fn chronicle() -> Self { + Self::Chronicle + } + + /// Serialize identity to a JSON object containing "type" ("Anonymous", "Chronicle", or "JWT"), + /// and, in the case of a JWT identity, "id" fields - the Input for an OPA check + pub fn identity(&self) -> Result { + serde_json::to_value(self).map_err(|e| IdentityError::SerdeJsonSerialize(e.to_string())) + } } /// Context data for an OPA check - `operation` and `state` fields are /// equivalent to GraphQL parent type and path node #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone, Default)] struct Context { - operation: Value, - state: Value, + operation: Value, + state: Value, } /// Identity and Context data for an OPA check #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct IdentityContext { - identity: AuthId, - context: Context, + identity: AuthId, + context: Context, } impl IdentityContext { - pub fn new(identity: AuthId, operation: Value, state: Value) -> Self { - Self { identity, context: Context { operation, state } } - } + pub fn new(identity: AuthId, operation: Value, state: Value) -> Self { + Self { identity, context: Context { operation, state } } + } } /// Contextual data for OPA created either via GraphQL or in the Transaction Processor #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] #[serde(rename_all = "lowercase", tag = "type")] pub enum OpaData { - GraphQL(IdentityContext), - Operation(IdentityContext), + GraphQL(IdentityContext), + Operation(IdentityContext), } impl OpaData { - pub fn graphql(identity: &AuthId, parent_type: &Value, resolve_path: &Value) -> Self { - Self::GraphQL(IdentityContext::new( - identity.to_owned(), - parent_type.to_owned(), - resolve_path.to_owned(), - )) - } - - pub fn operation(identity: &AuthId, operation: &Value, state: &Value) -> Self { - Self::Operation(IdentityContext::new( - identity.to_owned(), - operation.to_owned(), - state.to_owned(), - )) - } + pub fn graphql(identity: &AuthId, parent_type: &Value, resolve_path: &Value) -> Self { + Self::GraphQL(IdentityContext::new( + identity.to_owned(), + parent_type.to_owned(), + resolve_path.to_owned(), + )) + } + + pub fn operation(identity: &AuthId, operation: &Value, state: &Value) -> Self { + Self::Operation(IdentityContext::new( + identity.to_owned(), + operation.to_owned(), + state.to_owned(), + )) + } } /// Signed user identity containing the serialized identity, signature, and /// verifying key. Implements `TryFrom` to deserialize to the user identity object #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Clone, Debug, Eq, PartialEq)] pub struct SignedIdentity { - pub identity: String, - pub signature: Option>, - pub verifying_key: Option>, + pub identity: String, + pub signature: Option>, + pub verifying_key: Option>, } impl SignedIdentity { - fn new(id: &AuthId, signature: Vec, verifying_key: Vec) -> Result { - Ok(Self { - identity: serde_json::to_string(&id)?, - signature: Some(signature), - verifying_key: Some(verifying_key), - }) - } - - pub fn new_no_identity() -> Self { - Self { identity: "none".to_string(), signature: None, verifying_key: None } - } + fn new(id: &AuthId, signature: Vec, verifying_key: Vec) -> Result { + Ok(Self { + identity: serde_json::to_string(&id)?, + signature: Some(signature), + verifying_key: Some(verifying_key), + }) + } + + pub fn new_no_identity() -> Self { + Self { identity: "none".to_string(), signature: None, verifying_key: None } + } } impl TryFrom<&SignedIdentity> for AuthId { - type Error = serde_json::Error; + type Error = serde_json::Error; - fn try_from(signed_identity: &SignedIdentity) -> Result { - serde_json::from_str(&signed_identity.identity) - } + fn try_from(signed_identity: &SignedIdentity) -> Result { + serde_json::from_str(&signed_identity.identity) + } } #[cfg(test)] mod tests { - use serde_json::json; - - use crate::prov::{ExternalId, ExternalIdPart}; - - use super::*; - - fn external_id_from_jwt_claims<'a>(claim_strings: impl Iterator) -> ExternalId { - const ZERO: [u8; 1] = [0]; - let mut hasher = Sha512::new(); - claim_strings.for_each(|s| { - hasher.update(s.as_bytes()); - hasher.update(ZERO); - }); - hex::encode(hasher.finalize()).into() - } - - #[test] - fn test_auth_id_serialization() { - let auth_id = AuthId::anonymous(); - insta::assert_json_snapshot!(auth_id, @r###" + use serde_json::json; + + use crate::prov::{ExternalId, ExternalIdPart}; + + use super::*; + + fn external_id_from_jwt_claims<'a>(claim_strings: impl Iterator) -> ExternalId { + const ZERO: [u8; 1] = [0]; + let mut hasher = Sha512::new(); + claim_strings.for_each(|s| { + hasher.update(s.as_bytes()); + hasher.update(ZERO); + }); + hex::encode(hasher.finalize()).into() + } + + #[test] + fn test_auth_id_serialization() { + let auth_id = AuthId::anonymous(); + insta::assert_json_snapshot!(auth_id, @r###" { "type": "anonymous" } "###); - let auth_id = AuthId::chronicle(); - insta::assert_json_snapshot!(auth_id, @r###" + let auth_id = AuthId::chronicle(); + insta::assert_json_snapshot!(auth_id, @r###" { "type": "chronicle" } "###); - let claims = JwtClaims( - json!({ + let claims = JwtClaims( + json!({ "name": "abcdef", }) - .as_object() - .unwrap() - .to_owned(), - ); - let auth_id = - AuthId::from_jwt_claims(&claims, &BTreeSet::from(["name".to_string()])).unwrap(); - insta::assert_json_snapshot!(auth_id, @r###" + .as_object() + .unwrap() + .to_owned(), + ); + let auth_id = + AuthId::from_jwt_claims(&claims, &BTreeSet::from(["name".to_string()])).unwrap(); + insta::assert_json_snapshot!(auth_id, @r###" { "type": "jwt", "id": "6e7f57aeab5edb9bf5863ba2d749715b6f9a9079f3b8c81b6207d437c005b5b9f6f14de53c34c38ee0b1cc77fa6e02b5cef694faf5aaf028b58c15b3c4ee1cb0", @@ -287,59 +287,59 @@ mod tests { } "###); - if let AuthId::JWT(JwtId { id, .. }) = auth_id { - assert_eq!( - &external_id_from_jwt_claims(vec!["name", "abcdef"].into_iter()), - id.external_id_part() - ); - } else { - panic!("did not receive expected JWT identity: {auth_id}"); - } - } - - #[test] - fn test_auth_id_deserialization() { - let serialized = r#"{"type":"anonymous"}"#; - let deserialized: AuthId = serde_json::from_str(serialized).unwrap(); - assert_eq!(deserialized, AuthId::Anonymous); - - let serialized = r#"{"type":"chronicle"}"#; - let deserialized: AuthId = serde_json::from_str(serialized).unwrap(); - assert_eq!(deserialized, AuthId::Chronicle); - - let serialized = r#"{ + if let AuthId::JWT(JwtId { id, .. }) = auth_id { + assert_eq!( + &external_id_from_jwt_claims(vec!["name", "abcdef"].into_iter()), + id.external_id_part() + ); + } else { + panic!("did not receive expected JWT identity: {auth_id}"); + } + } + + #[test] + fn test_auth_id_deserialization() { + let serialized = r#"{"type":"anonymous"}"#; + let deserialized: AuthId = serde_json::from_str(serialized).unwrap(); + assert_eq!(deserialized, AuthId::Anonymous); + + let serialized = r#"{"type":"chronicle"}"#; + let deserialized: AuthId = serde_json::from_str(serialized).unwrap(); + assert_eq!(deserialized, AuthId::Chronicle); + + let serialized = r#"{ "type": "jwt", "id": "abcdef", "claims": { "name": "abcdef" } }"#; - let deserialized: AuthId = serde_json::from_str(serialized).unwrap(); - assert_eq!( - deserialized, - AuthId::JWT(JwtId { - id: AgentId::from_external_id("abcdef"), - claims: json!({ + let deserialized: AuthId = serde_json::from_str(serialized).unwrap(); + assert_eq!( + deserialized, + AuthId::JWT(JwtId { + id: AgentId::from_external_id("abcdef"), + claims: json!({ "name": "abcdef" }), - }) - ); - } - - #[test] - fn test_auth_id_from_jwt_claims() { - let claims = JwtClaims( - json!({ + }) + ); + } + + #[test] + fn test_auth_id_from_jwt_claims() { + let claims = JwtClaims( + json!({ "sub": "John Doe" }) - .as_object() - .unwrap() - .to_owned(), - ); - let auth_id = - AuthId::from_jwt_claims(&claims, &BTreeSet::from(["sub".to_string()])).unwrap(); - - insta::assert_json_snapshot!(auth_id, @r###" + .as_object() + .unwrap() + .to_owned(), + ); + let auth_id = + AuthId::from_jwt_claims(&claims, &BTreeSet::from(["sub".to_string()])).unwrap(); + + insta::assert_json_snapshot!(auth_id, @r###" { "type": "jwt", "id": "13cc0854e3c226984a47e3159be9d71dae9796586ae15c493a7dcb79c2c511be7b311a238439a6922b779014b2bc71f351ff388fcac012d4f20f161720fa0dcf", @@ -349,47 +349,47 @@ mod tests { } "###); - if let AuthId::JWT(JwtId { id, .. }) = auth_id { - assert_eq!( - &external_id_from_jwt_claims(vec!["sub", "John Doe"].into_iter()), - id.external_id_part() - ); - } else { - panic!("did not receive expected JWT identity: {auth_id}"); - } - } - - #[test] - fn test_auth_id_from_jwt_claims_failure() { - let claims = JwtClaims( - json!({ + if let AuthId::JWT(JwtId { id, .. }) = auth_id { + assert_eq!( + &external_id_from_jwt_claims(vec!["sub", "John Doe"].into_iter()), + id.external_id_part() + ); + } else { + panic!("did not receive expected JWT identity: {auth_id}"); + } + } + + #[test] + fn test_auth_id_from_jwt_claims_failure() { + let claims = JwtClaims( + json!({ "sub": "John Doe" }) - .as_object() - .unwrap() - .to_owned(), - ); - let auth_id_result = - AuthId::from_jwt_claims(&claims, &BTreeSet::from(["externalId".to_string()])); - assert!(auth_id_result.is_err()); - assert_eq!(auth_id_result.unwrap_err().to_string(), IdentityError::JwtClaims.to_string()); - } - - #[test] - fn test_opa_data_serialization() { - let identity = AuthId::Chronicle; - let operation = json!({ + .as_object() + .unwrap() + .to_owned(), + ); + let auth_id_result = + AuthId::from_jwt_claims(&claims, &BTreeSet::from(["externalId".to_string()])); + assert!(auth_id_result.is_err()); + assert_eq!(auth_id_result.unwrap_err().to_string(), IdentityError::JwtClaims.to_string()); + } + + #[test] + fn test_opa_data_serialization() { + let identity = AuthId::Chronicle; + let operation = json!({ "resource": "users", "action": "read" }); - let state = json!([{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}]); - let context = OpaData::graphql(&identity, &operation, &state); + let state = json!([{"id": 1, "name": "Alice"}, {"id": 2, "name": "Bob"}]); + let context = OpaData::graphql(&identity, &operation, &state); - let json = serde_json::to_string(&context).unwrap(); - let deserialized_context: OpaData = serde_json::from_str(&json).unwrap(); + let json = serde_json::to_string(&context).unwrap(); + let deserialized_context: OpaData = serde_json::from_str(&json).unwrap(); - assert!(context == deserialized_context); - insta::assert_json_snapshot!(context, @r###" + assert!(context == deserialized_context); + insta::assert_json_snapshot!(context, @r###" { "type": "graphql", "identity": { @@ -413,30 +413,30 @@ mod tests { } } "###); - } + } - #[test] - fn test_jwt_claims_custom_debug() { - let claims = JwtClaims( - json!({ + #[test] + fn test_jwt_claims_custom_debug() { + let claims = JwtClaims( + json!({ "key": "value", }) - .as_object() - .unwrap() - .to_owned(), - ); - insta::assert_debug_snapshot!(claims, @r###"JwtClaims({"key": "***SECRET***"})"###); - } - - #[test] - fn test_jwt_id_custom_debug() { - let jwt_id = AuthId::JWT(JwtId { - id: AgentId::from_external_id("abcdef"), - claims: json!({ + .as_object() + .unwrap() + .to_owned(), + ); + insta::assert_debug_snapshot!(claims, @r###"JwtClaims({"key": "***SECRET***"})"###); + } + + #[test] + fn test_jwt_id_custom_debug() { + let jwt_id = AuthId::JWT(JwtId { + id: AgentId::from_external_id("abcdef"), + claims: json!({ "key": "value" }), - }); - insta::assert_debug_snapshot!(jwt_id, @r###" + }); + insta::assert_debug_snapshot!(jwt_id, @r###" JWT( JwtId { id: AgentId( @@ -448,5 +448,5 @@ mod tests { }, ) "###); - } + } } diff --git a/crates/common/src/ledger.rs b/crates/common/src/ledger.rs index 1d7d5b9da..6c65ecc63 100644 --- a/crates/common/src/ledger.rs +++ b/crates/common/src/ledger.rs @@ -2,634 +2,634 @@ use tracing::instrument; use uuid::Uuid; use crate::{ - identity::SignedIdentity, - prov::{ - operations::{ - ActivityExists, ActivityUses, ActsOnBehalfOf, AgentExists, ChronicleOperation, - CreateNamespace, EndActivity, EntityDerive, EntityExists, SetAttributes, StartActivity, - WasAssociatedWith, WasAttributedTo, WasGeneratedBy, WasInformedBy, - }, - ChronicleIri, ChronicleTransactionId, Contradiction, NamespaceId, ProcessorError, - ProvModel, - }, + identity::SignedIdentity, + prov::{ + operations::{ + ActivityExists, ActivityUses, ActsOnBehalfOf, AgentExists, ChronicleOperation, + CreateNamespace, EndActivity, EntityDerive, EntityExists, SetAttributes, StartActivity, + WasAssociatedWith, WasAttributedTo, WasGeneratedBy, WasInformedBy, + }, + ChronicleIri, ChronicleTransactionId, Contradiction, NamespaceId, ProcessorError, + ProvModel, + }, }; #[cfg(not(feature = "std"))] use parity_scale_codec::{ - alloc::boxed::Box, alloc::collections::btree_map::Entry, alloc::collections::BTreeMap, - alloc::collections::BTreeSet, alloc::string::String, alloc::sync::Arc, alloc::vec::Vec + alloc::boxed::Box, alloc::collections::btree_map::Entry, alloc::collections::BTreeMap, + alloc::collections::BTreeSet, alloc::string::String, alloc::sync::Arc, alloc::vec::Vec, }; #[cfg(not(feature = "std"))] use scale_info::prelude::*; #[cfg(feature = "std")] use std::{ - boxed::Box, collections::btree_map::Entry, collections::BTreeMap, collections::BTreeSet, - sync::Arc, + boxed::Box, collections::btree_map::Entry, collections::BTreeMap, collections::BTreeSet, + sync::Arc, }; #[derive(Debug, Clone)] pub enum SubmissionError { - Communication { source: Arc, tx_id: ChronicleTransactionId }, - Processor { source: Arc, tx_id: ChronicleTransactionId }, - Contradiction { source: Contradiction, tx_id: ChronicleTransactionId }, + Communication { source: Arc, tx_id: ChronicleTransactionId }, + Processor { source: Arc, tx_id: ChronicleTransactionId }, + Contradiction { source: Contradiction, tx_id: ChronicleTransactionId }, } #[cfg(feature = "std")] impl std::error::Error for SubmissionError {} impl SubmissionError { - pub fn tx_id(&self) -> &ChronicleTransactionId { - match self { - SubmissionError::Communication { tx_id, .. } => tx_id, - SubmissionError::Processor { tx_id, .. } => tx_id, - SubmissionError::Contradiction { tx_id, .. } => tx_id, - } - } - - pub fn processor(tx_id: &ChronicleTransactionId, source: ProcessorError) -> SubmissionError { - SubmissionError::Processor { source: Arc::new(source), tx_id: *tx_id } - } - - pub fn contradiction(tx_id: &ChronicleTransactionId, source: Contradiction) -> SubmissionError { - SubmissionError::Contradiction { source, tx_id: *tx_id } - } - - pub fn communication(tx_id: &ChronicleTransactionId, source: anyhow::Error) -> SubmissionError { - SubmissionError::Communication { source: Arc::new(source), tx_id: *tx_id } - } + pub fn tx_id(&self) -> &ChronicleTransactionId { + match self { + SubmissionError::Communication { tx_id, .. } => tx_id, + SubmissionError::Processor { tx_id, .. } => tx_id, + SubmissionError::Contradiction { tx_id, .. } => tx_id, + } + } + + pub fn processor(tx_id: &ChronicleTransactionId, source: ProcessorError) -> SubmissionError { + SubmissionError::Processor { source: Arc::new(source), tx_id: *tx_id } + } + + pub fn contradiction(tx_id: &ChronicleTransactionId, source: Contradiction) -> SubmissionError { + SubmissionError::Contradiction { source, tx_id: *tx_id } + } + + pub fn communication(tx_id: &ChronicleTransactionId, source: anyhow::Error) -> SubmissionError { + SubmissionError::Communication { source: Arc::new(source), tx_id: *tx_id } + } } #[derive(Debug)] pub enum SubscriptionError { - Implementation { source: anyhow::Error }, + Implementation { source: anyhow::Error }, } impl core::fmt::Display for SubscriptionError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::Implementation { .. } => write!(f, "Subscription error"), - } - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Implementation { .. } => write!(f, "Subscription error"), + } + } } impl core::fmt::Display for SubmissionError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - Self::Communication { source, .. } => write!(f, "Ledger error {source} "), - Self::Processor { source, .. } => write!(f, "Processor error {source} "), - Self::Contradiction { source, .. } => write!(f, "Contradiction: {source}"), - } - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Communication { source, .. } => write!(f, "Ledger error {source} "), + Self::Processor { source, .. } => write!(f, "Processor error {source} "), + Self::Contradiction { source, .. } => write!(f, "Contradiction: {source}"), + } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - scale_encode::EncodeAsType, - parity_scale_codec::Encode, - parity_scale_codec::Decode - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + scale_encode::EncodeAsType, + parity_scale_codec::Encode, + parity_scale_codec::Decode + ) )] #[derive(Debug, Clone, Eq, PartialEq)] pub struct OperationSubmission { - pub correlation_id: [u8; 16], - pub items: Arc>, - pub identity: Arc, + pub correlation_id: [u8; 16], + pub items: Arc>, + pub identity: Arc, } impl OperationSubmission { - pub fn new(uuid: Uuid, identity: SignedIdentity, operations: Vec) -> Self { - OperationSubmission { - correlation_id: uuid.into_bytes(), - identity: identity.into(), - items: operations.into(), - } - } - - pub fn new_anonymous(uuid: Uuid, operations: Vec) -> Self { - Self::new(uuid, SignedIdentity::new_no_identity(), operations) - } + pub fn new(uuid: Uuid, identity: SignedIdentity, operations: Vec) -> Self { + OperationSubmission { + correlation_id: uuid.into_bytes(), + identity: identity.into(), + items: operations.into(), + } + } + + pub fn new_anonymous(uuid: Uuid, operations: Vec) -> Self { + Self::new(uuid, SignedIdentity::new_no_identity(), operations) + } } pub type SubmitResult = Result; #[derive(Debug, Clone)] pub struct Commit { - pub tx_id: ChronicleTransactionId, - pub block_id: String, - pub delta: Box, + pub tx_id: ChronicleTransactionId, + pub block_id: String, + pub delta: Box, } impl Commit { - pub fn new(tx_id: ChronicleTransactionId, block_id: String, delta: Box) -> Self { - Commit { tx_id, block_id, delta } - } + pub fn new(tx_id: ChronicleTransactionId, block_id: String, delta: Box) -> Self { + Commit { tx_id, block_id, delta } + } } pub type CommitResult = Result; #[derive(Debug, Clone)] pub enum SubmissionStage { - Submitted(SubmitResult), - Committed(Commit, Box), - NotCommitted((ChronicleTransactionId, Contradiction, Box)), + Submitted(SubmitResult), + Committed(Commit, Box), + NotCommitted((ChronicleTransactionId, Contradiction, Box)), } impl SubmissionStage { - pub fn submitted_error(r: &SubmissionError) -> Self { - SubmissionStage::Submitted(Err(r.clone())) - } - - pub fn submitted(r: &ChronicleTransactionId) -> Self { - SubmissionStage::Submitted(Ok(*r)) - } - - pub fn committed(commit: Commit, identity: SignedIdentity) -> Self { - SubmissionStage::Committed(commit, identity.into()) - } - - pub fn not_committed( - tx: ChronicleTransactionId, - contradiction: Contradiction, - identity: SignedIdentity, - ) -> Self { - SubmissionStage::NotCommitted((tx, contradiction, identity.into())) - } - - pub fn tx_id(&self) -> &ChronicleTransactionId { - match self { - Self::Submitted(tx_id) => match tx_id { - Ok(tx_id) => tx_id, - Err(e) => e.tx_id(), - }, - Self::Committed(commit, _) => &commit.tx_id, - Self::NotCommitted((tx_id, _, _)) => tx_id, - } - } + pub fn submitted_error(r: &SubmissionError) -> Self { + SubmissionStage::Submitted(Err(r.clone())) + } + + pub fn submitted(r: &ChronicleTransactionId) -> Self { + SubmissionStage::Submitted(Ok(*r)) + } + + pub fn committed(commit: Commit, identity: SignedIdentity) -> Self { + SubmissionStage::Committed(commit, identity.into()) + } + + pub fn not_committed( + tx: ChronicleTransactionId, + contradiction: Contradiction, + identity: SignedIdentity, + ) -> Self { + SubmissionStage::NotCommitted((tx, contradiction, identity.into())) + } + + pub fn tx_id(&self) -> &ChronicleTransactionId { + match self { + Self::Submitted(tx_id) => match tx_id { + Ok(tx_id) => tx_id, + Err(e) => e.tx_id(), + }, + Self::Committed(commit, _) => &commit.tx_id, + Self::NotCommitted((tx_id, _, _)) => tx_id, + } + } } #[derive(PartialEq, Eq, PartialOrd, Ord, Debug, Clone)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub struct ChronicleAddress { - // Namespaces do not have a namespace - namespace: Option, - resource: ChronicleIri, + // Namespaces do not have a namespace + namespace: Option, + resource: ChronicleIri, } #[cfg(feature = "parity-encoding")] impl parity_scale_codec::MaxEncodedLen for ChronicleAddress { - fn max_encoded_len() -> usize { - 2048usize - } + fn max_encoded_len() -> usize { + 2048usize + } } impl core::fmt::Display for ChronicleAddress { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - if let Some(namespace) = &self.namespace { - write!(f, "{}:{}", namespace, self.resource) - } else { - write!(f, "{}", self.resource) - } - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + if let Some(namespace) = &self.namespace { + write!(f, "{}:{}", namespace, self.resource) + } else { + write!(f, "{}", self.resource) + } + } } pub trait NameSpacePart { - fn namespace_part(&self) -> Option; + fn namespace_part(&self) -> Option; } impl NameSpacePart for ChronicleAddress { - fn namespace_part(&self) -> Option { - self.namespace.clone() - } + fn namespace_part(&self) -> Option { + self.namespace.clone() + } } pub trait ResourcePart { - fn resource_part(&self) -> ChronicleIri; + fn resource_part(&self) -> ChronicleIri; } impl ResourcePart for ChronicleAddress { - fn resource_part(&self) -> ChronicleIri { - self.resource.clone() - } + fn resource_part(&self) -> ChronicleIri { + self.resource.clone() + } } impl ChronicleAddress { - fn namespace(ns: &NamespaceId) -> Self { - Self { namespace: None, resource: ns.clone().into() } - } + fn namespace(ns: &NamespaceId) -> Self { + Self { namespace: None, resource: ns.clone().into() } + } - fn in_namespace(ns: &NamespaceId, resource: impl Into) -> Self { - Self { namespace: Some(ns.clone()), resource: resource.into() } - } + fn in_namespace(ns: &NamespaceId, resource: impl Into) -> Self { + Self { namespace: Some(ns.clone()), resource: resource.into() } + } } // Split a ProvModel into a snapshot list of its components - Namespaces, Entities, Activities and // Agents pub trait ProvSnapshot { - fn to_snapshot(&self) -> Vec<((Option, ChronicleIri), ProvModel)>; + fn to_snapshot(&self) -> Vec<((Option, ChronicleIri), ProvModel)>; } impl ProvSnapshot for ProvModel { - fn to_snapshot(&self) -> Vec<((Option, ChronicleIri), ProvModel)> { - let mut snapshot = Vec::new(); - - for (namespace_id, namespace) in &self.namespaces { - snapshot.push(( - (None, namespace_id.clone().into()), - ProvModel { - namespaces: vec![(namespace_id.clone(), namespace.clone())] - .into_iter() - .collect(), - ..Default::default() - }, - )); - } - - for ((ns, agent_id), agent) in &self.agents { - let mut delegation = BTreeMap::new(); - if let Some(delegation_set) = self.delegation.get(&(ns.clone(), agent_id.clone())) { - delegation.insert((ns.clone(), agent_id.clone()), delegation_set.clone()); - } - let mut acted_on_behalf_of = BTreeMap::new(); - if let Some(acted_on_behalf_of_set) = - self.acted_on_behalf_of.get(&(ns.clone(), agent_id.clone())) - { - acted_on_behalf_of - .insert((ns.clone(), agent_id.clone()), acted_on_behalf_of_set.clone()); - } - snapshot.push(( - (Some(ns.clone()), agent_id.clone().into()), - ProvModel { - agents: vec![((ns.clone(), agent_id.clone()), agent.clone())] - .into_iter() - .collect(), - delegation, - acted_on_behalf_of, - ..Default::default() - }, - )); - } - - for ((ns, activity_id), activity) in &self.activities { - let mut was_informed_by = BTreeMap::new(); - if let Some(was_informed_by_set) = - self.was_informed_by.get(&(ns.clone(), activity_id.clone())) - { - was_informed_by - .insert((ns.clone(), activity_id.clone()), was_informed_by_set.clone()); - } - let mut generated = BTreeMap::new(); - if let Some(generated_set) = self.generated.get(&(ns.clone(), activity_id.clone())) { - generated.insert((ns.clone(), activity_id.clone()), generated_set.clone()); - } - let mut usage = BTreeMap::new(); - if let Some(usage_set) = self.usage.get(&(ns.clone(), activity_id.clone())) { - usage.insert((ns.clone(), activity_id.clone()), usage_set.clone()); - } - let mut association = BTreeMap::new(); - if let Some(association_set) = self.association.get(&(ns.clone(), activity_id.clone())) - { - association.insert((ns.clone(), activity_id.clone()), association_set.clone()); - } - - snapshot.push(( - (Some(ns.clone()), activity_id.clone().into()), - ProvModel { - activities: vec![((ns.clone(), activity_id.clone()), activity.clone())] - .into_iter() - .collect(), - was_informed_by, - usage, - generated, - association, - ..Default::default() - }, - )); - } - - for ((ns, entity_id), entity) in &self.entities { - let mut derivation = BTreeMap::new(); - if let Some(derivation_set) = self.derivation.get(&(ns.clone(), entity_id.clone())) { - derivation.insert((ns.clone(), entity_id.clone()), derivation_set.clone()); - } - let mut generation = BTreeMap::new(); - if let Some(generation_set) = self.generation.get(&(ns.clone(), entity_id.clone())) { - generation.insert((ns.clone(), entity_id.clone()), generation_set.clone()); - } - let mut attribution = BTreeMap::new(); - if let Some(attribution_set) = self.attribution.get(&(ns.clone(), entity_id.clone())) { - attribution.insert((ns.clone(), entity_id.clone()), attribution_set.clone()); - } - snapshot.push(( - (Some(ns.clone()), entity_id.clone().into()), - ProvModel { - entities: vec![((ns.clone(), entity_id.clone()), entity.clone())] - .into_iter() - .collect(), - derivation, - generation, - attribution, - ..Default::default() - }, - )); - } - - snapshot - } + fn to_snapshot(&self) -> Vec<((Option, ChronicleIri), ProvModel)> { + let mut snapshot = Vec::new(); + + for (namespace_id, namespace) in &self.namespaces { + snapshot.push(( + (None, namespace_id.clone().into()), + ProvModel { + namespaces: vec![(namespace_id.clone(), namespace.clone())] + .into_iter() + .collect(), + ..Default::default() + }, + )); + } + + for ((ns, agent_id), agent) in &self.agents { + let mut delegation = BTreeMap::new(); + if let Some(delegation_set) = self.delegation.get(&(ns.clone(), agent_id.clone())) { + delegation.insert((ns.clone(), agent_id.clone()), delegation_set.clone()); + } + let mut acted_on_behalf_of = BTreeMap::new(); + if let Some(acted_on_behalf_of_set) = + self.acted_on_behalf_of.get(&(ns.clone(), agent_id.clone())) + { + acted_on_behalf_of + .insert((ns.clone(), agent_id.clone()), acted_on_behalf_of_set.clone()); + } + snapshot.push(( + (Some(ns.clone()), agent_id.clone().into()), + ProvModel { + agents: vec![((ns.clone(), agent_id.clone()), agent.clone())] + .into_iter() + .collect(), + delegation, + acted_on_behalf_of, + ..Default::default() + }, + )); + } + + for ((ns, activity_id), activity) in &self.activities { + let mut was_informed_by = BTreeMap::new(); + if let Some(was_informed_by_set) = + self.was_informed_by.get(&(ns.clone(), activity_id.clone())) + { + was_informed_by + .insert((ns.clone(), activity_id.clone()), was_informed_by_set.clone()); + } + let mut generated = BTreeMap::new(); + if let Some(generated_set) = self.generated.get(&(ns.clone(), activity_id.clone())) { + generated.insert((ns.clone(), activity_id.clone()), generated_set.clone()); + } + let mut usage = BTreeMap::new(); + if let Some(usage_set) = self.usage.get(&(ns.clone(), activity_id.clone())) { + usage.insert((ns.clone(), activity_id.clone()), usage_set.clone()); + } + let mut association = BTreeMap::new(); + if let Some(association_set) = self.association.get(&(ns.clone(), activity_id.clone())) + { + association.insert((ns.clone(), activity_id.clone()), association_set.clone()); + } + + snapshot.push(( + (Some(ns.clone()), activity_id.clone().into()), + ProvModel { + activities: vec![((ns.clone(), activity_id.clone()), activity.clone())] + .into_iter() + .collect(), + was_informed_by, + usage, + generated, + association, + ..Default::default() + }, + )); + } + + for ((ns, entity_id), entity) in &self.entities { + let mut derivation = BTreeMap::new(); + if let Some(derivation_set) = self.derivation.get(&(ns.clone(), entity_id.clone())) { + derivation.insert((ns.clone(), entity_id.clone()), derivation_set.clone()); + } + let mut generation = BTreeMap::new(); + if let Some(generation_set) = self.generation.get(&(ns.clone(), entity_id.clone())) { + generation.insert((ns.clone(), entity_id.clone()), generation_set.clone()); + } + let mut attribution = BTreeMap::new(); + if let Some(attribution_set) = self.attribution.get(&(ns.clone(), entity_id.clone())) { + attribution.insert((ns.clone(), entity_id.clone()), attribution_set.clone()); + } + snapshot.push(( + (Some(ns.clone()), entity_id.clone().into()), + ProvModel { + entities: vec![((ns.clone(), entity_id.clone()), entity.clone())] + .into_iter() + .collect(), + derivation, + generation, + attribution, + ..Default::default() + }, + )); + } + + snapshot + } } #[derive(Debug, Clone)] pub struct StateInput { - data: ProvModel, + data: ProvModel, } impl StateInput { - pub fn new(data: ProvModel) -> Self { - Self { data } - } + pub fn new(data: ProvModel) -> Self { + Self { data } + } - pub fn data(&self) -> &ProvModel { - &self.data - } + pub fn data(&self) -> &ProvModel { + &self.data + } } #[derive(Debug)] pub struct StateOutput { - pub address: ChronicleAddress, - pub data: ProvModel, + pub address: ChronicleAddress, + pub data: ProvModel, } impl StateOutput { - pub fn new(address: ChronicleAddress, data: ProvModel) -> Self { - Self { address, data } - } + pub fn new(address: ChronicleAddress, data: ProvModel) -> Self { + Self { address, data } + } - pub fn address(&self) -> &ChronicleAddress { - &self.address - } + pub fn address(&self) -> &ChronicleAddress { + &self.address + } - pub fn data(&self) -> &ProvModel { - &self.data - } + pub fn data(&self) -> &ProvModel { + &self.data + } } #[derive(Debug, Clone)] pub struct Version { - pub(crate) version: u32, - pub(crate) value: Option, + pub(crate) version: u32, + pub(crate) value: Option, } impl Version { - pub fn write(&mut self, value: Option) { - if value != self.value { - self.version += 1; - self.value = value - } - } + pub fn write(&mut self, value: Option) { + if value != self.value { + self.version += 1; + self.value = value + } + } } /// Hold a cache of `LedgerWriter::submit` input and output address data pub struct OperationState { - state: BTreeMap, + state: BTreeMap, } impl Default for OperationState { - fn default() -> Self { - Self::new() - } + fn default() -> Self { + Self::new() + } } impl OperationState { - pub fn new() -> Self { - Self { state: BTreeMap::new() } - } - - pub fn update_state_from_output(&mut self, output: impl Iterator) { - self.update_state(output.map(|output| (output.address, Some(output.data)))) - } - - /// Load input values into `OperationState` - pub fn update_state( - &mut self, - input: impl Iterator)>, - ) { - input.for_each(|(address, value)| { - let entry = self.state.entry(address); - if let Entry::Vacant(e) = entry { - e.insert(Version { version: 0, value }); - } else if let Entry::Occupied(mut e) = entry { - e.get_mut().write(value); - } - }); - } - - /// Return the input data held in `OperationState` - /// as a vector of `StateInput`s - pub fn input(&self) -> Vec { - self.state - .values() - .cloned() - .filter_map(|v| v.value.map(StateInput::new)) - .collect() - } - - /// Check if the data associated with an address has changed in processing - /// while outputting a stream of dirty `StateOutput`s - pub fn dirty(self) -> impl Iterator { - self.state - .into_iter() - .filter_map(|(addr, data)| { - if data.version > 0 { - data.value.map(|value| (StateOutput::new(addr, value))) - } else { - None - } - }) - .collect::>() - .into_iter() - } - - /// Return the input data held in `OperationState` for `addresses` as a vector of `StateInput`s - pub fn opa_context(&self, addresses: BTreeSet) -> Vec { - self.state - .iter() - .filter(|(addr, _data)| addresses.iter().any(|a| &a == addr)) - .map(|(_, data)| data.clone()) - .filter_map(|v| v.value.map(StateInput::new)) - .collect() - } + pub fn new() -> Self { + Self { state: BTreeMap::new() } + } + + pub fn update_state_from_output(&mut self, output: impl Iterator) { + self.update_state(output.map(|output| (output.address, Some(output.data)))) + } + + /// Load input values into `OperationState` + pub fn update_state( + &mut self, + input: impl Iterator)>, + ) { + input.for_each(|(address, value)| { + let entry = self.state.entry(address); + if let Entry::Vacant(e) = entry { + e.insert(Version { version: 0, value }); + } else if let Entry::Occupied(mut e) = entry { + e.get_mut().write(value); + } + }); + } + + /// Return the input data held in `OperationState` + /// as a vector of `StateInput`s + pub fn input(&self) -> Vec { + self.state + .values() + .cloned() + .filter_map(|v| v.value.map(StateInput::new)) + .collect() + } + + /// Check if the data associated with an address has changed in processing + /// while outputting a stream of dirty `StateOutput`s + pub fn dirty(self) -> impl Iterator { + self.state + .into_iter() + .filter_map(|(addr, data)| { + if data.version > 0 { + data.value.map(|value| (StateOutput::new(addr, value))) + } else { + None + } + }) + .collect::>() + .into_iter() + } + + /// Return the input data held in `OperationState` for `addresses` as a vector of `StateInput`s + pub fn opa_context(&self, addresses: BTreeSet) -> Vec { + self.state + .iter() + .filter(|(addr, _data)| addresses.iter().any(|a| &a == addr)) + .map(|(_, data)| data.clone()) + .filter_map(|v| v.value.map(StateInput::new)) + .collect() + } } impl ChronicleOperation { - /// Compute dependencies for a chronicle operation, input and output addresses are always - /// symmetric - pub fn dependencies(&self) -> Vec { - match self { - ChronicleOperation::CreateNamespace(CreateNamespace { id, .. }) => { - vec![ChronicleAddress::namespace(id)] - } - ChronicleOperation::AgentExists(AgentExists { namespace, id, .. }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ] - } - ChronicleOperation::ActivityExists(ActivityExists { namespace, id, .. }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ] - } - ChronicleOperation::StartActivity(StartActivity { namespace, id, .. }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ] - } - ChronicleOperation::WasAssociatedWith(WasAssociatedWith { - id, - namespace, - activity_id, - agent_id, - .. - }) => vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ChronicleAddress::in_namespace(namespace, activity_id.clone()), - ChronicleAddress::in_namespace(namespace, agent_id.clone()), - ], - ChronicleOperation::WasAttributedTo(WasAttributedTo { - id, - namespace, - entity_id, - agent_id, - .. - }) => vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ChronicleAddress::in_namespace(namespace, entity_id.clone()), - ChronicleAddress::in_namespace(namespace, agent_id.clone()), - ], - ChronicleOperation::EndActivity(EndActivity { namespace, id, .. }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ] - } - ChronicleOperation::ActivityUses(ActivityUses { namespace, id, activity }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, activity.clone()), - ChronicleAddress::in_namespace(namespace, id.clone()), - ] - } - ChronicleOperation::EntityExists(EntityExists { namespace, id }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ] - } - ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity }) => vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, activity.clone()), - ChronicleAddress::in_namespace(namespace, id.clone()), - ], - ChronicleOperation::WasInformedBy(WasInformedBy { - namespace, - activity, - informing_activity, - }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, activity.clone()), - ChronicleAddress::in_namespace(namespace, informing_activity.clone()), - ] - } - ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf { - namespace, - id, - delegate_id, - activity_id, - responsible_id, - .. - }) => vec![ - Some(ChronicleAddress::namespace(namespace)), - activity_id.as_ref().map(|activity_id| { - ChronicleAddress::in_namespace(namespace, activity_id.clone()) - }), - Some(ChronicleAddress::in_namespace(namespace, delegate_id.clone())), - Some(ChronicleAddress::in_namespace(namespace, responsible_id.clone())), - Some(ChronicleAddress::in_namespace(namespace, id.clone())), - ] - .into_iter() - .flatten() - .collect(), - ChronicleOperation::EntityDerive(EntityDerive { - namespace, - id, - used_id, - activity_id, - .. - }) => vec![ - Some(ChronicleAddress::namespace(namespace)), - activity_id.as_ref().map(|activity_id| { - ChronicleAddress::in_namespace(namespace, activity_id.clone()) - }), - Some(ChronicleAddress::in_namespace(namespace, used_id.clone())), - Some(ChronicleAddress::in_namespace(namespace, id.clone())), - ] - .into_iter() - .flatten() - .collect(), - ChronicleOperation::SetAttributes(SetAttributes::Agent { id, namespace, .. }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ] - } - ChronicleOperation::SetAttributes(SetAttributes::Entity { id, namespace, .. }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ] - } - ChronicleOperation::SetAttributes(SetAttributes::Activity { - id, namespace, .. - }) => { - vec![ - ChronicleAddress::namespace(namespace), - ChronicleAddress::in_namespace(namespace, id.clone()), - ] - } - } - } - - /// Apply an operation's input states to the prov model - /// Take input states and apply them to the prov model, then apply transaction, - /// then return a snapshot of output state for diff calculation - #[instrument(level = "debug", skip(self, model, input))] - pub fn process( - &self, - mut model: ProvModel, - input: Vec, - ) -> Result<(Vec, ProvModel), ProcessorError> { - for input in input.iter() { - model.combine(input.data()) - } - model.apply(self).map_err(ProcessorError::Contradiction)?; - Ok(( - model - .to_snapshot() - .into_iter() - .map(|((namespace, resource), prov)| { - StateOutput::new(ChronicleAddress { namespace, resource }, prov) - }) - .collect::>(), - model, - )) - } + /// Compute dependencies for a chronicle operation, input and output addresses are always + /// symmetric + pub fn dependencies(&self) -> Vec { + match self { + ChronicleOperation::CreateNamespace(CreateNamespace { id, .. }) => { + vec![ChronicleAddress::namespace(id)] + }, + ChronicleOperation::AgentExists(AgentExists { namespace, id, .. }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ] + }, + ChronicleOperation::ActivityExists(ActivityExists { namespace, id, .. }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ] + }, + ChronicleOperation::StartActivity(StartActivity { namespace, id, .. }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ] + }, + ChronicleOperation::WasAssociatedWith(WasAssociatedWith { + id, + namespace, + activity_id, + agent_id, + .. + }) => vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ChronicleAddress::in_namespace(namespace, activity_id.clone()), + ChronicleAddress::in_namespace(namespace, agent_id.clone()), + ], + ChronicleOperation::WasAttributedTo(WasAttributedTo { + id, + namespace, + entity_id, + agent_id, + .. + }) => vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ChronicleAddress::in_namespace(namespace, entity_id.clone()), + ChronicleAddress::in_namespace(namespace, agent_id.clone()), + ], + ChronicleOperation::EndActivity(EndActivity { namespace, id, .. }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ] + }, + ChronicleOperation::ActivityUses(ActivityUses { namespace, id, activity }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, activity.clone()), + ChronicleAddress::in_namespace(namespace, id.clone()), + ] + }, + ChronicleOperation::EntityExists(EntityExists { namespace, id }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ] + }, + ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity }) => vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, activity.clone()), + ChronicleAddress::in_namespace(namespace, id.clone()), + ], + ChronicleOperation::WasInformedBy(WasInformedBy { + namespace, + activity, + informing_activity, + }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, activity.clone()), + ChronicleAddress::in_namespace(namespace, informing_activity.clone()), + ] + }, + ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf { + namespace, + id, + delegate_id, + activity_id, + responsible_id, + .. + }) => vec![ + Some(ChronicleAddress::namespace(namespace)), + activity_id.as_ref().map(|activity_id| { + ChronicleAddress::in_namespace(namespace, activity_id.clone()) + }), + Some(ChronicleAddress::in_namespace(namespace, delegate_id.clone())), + Some(ChronicleAddress::in_namespace(namespace, responsible_id.clone())), + Some(ChronicleAddress::in_namespace(namespace, id.clone())), + ] + .into_iter() + .flatten() + .collect(), + ChronicleOperation::EntityDerive(EntityDerive { + namespace, + id, + used_id, + activity_id, + .. + }) => vec![ + Some(ChronicleAddress::namespace(namespace)), + activity_id.as_ref().map(|activity_id| { + ChronicleAddress::in_namespace(namespace, activity_id.clone()) + }), + Some(ChronicleAddress::in_namespace(namespace, used_id.clone())), + Some(ChronicleAddress::in_namespace(namespace, id.clone())), + ] + .into_iter() + .flatten() + .collect(), + ChronicleOperation::SetAttributes(SetAttributes::Agent { id, namespace, .. }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ] + }, + ChronicleOperation::SetAttributes(SetAttributes::Entity { id, namespace, .. }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ] + }, + ChronicleOperation::SetAttributes(SetAttributes::Activity { + id, namespace, .. + }) => { + vec![ + ChronicleAddress::namespace(namespace), + ChronicleAddress::in_namespace(namespace, id.clone()), + ] + }, + } + } + + /// Apply an operation's input states to the prov model + /// Take input states and apply them to the prov model, then apply transaction, + /// then return a snapshot of output state for diff calculation + #[instrument(level = "debug", skip(self, model, input))] + pub fn process( + &self, + mut model: ProvModel, + input: Vec, + ) -> Result<(Vec, ProvModel), ProcessorError> { + for input in input.iter() { + model.combine(input.data()) + } + model.apply(self).map_err(ProcessorError::Contradiction)?; + Ok(( + model + .to_snapshot() + .into_iter() + .map(|((namespace, resource), prov)| { + StateOutput::new(ChronicleAddress { namespace, resource }, prov) + }) + .collect::>(), + model, + )) + } } diff --git a/crates/common/src/opa/core.rs b/crates/common/src/opa/core.rs index ad541b4cb..818ce2f8b 100644 --- a/crates/common/src/opa/core.rs +++ b/crates/common/src/opa/core.rs @@ -6,126 +6,126 @@ use parity_scale_codec::alloc::string::String; use scale_info::{prelude::vec, prelude::vec::Vec}; #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType, - scale_decode::DecodeAsType, - parity_scale_codec::MaxEncodedLen - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType, + scale_decode::DecodeAsType, + parity_scale_codec::MaxEncodedLen + ) )] -#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize, ))] +#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize,))] #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub struct H128([u8; 16]); impl H128 { - pub fn new(value: [u8; 16]) -> Self { - H128(value) - } + pub fn new(value: [u8; 16]) -> Self { + H128(value) + } - pub fn into(self) -> [u8; 16] { - self.0 - } + pub fn into(self) -> [u8; 16] { + self.0 + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType, - scale_decode::DecodeAsType, - parity_scale_codec::MaxEncodedLen - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType, + scale_decode::DecodeAsType, + parity_scale_codec::MaxEncodedLen + ) )] -#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize, ))] +#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize,))] #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub struct PolicyAddress(H128); impl fmt::Display for PolicyAddress { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "PolicyAddress({})", hex::encode(self.0.into())) - } + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "PolicyAddress({})", hex::encode(self.0.into())) + } } impl From<[u8; 16]> for PolicyAddress { - fn from(value: [u8; 16]) -> Self { - tracing::debug!("Converting [u8; 16] to PolicyAddress"); - PolicyAddress(H128::new(value)) - } + fn from(value: [u8; 16]) -> Self { + tracing::debug!("Converting [u8; 16] to PolicyAddress"); + PolicyAddress(H128::new(value)) + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType, - parity_scale_codec::MaxEncodedLen - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType, + parity_scale_codec::MaxEncodedLen + ) )] #[derive(Debug, PartialEq, Eq, Clone)] pub struct PolicyMetaAddress(H128); impl PolicyMetaAddress { - pub fn new(value: H128) -> Self { - PolicyMetaAddress(value) - } + pub fn new(value: H128) -> Self { + PolicyMetaAddress(value) + } - pub fn into(self) -> H128 { - self.0 - } + pub fn into(self) -> H128 { + self.0 + } } impl From<[u8; 16]> for H128 { - fn from(value: [u8; 16]) -> Self { - H128(value) - } + fn from(value: [u8; 16]) -> Self { + H128(value) + } } impl From<[u8; 16]> for PolicyMetaAddress { - fn from(value: [u8; 16]) -> Self { - PolicyMetaAddress(H128::new(value)) - } + fn from(value: [u8; 16]) -> Self { + PolicyMetaAddress(H128::new(value)) + } } impl From<[u8; 16]> for KeyAddress { - fn from(value: [u8; 16]) -> Self { - KeyAddress(H128::new(value)) - } + fn from(value: [u8; 16]) -> Self { + KeyAddress(H128::new(value)) + } } impl From for PolicyMetaAddress { - fn from(value: H128) -> Self { - PolicyMetaAddress(value) - } + fn from(value: H128) -> Self { + PolicyMetaAddress(value) + } } impl From for PolicyAddress { - fn from(value: H128) -> Self { - PolicyAddress(value) - } + fn from(value: H128) -> Self { + PolicyAddress(value) + } } impl From for KeyAddress { - fn from(value: H128) -> Self { - KeyAddress(value) - } + fn from(value: H128) -> Self { + KeyAddress(value) + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType, - parity_scale_codec::MaxEncodedLen - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType, + parity_scale_codec::MaxEncodedLen + ) )] #[derive(Debug, PartialEq, Eq, Clone)] pub struct KeyAddress(H128); @@ -134,56 +134,56 @@ pub struct KeyAddress(H128); // This message is used to bootstrap the root key for a newly created authz tp, // it can only be executed once pub struct BootstrapRoot { - pub public_key: PemEncoded, + pub public_key: PemEncoded, } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType, - scale_decode::DecodeAsType, - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType, + scale_decode::DecodeAsType, + ) )] -#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize, ))] +#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize,))] #[derive(Debug, Clone, Eq, PartialEq)] pub struct PemEncoded(String); impl PemEncoded { - pub fn as_str(&self) -> &str { - &self.0 - } + pub fn as_str(&self) -> &str { + &self.0 + } - pub fn as_bytes(&self) -> &[u8] { - self.0.as_bytes() - } + pub fn as_bytes(&self) -> &[u8] { + self.0.as_bytes() + } } impl PemEncoded { - pub fn new(encoded: String) -> Self { - PemEncoded(encoded) - } + pub fn new(encoded: String) -> Self { + PemEncoded(encoded) + } } impl From for PemEncoded { - fn from(encoded: String) -> Self { - PemEncoded(encoded) - } + fn from(encoded: String) -> Self { + PemEncoded(encoded) + } } #[derive(Debug, Clone, Eq, PartialEq)] pub struct RegisterKey { - pub public_key: PemEncoded, - pub id: String, - pub overwrite_existing: bool, + pub public_key: PemEncoded, + pub id: String, + pub overwrite_existing: bool, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct NewPublicKey { - pub public_key: PemEncoded, - pub id: String, + pub public_key: PemEncoded, + pub id: String, } #[derive(Debug, Clone, Eq, PartialEq)] @@ -191,120 +191,120 @@ pub struct NewPublicKey { // message must be signed by the old key. The signature must be valid for // the new one, to demonstrate ownership of both keys pub struct RotateKey { - pub payload: NewPublicKey, - pub previous_signing_key: PemEncoded, - pub previous_signature: Vec, - pub new_signing_key: PemEncoded, - pub new_signature: Vec, + pub payload: NewPublicKey, + pub previous_signing_key: PemEncoded, + pub previous_signature: Vec, + pub new_signing_key: PemEncoded, + pub new_signature: Vec, } #[derive(Debug, Clone, Eq, PartialEq)] // Set the policy with name to the new policy, the SignedOperation for this must // be signed by the root key pub struct SetPolicy { - pub id: String, - pub policy: Policy, + pub id: String, + pub policy: Policy, } #[derive(Debug, Clone, Eq, PartialEq)] pub struct SignedOperationPayload { - pub operation: Operation, + pub operation: Operation, } #[derive(Debug, Clone, Eq, PartialEq)] // An OPA TP operation and its signature pub struct SignedOperation { - pub payload: SignedOperationPayload, - pub verifying_key: PemEncoded, - pub signature: Vec, + pub payload: SignedOperationPayload, + pub verifying_key: PemEncoded, + pub signature: Vec, } #[derive(Debug, Clone, Eq, PartialEq)] pub enum Operation { - RegisterKey(RegisterKey), - RotateKey(RotateKey), - SetPolicy(SetPolicy), + RegisterKey(RegisterKey), + RotateKey(RotateKey), + SetPolicy(SetPolicy), } #[derive(Debug, Clone, Eq, PartialEq)] pub struct OpaSubmission { - pub version: String, - pub correlation_id: [u8; 16], - pub span_id: u64, - pub payload: Payload, + pub version: String, + pub correlation_id: [u8; 16], + pub span_id: u64, + pub payload: Payload, } #[derive(Debug, Clone, Eq, PartialEq)] pub enum Payload { - BootstrapRoot(BootstrapRoot), - SignedOperation(SignedOperation), + BootstrapRoot(BootstrapRoot), + SignedOperation(SignedOperation), } -#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize, ))] +#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize,))] #[derive(Debug, Clone, PartialEq, Eq)] pub struct KeyRegistration { - pub key: PemEncoded, - pub version: u64, + pub key: PemEncoded, + pub version: u64, } -#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize, ))] +#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize,))] #[derive(Debug, Clone, PartialEq, Eq)] pub struct Keys { - pub id: String, - pub current: KeyRegistration, - pub expired: Option, + pub id: String, + pub current: KeyRegistration, + pub expired: Option, } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType, - scale_decode::DecodeAsType, - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType, + scale_decode::DecodeAsType, + ) )] #[derive(Debug, Clone, PartialEq, Eq)] pub struct Policy(Vec); impl Policy { - pub fn new(data: Vec) -> Self { - Policy(data) - } + pub fn new(data: Vec) -> Self { + Policy(data) + } - pub fn as_bytes(&self) -> &[u8] { - &self.0 - } + pub fn as_bytes(&self) -> &[u8] { + &self.0 + } - pub fn into_vec(self) -> Vec { - self.0 - } + pub fn into_vec(self) -> Vec { + self.0 + } } -#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize, ))] +#[cfg_attr(feature = "std", derive(serde::Serialize, serde::Deserialize,))] #[derive(Debug, Clone, PartialEq, Eq)] pub struct PolicyMeta { - pub id: String, - pub hash: H128, - pub policy_address: PolicyAddress, + pub id: String, + pub hash: H128, + pub policy_address: PolicyAddress, } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType, - scale_decode::DecodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType, + scale_decode::DecodeAsType + ) )] #[derive(Debug, Clone)] pub struct OpaSettings { - pub policy_address: PolicyAddress, - pub policy_name: String, - pub entrypoint: String, + pub policy_address: PolicyAddress, + pub policy_name: String, + pub entrypoint: String, } #[cfg(feature = "parity-encoding")] @@ -312,381 +312,381 @@ use parity_scale_codec::MaxEncodedLen; #[cfg(feature = "parity-encoding")] impl MaxEncodedLen for OpaSettings { - fn max_encoded_len() -> usize { - PolicyAddress::max_encoded_len() + 1024 - } + fn max_encoded_len() -> usize { + PolicyAddress::max_encoded_len() + 1024 + } } #[cfg(feature = "parity-encoding")] pub mod codec { - use super::*; - use parity_scale_codec::{Decode, Encode}; - - use scale_decode::DecodeAsType; - use scale_encode::EncodeAsType; - #[cfg(not(feature = "std"))] - use scale_info::prelude::vec::Vec; - - use scale_info::TypeInfo; - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, Eq, PartialEq, TypeInfo, Clone)] - pub struct KeysV1 { - pub id: String, - pub current: KeyRegistrationV1, - pub expired: Option, - } - - impl MaxEncodedLen for KeysV1 { - fn max_encoded_len() -> usize { - 1024 + KeyRegistrationV1::max_encoded_len() + - Option::::max_encoded_len() - } - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, Eq, PartialEq, TypeInfo, Clone)] - pub struct KeyRegistrationV1 { - // Der encoded public key - pub key: PemEncoded, - pub version: u64, - } - - impl MaxEncodedLen for KeyRegistrationV1 { - fn max_encoded_len() -> usize { - 1024 + u64::max_encoded_len() - } - } - - impl From for KeysV1 { - fn from(keys: super::Keys) -> Self { - Self { - id: keys.id, - current: KeyRegistrationV1 { key: keys.current.key, version: keys.current.version }, - expired: keys.expired.map(|expired_key| KeyRegistrationV1 { - key: expired_key.key, - version: expired_key.version, - }), - } - } - } - - impl core::convert::TryFrom for super::Keys { - type Error = core::convert::Infallible; - - fn try_from(keys_v1: KeysV1) -> Result { - Ok(Self { - id: keys_v1.id, - current: super::KeyRegistration { - key: keys_v1.current.key, - version: keys_v1.current.version, - }, - expired: keys_v1.expired.map(|expired_key_v1| super::KeyRegistration { - key: expired_key_v1.key, - version: expired_key_v1.version, - }), - }) - } - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] - pub struct BootstrapRootV1 { - pub public_key: PemEncoded, - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] - pub struct RegisterKeyV1 { - pub public_key: PemEncoded, - pub id: String, - pub overwrite_existing: bool, - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] - pub struct NewPublicKeyV1 { - pub public_key: PemEncoded, - pub id: String, - } - - impl From for NewPublicKeyV1 { - fn from(new_public_key: super::NewPublicKey) -> Self { - Self { public_key: new_public_key.public_key, id: new_public_key.id } - } - } - - impl core::convert::TryFrom for super::NewPublicKey { - type Error = core::convert::Infallible; - - fn try_from(new_public_key_v1: NewPublicKeyV1) -> Result { - Ok(Self { public_key: new_public_key_v1.public_key, id: new_public_key_v1.id }) - } - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] - pub struct RotateKeyV1 { - pub payload: NewPublicKeyV1, - pub previous_signing_key: PemEncoded, - pub previous_signature: Vec, - pub new_signing_key: PemEncoded, - pub new_signature: Vec, - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] - pub struct SetPolicyV1 { - pub id: String, - pub policy: Policy, - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] - pub struct SignedOperationPayloadV1 { - pub operation: OperationV1, - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] - pub struct SignedOperationV1 { - pub payload: SignedOperationPayloadV1, - pub verifying_key: PemEncoded, - pub signature: Vec, - } - - impl From for SignedOperationV1 { - fn from(signed_operation: super::SignedOperation) -> Self { - Self { - payload: SignedOperationPayloadV1 { - operation: signed_operation.payload.operation.into(), - }, - verifying_key: signed_operation.verifying_key, - signature: signed_operation.signature, - } - } - } - - impl From for OperationV1 { - fn from(operation: super::Operation) -> Self { - match operation { - super::Operation::RegisterKey(register_key) => - OperationV1::RegisterKey(register_key.into()), - super::Operation::RotateKey(rotate_key) => - OperationV1::RotateKey(rotate_key.into()), - super::Operation::SetPolicy(set_policy) => - OperationV1::SetPolicy(set_policy.into()), - } - } - } - - impl From for RegisterKeyV1 { - fn from(register_key: super::RegisterKey) -> Self { - Self { - public_key: register_key.public_key, - id: register_key.id, - overwrite_existing: register_key.overwrite_existing, - } - } - } - - impl From for RotateKeyV1 { - fn from(rotate_key: super::RotateKey) -> Self { - Self { - payload: rotate_key.payload.into(), - previous_signing_key: rotate_key.previous_signing_key, - previous_signature: rotate_key.previous_signature, - new_signing_key: rotate_key.new_signing_key, - new_signature: rotate_key.new_signature, - } - } - } - - impl From for SetPolicyV1 { - fn from(set_policy: super::SetPolicy) -> Self { - Self { id: set_policy.id, policy: set_policy.policy } - } - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, Clone, TypeInfo, PartialEq, Eq)] - pub enum OperationV1 { - RegisterKey(RegisterKeyV1), - RotateKey(RotateKeyV1), - SetPolicy(SetPolicyV1), - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] - pub struct OpaSubmissionV1 { - pub version: String, - pub correlation_id: [u8; 16], - pub span_id: u64, - pub payload: PayloadV1, - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] - pub enum PayloadV1 { - BootstrapRoot(BootstrapRootV1), - SignedOperation(SignedOperationV1), - } - - #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq)] - pub struct PolicyV1(Vec); - - impl PolicyV1 { - pub fn into_vec(self) -> Vec { - self.0 - } - } - - impl MaxEncodedLen for PolicyV1 { - fn max_encoded_len() -> usize { - 1024 * 1024 * 10 - } - } - - impl From for codec::PolicyV1 { - fn from(item: Policy) -> Self { - Self(item.0) - } - } - - impl core::convert::TryFrom for Policy { - type Error = core::convert::Infallible; - - fn try_from(value: codec::PolicyV1) -> Result { - Ok(Self(value.0)) - } - } - - #[derive(Encode, Decode, Debug, TypeInfo, Clone, PartialEq)] - pub struct PolicyMetaV1 { - pub id: String, - pub hash: H128, - pub policy_address: PolicyAddress, - } - - impl MaxEncodedLen for PolicyMetaV1 { - fn max_encoded_len() -> usize { - 1024 + H128::max_encoded_len() + PolicyAddress::max_encoded_len() - } - } - - impl From for codec::PolicyMetaV1 { - fn from(item: super::PolicyMeta) -> Self { - Self { id: item.id, hash: item.hash, policy_address: item.policy_address } - } - } - - impl core::convert::TryFrom for super::PolicyMeta { - type Error = core::convert::Infallible; - - fn try_from(value: codec::PolicyMetaV1) -> Result { - Ok(Self { id: value.id, hash: value.hash, policy_address: value.policy_address }) - } - } - - impl From for BootstrapRoot { - fn from(item: codec::BootstrapRootV1) -> Self { - Self { public_key: item.public_key } - } - } - - impl From for codec::BootstrapRootV1 { - fn from(item: BootstrapRoot) -> Self { - tracing::debug!(target: "codec_conversion", "Converting BootstrapRoot to BootstrapRootV1"); - Self { public_key: item.public_key } - } - } - - impl From for Payload { - fn from(item: codec::PayloadV1) -> Self { - match item { - codec::PayloadV1::BootstrapRoot(v) => Self::BootstrapRoot(v.into()), - codec::PayloadV1::SignedOperation(v) => Self::SignedOperation(v.into()), - } - } - } - - impl From for OpaSubmission { - fn from(item: codec::OpaSubmissionV1) -> Self { - Self { - correlation_id: item.correlation_id, - version: item.version, - span_id: item.span_id, - payload: item.payload.into(), - } - } - } - - impl From for codec::OpaSubmissionV1 { - fn from(item: OpaSubmission) -> Self { - tracing::debug!(target: "codec_conversion", "Converting OpaSubmission to OpaSubmissionV1"); - Self { - version: item.version, - correlation_id: item.correlation_id, - span_id: item.span_id, - payload: match item.payload { - Payload::BootstrapRoot(v) => { - tracing::trace!(target: "codec_conversion", "Payload is BootstrapRoot"); - codec::PayloadV1::BootstrapRoot(v.into()) - } - Payload::SignedOperation(v) => { - tracing::trace!(target: "codec_conversion", "Payload is SignedOperation"); - codec::PayloadV1::SignedOperation(v.into()) - } - }, - } - } - } - - impl From for Operation { - fn from(item: codec::OperationV1) -> Self { - match item { - codec::OperationV1::RegisterKey(v) => Self::RegisterKey(v.into()), - codec::OperationV1::RotateKey(v) => Self::RotateKey(v.into()), - codec::OperationV1::SetPolicy(v) => Self::SetPolicy(v.into()), - } - } - } - - impl From for SignedOperation { - fn from(item: codec::SignedOperationV1) -> Self { - Self { - payload: SignedOperationPayload { operation: item.payload.operation.into() }, - verifying_key: item.verifying_key, - signature: item.signature, - } - } - } - - impl From for RotateKey { - fn from(item: codec::RotateKeyV1) -> Self { - Self { - payload: NewPublicKey { public_key: item.payload.public_key, id: item.payload.id }, - previous_signing_key: item.previous_signing_key, - previous_signature: item.previous_signature, - new_signing_key: item.new_signing_key, - new_signature: item.new_signature, - } - } - } - - impl From for RegisterKey { - fn from(item: codec::RegisterKeyV1) -> Self { - Self { - public_key: item.public_key, - id: item.id, - overwrite_existing: item.overwrite_existing, - } - } - } - - impl From for SetPolicy { - fn from(item: codec::SetPolicyV1) -> Self { - Self { id: item.id, policy: item.policy } - } - } - - impl From for codec::SignedOperationPayloadV1 { - fn from(item: SignedOperationPayload) -> Self { - codec::SignedOperationPayloadV1 { - operation: match item.operation { - Operation::RegisterKey(v) => codec::OperationV1::RegisterKey(v.into()), - Operation::RotateKey(v) => codec::OperationV1::RotateKey(v.into()), - Operation::SetPolicy(v) => codec::OperationV1::SetPolicy(v.into()), - }, - } - } - } + use super::*; + use parity_scale_codec::{Decode, Encode}; + + use scale_decode::DecodeAsType; + use scale_encode::EncodeAsType; + #[cfg(not(feature = "std"))] + use scale_info::prelude::vec::Vec; + + use scale_info::TypeInfo; + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, Eq, PartialEq, TypeInfo, Clone)] + pub struct KeysV1 { + pub id: String, + pub current: KeyRegistrationV1, + pub expired: Option, + } + + impl MaxEncodedLen for KeysV1 { + fn max_encoded_len() -> usize { + 1024 + KeyRegistrationV1::max_encoded_len() + + Option::::max_encoded_len() + } + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, Eq, PartialEq, TypeInfo, Clone)] + pub struct KeyRegistrationV1 { + // Der encoded public key + pub key: PemEncoded, + pub version: u64, + } + + impl MaxEncodedLen for KeyRegistrationV1 { + fn max_encoded_len() -> usize { + 1024 + u64::max_encoded_len() + } + } + + impl From for KeysV1 { + fn from(keys: super::Keys) -> Self { + Self { + id: keys.id, + current: KeyRegistrationV1 { key: keys.current.key, version: keys.current.version }, + expired: keys.expired.map(|expired_key| KeyRegistrationV1 { + key: expired_key.key, + version: expired_key.version, + }), + } + } + } + + impl core::convert::TryFrom for super::Keys { + type Error = core::convert::Infallible; + + fn try_from(keys_v1: KeysV1) -> Result { + Ok(Self { + id: keys_v1.id, + current: super::KeyRegistration { + key: keys_v1.current.key, + version: keys_v1.current.version, + }, + expired: keys_v1.expired.map(|expired_key_v1| super::KeyRegistration { + key: expired_key_v1.key, + version: expired_key_v1.version, + }), + }) + } + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] + pub struct BootstrapRootV1 { + pub public_key: PemEncoded, + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] + pub struct RegisterKeyV1 { + pub public_key: PemEncoded, + pub id: String, + pub overwrite_existing: bool, + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] + pub struct NewPublicKeyV1 { + pub public_key: PemEncoded, + pub id: String, + } + + impl From for NewPublicKeyV1 { + fn from(new_public_key: super::NewPublicKey) -> Self { + Self { public_key: new_public_key.public_key, id: new_public_key.id } + } + } + + impl core::convert::TryFrom for super::NewPublicKey { + type Error = core::convert::Infallible; + + fn try_from(new_public_key_v1: NewPublicKeyV1) -> Result { + Ok(Self { public_key: new_public_key_v1.public_key, id: new_public_key_v1.id }) + } + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] + pub struct RotateKeyV1 { + pub payload: NewPublicKeyV1, + pub previous_signing_key: PemEncoded, + pub previous_signature: Vec, + pub new_signing_key: PemEncoded, + pub new_signature: Vec, + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] + pub struct SetPolicyV1 { + pub id: String, + pub policy: Policy, + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] + pub struct SignedOperationPayloadV1 { + pub operation: OperationV1, + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] + pub struct SignedOperationV1 { + pub payload: SignedOperationPayloadV1, + pub verifying_key: PemEncoded, + pub signature: Vec, + } + + impl From for SignedOperationV1 { + fn from(signed_operation: super::SignedOperation) -> Self { + Self { + payload: SignedOperationPayloadV1 { + operation: signed_operation.payload.operation.into(), + }, + verifying_key: signed_operation.verifying_key, + signature: signed_operation.signature, + } + } + } + + impl From for OperationV1 { + fn from(operation: super::Operation) -> Self { + match operation { + super::Operation::RegisterKey(register_key) => + OperationV1::RegisterKey(register_key.into()), + super::Operation::RotateKey(rotate_key) => + OperationV1::RotateKey(rotate_key.into()), + super::Operation::SetPolicy(set_policy) => + OperationV1::SetPolicy(set_policy.into()), + } + } + } + + impl From for RegisterKeyV1 { + fn from(register_key: super::RegisterKey) -> Self { + Self { + public_key: register_key.public_key, + id: register_key.id, + overwrite_existing: register_key.overwrite_existing, + } + } + } + + impl From for RotateKeyV1 { + fn from(rotate_key: super::RotateKey) -> Self { + Self { + payload: rotate_key.payload.into(), + previous_signing_key: rotate_key.previous_signing_key, + previous_signature: rotate_key.previous_signature, + new_signing_key: rotate_key.new_signing_key, + new_signature: rotate_key.new_signature, + } + } + } + + impl From for SetPolicyV1 { + fn from(set_policy: super::SetPolicy) -> Self { + Self { id: set_policy.id, policy: set_policy.policy } + } + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, Clone, TypeInfo, PartialEq, Eq)] + pub enum OperationV1 { + RegisterKey(RegisterKeyV1), + RotateKey(RotateKeyV1), + SetPolicy(SetPolicyV1), + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] + pub struct OpaSubmissionV1 { + pub version: String, + pub correlation_id: [u8; 16], + pub span_id: u64, + pub payload: PayloadV1, + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq, Eq)] + pub enum PayloadV1 { + BootstrapRoot(BootstrapRootV1), + SignedOperation(SignedOperationV1), + } + + #[derive(Encode, EncodeAsType, DecodeAsType, Decode, Debug, TypeInfo, Clone, PartialEq)] + pub struct PolicyV1(Vec); + + impl PolicyV1 { + pub fn into_vec(self) -> Vec { + self.0 + } + } + + impl MaxEncodedLen for PolicyV1 { + fn max_encoded_len() -> usize { + 1024 * 1024 * 10 + } + } + + impl From for codec::PolicyV1 { + fn from(item: Policy) -> Self { + Self(item.0) + } + } + + impl core::convert::TryFrom for Policy { + type Error = core::convert::Infallible; + + fn try_from(value: codec::PolicyV1) -> Result { + Ok(Self(value.0)) + } + } + + #[derive(Encode, Decode, Debug, TypeInfo, Clone, PartialEq)] + pub struct PolicyMetaV1 { + pub id: String, + pub hash: H128, + pub policy_address: PolicyAddress, + } + + impl MaxEncodedLen for PolicyMetaV1 { + fn max_encoded_len() -> usize { + 1024 + H128::max_encoded_len() + PolicyAddress::max_encoded_len() + } + } + + impl From for codec::PolicyMetaV1 { + fn from(item: super::PolicyMeta) -> Self { + Self { id: item.id, hash: item.hash, policy_address: item.policy_address } + } + } + + impl core::convert::TryFrom for super::PolicyMeta { + type Error = core::convert::Infallible; + + fn try_from(value: codec::PolicyMetaV1) -> Result { + Ok(Self { id: value.id, hash: value.hash, policy_address: value.policy_address }) + } + } + + impl From for BootstrapRoot { + fn from(item: codec::BootstrapRootV1) -> Self { + Self { public_key: item.public_key } + } + } + + impl From for codec::BootstrapRootV1 { + fn from(item: BootstrapRoot) -> Self { + tracing::debug!(target: "codec_conversion", "Converting BootstrapRoot to BootstrapRootV1"); + Self { public_key: item.public_key } + } + } + + impl From for Payload { + fn from(item: codec::PayloadV1) -> Self { + match item { + codec::PayloadV1::BootstrapRoot(v) => Self::BootstrapRoot(v.into()), + codec::PayloadV1::SignedOperation(v) => Self::SignedOperation(v.into()), + } + } + } + + impl From for OpaSubmission { + fn from(item: codec::OpaSubmissionV1) -> Self { + Self { + correlation_id: item.correlation_id, + version: item.version, + span_id: item.span_id, + payload: item.payload.into(), + } + } + } + + impl From for codec::OpaSubmissionV1 { + fn from(item: OpaSubmission) -> Self { + tracing::debug!(target: "codec_conversion", "Converting OpaSubmission to OpaSubmissionV1"); + Self { + version: item.version, + correlation_id: item.correlation_id, + span_id: item.span_id, + payload: match item.payload { + Payload::BootstrapRoot(v) => { + tracing::trace!(target: "codec_conversion", "Payload is BootstrapRoot"); + codec::PayloadV1::BootstrapRoot(v.into()) + }, + Payload::SignedOperation(v) => { + tracing::trace!(target: "codec_conversion", "Payload is SignedOperation"); + codec::PayloadV1::SignedOperation(v.into()) + }, + }, + } + } + } + + impl From for Operation { + fn from(item: codec::OperationV1) -> Self { + match item { + codec::OperationV1::RegisterKey(v) => Self::RegisterKey(v.into()), + codec::OperationV1::RotateKey(v) => Self::RotateKey(v.into()), + codec::OperationV1::SetPolicy(v) => Self::SetPolicy(v.into()), + } + } + } + + impl From for SignedOperation { + fn from(item: codec::SignedOperationV1) -> Self { + Self { + payload: SignedOperationPayload { operation: item.payload.operation.into() }, + verifying_key: item.verifying_key, + signature: item.signature, + } + } + } + + impl From for RotateKey { + fn from(item: codec::RotateKeyV1) -> Self { + Self { + payload: NewPublicKey { public_key: item.payload.public_key, id: item.payload.id }, + previous_signing_key: item.previous_signing_key, + previous_signature: item.previous_signature, + new_signing_key: item.new_signing_key, + new_signature: item.new_signature, + } + } + } + + impl From for RegisterKey { + fn from(item: codec::RegisterKeyV1) -> Self { + Self { + public_key: item.public_key, + id: item.id, + overwrite_existing: item.overwrite_existing, + } + } + } + + impl From for SetPolicy { + fn from(item: codec::SetPolicyV1) -> Self { + Self { id: item.id, policy: item.policy } + } + } + + impl From for codec::SignedOperationPayloadV1 { + fn from(item: SignedOperationPayload) -> Self { + codec::SignedOperationPayloadV1 { + operation: match item.operation { + Operation::RegisterKey(v) => codec::OperationV1::RegisterKey(v.into()), + Operation::RotateKey(v) => codec::OperationV1::RotateKey(v.into()), + Operation::SetPolicy(v) => codec::OperationV1::SetPolicy(v.into()), + }, + } + } + } } diff --git a/crates/common/src/opa/std.rs b/crates/common/src/opa/std.rs index 6e1b25250..7efaf546b 100644 --- a/crates/common/src/opa/std.rs +++ b/crates/common/src/opa/std.rs @@ -18,236 +18,236 @@ pub struct EmbeddedOpaPolicies; // Prefer these functions over the core ones in std, as they are more efficient pub fn policy_address(id: impl AsRef) -> PolicyAddress { - sp_core::blake2_128(format!("opa:policy:binary:{}", id.as_ref()).as_bytes()).into() + sp_core::blake2_128(format!("opa:policy:binary:{}", id.as_ref()).as_bytes()).into() } // Prefer these functions over the core ones in std, as they are more efficient pub fn policy_meta_address(id: impl AsRef) -> PolicyMetaAddress { - sp_core::blake2_128(format!("opa:policy:meta:{}", id.as_ref()).as_bytes()).into() + sp_core::blake2_128(format!("opa:policy:meta:{}", id.as_ref()).as_bytes()).into() } // Prefer these functions over the core ones in std, as they are more efficient pub fn key_address(id: impl AsRef) -> KeyAddress { - sp_core::blake2_128(format!("opa:keys:{}", id.as_ref()).as_bytes()).into() + sp_core::blake2_128(format!("opa:keys:{}", id.as_ref()).as_bytes()).into() } #[derive(Error, Debug)] pub enum FromUrlError { - #[error("HTTP error while attempting to read from URL: {0}")] - HTTP( - #[from] - #[source] - reqwest::Error, - ), + #[error("HTTP error while attempting to read from URL: {0}")] + HTTP( + #[from] + #[source] + reqwest::Error, + ), - #[error("Invalid URL scheme: {0}")] - InvalidUrlScheme(String), + #[error("Invalid URL scheme: {0}")] + InvalidUrlScheme(String), - #[error("IO error while attempting to read from URL: {0}")] - IO( - #[from] - #[source] - std::io::Error, - ), + #[error("IO error while attempting to read from URL: {0}")] + IO( + #[from] + #[source] + std::io::Error, + ), } pub enum PathOrUrl { - File(PathBuf), - Url(Url), + File(PathBuf), + Url(Url), } pub async fn load_bytes_from_url(url: &str) -> Result, FromUrlError> { - let path_or_url = match url.parse::() { - Ok(url) => PathOrUrl::Url(url), - Err(_) => PathOrUrl::File(PathBuf::from(url)), - }; - - let content = match path_or_url { - PathOrUrl::File(path) => { - let mut file = File::open(path)?; - let mut buf = Vec::new(); - file.read_to_end(&mut buf)?; - Ok(buf) - } - PathOrUrl::Url(url) => match url.scheme() { - "file" => { - let mut file = File::open(url.path())?; - let mut buf = Vec::new(); - file.read_to_end(&mut buf)?; - Ok(buf) - } - "http" | "https" => Ok(reqwest::get(url).await?.bytes().await?.into()), - _ => Err(FromUrlError::InvalidUrlScheme(url.scheme().to_owned())), - }, - }?; - - Ok(content) + let path_or_url = match url.parse::() { + Ok(url) => PathOrUrl::Url(url), + Err(_) => PathOrUrl::File(PathBuf::from(url)), + }; + + let content = match path_or_url { + PathOrUrl::File(path) => { + let mut file = File::open(path)?; + let mut buf = Vec::new(); + file.read_to_end(&mut buf)?; + Ok(buf) + }, + PathOrUrl::Url(url) => match url.scheme() { + "file" => { + let mut file = File::open(url.path())?; + let mut buf = Vec::new(); + file.read_to_end(&mut buf)?; + Ok(buf) + }, + "http" | "https" => Ok(reqwest::get(url).await?.bytes().await?.into()), + _ => Err(FromUrlError::InvalidUrlScheme(url.scheme().to_owned())), + }, + }?; + + Ok(content) } pub fn load_bytes_from_stdin() -> Result, std::io::Error> { - let mut buffer = Vec::new(); - let mut stdin = std::io::stdin(); - let _ = stdin.read_to_end(&mut buffer)?; - Ok(buffer) + let mut buffer = Vec::new(); + let mut stdin = std::io::stdin(); + let _ = stdin.read_to_end(&mut buffer)?; + Ok(buffer) } #[derive(Debug, Error)] pub enum OpaExecutorError { - #[error("Access denied")] - AccessDenied, - - #[error("Identity error: {0}")] - IdentityError( - #[from] - #[source] - IdentityError, - ), - - #[error("Error loading OPA policy: {0}")] - PolicyLoaderError( - #[from] - #[source] - PolicyLoaderError, - ), - - #[error("Error evaluating OPA policy: {0}")] - OpaEvaluationError( - #[from] - #[source] - anyhow::Error, - ), + #[error("Access denied")] + AccessDenied, + + #[error("Identity error: {0}")] + IdentityError( + #[from] + #[source] + IdentityError, + ), + + #[error("Error loading OPA policy: {0}")] + PolicyLoaderError( + #[from] + #[source] + PolicyLoaderError, + ), + + #[error("Error evaluating OPA policy: {0}")] + OpaEvaluationError( + #[from] + #[source] + anyhow::Error, + ), } #[async_trait::async_trait] pub trait OpaExecutor { - /// Evaluate the loaded OPA instance against the provided identity and context - async fn evaluate(&mut self, id: &AuthId, context: &OpaData) -> Result<(), OpaExecutorError>; + /// Evaluate the loaded OPA instance against the provided identity and context + async fn evaluate(&mut self, id: &AuthId, context: &OpaData) -> Result<(), OpaExecutorError>; } #[derive(Clone, Debug)] pub struct ExecutorContext { - executor: Arc>, - hash: String, + executor: Arc>, + hash: String, } impl ExecutorContext { - #[instrument(skip(self), level = "trace", ret(Debug))] - pub async fn evaluate(&self, id: &AuthId, context: &OpaData) -> Result<(), OpaExecutorError> { - self.executor.lock().await.evaluate(id, context).await - } + #[instrument(skip(self), level = "trace", ret(Debug))] + pub async fn evaluate(&self, id: &AuthId, context: &OpaData) -> Result<(), OpaExecutorError> { + self.executor.lock().await.evaluate(id, context).await + } - pub fn from_loader(loader: &L) -> Result { - Ok(Self { - executor: Arc::new(Mutex::new(WasmtimeOpaExecutor::from_loader(loader)?)), - hash: loader.hash(), - }) - } + pub fn from_loader(loader: &L) -> Result { + Ok(Self { + executor: Arc::new(Mutex::new(WasmtimeOpaExecutor::from_loader(loader)?)), + hash: loader.hash(), + }) + } - pub fn hash(&self) -> &str { - &self.hash - } + pub fn hash(&self) -> &str { + &self.hash + } } #[derive(Debug)] pub struct WasmtimeOpaExecutor { - opa: Opa, - entrypoint: String, + opa: Opa, + entrypoint: String, } impl WasmtimeOpaExecutor { - /// Build a `WasmtimeOpaExecutor` from the `PolicyLoader` provided - pub fn from_loader(loader: &L) -> Result { - Ok(Self { opa: loader.build_opa()?, entrypoint: loader.get_entrypoint().to_owned() }) - } + /// Build a `WasmtimeOpaExecutor` from the `PolicyLoader` provided + pub fn from_loader(loader: &L) -> Result { + Ok(Self { opa: loader.build_opa()?, entrypoint: loader.get_entrypoint().to_owned() }) + } } #[async_trait::async_trait] impl OpaExecutor for WasmtimeOpaExecutor { - #[instrument(level = "trace", skip(self))] - async fn evaluate(&mut self, id: &AuthId, context: &OpaData) -> Result<(), OpaExecutorError> { - self.opa.set_data(context)?; - let input = id.identity()?; - match self.opa.eval(&self.entrypoint, &input)? { - true => Ok(()), - false => Err(OpaExecutorError::AccessDenied), - } - } + #[instrument(level = "trace", skip(self))] + async fn evaluate(&mut self, id: &AuthId, context: &OpaData) -> Result<(), OpaExecutorError> { + self.opa.set_data(context)?; + let input = id.identity()?; + match self.opa.eval(&self.entrypoint, &input)? { + true => Ok(()), + false => Err(OpaExecutorError::AccessDenied), + } + } } #[derive(Debug, Error)] pub enum PolicyLoaderError { - #[error("Failed to read embedded OPA policies")] - EmbeddedOpaPolicies, - - #[error("Policy not found: {0}")] - MissingPolicy(String), - - #[error("OPA bundle I/O error: {0}")] - OpaBundleError( - #[from] - #[source] - opa::bundle::Error, - ), - - #[error("Error loading OPA policy: {0}")] - Substrate( - #[from] - #[source] - anyhow::Error, - ), - - #[error("Error loading from URL: {0}")] - FromUrl( - #[from] - #[source] - FromUrlError, - ), + #[error("Failed to read embedded OPA policies")] + EmbeddedOpaPolicies, + + #[error("Policy not found: {0}")] + MissingPolicy(String), + + #[error("OPA bundle I/O error: {0}")] + OpaBundleError( + #[from] + #[source] + opa::bundle::Error, + ), + + #[error("Error loading OPA policy: {0}")] + Substrate( + #[from] + #[source] + anyhow::Error, + ), + + #[error("Error loading from URL: {0}")] + FromUrl( + #[from] + #[source] + FromUrlError, + ), } #[async_trait::async_trait] pub trait PolicyLoader { - /// Set address of OPA policy - fn set_address(&mut self, address: &str); + /// Set address of OPA policy + fn set_address(&mut self, address: &str); - /// Set OPA policy - fn set_rule_name(&mut self, policy: &str); + /// Set OPA policy + fn set_rule_name(&mut self, policy: &str); - /// Set entrypoint for OPA policy - fn set_entrypoint(&mut self, entrypoint: &str); + /// Set entrypoint for OPA policy + fn set_entrypoint(&mut self, entrypoint: &str); - fn get_address(&self) -> &str; + fn get_address(&self) -> &str; - fn get_rule_name(&self) -> &str; + fn get_rule_name(&self) -> &str; - fn get_entrypoint(&self) -> &str; + fn get_entrypoint(&self) -> &str; - fn get_policy(&self) -> &[u8]; + fn get_policy(&self) -> &[u8]; - /// Load OPA policy from address set in `PolicyLoader` - async fn load_policy(&mut self) -> Result<(), PolicyLoaderError>; + /// Load OPA policy from address set in `PolicyLoader` + async fn load_policy(&mut self) -> Result<(), PolicyLoaderError>; - /// Load OPA policy from provided bytes - fn load_policy_from_bytes(&mut self, policy: &[u8]); + /// Load OPA policy from provided bytes + fn load_policy_from_bytes(&mut self, policy: &[u8]); - /// Return a built OPA instance from the cached policy - #[instrument(level = "trace", skip(self), ret)] - fn build_opa(&self) -> Result { - Ok(Opa::new().build(self.get_policy())?) - } + /// Return a built OPA instance from the cached policy + #[instrument(level = "trace", skip(self), ret)] + fn build_opa(&self) -> Result { + Ok(Opa::new().build(self.get_policy())?) + } - /// Load OPA policy from provided policy bundle - fn load_policy_from_bundle(&mut self, bundle: &Bundle) -> Result<(), PolicyLoaderError> { - let rule = self.get_rule_name(); - self.load_policy_from_bytes( - bundle - .wasm_policies - .iter() - .find(|p| p.entrypoint == rule) - .map(|p| p.bytes.as_ref()) - .ok_or(PolicyLoaderError::MissingPolicy(rule.to_string()))?, - ); - Ok(()) - } + /// Load OPA policy from provided policy bundle + fn load_policy_from_bundle(&mut self, bundle: &Bundle) -> Result<(), PolicyLoaderError> { + let rule = self.get_rule_name(); + self.load_policy_from_bytes( + bundle + .wasm_policies + .iter() + .find(|p| p.entrypoint == rule) + .map(|p| p.bytes.as_ref()) + .ok_or(PolicyLoaderError::MissingPolicy(rule.to_string()))?, + ); + Ok(()) + } - fn hash(&self) -> String; + fn hash(&self) -> String; } diff --git a/crates/common/src/prov/id/diesel_bindings.rs b/crates/common/src/prov/id/diesel_bindings.rs index d5d0722f3..ddb472c4e 100644 --- a/crates/common/src/prov/id/diesel_bindings.rs +++ b/crates/common/src/prov/id/diesel_bindings.rs @@ -1,47 +1,47 @@ use super::*; use diesel::{ - backend::Backend, - deserialize::FromSql, - serialize::{Output, ToSql}, - sql_types::Text, + backend::Backend, + deserialize::FromSql, + serialize::{Output, ToSql}, + sql_types::Text, }; impl ToSql for Role - where - DB: Backend, - String: ToSql, +where + DB: Backend, + String: ToSql, { - fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> diesel::serialize::Result { - self.0.to_sql(out) - } + fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> diesel::serialize::Result { + self.0.to_sql(out) + } } impl FromSql for Role - where - DB: Backend, - String: FromSql, +where + DB: Backend, + String: FromSql, { - fn from_sql(bytes: ::RawValue<'_>) -> diesel::deserialize::Result { - Ok(Self(String::from_sql(bytes)?)) - } + fn from_sql(bytes: ::RawValue<'_>) -> diesel::deserialize::Result { + Ok(Self(String::from_sql(bytes)?)) + } } impl ToSql for ExternalId - where - DB: Backend, - String: ToSql, +where + DB: Backend, + String: ToSql, { - fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> diesel::serialize::Result { - self.0.to_sql(out) - } + fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> diesel::serialize::Result { + self.0.to_sql(out) + } } impl FromSql for ExternalId - where - DB: Backend, - String: FromSql, +where + DB: Backend, + String: FromSql, { - fn from_sql(bytes: ::RawValue<'_>) -> diesel::deserialize::Result { - Ok(Self(String::from_sql(bytes)?)) - } + fn from_sql(bytes: ::RawValue<'_>) -> diesel::deserialize::Result { + Ok(Self(String::from_sql(bytes)?)) + } } diff --git a/crates/common/src/prov/id/graphlql_scalars.rs b/crates/common/src/prov/id/graphlql_scalars.rs index 325901240..d6d9b1706 100644 --- a/crates/common/src/prov/id/graphlql_scalars.rs +++ b/crates/common/src/prov/id/graphlql_scalars.rs @@ -8,74 +8,74 @@ async_graphql::scalar!(ChronicleJSON); /// Derived from an `Activity`'s or `Agent`'s or `Entity`'s subtype. /// The built-in GraphQL field `__TypeName` should be used for union queries. impl ScalarType for DomaintypeId { - fn parse(value: Value) -> InputValueResult { - if let Value::String(value) = value { - // Parse the integer value - Ok(DomaintypeId::try_from(value)?) - } else { - // If the type does not match - Err(InputValueError::expected_type(value)) - } - } + fn parse(value: Value) -> InputValueResult { + if let Value::String(value) = value { + // Parse the integer value + Ok(DomaintypeId::try_from(value)?) + } else { + // If the type does not match + Err(InputValueError::expected_type(value)) + } + } - fn to_value(&self) -> Value { - Value::String(self.to_string()) - } + fn to_value(&self) -> Value { + Value::String(self.to_string()) + } } #[Scalar(name = "EntityID")] /// This is derived from an `Entity`'s externalId, but clients /// should not attempt to synthesize it themselves. impl ScalarType for EntityId { - fn parse(value: Value) -> InputValueResult { - if let Value::String(value) = value { - // Parse the integer value - Ok(EntityId::try_from(value)?) - } else { - // If the type does not match - Err(InputValueError::expected_type(value)) - } - } + fn parse(value: Value) -> InputValueResult { + if let Value::String(value) = value { + // Parse the integer value + Ok(EntityId::try_from(value)?) + } else { + // If the type does not match + Err(InputValueError::expected_type(value)) + } + } - fn to_value(&self) -> Value { - Value::String(self.to_string()) - } + fn to_value(&self) -> Value { + Value::String(self.to_string()) + } } #[Scalar(name = "AgentID")] /// This is derived from an `Agent`'s externalId, but clients /// should not attempt to synthesize it themselves. impl ScalarType for AgentId { - fn parse(value: Value) -> InputValueResult { - if let Value::String(value) = value { - // Parse the integer value - Ok(AgentId::try_from(value)?) - } else { - // If the type does not match - Err(InputValueError::expected_type(value)) - } - } + fn parse(value: Value) -> InputValueResult { + if let Value::String(value) = value { + // Parse the integer value + Ok(AgentId::try_from(value)?) + } else { + // If the type does not match + Err(InputValueError::expected_type(value)) + } + } - fn to_value(&self) -> Value { - Value::String(self.to_string()) - } + fn to_value(&self) -> Value { + Value::String(self.to_string()) + } } #[Scalar(name = "ActivityID")] /// This is derived from an `Activity`'s externalId, but clients /// should not attempt to synthesize it themselves. impl ScalarType for ActivityId { - fn parse(value: Value) -> InputValueResult { - if let Value::String(value) = value { - // Parse the integer value - Ok(ActivityId::try_from(value)?) - } else { - // If the type does not match - Err(InputValueError::expected_type(value)) - } - } + fn parse(value: Value) -> InputValueResult { + if let Value::String(value) = value { + // Parse the integer value + Ok(ActivityId::try_from(value)?) + } else { + // If the type does not match + Err(InputValueError::expected_type(value)) + } + } - fn to_value(&self) -> Value { - Value::String(self.to_string()) - } + fn to_value(&self) -> Value { + Value::String(self.to_string()) + } } diff --git a/crates/common/src/prov/id/mod.rs b/crates/common/src/prov/id/mod.rs index dd38d80b4..f27fc34af 100644 --- a/crates/common/src/prov/id/mod.rs +++ b/crates/common/src/prov/id/mod.rs @@ -16,9 +16,7 @@ mod diesel_bindings; use parity_scale_codec::{alloc::string::String, alloc::vec::Vec}; #[cfg(not(feature = "std"))] -use scale_info::{ - prelude::borrow::ToOwned, prelude::string::ToString, prelude::*, -}; +use scale_info::{prelude::borrow::ToOwned, prelude::string::ToString, prelude::*}; #[cfg(feature = "diesel-bindings")] use diesel::{AsExpression, FromSqlRow}; @@ -33,16 +31,16 @@ use thiserror_no_std::Error; #[derive(Debug, Error)] pub enum ParseIriError { - #[error("Not an IRI")] - NotAnIri(String), - #[error("Unparsable Chronicle IRI")] - UnparsableIri(String), - #[error("Unparsable UUID")] - UnparsableUuid(uuid::Error), - #[error("Unexpected IRI type")] - NotAChronicleUri(String), - #[error("Expected {component}")] - MissingComponent { component: String }, + #[error("Not an IRI")] + NotAnIri(String), + #[error("Unparsable Chronicle IRI")] + UnparsableIri(String), + #[error("Unparsable UUID")] + UnparsableUuid(uuid::Error), + #[error("Unexpected IRI type")] + NotAChronicleUri(String), + #[error("Expected {component}")] + MissingComponent { component: String }, } // Percent decoded, and has the correct authority @@ -51,980 +49,980 @@ pub enum ParseIriError { pub struct ProbableChronicleCURIE(Vec); impl ProbableChronicleCURIE { - fn from_string(str: String) -> Result { - let uri = iri_string::types::UriString::try_from(str) - .map_err(|e| ParseIriError::NotAnIri(e.into_source()))?; - - Self::from_uri(uri) - } - - // Take long or short form uris and return a short form iri - fn from_uri(uri: UriString) -> Result { - let mut uri = uri; - - if uri.as_str().starts_with(Chronicle::LONG_PREFIX) { - uri = UriString::from_str( - &uri.as_str() - .replace(Chronicle::LONG_PREFIX, &(Chronicle::PREFIX.to_owned() + ":")), - ) - .unwrap() - } - - for prefix in Chronicle::LEGACY_PREFIXES { - if uri.as_str().starts_with(prefix) { - uri = UriString::from_str( - &uri.as_str().replace(prefix, &(Chronicle::PREFIX.to_owned() + ":")), - ) - .unwrap() - } - } - - let iri: IriString = uri.into(); - - if iri.scheme_str() != Chronicle::PREFIX { - return Err(ParseIriError::NotAChronicleUri(iri.to_string())); - } - - Ok(Self( - iri.path_str() - .split(':') - .map(|x| percent_encoding::percent_decode_str(x).decode_utf8_lossy().to_string()) - .collect::>(), - )) - } - - fn path_components(&self) -> impl Iterator { - self.0.iter().map(|x| x.as_ref()) - } + fn from_string(str: String) -> Result { + let uri = iri_string::types::UriString::try_from(str) + .map_err(|e| ParseIriError::NotAnIri(e.into_source()))?; + + Self::from_uri(uri) + } + + // Take long or short form uris and return a short form iri + fn from_uri(uri: UriString) -> Result { + let mut uri = uri; + + if uri.as_str().starts_with(Chronicle::LONG_PREFIX) { + uri = UriString::from_str( + &uri.as_str() + .replace(Chronicle::LONG_PREFIX, &(Chronicle::PREFIX.to_owned() + ":")), + ) + .unwrap() + } + + for prefix in Chronicle::LEGACY_PREFIXES { + if uri.as_str().starts_with(prefix) { + uri = UriString::from_str( + &uri.as_str().replace(prefix, &(Chronicle::PREFIX.to_owned() + ":")), + ) + .unwrap() + } + } + + let iri: IriString = uri.into(); + + if iri.scheme_str() != Chronicle::PREFIX { + return Err(ParseIriError::NotAChronicleUri(iri.to_string())); + } + + Ok(Self( + iri.path_str() + .split(':') + .map(|x| percent_encoding::percent_decode_str(x).decode_utf8_lossy().to_string()) + .collect::>(), + )) + } + + fn path_components(&self) -> impl Iterator { + self.0.iter().map(|x| x.as_ref()) + } } impl core::fmt::Display for ProbableChronicleCURIE { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", self.0.join(":")) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.0.join(":")) + } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[cfg_attr(feature = "diesel-bindings", derive(AsExpression, FromSqlRow))] #[cfg_attr(feature = "diesel-bindings", diesel(sql_type = diesel::sql_types::Text))] pub struct Role(pub String); impl core::fmt::Display for Role { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", self.0) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.0) + } } impl From for Role - where - T: AsRef, +where + T: AsRef, { - fn from(s: T) -> Self { - Role(s.as_ref().to_owned()) - } + fn from(s: T) -> Self { + Role(s.as_ref().to_owned()) + } } impl Role { - pub fn as_str(&self) -> &str { - &self.0 - } + pub fn as_str(&self) -> &str { + &self.0 + } } impl AsRef for &Role { - fn as_ref(&self) -> &str { - &self.0 - } + fn as_ref(&self) -> &str { + &self.0 + } } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[cfg_attr(feature = "diesel-bindings", derive(AsExpression, FromSqlRow))] #[cfg_attr(feature = "diesel-bindings", diesel(sql_type = diesel::sql_types::Text))] pub struct ExternalId(String); impl core::fmt::Display for ExternalId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", self.0) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.0) + } } #[cfg(feature = "graphql-bindings")] async_graphql::scalar!(ExternalId); impl From for ExternalId - where - T: AsRef, +where + T: AsRef, { - fn from(s: T) -> Self { - ExternalId(s.as_ref().to_owned()) - } + fn from(s: T) -> Self { + ExternalId(s.as_ref().to_owned()) + } } impl ExternalId { - pub fn as_str(&self) -> &str { - &self.0 - } + pub fn as_str(&self) -> &str { + &self.0 + } } impl AsRef for &ExternalId { - fn as_ref(&self) -> &str { - &self.0 - } + fn as_ref(&self) -> &str { + &self.0 + } } pub trait ExternalIdPart { - fn external_id_part(&self) -> &ExternalId; + fn external_id_part(&self) -> &ExternalId; } pub trait UuidPart { - fn uuid_part(&self) -> Uuid; + fn uuid_part(&self) -> Uuid; } /// Transform a chronicle IRI into its long-form representation pub trait FromCompact { - fn de_compact(&self) -> String; + fn de_compact(&self) -> String; } impl FromCompact for T { - fn de_compact(&self) -> String { - let replace = Chronicle::PREFIX.to_string() + ":"; - self.to_string().replace(&replace, Chronicle::LONG_PREFIX) - } + fn de_compact(&self) -> String { + let replace = Chronicle::PREFIX.to_string() + ":"; + self.to_string().replace(&replace, Chronicle::LONG_PREFIX) + } } #[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Debug, Clone, Ord, PartialOrd)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] pub enum ChronicleIri { - Namespace(NamespaceId), - Domaintype(DomaintypeId), - Entity(EntityId), - Agent(AgentId), - Activity(ActivityId), - Association(AssociationId), - Attribution(AttributionId), - Delegation(DelegationId), + Namespace(NamespaceId), + Domaintype(DomaintypeId), + Entity(EntityId), + Agent(AgentId), + Activity(ActivityId), + Association(AssociationId), + Attribution(AttributionId), + Delegation(DelegationId), } #[cfg(feature = "parity-encoding")] impl parity_scale_codec::MaxEncodedLen for ChronicleIri { - fn max_encoded_len() -> usize { - 2048usize - } + fn max_encoded_len() -> usize { + 2048usize + } } impl core::fmt::Display for ChronicleIri { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - ChronicleIri::Namespace(id) => write!(f, "{id}"), - ChronicleIri::Domaintype(id) => write!(f, "{id}"), - ChronicleIri::Entity(id) => write!(f, "{id}"), - ChronicleIri::Agent(id) => write!(f, "{id}"), - ChronicleIri::Activity(id) => write!(f, "{id}"), - ChronicleIri::Association(id) => write!(f, "{id}"), - ChronicleIri::Attribution(id) => write!(f, "{id}"), - ChronicleIri::Delegation(id) => write!(f, "{id}"), - } - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + ChronicleIri::Namespace(id) => write!(f, "{id}"), + ChronicleIri::Domaintype(id) => write!(f, "{id}"), + ChronicleIri::Entity(id) => write!(f, "{id}"), + ChronicleIri::Agent(id) => write!(f, "{id}"), + ChronicleIri::Activity(id) => write!(f, "{id}"), + ChronicleIri::Association(id) => write!(f, "{id}"), + ChronicleIri::Attribution(id) => write!(f, "{id}"), + ChronicleIri::Delegation(id) => write!(f, "{id}"), + } + } } impl From for ChronicleIri { - fn from(val: NamespaceId) -> Self { - ChronicleIri::Namespace(val) - } + fn from(val: NamespaceId) -> Self { + ChronicleIri::Namespace(val) + } } impl From for ChronicleIri { - fn from(val: DomaintypeId) -> Self { - ChronicleIri::Domaintype(val) - } + fn from(val: DomaintypeId) -> Self { + ChronicleIri::Domaintype(val) + } } impl From for ChronicleIri { - fn from(val: EntityId) -> Self { - ChronicleIri::Entity(val) - } + fn from(val: EntityId) -> Self { + ChronicleIri::Entity(val) + } } impl From for ChronicleIri { - fn from(val: AgentId) -> Self { - ChronicleIri::Agent(val) - } + fn from(val: AgentId) -> Self { + ChronicleIri::Agent(val) + } } impl From for ChronicleIri { - fn from(val: ActivityId) -> Self { - ChronicleIri::Activity(val) - } + fn from(val: ActivityId) -> Self { + ChronicleIri::Activity(val) + } } impl From for ChronicleIri { - fn from(val: AssociationId) -> Self { - ChronicleIri::Association(val) - } + fn from(val: AssociationId) -> Self { + ChronicleIri::Association(val) + } } impl From for ChronicleIri { - fn from(val: AttributionId) -> Self { - ChronicleIri::Attribution(val) - } + fn from(val: AttributionId) -> Self { + ChronicleIri::Attribution(val) + } } impl From for ChronicleIri { - fn from(val: DelegationId) -> Self { - ChronicleIri::Delegation(val) - } + fn from(val: DelegationId) -> Self { + ChronicleIri::Delegation(val) + } } impl core::str::FromStr for ChronicleIri { - type Err = ParseIriError; + type Err = ParseIriError; - fn from_str(s: &str) -> Result { - trace!(parsing_iri = %s); - //Compacted form, expand + fn from_str(s: &str) -> Result { + trace!(parsing_iri = %s); + //Compacted form, expand - let iri = ProbableChronicleCURIE::from_string(s.to_owned())?; + let iri = ProbableChronicleCURIE::from_string(s.to_owned())?; - //TODO: this just needs to extract the first path component - match iri.path_components().collect::>().as_slice() { - ["agent", ..] => Ok(AgentId::try_from(iri)?.into()), - ["ns", ..] => Ok(NamespaceId::try_from(iri)?.into()), - ["activity", ..] => Ok(ActivityId::try_from(iri)?.into()), - ["entity", ..] => Ok(EntityId::try_from(iri)?.into()), - ["domaintype", ..] => Ok(DomaintypeId::try_from(iri)?.into()), - ["association", ..] => Ok(AssociationId::try_from(iri)?.into()), - ["attribution", ..] => Ok(AttributionId::try_from(iri)?.into()), - ["delegation", ..] => Ok(DelegationId::try_from(iri)?.into()), - _ => Err(ParseIriError::UnparsableIri(s.to_string())), - } - } + //TODO: this just needs to extract the first path component + match iri.path_components().collect::>().as_slice() { + ["agent", ..] => Ok(AgentId::try_from(iri)?.into()), + ["ns", ..] => Ok(NamespaceId::try_from(iri)?.into()), + ["activity", ..] => Ok(ActivityId::try_from(iri)?.into()), + ["entity", ..] => Ok(EntityId::try_from(iri)?.into()), + ["domaintype", ..] => Ok(DomaintypeId::try_from(iri)?.into()), + ["association", ..] => Ok(AssociationId::try_from(iri)?.into()), + ["attribution", ..] => Ok(AttributionId::try_from(iri)?.into()), + ["delegation", ..] => Ok(DelegationId::try_from(iri)?.into()), + _ => Err(ParseIriError::UnparsableIri(s.to_string())), + } + } } impl ChronicleIri { - // Coerce this to a `NamespaceId`, if possible - pub fn namespace(self) -> Result { - match self { - ChronicleIri::Namespace(ns) => Ok(ns), - _ => Err(ParseIriError::NotAChronicleUri(self.to_string())), - } - } + // Coerce this to a `NamespaceId`, if possible + pub fn namespace(self) -> Result { + match self { + ChronicleIri::Namespace(ns) => Ok(ns), + _ => Err(ParseIriError::NotAChronicleUri(self.to_string())), + } + } } #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct ChronicleJSON(pub serde_json::Value); fn optional_component(external_id: &str, component: &str) -> Result, ParseIriError> { - let kv = format!("{external_id}="); - if !component.starts_with(&*kv) { - return Err(ParseIriError::MissingComponent { component: external_id.to_string() }); - } + let kv = format!("{external_id}="); + if !component.starts_with(&*kv) { + return Err(ParseIriError::MissingComponent { component: external_id.to_string() }); + } - match component.replace(&*kv, "") { - s if s.is_empty() => Ok(None), - s => Ok(Some(s)), - } + match component.replace(&*kv, "") { + s if s.is_empty() => Ok(None), + s => Ok(Some(s)), + } } // A composite identifier of agent, activity and role #[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Debug, Clone, Ord, PartialOrd)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] pub struct DelegationId { - delegate: ExternalId, - responsible: ExternalId, - activity: Option, - role: Option, + delegate: ExternalId, + responsible: ExternalId, + activity: Option, + role: Option, } impl core::fmt::Display for DelegationId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.write_str(Into::::into(self).as_str()) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str(Into::::into(self).as_str()) + } } impl DelegationId { - pub fn from_component_ids( - delegate: &AgentId, - responsible: &AgentId, - activity: Option<&ActivityId>, - role: Option>, - ) -> Self { - Self { - delegate: delegate.external_id_part().clone(), - responsible: responsible.external_id_part().clone(), - activity: activity.map(|x| ExternalIdPart::external_id_part(x).to_owned()), - role: role.map(|x| Role::from(x.as_ref())), - } - } - - pub fn delegate(&self) -> AgentId { - AgentId::from_external_id(&self.delegate) - } - - pub fn responsible(&self) -> AgentId { - AgentId::from_external_id(&self.responsible) - } - - pub fn activity(&self) -> Option { - self.activity.as_ref().map(ActivityId::from_external_id) - } - - pub fn role(&self) -> &Option { - &self.role - } + pub fn from_component_ids( + delegate: &AgentId, + responsible: &AgentId, + activity: Option<&ActivityId>, + role: Option>, + ) -> Self { + Self { + delegate: delegate.external_id_part().clone(), + responsible: responsible.external_id_part().clone(), + activity: activity.map(|x| ExternalIdPart::external_id_part(x).to_owned()), + role: role.map(|x| Role::from(x.as_ref())), + } + } + + pub fn delegate(&self) -> AgentId { + AgentId::from_external_id(&self.delegate) + } + + pub fn responsible(&self) -> AgentId { + AgentId::from_external_id(&self.responsible) + } + + pub fn activity(&self) -> Option { + self.activity.as_ref().map(ActivityId::from_external_id) + } + + pub fn role(&self) -> &Option { + &self.role + } } impl TryFrom for DelegationId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: String) -> Result { - ProbableChronicleCURIE::from_string(value)?.try_into() - } + fn try_from(value: String) -> Result { + ProbableChronicleCURIE::from_string(value)?.try_into() + } } impl TryFrom for DelegationId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: UriString) -> Result { - ProbableChronicleCURIE::from_uri(value)?.try_into() - } + fn try_from(value: UriString) -> Result { + ProbableChronicleCURIE::from_uri(value)?.try_into() + } } impl TryFrom for DelegationId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(iri: ProbableChronicleCURIE) -> Result { - match iri.path_components().collect::>().as_slice() { - [_, delegate, responsible, role, activity] => Ok(Self { - delegate: ExternalId::from(delegate), - responsible: ExternalId::from(responsible), - role: optional_component("role", role)?.map(Role::from), - activity: optional_component("activity", activity)?.map(ExternalId::from), - }), + fn try_from(iri: ProbableChronicleCURIE) -> Result { + match iri.path_components().collect::>().as_slice() { + [_, delegate, responsible, role, activity] => Ok(Self { + delegate: ExternalId::from(delegate), + responsible: ExternalId::from(responsible), + role: optional_component("role", role)?.map(Role::from), + activity: optional_component("activity", activity)?.map(ExternalId::from), + }), - _ => Err(ParseIriError::UnparsableIri(iri.to_string())), - } - } + _ => Err(ParseIriError::UnparsableIri(iri.to_string())), + } + } } impl From<&DelegationId> for UriString { - fn from(val: &DelegationId) -> Self { - Chronicle::delegation( - &AgentId::from_external_id(&val.delegate), - &AgentId::from_external_id(&val.responsible), - &val.activity().map(|n| ActivityId::from_external_id(n.external_id_part())), - &val.role, - ) - .unwrap() - } + fn from(val: &DelegationId) -> Self { + Chronicle::delegation( + &AgentId::from_external_id(&val.delegate), + &AgentId::from_external_id(&val.responsible), + &val.activity().map(|n| ActivityId::from_external_id(n.external_id_part())), + &val.role, + ) + .unwrap() + } } // A composite identifier of agent, activity and role #[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Debug, Clone, Ord, PartialOrd)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] pub struct AssociationId { - agent: ExternalId, - activity: ExternalId, - role: Option, + agent: ExternalId, + activity: ExternalId, + role: Option, } impl core::fmt::Display for AssociationId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.write_str(Into::::into(self).as_str()) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str(Into::::into(self).as_str()) + } } impl AssociationId { - pub fn from_component_ids( - agent: &AgentId, - activity: &ActivityId, - role: Option>, - ) -> Self { - Self { - agent: agent.external_id_part().clone(), - activity: activity.external_id_part().clone(), - role: role.map(|x| Role::from(x.as_ref())), - } - } - - pub fn agent(&self) -> AgentId { - AgentId::from_external_id(&self.agent) - } - - pub fn activity(&self) -> ActivityId { - ActivityId::from_external_id(&self.activity) - } + pub fn from_component_ids( + agent: &AgentId, + activity: &ActivityId, + role: Option>, + ) -> Self { + Self { + agent: agent.external_id_part().clone(), + activity: activity.external_id_part().clone(), + role: role.map(|x| Role::from(x.as_ref())), + } + } + + pub fn agent(&self) -> AgentId { + AgentId::from_external_id(&self.agent) + } + + pub fn activity(&self) -> ActivityId { + ActivityId::from_external_id(&self.activity) + } } impl TryFrom for AssociationId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: String) -> Result { - ProbableChronicleCURIE::from_string(value)?.try_into() - } + fn try_from(value: String) -> Result { + ProbableChronicleCURIE::from_string(value)?.try_into() + } } impl TryFrom for AssociationId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: UriString) -> Result { - ProbableChronicleCURIE::from_uri(value)?.try_into() - } + fn try_from(value: UriString) -> Result { + ProbableChronicleCURIE::from_uri(value)?.try_into() + } } impl TryFrom for AssociationId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(iri: ProbableChronicleCURIE) -> Result { - match iri.path_components().collect::>().as_slice() { - [_, agent, activity, role] => Ok(Self { - agent: ExternalId::from(agent), - activity: ExternalId::from(activity), - role: optional_component("role", role)?.map(Role::from), - }), + fn try_from(iri: ProbableChronicleCURIE) -> Result { + match iri.path_components().collect::>().as_slice() { + [_, agent, activity, role] => Ok(Self { + agent: ExternalId::from(agent), + activity: ExternalId::from(activity), + role: optional_component("role", role)?.map(Role::from), + }), - _ => Err(ParseIriError::UnparsableIri(iri.to_string())), - } - } + _ => Err(ParseIriError::UnparsableIri(iri.to_string())), + } + } } impl From<&AssociationId> for UriString { - fn from(val: &AssociationId) -> Self { - Chronicle::association( - &AgentId::from_external_id(&val.agent), - &ActivityId::from_external_id(&val.activity), - &val.role, - ) - .unwrap() - } + fn from(val: &AssociationId) -> Self { + Chronicle::association( + &AgentId::from_external_id(&val.agent), + &ActivityId::from_external_id(&val.activity), + &val.role, + ) + .unwrap() + } } // A composite identifier of agent, entity, and role #[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Debug, Clone, Ord, PartialOrd)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] pub struct AttributionId { - agent: ExternalId, - entity: ExternalId, - role: Option, + agent: ExternalId, + entity: ExternalId, + role: Option, } impl core::fmt::Display for AttributionId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.write_str(Into::::into(self).as_str()) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str(Into::::into(self).as_str()) + } } impl AttributionId { - pub fn from_component_ids( - agent: &AgentId, - entity: &EntityId, - role: Option>, - ) -> Self { - Self { - agent: agent.external_id_part().clone(), - entity: entity.external_id_part().clone(), - role: role.map(|x| Role::from(x.as_ref())), - } - } - - pub fn agent(&self) -> AgentId { - AgentId::from_external_id(&self.agent) - } - - pub fn entity(&self) -> EntityId { - EntityId::from_external_id(&self.entity) - } + pub fn from_component_ids( + agent: &AgentId, + entity: &EntityId, + role: Option>, + ) -> Self { + Self { + agent: agent.external_id_part().clone(), + entity: entity.external_id_part().clone(), + role: role.map(|x| Role::from(x.as_ref())), + } + } + + pub fn agent(&self) -> AgentId { + AgentId::from_external_id(&self.agent) + } + + pub fn entity(&self) -> EntityId { + EntityId::from_external_id(&self.entity) + } } impl TryFrom for AttributionId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: String) -> Result { - ProbableChronicleCURIE::from_string(value)?.try_into() - } + fn try_from(value: String) -> Result { + ProbableChronicleCURIE::from_string(value)?.try_into() + } } impl TryFrom for AttributionId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: UriString) -> Result { - ProbableChronicleCURIE::from_uri(value)?.try_into() - } + fn try_from(value: UriString) -> Result { + ProbableChronicleCURIE::from_uri(value)?.try_into() + } } impl TryFrom for AttributionId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(iri: ProbableChronicleCURIE) -> Result { - match iri.path_components().collect::>().as_slice() { - [_, agent, entity, role] => Ok(Self { - agent: ExternalId::from(agent), - entity: ExternalId::from(entity), - role: optional_component("role", role)?.map(Role::from), - }), + fn try_from(iri: ProbableChronicleCURIE) -> Result { + match iri.path_components().collect::>().as_slice() { + [_, agent, entity, role] => Ok(Self { + agent: ExternalId::from(agent), + entity: ExternalId::from(entity), + role: optional_component("role", role)?.map(Role::from), + }), - _ => Err(ParseIriError::UnparsableIri(iri.to_string())), - } - } + _ => Err(ParseIriError::UnparsableIri(iri.to_string())), + } + } } impl From<&AttributionId> for UriString { - fn from(val: &AttributionId) -> Self { - Chronicle::attribution( - &AgentId::from_external_id(&val.agent), - &EntityId::from_external_id(&val.entity), - &val.role, - ) - .unwrap() - } + fn from(val: &AttributionId) -> Self { + Chronicle::attribution( + &AgentId::from_external_id(&val.agent), + &EntityId::from_external_id(&val.entity), + &val.role, + ) + .unwrap() + } } #[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Debug, Clone, Ord, PartialOrd)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] pub struct DomaintypeId(ExternalId); impl core::fmt::Display for DomaintypeId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.write_str(Into::::into(self).as_str()) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str(Into::::into(self).as_str()) + } } impl ExternalIdPart for DomaintypeId { - fn external_id_part(&self) -> &ExternalId { - &self.0 - } + fn external_id_part(&self) -> &ExternalId { + &self.0 + } } impl DomaintypeId { - pub fn from_external_id(external_id: impl AsRef) -> Self { - Self(external_id.as_ref().into()) - } + pub fn from_external_id(external_id: impl AsRef) -> Self { + Self(external_id.as_ref().into()) + } } impl TryFrom for DomaintypeId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: String) -> Result { - ProbableChronicleCURIE::from_string(value)?.try_into() - } + fn try_from(value: String) -> Result { + ProbableChronicleCURIE::from_string(value)?.try_into() + } } impl TryFrom for DomaintypeId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: UriString) -> Result { - ProbableChronicleCURIE::from_uri(value)?.try_into() - } + fn try_from(value: UriString) -> Result { + ProbableChronicleCURIE::from_uri(value)?.try_into() + } } impl TryFrom for DomaintypeId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(iri: ProbableChronicleCURIE) -> Result { - match iri.path_components().collect::>().as_slice() { - [_, external_id] => Ok(Self(ExternalId::from(external_id))), - _ => Err(ParseIriError::UnparsableIri(iri.to_string())), - } - } + fn try_from(iri: ProbableChronicleCURIE) -> Result { + match iri.path_components().collect::>().as_slice() { + [_, external_id] => Ok(Self(ExternalId::from(external_id))), + _ => Err(ParseIriError::UnparsableIri(iri.to_string())), + } + } } impl From<&DomaintypeId> for UriString { - fn from(val: &DomaintypeId) -> Self { - Chronicle::domaintype(&val.0).unwrap() - } + fn from(val: &DomaintypeId) -> Self { + Chronicle::domaintype(&val.0).unwrap() + } } #[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Clone, Ord, PartialOrd)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] pub struct NamespaceId { - external_id: ExternalId, - uuid: [u8; 16], + external_id: ExternalId, + uuid: [u8; 16], } impl core::fmt::Display for NamespaceId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.write_str(Into::::into(self).as_str()) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str(Into::::into(self).as_str()) + } } impl core::fmt::Debug for NamespaceId { - fn fmt( - &self, - f: &mut scale_info::prelude::fmt::Formatter<'_>, - ) -> scale_info::prelude::fmt::Result { - f.debug_struct("NamespaceId") - .field("external_id", &self.external_id) - .field("uuid", &Uuid::from_bytes(self.uuid)) - .finish() - } + fn fmt( + &self, + f: &mut scale_info::prelude::fmt::Formatter<'_>, + ) -> scale_info::prelude::fmt::Result { + f.debug_struct("NamespaceId") + .field("external_id", &self.external_id) + .field("uuid", &Uuid::from_bytes(self.uuid)) + .finish() + } } impl NamespaceId { - pub fn from_external_id(external_id: impl AsRef, uuid: Uuid) -> Self { - Self { external_id: external_id.as_ref().into(), uuid: uuid.into_bytes() } - } + pub fn from_external_id(external_id: impl AsRef, uuid: Uuid) -> Self { + Self { external_id: external_id.as_ref().into(), uuid: uuid.into_bytes() } + } } impl ExternalIdPart for NamespaceId { - fn external_id_part(&self) -> &ExternalId { - &self.external_id - } + fn external_id_part(&self) -> &ExternalId { + &self.external_id + } } impl UuidPart for NamespaceId { - fn uuid_part(&self) -> Uuid { - Uuid::from_bytes(self.uuid) - } + fn uuid_part(&self) -> Uuid { + Uuid::from_bytes(self.uuid) + } } impl TryFrom<&'_ str> for NamespaceId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: &str) -> Result { - ProbableChronicleCURIE::from_string(value.to_owned())?.try_into() - } + fn try_from(value: &str) -> Result { + ProbableChronicleCURIE::from_string(value.to_owned())?.try_into() + } } impl TryFrom for NamespaceId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: String) -> Result { - ProbableChronicleCURIE::from_string(value)?.try_into() - } + fn try_from(value: String) -> Result { + ProbableChronicleCURIE::from_string(value)?.try_into() + } } impl TryFrom for NamespaceId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: UriString) -> Result { - ProbableChronicleCURIE::from_uri(value)?.try_into() - } + fn try_from(value: UriString) -> Result { + ProbableChronicleCURIE::from_uri(value)?.try_into() + } } impl TryFrom for NamespaceId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(iri: ProbableChronicleCURIE) -> Result { - match iri.path_components().collect::>().as_slice() { - [_, external_id, uuid] => Ok(Self { - external_id: ExternalId::from(external_id), - uuid: Uuid::parse_str(uuid).map_err(ParseIriError::UnparsableUuid)?.into_bytes(), - }), + fn try_from(iri: ProbableChronicleCURIE) -> Result { + match iri.path_components().collect::>().as_slice() { + [_, external_id, uuid] => Ok(Self { + external_id: ExternalId::from(external_id), + uuid: Uuid::parse_str(uuid).map_err(ParseIriError::UnparsableUuid)?.into_bytes(), + }), - _ => Err(ParseIriError::UnparsableIri(format!("{:?}", iri))), - } - } + _ => Err(ParseIriError::UnparsableIri(format!("{:?}", iri))), + } + } } impl From<&NamespaceId> for UriString { - fn from(val: &NamespaceId) -> Self { - Chronicle::namespace(&val.external_id, &val.uuid_part()).unwrap() - } + fn from(val: &NamespaceId) -> Self { + Chronicle::namespace(&val.external_id, &val.uuid_part()).unwrap() + } } #[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Debug, Clone, Ord, PartialOrd)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] pub struct EntityId(ExternalId); impl core::fmt::Display for EntityId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.write_str(Into::::into(self).as_str()) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str(Into::::into(self).as_str()) + } } impl EntityId { - pub fn from_external_id(external_id: impl AsRef) -> Self { - Self(external_id.as_ref().into()) - } + pub fn from_external_id(external_id: impl AsRef) -> Self { + Self(external_id.as_ref().into()) + } } impl ExternalIdPart for EntityId { - fn external_id_part(&self) -> &ExternalId { - &self.0 - } + fn external_id_part(&self) -> &ExternalId { + &self.0 + } } impl TryFrom for EntityId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: String) -> Result { - ProbableChronicleCURIE::from_string(value)?.try_into() - } + fn try_from(value: String) -> Result { + ProbableChronicleCURIE::from_string(value)?.try_into() + } } impl TryFrom for EntityId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: UriString) -> Result { - ProbableChronicleCURIE::from_uri(value)?.try_into() - } + fn try_from(value: UriString) -> Result { + ProbableChronicleCURIE::from_uri(value)?.try_into() + } } impl TryFrom for EntityId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: ProbableChronicleCURIE) -> Result { - match value.path_components().collect::>().as_slice() { - [_, external_id] => Ok(Self(ExternalId::from(external_id))), + fn try_from(value: ProbableChronicleCURIE) -> Result { + match value.path_components().collect::>().as_slice() { + [_, external_id] => Ok(Self(ExternalId::from(external_id))), - _ => Err(ParseIriError::UnparsableIri(value.to_string())), - } - } + _ => Err(ParseIriError::UnparsableIri(value.to_string())), + } + } } impl From<&EntityId> for UriString { - fn from(val: &EntityId) -> Self { - Chronicle::entity(&val.0).unwrap() - } + fn from(val: &EntityId) -> Self { + Chronicle::entity(&val.0).unwrap() + } } /// Input either a short-form `externalId`, e.g. "agreement", /// or long-form Chronicle `id`, e.g. "chronicle:entity:agreement" #[cfg_attr(feature = "graphql-bindings", derive(async_graphql::OneofObject))] pub enum EntityIdOrExternal { - ExternalId(String), - Id(EntityId), + ExternalId(String), + Id(EntityId), } impl From for EntityId { - fn from(input: EntityIdOrExternal) -> Self { - match input { - EntityIdOrExternal::ExternalId(external_id) => Self::from_external_id(external_id), - EntityIdOrExternal::Id(id) => id, - } - } + fn from(input: EntityIdOrExternal) -> Self { + match input { + EntityIdOrExternal::ExternalId(external_id) => Self::from_external_id(external_id), + EntityIdOrExternal::Id(id) => id, + } + } } #[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Debug, Clone, Ord, PartialOrd)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] pub struct AgentId(ExternalId); impl core::fmt::Display for AgentId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.write_str(Into::::into(self).as_str()) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str(Into::::into(self).as_str()) + } } impl AgentId { - pub fn from_external_id(external_id: impl AsRef) -> Self { - Self(external_id.as_ref().into()) - } + pub fn from_external_id(external_id: impl AsRef) -> Self { + Self(external_id.as_ref().into()) + } } impl ExternalIdPart for AgentId { - fn external_id_part(&self) -> &ExternalId { - &self.0 - } + fn external_id_part(&self) -> &ExternalId { + &self.0 + } } impl TryFrom for AgentId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: String) -> Result { - ProbableChronicleCURIE::from_string(value)?.try_into() - } + fn try_from(value: String) -> Result { + ProbableChronicleCURIE::from_string(value)?.try_into() + } } impl TryFrom for AgentId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: UriString) -> Result { - ProbableChronicleCURIE::from_uri(value)?.try_into() - } + fn try_from(value: UriString) -> Result { + ProbableChronicleCURIE::from_uri(value)?.try_into() + } } impl TryFrom for AgentId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: ProbableChronicleCURIE) -> Result { - match value.path_components().collect::>().as_slice() { - [_, external_id] => Ok(Self(ExternalId::from(external_id))), - _ => Err(ParseIriError::UnparsableIri(value.to_string())), - } - } + fn try_from(value: ProbableChronicleCURIE) -> Result { + match value.path_components().collect::>().as_slice() { + [_, external_id] => Ok(Self(ExternalId::from(external_id))), + _ => Err(ParseIriError::UnparsableIri(value.to_string())), + } + } } impl From<&AgentId> for UriString { - fn from(val: &AgentId) -> Self { - Chronicle::agent(&val.0).unwrap() - } + fn from(val: &AgentId) -> Self { + Chronicle::agent(&val.0).unwrap() + } } /// Input either a short-form `externalId`, e.g. "bob", /// or long-form Chronicle `id`, e.g. "chronicle:agent:bob" #[cfg_attr(feature = "graphql-bindings", derive(OneofObject))] pub enum AgentIdOrExternal { - ExternalId(String), - Id(AgentId), + ExternalId(String), + Id(AgentId), } impl From for AgentId { - fn from(input: AgentIdOrExternal) -> Self { - match input { - AgentIdOrExternal::ExternalId(external_id) => Self::from_external_id(external_id), - AgentIdOrExternal::Id(id) => id, - } - } + fn from(input: AgentIdOrExternal) -> Self { + match input { + AgentIdOrExternal::ExternalId(external_id) => Self::from_external_id(external_id), + AgentIdOrExternal::Id(id) => id, + } + } } #[derive(Serialize, Deserialize, PartialEq, Eq, Hash, Debug, Clone, Ord, PartialOrd)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] pub struct ActivityId(ExternalId); impl core::fmt::Display for ActivityId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.write_str(UriString::from(self).as_str()) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str(UriString::from(self).as_str()) + } } impl ActivityId { - pub fn from_external_id(external_id: impl AsRef) -> Self { - Self(external_id.as_ref().into()) - } + pub fn from_external_id(external_id: impl AsRef) -> Self { + Self(external_id.as_ref().into()) + } } impl ExternalIdPart for ActivityId { - fn external_id_part(&self) -> &ExternalId { - &self.0 - } + fn external_id_part(&self) -> &ExternalId { + &self.0 + } } impl TryFrom for ActivityId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: String) -> Result { - ProbableChronicleCURIE::from_string(value)?.try_into() - } + fn try_from(value: String) -> Result { + ProbableChronicleCURIE::from_string(value)?.try_into() + } } impl TryFrom for ActivityId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(value: UriString) -> Result { - ProbableChronicleCURIE::from_uri(value)?.try_into() - } + fn try_from(value: UriString) -> Result { + ProbableChronicleCURIE::from_uri(value)?.try_into() + } } impl TryFrom for ActivityId { - type Error = ParseIriError; + type Error = ParseIriError; - fn try_from(iri: ProbableChronicleCURIE) -> Result { - match iri.path_components().collect::>().as_slice() { - [_, external_id] => Ok(Self(ExternalId::from(external_id))), + fn try_from(iri: ProbableChronicleCURIE) -> Result { + match iri.path_components().collect::>().as_slice() { + [_, external_id] => Ok(Self(ExternalId::from(external_id))), - _ => Err(ParseIriError::UnparsableIri(iri.to_string())), - } - } + _ => Err(ParseIriError::UnparsableIri(iri.to_string())), + } + } } impl From<&ActivityId> for UriString { - fn from(val: &ActivityId) -> Self { - Chronicle::activity(&val.0).unwrap() - } + fn from(val: &ActivityId) -> Self { + Chronicle::activity(&val.0).unwrap() + } } /// Input either a short-form `externalId`, e.g. "record", /// or long-form Chronicle `id`, e.g. "chronicle:activity:record" #[cfg_attr(feature = "graphql-bindings", derive(OneofObject))] pub enum ActivityIdOrExternal { - ExternalId(String), - Id(ActivityId), + ExternalId(String), + Id(ActivityId), } impl From for ActivityId { - fn from(input: ActivityIdOrExternal) -> Self { - match input { - ActivityIdOrExternal::ExternalId(external_id) => Self::from_external_id(external_id), - ActivityIdOrExternal::Id(id) => id, - } - } + fn from(input: ActivityIdOrExternal) -> Self { + match input { + ActivityIdOrExternal::ExternalId(external_id) => Self::from_external_id(external_id), + ActivityIdOrExternal::Id(id) => id, + } + } } /// A `Namespace` ID reserved for Chronicle system use. diff --git a/crates/common/src/prov/mod.rs b/crates/common/src/prov/mod.rs index f8a95a7b9..29cfd0e9d 100644 --- a/crates/common/src/prov/mod.rs +++ b/crates/common/src/prov/mod.rs @@ -5,4 +5,3 @@ mod id; mod model; pub mod operations; pub mod vocab; - diff --git a/crates/common/src/prov/model/contradiction.rs b/crates/common/src/prov/model/contradiction.rs index 42faa4dba..3ff052e30 100644 --- a/crates/common/src/prov/model/contradiction.rs +++ b/crates/common/src/prov/model/contradiction.rs @@ -5,121 +5,121 @@ use parity_scale_codec::{alloc::string::String, alloc::vec::Vec}; use scale_info::prelude::*; use crate::{ - attributes::Attribute, - prov::{ChronicleIri, NamespaceId, operations::TimeWrapper}, + attributes::Attribute, + prov::{operations::TimeWrapper, ChronicleIri, NamespaceId}, }; #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub struct Contradiction { - pub(crate) id: ChronicleIri, - pub(crate) namespace: NamespaceId, - pub(crate) contradiction: Vec, + pub(crate) id: ChronicleIri, + pub(crate) namespace: NamespaceId, + pub(crate) contradiction: Vec, } impl core::fmt::Display for Contradiction { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "Contradiction {{ ")?; - for detail in &self.contradiction { - match detail { - ContradictionDetail::AttributeValueChange { name, value, attempted } => { - write!(f, "attribute value change: {name} {value:?} {attempted:?}")?; - } - ContradictionDetail::StartAlteration { value, attempted } => { - write!(f, "start date alteration: {value} {attempted}")?; - } - ContradictionDetail::EndAlteration { value, attempted } => { - write!(f, "end date alteration: {value} {attempted}")?; - } - ContradictionDetail::InvalidRange { start, end } => { - write!(f, "invalid range: {start} {end}")?; - } - } - } - write!(f, " }}") - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "Contradiction {{ ")?; + for detail in &self.contradiction { + match detail { + ContradictionDetail::AttributeValueChange { name, value, attempted } => { + write!(f, "attribute value change: {name} {value:?} {attempted:?}")?; + }, + ContradictionDetail::StartAlteration { value, attempted } => { + write!(f, "start date alteration: {value} {attempted}")?; + }, + ContradictionDetail::EndAlteration { value, attempted } => { + write!(f, "end date alteration: {value} {attempted}")?; + }, + ContradictionDetail::InvalidRange { start, end } => { + write!(f, "invalid range: {start} {end}")?; + }, + } + } + write!(f, " }}") + } } impl Contradiction { - pub fn start_date_alteration( - id: ChronicleIri, - namespace: NamespaceId, - value: DateTime, - attempted: DateTime, - ) -> Self { - Self { - id, - namespace, - contradiction: vec![ContradictionDetail::StartAlteration { - value: value.into(), - attempted: attempted.into(), - }], - } - } + pub fn start_date_alteration( + id: ChronicleIri, + namespace: NamespaceId, + value: DateTime, + attempted: DateTime, + ) -> Self { + Self { + id, + namespace, + contradiction: vec![ContradictionDetail::StartAlteration { + value: value.into(), + attempted: attempted.into(), + }], + } + } - pub fn end_date_alteration( - id: ChronicleIri, - namespace: NamespaceId, - value: DateTime, - attempted: DateTime, - ) -> Self { - Self { - id, - namespace, - contradiction: vec![ContradictionDetail::EndAlteration { - value: value.into(), - attempted: attempted.into(), - }], - } - } + pub fn end_date_alteration( + id: ChronicleIri, + namespace: NamespaceId, + value: DateTime, + attempted: DateTime, + ) -> Self { + Self { + id, + namespace, + contradiction: vec![ContradictionDetail::EndAlteration { + value: value.into(), + attempted: attempted.into(), + }], + } + } - pub fn invalid_range( - id: ChronicleIri, - namespace: NamespaceId, - start: DateTime, - end: DateTime, - ) -> Self { - Self { - id, - namespace, - contradiction: vec![ContradictionDetail::InvalidRange { - start: start.into(), - end: end.into(), - }], - } - } + pub fn invalid_range( + id: ChronicleIri, + namespace: NamespaceId, + start: DateTime, + end: DateTime, + ) -> Self { + Self { + id, + namespace, + contradiction: vec![ContradictionDetail::InvalidRange { + start: start.into(), + end: end.into(), + }], + } + } - pub fn attribute_value_change( - id: ChronicleIri, - namespace: NamespaceId, - changes: Vec<(String, Attribute, Attribute)>, - ) -> Self { - Self { - id, - namespace, - contradiction: changes - .into_iter() - .map(|(name, value, attempted)| ContradictionDetail::AttributeValueChange { - name, - value, - attempted, - }) - .collect(), - } - } + pub fn attribute_value_change( + id: ChronicleIri, + namespace: NamespaceId, + changes: Vec<(String, Attribute, Attribute)>, + ) -> Self { + Self { + id, + namespace, + contradiction: changes + .into_iter() + .map(|(name, value, attempted)| ContradictionDetail::AttributeValueChange { + name, + value, + attempted, + }) + .collect(), + } + } } #[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub enum ContradictionDetail { - AttributeValueChange { name: String, value: Attribute, attempted: Attribute }, - StartAlteration { value: TimeWrapper, attempted: TimeWrapper }, - EndAlteration { value: TimeWrapper, attempted: TimeWrapper }, - InvalidRange { start: TimeWrapper, end: TimeWrapper }, + AttributeValueChange { name: String, value: Attribute, attempted: Attribute }, + StartAlteration { value: TimeWrapper, attempted: TimeWrapper }, + EndAlteration { value: TimeWrapper, attempted: TimeWrapper }, + InvalidRange { start: TimeWrapper, end: TimeWrapper }, } diff --git a/crates/common/src/prov/model/json_ld/from_json_ld.rs b/crates/common/src/prov/model/json_ld/from_json_ld.rs index 9e38f6e2f..4054acc26 100644 --- a/crates/common/src/prov/model/json_ld/from_json_ld.rs +++ b/crates/common/src/prov/model/json_ld/from_json_ld.rs @@ -3,770 +3,770 @@ use futures::{future::BoxFuture, FutureExt}; use iref::IriBuf; use iri_string::types::IriString; use json_ld::{ - Indexed, Loader, Node, Profile, RemoteDocument, syntax::IntoJsonWithContextMeta, Term, + syntax::IntoJsonWithContextMeta, Indexed, Loader, Node, Profile, RemoteDocument, Term, }; use locspan::Meta; use mime::Mime; #[cfg(not(feature = "std"))] use parity_scale_codec::{alloc::string::String, alloc::vec::Vec, prelude::*}; -use rdf_types::{BlankIdBuf, IriVocabularyMut, vocabulary::no_vocabulary_mut}; +use rdf_types::{vocabulary::no_vocabulary_mut, BlankIdBuf, IriVocabularyMut}; use serde_json::{json, Value}; use tracing::{error, instrument, trace}; use crate::{ - attributes::{Attribute, Attributes}, - prov::{ - ActivityId, - AgentId, - DomaintypeId, EntityId, NamespaceId, operations::{ - ActivityExists, ActivityUses, ActsOnBehalfOf, AgentExists, ChronicleOperation, - CreateNamespace, DerivationType, EntityDerive, EntityExists, SetAttributes, - WasAssociatedWith, WasAttributedTo, WasGeneratedBy, WasInformedBy, - }, Role, vocab::{self, Chronicle, Prov}, - }, + attributes::{Attribute, Attributes}, + prov::{ + operations::{ + ActivityExists, ActivityUses, ActsOnBehalfOf, AgentExists, ChronicleOperation, + CreateNamespace, DerivationType, EntityDerive, EntityExists, SetAttributes, + WasAssociatedWith, WasAttributedTo, WasGeneratedBy, WasInformedBy, + }, + vocab::{self, Chronicle, Prov}, + Activity, ActivityId, Agent, AgentId, DomaintypeId, Entity, EntityId, NamespaceId, + ProcessorError, ProvModel, Role, + }, }; -use crate::prov::{Activity, Agent, Entity, ProcessorError, ProvModel}; pub struct ContextLoader; impl Loader for ContextLoader { - type Error = (); - type Output = json_ld::syntax::Value; - - // This is only used to load the context, so we can just return it directly - fn load_with<'b>( - &'b mut self, - vocabulary: &'b mut (impl Sync + Send + IriVocabularyMut), - url: IriBuf, - ) -> BoxFuture, Self::Error>> - where - IriBuf: 'b, - { - use hashbrown::HashSet; - use std::str::FromStr; - let mut profiles = HashSet::new(); - profiles.insert(Profile::new(url.as_iri(), vocabulary)); - trace!("Loading context from {}", url); - async move { - let json = json!({ + type Error = (); + type Output = json_ld::syntax::Value; + + // This is only used to load the context, so we can just return it directly + fn load_with<'b>( + &'b mut self, + vocabulary: &'b mut (impl Sync + Send + IriVocabularyMut), + url: IriBuf, + ) -> BoxFuture, Self::Error>> + where + IriBuf: 'b, + { + use hashbrown::HashSet; + use std::str::FromStr; + let mut profiles = HashSet::new(); + profiles.insert(Profile::new(url.as_iri(), vocabulary)); + trace!("Loading context from {}", url); + async move { + let json = json!({ "@context": crate::context::PROV.clone() }); - let value = json_ld::syntax::Value::from_serde_json(json, |_| ()); - Ok(json_ld::RemoteDocument::new_full( - Some(url), - Some(Mime::from_str("application/json").unwrap()), - None, - profiles, - value, - )) - } - .boxed() - } + let value = json_ld::syntax::Value::from_serde_json(json, |_| ()); + Ok(json_ld::RemoteDocument::new_full( + Some(url), + Some(Mime::from_str("application/json").unwrap()), + None, + profiles, + value, + )) + } + .boxed() + } } fn as_json(node: &Node) -> serde_json::Value { - node.clone().into_json_meta_with((), no_vocabulary_mut()).into_value().into() + node.clone().into_json_meta_with((), no_vocabulary_mut()).into_value().into() } // Convert with coercion from our vocab iris, this is safe as sourced from constants fn id_from_iri_string>(iri: I) -> json_ld::Id { - json_ld::Id::Valid(json_ld::ValidId::Iri(IriBuf::try_from(iri.into().to_string()).unwrap())) + json_ld::Id::Valid(json_ld::ValidId::Iri(IriBuf::try_from(iri.into().to_string()).unwrap())) } fn extract_reference_ids>( - iri: I, - node: &Node, + iri: I, + node: &Node, ) -> Result, ProcessorError> { - let ids: Result, _> = node - .get(&id_from_iri_string(iri)) - .map(|o| o.id().ok_or_else(|| ProcessorError::MissingId { object: as_json(node) })) - .map(|id| { - id.and_then(|id| { - id.as_iri().ok_or_else(|| ProcessorError::MissingId { object: as_json(node) }) - }) - }) - .map(|id| id.map(|id| id.to_owned())) - .collect(); - - ids + let ids: Result, _> = node + .get(&id_from_iri_string(iri)) + .map(|o| o.id().ok_or_else(|| ProcessorError::MissingId { object: as_json(node) })) + .map(|id| { + id.and_then(|id| { + id.as_iri().ok_or_else(|| ProcessorError::MissingId { object: as_json(node) }) + }) + }) + .map(|id| id.map(|id| id.to_owned())) + .collect(); + + ids } fn extract_scalar_prop + Clone>( - iri: I, - node: &Node, + iri: I, + node: &Node, ) -> Result<&Indexed, ()>, ProcessorError> { - if let Some(object) = node.get_any(&id_from_iri_string(iri.clone())) { - Ok(object) - } else { - Err(ProcessorError::MissingProperty { iri: iri.into().to_string(), object: as_json(node) }) - } + if let Some(object) = node.get_any(&id_from_iri_string(iri.clone())) { + Ok(object) + } else { + Err(ProcessorError::MissingProperty { iri: iri.into().to_string(), object: as_json(node) }) + } } fn extract_namespace(agent: &Node) -> Result { - Ok(NamespaceId::try_from( - extract_scalar_prop(Chronicle::HasNamespace, agent)? - .id() - .ok_or(ProcessorError::MissingId { object: as_json(agent) })? - .to_string(), - )?) + Ok(NamespaceId::try_from( + extract_scalar_prop(Chronicle::HasNamespace, agent)? + .id() + .ok_or(ProcessorError::MissingId { object: as_json(agent) })? + .to_string(), + )?) } impl ProvModel { - pub async fn apply_json_ld_str(&mut self, buf: &str) -> Result<(), ProcessorError> { - self.apply_json_ld(serde_json::from_str(buf)?).await?; - - Ok(()) - } - - pub async fn apply_json_ld_bytes(&mut self, buf: &[u8]) -> Result<(), ProcessorError> { - self.apply_json_ld(serde_json::from_slice(buf)?).await?; - - Ok(()) - } - - /// Take a Json-Ld input document, assuming it is in compact form, expand it and apply the state - /// to the prov model Replace @context with our resource context - /// We rely on reified @types, so subclassing must also include supertypes - #[instrument(level = "trace", skip(self, json))] - pub async fn apply_json_ld(&mut self, json: serde_json::Value) -> Result<(), ProcessorError> { - if let serde_json::Value::Object(mut map) = json { - map.insert( - "@context".to_string(), - serde_json::Value::String("http://chronicle.works/chr/1.0/c.jsonld".to_string()), - ); - let json = serde_json::Value::Object(map); - - trace!(to_apply_compact=%serde_json::to_string_pretty(&json)?); - - use json_ld::Expand; - let output = json_ld::syntax::Value::from_serde_json(json.clone(), |_| ()) - .expand(&mut ContextLoader) - .await - .map_err(|e| ProcessorError::Expansion { inner: format!("{e:?}") })?; - - for o in output.into_value().into_objects() { - let o = - o.value().inner().as_node().ok_or(ProcessorError::NotANode(json.clone()))?; - - if o.has_type(&id_from_iri_string(Chronicle::Namespace)) { - self.apply_node_as_namespace(o)?; - } - if o.has_type(&id_from_iri_string(Prov::Agent)) { - self.apply_node_as_agent(o)?; - } else if o.has_type(&id_from_iri_string(Prov::Activity)) { - self.apply_node_as_activity(o)?; - } else if o.has_type(&id_from_iri_string(Prov::Entity)) { - self.apply_node_as_entity(o)?; - } else if o.has_type(&id_from_iri_string(Prov::Delegation)) { - self.apply_node_as_delegation(o)?; - } else if o.has_type(&id_from_iri_string(Prov::Association)) { - self.apply_node_as_association(o)?; - } else if o.has_type(&id_from_iri_string(Prov::Attribution)) { - self.apply_node_as_attribution(o)?; - } - } - Ok(()) - } else { - Err(ProcessorError::NotAnObject) - } - } - - /// Extract the types and find the first that is not prov::, as we currently only alow zero or - /// one domain types this should be sufficient - fn extract_attributes( - node: &Node, - ) -> Result { - let typ = node - .types() - .iter() - .filter_map(|x| x.as_iri()) - .find(|x| x.as_str().contains("domaintype")) - .map(|iri| Ok::<_, ProcessorError>(DomaintypeId::try_from(iri.to_string())?)) - .transpose(); - - if let serde_json::Value::Object(map) = as_json(node) { - if let Some(serde_json::Value::Array(array)) = map.get(Chronicle::Value.as_str()) { - if array.len() == 1 { - let o = array.first().unwrap(); - let serde_object = &o["@value"]; - - if let serde_json::Value::Object(object) = serde_object { - let attributes = object - .into_iter() - .map(|(typ, value)| Attribute::new(typ, value.clone())) - .collect(); - - return Ok(Attributes::new(typ?, attributes)); - } - } - } - } - - Err(ProcessorError::NotAnObject) - } - - fn apply_node_as_namespace( - &mut self, - ns: &Node, - ) -> Result<(), ProcessorError> { - let ns = ns.id().ok_or_else(|| ProcessorError::MissingId { object: as_json(ns) })?; - - self.namespace_context(&NamespaceId::try_from(ns.to_string())?); - - Ok(()) - } - - fn apply_node_as_delegation( - &mut self, - delegation: &Node, - ) -> Result<(), ProcessorError> { - let namespace_id = extract_namespace(delegation)?; - - let role = extract_scalar_prop(Prov::HadRole, delegation) - .ok() - .and_then(|x| x.as_str().map(Role::from)); - - let responsible_id = extract_reference_ids(Prov::ActedOnBehalfOf, delegation)? - .into_iter() - .next() - .ok_or_else(|| ProcessorError::MissingProperty { - object: as_json(delegation), - iri: Prov::ActedOnBehalfOf.to_string(), - }) - .and_then(|x| Ok(AgentId::try_from(x.to_string())?))?; - - let delegate_id = extract_reference_ids(Prov::Delegate, delegation)? - .into_iter() - .next() - .ok_or_else(|| ProcessorError::MissingProperty { - object: as_json(delegation), - iri: Prov::Delegate.to_string(), - }) - .and_then(|x| Ok(AgentId::try_from(x.to_string())?))?; - - let activity_id = extract_reference_ids(Prov::HadActivity, delegation)? - .into_iter() - .next() - .map(|x| ActivityId::try_from(x.to_string())) - .transpose()?; - - self.qualified_delegation(&namespace_id, &responsible_id, &delegate_id, activity_id, role); - Ok(()) - } - - fn apply_node_as_association( - &mut self, - association: &Node, - ) -> Result<(), ProcessorError> { - let namespace_id = extract_namespace(association)?; - - let role = extract_scalar_prop(Prov::HadRole, association) - .ok() - .and_then(|x| x.as_str().map(Role::from)); - - let agent_id = extract_reference_ids(Prov::Responsible, association)? - .into_iter() - .next() - .ok_or_else(|| ProcessorError::MissingProperty { - object: as_json(association), - iri: Prov::Responsible.to_string(), - }) - .and_then(|x| Ok(AgentId::try_from(x.to_string())?))?; - - let activity_id = extract_reference_ids(Prov::HadActivity, association)? - .into_iter() - .next() - .ok_or_else(|| ProcessorError::MissingProperty { - object: as_json(association), - iri: Prov::HadActivity.to_string(), - }) - .and_then(|x| Ok(ActivityId::try_from(x.to_string())?))?; - - self.qualified_association(&namespace_id, &activity_id, &agent_id, role); - - Ok(()) - } - - fn apply_node_as_attribution( - &mut self, - attribution: &Node, - ) -> Result<(), ProcessorError> { - let namespace_id = extract_namespace(attribution)?; - - let role = extract_scalar_prop(Prov::HadRole, attribution) - .ok() - .and_then(|x| x.as_str().map(Role::from)); - - let agent_id = extract_reference_ids(Prov::Responsible, attribution)? - .into_iter() - .next() - .ok_or_else(|| ProcessorError::MissingProperty { - object: as_json(attribution), - iri: Prov::Responsible.to_string(), - }) - .and_then(|x| Ok(AgentId::try_from(x.to_string())?))?; - - let entity_id = extract_reference_ids(Prov::HadEntity, attribution)? - .into_iter() - .next() - .ok_or_else(|| ProcessorError::MissingProperty { - object: as_json(attribution), - iri: Prov::HadEntity.to_string(), - }) - .and_then(|x| Ok(EntityId::try_from(x.to_string())?))?; - - self.qualified_attribution(&namespace_id, &entity_id, &agent_id, role); - - Ok(()) - } - - fn apply_node_as_agent( - &mut self, - agent: &Node, - ) -> Result<(), ProcessorError> { - let id = AgentId::try_from( - agent - .id() - .ok_or_else(|| ProcessorError::MissingId { object: as_json(agent) })? - .to_string(), - )?; - - let namespaceid = extract_namespace(agent)?; - - let attributes = Self::extract_attributes(agent)?; - - let agent = Agent::exists(namespaceid, id).has_attributes(attributes); - - self.add_agent(agent); - - Ok(()) - } - - fn apply_node_as_activity( - &mut self, - activity: &Node, - ) -> Result<(), ProcessorError> { - let id = ActivityId::try_from( - activity - .id() - .ok_or_else(|| ProcessorError::MissingId { object: as_json(activity) })? - .to_string(), - )?; - - let namespaceid = extract_namespace(activity)?; - - let started = extract_scalar_prop(Prov::StartedAtTime, activity) - .ok() - .and_then(|x| x.as_str().map(DateTime::parse_from_rfc3339)); - - let ended = extract_scalar_prop(Prov::EndedAtTime, activity) - .ok() - .and_then(|x| x.as_str().map(DateTime::parse_from_rfc3339)); - - let used = extract_reference_ids(Prov::Used, activity)? - .into_iter() - .map(|id| EntityId::try_from(id.to_string())) - .collect::, _>>()?; - - let was_informed_by = extract_reference_ids(Prov::WasInformedBy, activity)? - .into_iter() - .map(|id| ActivityId::try_from(id.to_string())) - .collect::, _>>()?; - - let attributes = Self::extract_attributes(activity)?; - - let mut activity = Activity::exists(namespaceid.clone(), id).has_attributes(attributes); - - if let Some(started) = started { - activity.started = Some(DateTime::::from(started?).into()); - } - - if let Some(ended) = ended { - activity.ended = Some(DateTime::::from(ended?).into()); - } - - for entity in used { - self.used(namespaceid.clone(), &activity.id, &entity); - } - - for informing_activity in was_informed_by { - self.was_informed_by(namespaceid.clone(), &activity.id, &informing_activity); - } - - self.add_activity(activity); - - Ok(()) - } - - fn apply_node_as_entity( - &mut self, - entity: &Node, - ) -> Result<(), ProcessorError> { - let id = EntityId::try_from( - entity - .id() - .ok_or_else(|| ProcessorError::MissingId { object: as_json(entity) })? - .to_string(), - )?; - - let namespaceid = extract_namespace(entity)?; - - let generatedby = extract_reference_ids(Prov::WasGeneratedBy, entity)? - .into_iter() - .map(|id| ActivityId::try_from(id.to_string())) - .collect::, _>>()?; - - for derived in extract_reference_ids(Prov::WasDerivedFrom, entity)? - .into_iter() - .map(|id| EntityId::try_from(id.to_string())) - { - self.was_derived_from( - namespaceid.clone(), - DerivationType::None, - derived?, - id.clone(), - None, - ); - } - - for derived in extract_reference_ids(Prov::WasQuotedFrom, entity)? - .into_iter() - .map(|id| EntityId::try_from(id.to_string())) - { - self.was_derived_from( - namespaceid.clone(), - DerivationType::quotation(), - derived?, - id.clone(), - None, - ); - } - - for derived in extract_reference_ids(Prov::WasRevisionOf, entity)? - .into_iter() - .map(|id| EntityId::try_from(id.to_string())) - { - self.was_derived_from( - namespaceid.clone(), - DerivationType::revision(), - derived?, - id.clone(), - None, - ); - } - - for derived in extract_reference_ids(Prov::HadPrimarySource, entity)? - .into_iter() - .map(|id| EntityId::try_from(id.to_string())) - { - self.was_derived_from( - namespaceid.clone(), - DerivationType::primary_source(), - derived?, - id.clone(), - None, - ); - } - - for activity in generatedby { - self.was_generated_by(namespaceid.clone(), &id, &activity); - } - - let attributes = Self::extract_attributes(entity)?; - self.add_entity(Entity::exists(namespaceid, id).has_attributes(attributes)); - - Ok(()) - } + pub async fn apply_json_ld_str(&mut self, buf: &str) -> Result<(), ProcessorError> { + self.apply_json_ld(serde_json::from_str(buf)?).await?; + + Ok(()) + } + + pub async fn apply_json_ld_bytes(&mut self, buf: &[u8]) -> Result<(), ProcessorError> { + self.apply_json_ld(serde_json::from_slice(buf)?).await?; + + Ok(()) + } + + /// Take a Json-Ld input document, assuming it is in compact form, expand it and apply the state + /// to the prov model Replace @context with our resource context + /// We rely on reified @types, so subclassing must also include supertypes + #[instrument(level = "trace", skip(self, json))] + pub async fn apply_json_ld(&mut self, json: serde_json::Value) -> Result<(), ProcessorError> { + if let serde_json::Value::Object(mut map) = json { + map.insert( + "@context".to_string(), + serde_json::Value::String("http://chronicle.works/chr/1.0/c.jsonld".to_string()), + ); + let json = serde_json::Value::Object(map); + + trace!(to_apply_compact=%serde_json::to_string_pretty(&json)?); + + use json_ld::Expand; + let output = json_ld::syntax::Value::from_serde_json(json.clone(), |_| ()) + .expand(&mut ContextLoader) + .await + .map_err(|e| ProcessorError::Expansion { inner: format!("{e:?}") })?; + + for o in output.into_value().into_objects() { + let o = + o.value().inner().as_node().ok_or(ProcessorError::NotANode(json.clone()))?; + + if o.has_type(&id_from_iri_string(Chronicle::Namespace)) { + self.apply_node_as_namespace(o)?; + } + if o.has_type(&id_from_iri_string(Prov::Agent)) { + self.apply_node_as_agent(o)?; + } else if o.has_type(&id_from_iri_string(Prov::Activity)) { + self.apply_node_as_activity(o)?; + } else if o.has_type(&id_from_iri_string(Prov::Entity)) { + self.apply_node_as_entity(o)?; + } else if o.has_type(&id_from_iri_string(Prov::Delegation)) { + self.apply_node_as_delegation(o)?; + } else if o.has_type(&id_from_iri_string(Prov::Association)) { + self.apply_node_as_association(o)?; + } else if o.has_type(&id_from_iri_string(Prov::Attribution)) { + self.apply_node_as_attribution(o)?; + } + } + Ok(()) + } else { + Err(ProcessorError::NotAnObject) + } + } + + /// Extract the types and find the first that is not prov::, as we currently only alow zero or + /// one domain types this should be sufficient + fn extract_attributes( + node: &Node, + ) -> Result { + let typ = node + .types() + .iter() + .filter_map(|x| x.as_iri()) + .find(|x| x.as_str().contains("domaintype")) + .map(|iri| Ok::<_, ProcessorError>(DomaintypeId::try_from(iri.to_string())?)) + .transpose(); + + if let serde_json::Value::Object(map) = as_json(node) { + if let Some(serde_json::Value::Array(array)) = map.get(Chronicle::Value.as_str()) { + if array.len() == 1 { + let o = array.first().unwrap(); + let serde_object = &o["@value"]; + + if let serde_json::Value::Object(object) = serde_object { + let attributes = object + .into_iter() + .map(|(typ, value)| Attribute::new(typ, value.clone())) + .collect(); + + return Ok(Attributes::new(typ?, attributes)); + } + } + } + } + + Err(ProcessorError::NotAnObject) + } + + fn apply_node_as_namespace( + &mut self, + ns: &Node, + ) -> Result<(), ProcessorError> { + let ns = ns.id().ok_or_else(|| ProcessorError::MissingId { object: as_json(ns) })?; + + self.namespace_context(&NamespaceId::try_from(ns.to_string())?); + + Ok(()) + } + + fn apply_node_as_delegation( + &mut self, + delegation: &Node, + ) -> Result<(), ProcessorError> { + let namespace_id = extract_namespace(delegation)?; + + let role = extract_scalar_prop(Prov::HadRole, delegation) + .ok() + .and_then(|x| x.as_str().map(Role::from)); + + let responsible_id = extract_reference_ids(Prov::ActedOnBehalfOf, delegation)? + .into_iter() + .next() + .ok_or_else(|| ProcessorError::MissingProperty { + object: as_json(delegation), + iri: Prov::ActedOnBehalfOf.to_string(), + }) + .and_then(|x| Ok(AgentId::try_from(x.to_string())?))?; + + let delegate_id = extract_reference_ids(Prov::Delegate, delegation)? + .into_iter() + .next() + .ok_or_else(|| ProcessorError::MissingProperty { + object: as_json(delegation), + iri: Prov::Delegate.to_string(), + }) + .and_then(|x| Ok(AgentId::try_from(x.to_string())?))?; + + let activity_id = extract_reference_ids(Prov::HadActivity, delegation)? + .into_iter() + .next() + .map(|x| ActivityId::try_from(x.to_string())) + .transpose()?; + + self.qualified_delegation(&namespace_id, &responsible_id, &delegate_id, activity_id, role); + Ok(()) + } + + fn apply_node_as_association( + &mut self, + association: &Node, + ) -> Result<(), ProcessorError> { + let namespace_id = extract_namespace(association)?; + + let role = extract_scalar_prop(Prov::HadRole, association) + .ok() + .and_then(|x| x.as_str().map(Role::from)); + + let agent_id = extract_reference_ids(Prov::Responsible, association)? + .into_iter() + .next() + .ok_or_else(|| ProcessorError::MissingProperty { + object: as_json(association), + iri: Prov::Responsible.to_string(), + }) + .and_then(|x| Ok(AgentId::try_from(x.to_string())?))?; + + let activity_id = extract_reference_ids(Prov::HadActivity, association)? + .into_iter() + .next() + .ok_or_else(|| ProcessorError::MissingProperty { + object: as_json(association), + iri: Prov::HadActivity.to_string(), + }) + .and_then(|x| Ok(ActivityId::try_from(x.to_string())?))?; + + self.qualified_association(&namespace_id, &activity_id, &agent_id, role); + + Ok(()) + } + + fn apply_node_as_attribution( + &mut self, + attribution: &Node, + ) -> Result<(), ProcessorError> { + let namespace_id = extract_namespace(attribution)?; + + let role = extract_scalar_prop(Prov::HadRole, attribution) + .ok() + .and_then(|x| x.as_str().map(Role::from)); + + let agent_id = extract_reference_ids(Prov::Responsible, attribution)? + .into_iter() + .next() + .ok_or_else(|| ProcessorError::MissingProperty { + object: as_json(attribution), + iri: Prov::Responsible.to_string(), + }) + .and_then(|x| Ok(AgentId::try_from(x.to_string())?))?; + + let entity_id = extract_reference_ids(Prov::HadEntity, attribution)? + .into_iter() + .next() + .ok_or_else(|| ProcessorError::MissingProperty { + object: as_json(attribution), + iri: Prov::HadEntity.to_string(), + }) + .and_then(|x| Ok(EntityId::try_from(x.to_string())?))?; + + self.qualified_attribution(&namespace_id, &entity_id, &agent_id, role); + + Ok(()) + } + + fn apply_node_as_agent( + &mut self, + agent: &Node, + ) -> Result<(), ProcessorError> { + let id = AgentId::try_from( + agent + .id() + .ok_or_else(|| ProcessorError::MissingId { object: as_json(agent) })? + .to_string(), + )?; + + let namespaceid = extract_namespace(agent)?; + + let attributes = Self::extract_attributes(agent)?; + + let agent = Agent::exists(namespaceid, id).has_attributes(attributes); + + self.add_agent(agent); + + Ok(()) + } + + fn apply_node_as_activity( + &mut self, + activity: &Node, + ) -> Result<(), ProcessorError> { + let id = ActivityId::try_from( + activity + .id() + .ok_or_else(|| ProcessorError::MissingId { object: as_json(activity) })? + .to_string(), + )?; + + let namespaceid = extract_namespace(activity)?; + + let started = extract_scalar_prop(Prov::StartedAtTime, activity) + .ok() + .and_then(|x| x.as_str().map(DateTime::parse_from_rfc3339)); + + let ended = extract_scalar_prop(Prov::EndedAtTime, activity) + .ok() + .and_then(|x| x.as_str().map(DateTime::parse_from_rfc3339)); + + let used = extract_reference_ids(Prov::Used, activity)? + .into_iter() + .map(|id| EntityId::try_from(id.to_string())) + .collect::, _>>()?; + + let was_informed_by = extract_reference_ids(Prov::WasInformedBy, activity)? + .into_iter() + .map(|id| ActivityId::try_from(id.to_string())) + .collect::, _>>()?; + + let attributes = Self::extract_attributes(activity)?; + + let mut activity = Activity::exists(namespaceid.clone(), id).has_attributes(attributes); + + if let Some(started) = started { + activity.started = Some(DateTime::::from(started?).into()); + } + + if let Some(ended) = ended { + activity.ended = Some(DateTime::::from(ended?).into()); + } + + for entity in used { + self.used(namespaceid.clone(), &activity.id, &entity); + } + + for informing_activity in was_informed_by { + self.was_informed_by(namespaceid.clone(), &activity.id, &informing_activity); + } + + self.add_activity(activity); + + Ok(()) + } + + fn apply_node_as_entity( + &mut self, + entity: &Node, + ) -> Result<(), ProcessorError> { + let id = EntityId::try_from( + entity + .id() + .ok_or_else(|| ProcessorError::MissingId { object: as_json(entity) })? + .to_string(), + )?; + + let namespaceid = extract_namespace(entity)?; + + let generatedby = extract_reference_ids(Prov::WasGeneratedBy, entity)? + .into_iter() + .map(|id| ActivityId::try_from(id.to_string())) + .collect::, _>>()?; + + for derived in extract_reference_ids(Prov::WasDerivedFrom, entity)? + .into_iter() + .map(|id| EntityId::try_from(id.to_string())) + { + self.was_derived_from( + namespaceid.clone(), + DerivationType::None, + derived?, + id.clone(), + None, + ); + } + + for derived in extract_reference_ids(Prov::WasQuotedFrom, entity)? + .into_iter() + .map(|id| EntityId::try_from(id.to_string())) + { + self.was_derived_from( + namespaceid.clone(), + DerivationType::quotation(), + derived?, + id.clone(), + None, + ); + } + + for derived in extract_reference_ids(Prov::WasRevisionOf, entity)? + .into_iter() + .map(|id| EntityId::try_from(id.to_string())) + { + self.was_derived_from( + namespaceid.clone(), + DerivationType::revision(), + derived?, + id.clone(), + None, + ); + } + + for derived in extract_reference_ids(Prov::HadPrimarySource, entity)? + .into_iter() + .map(|id| EntityId::try_from(id.to_string())) + { + self.was_derived_from( + namespaceid.clone(), + DerivationType::primary_source(), + derived?, + id.clone(), + None, + ); + } + + for activity in generatedby { + self.was_generated_by(namespaceid.clone(), &id, &activity); + } + + let attributes = Self::extract_attributes(entity)?; + self.add_entity(Entity::exists(namespaceid, id).has_attributes(attributes)); + + Ok(()) + } } trait Operation { - fn namespace(&self) -> Option; - fn agent(&self) -> Option; - fn delegate(&self) -> Option; - fn responsible(&self) -> Option; - - fn activity(&self) -> Option; - fn optional_role(&self) -> Option; - fn start_time(&self) -> Option; - fn end_time(&self) -> Option; - fn entity(&self) -> Option; - fn used_entity(&self) -> Option; - fn derivation(&self) -> DerivationType; - fn domain(&self) -> Option; - fn attributes(&self) -> Vec; - fn informing_activity(&self) -> Option; + fn namespace(&self) -> Option; + fn agent(&self) -> Option; + fn delegate(&self) -> Option; + fn responsible(&self) -> Option; + + fn activity(&self) -> Option; + fn optional_role(&self) -> Option; + fn start_time(&self) -> Option; + fn end_time(&self) -> Option; + fn entity(&self) -> Option; + fn used_entity(&self) -> Option; + fn derivation(&self) -> DerivationType; + fn domain(&self) -> Option; + fn attributes(&self) -> Vec; + fn informing_activity(&self) -> Option; } impl Operation for Node { - fn namespace(&self) -> Option { - let mut uuid_objects = - self.get(&id_from_iri_string(vocab::ChronicleOperation::NamespaceUuid)); - let uuid = uuid_objects.next()?.as_str()?; - let mut name_objects = - self.get(&id_from_iri_string(vocab::ChronicleOperation::NamespaceName)); - let external_id = name_objects.next()?.as_str()?; - let uuid = uuid::Uuid::parse_str(uuid).ok()?; - Some(NamespaceId::from_external_id(external_id, uuid)) - } - - fn agent(&self) -> Option { - let mut name_objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::AgentName)); - let external_id = name_objects.next()?.as_str()?; - Some(AgentId::from_external_id(external_id)) - } - - fn delegate(&self) -> Option { - let mut name_objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::DelegateId)); - let external_id = name_objects.next()?.as_str()?; - Some(AgentId::from_external_id(external_id)) - } - - fn start_time(&self) -> Option { - let mut objects = - self.get(&id_from_iri_string(vocab::ChronicleOperation::StartActivityTime)); - let time = objects.next()?.as_str()?; - Some(time.to_owned()) - } - - fn end_time(&self) -> Option { - let mut objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::EndActivityTime)); - let time = objects.next()?.as_str()?; - Some(time.to_owned()) - } - - fn entity(&self) -> Option { - let mut name_objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::EntityName)); - let external_id = name_objects.next()?.as_str()?; - Some(EntityId::from_external_id(external_id)) - } - - fn used_entity(&self) -> Option { - let mut name_objects = - self.get(&id_from_iri_string(vocab::ChronicleOperation::UsedEntityName)); - let external_id = name_objects.next()?.as_str()?; - Some(EntityId::from_external_id(external_id)) - } - - fn derivation(&self) -> DerivationType { - let mut objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::DerivationType)); - if let Some(object) = objects.next() { - if let Some(derivation) = object.as_str() { - return match derivation { - "Revision" => DerivationType::Revision, - "Quotation" => DerivationType::Quotation, - "PrimarySource" => DerivationType::PrimarySource, - _ => DerivationType::None, - }; - } - } - DerivationType::None - } - - fn domain(&self) -> Option { - let mut objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::DomaintypeId)); - let d = objects.next()?.as_str()?; - Some(DomaintypeId::from_external_id(d)) - } - - fn attributes(&self) -> Vec { - self.get(&id_from_iri_string(vocab::ChronicleOperation::Attributes)) - .filter_map(|o| { - let serde_object = - if let Some(json_ld::object::Value::Json(Meta(json, _))) = o.as_value() { - Some(json.clone().into()) - } else { - serde_json::from_str(&as_json(o.as_node()?)["@value"].to_string()).ok() - }; - - serde_object.and_then(|obj: serde_json::Value| { - if let serde_json::Value::Object(object) = obj { - Some( - object - .into_iter() - .map(|(typ, value)| Attribute { typ, value: value.into() }) - .collect::>(), - ) - } else { - None - } - }) - }) - .flatten() - .collect() - } - - fn responsible(&self) -> Option { - let mut name_objects = - self.get(&id_from_iri_string(vocab::ChronicleOperation::ResponsibleId)); - let external_id = name_objects.next()?.as_str()?; - Some(AgentId::from_external_id(external_id)) - } - - fn optional_role(&self) -> Option { - let mut name_objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::Role)); - let object = name_objects.next()?; - Some(Role::from(object.as_str()?)) - } - - fn activity(&self) -> Option { - let mut name_objects = - self.get(&id_from_iri_string(vocab::ChronicleOperation::ActivityName)); - let external_id = name_objects.next()?.as_str()?; - Some(ActivityId::from_external_id(external_id)) - } - - fn informing_activity(&self) -> Option { - let mut name_objects = - self.get(&id_from_iri_string(vocab::ChronicleOperation::InformingActivityName)); - let external_id = name_objects.next()?.as_str()?; - Some(ActivityId::from_external_id(external_id)) - } + fn namespace(&self) -> Option { + let mut uuid_objects = + self.get(&id_from_iri_string(vocab::ChronicleOperation::NamespaceUuid)); + let uuid = uuid_objects.next()?.as_str()?; + let mut name_objects = + self.get(&id_from_iri_string(vocab::ChronicleOperation::NamespaceName)); + let external_id = name_objects.next()?.as_str()?; + let uuid = uuid::Uuid::parse_str(uuid).ok()?; + Some(NamespaceId::from_external_id(external_id, uuid)) + } + + fn agent(&self) -> Option { + let mut name_objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::AgentName)); + let external_id = name_objects.next()?.as_str()?; + Some(AgentId::from_external_id(external_id)) + } + + fn delegate(&self) -> Option { + let mut name_objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::DelegateId)); + let external_id = name_objects.next()?.as_str()?; + Some(AgentId::from_external_id(external_id)) + } + + fn start_time(&self) -> Option { + let mut objects = + self.get(&id_from_iri_string(vocab::ChronicleOperation::StartActivityTime)); + let time = objects.next()?.as_str()?; + Some(time.to_owned()) + } + + fn end_time(&self) -> Option { + let mut objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::EndActivityTime)); + let time = objects.next()?.as_str()?; + Some(time.to_owned()) + } + + fn entity(&self) -> Option { + let mut name_objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::EntityName)); + let external_id = name_objects.next()?.as_str()?; + Some(EntityId::from_external_id(external_id)) + } + + fn used_entity(&self) -> Option { + let mut name_objects = + self.get(&id_from_iri_string(vocab::ChronicleOperation::UsedEntityName)); + let external_id = name_objects.next()?.as_str()?; + Some(EntityId::from_external_id(external_id)) + } + + fn derivation(&self) -> DerivationType { + let mut objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::DerivationType)); + if let Some(object) = objects.next() { + if let Some(derivation) = object.as_str() { + return match derivation { + "Revision" => DerivationType::Revision, + "Quotation" => DerivationType::Quotation, + "PrimarySource" => DerivationType::PrimarySource, + _ => DerivationType::None, + }; + } + } + DerivationType::None + } + + fn domain(&self) -> Option { + let mut objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::DomaintypeId)); + let d = objects.next()?.as_str()?; + Some(DomaintypeId::from_external_id(d)) + } + + fn attributes(&self) -> Vec { + self.get(&id_from_iri_string(vocab::ChronicleOperation::Attributes)) + .filter_map(|o| { + let serde_object = + if let Some(json_ld::object::Value::Json(Meta(json, _))) = o.as_value() { + Some(json.clone().into()) + } else { + serde_json::from_str(&as_json(o.as_node()?)["@value"].to_string()).ok() + }; + + serde_object.and_then(|obj: serde_json::Value| { + if let serde_json::Value::Object(object) = obj { + Some( + object + .into_iter() + .map(|(typ, value)| Attribute { typ, value: value.into() }) + .collect::>(), + ) + } else { + None + } + }) + }) + .flatten() + .collect() + } + + fn responsible(&self) -> Option { + let mut name_objects = + self.get(&id_from_iri_string(vocab::ChronicleOperation::ResponsibleId)); + let external_id = name_objects.next()?.as_str()?; + Some(AgentId::from_external_id(external_id)) + } + + fn optional_role(&self) -> Option { + let mut name_objects = self.get(&id_from_iri_string(vocab::ChronicleOperation::Role)); + let object = name_objects.next()?; + Some(Role::from(object.as_str()?)) + } + + fn activity(&self) -> Option { + let mut name_objects = + self.get(&id_from_iri_string(vocab::ChronicleOperation::ActivityName)); + let external_id = name_objects.next()?.as_str()?; + Some(ActivityId::from_external_id(external_id)) + } + + fn informing_activity(&self) -> Option { + let mut name_objects = + self.get(&id_from_iri_string(vocab::ChronicleOperation::InformingActivityName)); + let external_id = name_objects.next()?.as_str()?; + Some(ActivityId::from_external_id(external_id)) + } } impl ChronicleOperation { - pub async fn from_json(json: &Value) -> Result { - use json_ld::Expand; - - let mut output = json_ld::syntax::Value::from_serde_json(json.clone(), |_| ()) - .expand(&mut ContextLoader) - .await - .map_err(|e| ProcessorError::Expansion { inner: format!("{e:?}") })?; - - output.canonicalize(); - if let Some(object) = output.into_value().into_objects().into_iter().next() { - let o = - object.value().inner().as_node().ok_or(ProcessorError::NotANode(json.clone()))?; - if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::CreateNamespace)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - Ok(ChronicleOperation::CreateNamespace(CreateNamespace { id: namespace })) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::AgentExists)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let agent_id = o.agent().ok_or(ProcessorError::MissingAgent)?; - Ok(ChronicleOperation::AgentExists(AgentExists { namespace, id: agent_id })) - } else if o - .has_type(&id_from_iri_string(vocab::ChronicleOperation::AgentActsOnBehalfOf)) - { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let delegate_id = o.delegate().ok_or(ProcessorError::MissingAgent)?; - let responsible_id = o.responsible().ok_or(ProcessorError::MissingAgent)?; - let activity_id = o.activity(); - - Ok(ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf::new( - namespace, - responsible_id, - delegate_id, - activity_id, - o.optional_role(), - ))) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::ActivityExists)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let activity_id = o.activity().ok_or(ProcessorError::MissingActivity)?; - Ok(ChronicleOperation::ActivityExists(ActivityExists { - namespace, - id: activity_id, - })) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::StartActivity)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let id = o.activity().ok_or(ProcessorError::MissingActivity)?; - let time_str = o.start_time().ok_or(ProcessorError::MissingTime)?; - match time_str.parse::>() { - Ok(time) => Ok(ChronicleOperation::start_activity(namespace, id, time)), - Err(e) => Err(ProcessorError::Time(e)), - } - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::EndActivity)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let id = o.activity().ok_or(ProcessorError::MissingActivity)?; - let time_str = o.end_time().ok_or(ProcessorError::MissingTime)?; - match time_str.parse::>() { - Ok(time) => Ok(ChronicleOperation::end_activity(namespace, id, time)), - Err(e) => Err(ProcessorError::Time(e)), - } - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::ActivityUses)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let id = o.entity().ok_or(ProcessorError::MissingEntity)?; - let activity = o.activity().ok_or(ProcessorError::MissingActivity)?; - Ok(ChronicleOperation::ActivityUses(ActivityUses { namespace, id, activity })) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::EntityExists)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let entity_id = o.entity().ok_or(ProcessorError::MissingEntity)?; - Ok(ChronicleOperation::EntityExists(EntityExists { namespace, id: entity_id })) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::WasGeneratedBy)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let id = o.entity().ok_or(ProcessorError::MissingEntity)?; - let activity = o.activity().ok_or(ProcessorError::MissingActivity)?; - Ok(ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity })) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::EntityDerive)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let id = o.entity().ok_or(ProcessorError::MissingEntity)?; - let used_id = o.used_entity().ok_or(ProcessorError::MissingEntity)?; - let activity_id = o.activity(); - let typ = o.derivation(); - Ok(ChronicleOperation::EntityDerive(EntityDerive { - namespace, - id, - used_id, - activity_id, - typ, - })) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::SetAttributes)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let domain = o.domain(); - let attrs = o.attributes(); - - let attributes = Attributes::new(domain, attrs); - let actor: SetAttributes = { - if o.has_key(&Term::Id(id_from_iri_string( - vocab::ChronicleOperation::EntityName, - ))) { - let id = o.entity().ok_or(ProcessorError::MissingEntity)?; - SetAttributes::Entity { namespace, id, attributes } - } else if o.has_key(&Term::Id(id_from_iri_string( - vocab::ChronicleOperation::AgentName, - ))) { - let id = o.agent().ok_or(ProcessorError::MissingAgent)?; - SetAttributes::Agent { namespace, id, attributes } - } else { - let id = o.activity().ok_or(ProcessorError::MissingActivity)?; - SetAttributes::Activity { namespace, id, attributes } - } - }; - - Ok(ChronicleOperation::SetAttributes(actor)) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::WasAssociatedWith)) - { - Ok(ChronicleOperation::WasAssociatedWith(WasAssociatedWith::new( - o.namespace().ok_or(ProcessorError::MissingNamespace)?, - o.activity().ok_or(ProcessorError::MissingActivity)?, - o.agent().ok_or(ProcessorError::MissingAgent)?, - o.optional_role(), - ))) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::WasAttributedTo)) { - Ok(ChronicleOperation::WasAttributedTo(WasAttributedTo::new( - o.namespace().ok_or(ProcessorError::MissingNamespace)?, - o.entity().ok_or(ProcessorError::MissingEntity)?, - o.agent().ok_or(ProcessorError::MissingAgent)?, - o.optional_role(), - ))) - } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::WasInformedBy)) { - let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; - let activity = o.activity().ok_or(ProcessorError::MissingActivity)?; - let informing_activity = - o.informing_activity().ok_or(ProcessorError::MissingActivity)?; - Ok(ChronicleOperation::WasInformedBy(WasInformedBy { - namespace, - activity, - informing_activity, - })) - } else { - error!( + pub async fn from_json(json: &Value) -> Result { + use json_ld::Expand; + + let mut output = json_ld::syntax::Value::from_serde_json(json.clone(), |_| ()) + .expand(&mut ContextLoader) + .await + .map_err(|e| ProcessorError::Expansion { inner: format!("{e:?}") })?; + + output.canonicalize(); + if let Some(object) = output.into_value().into_objects().into_iter().next() { + let o = + object.value().inner().as_node().ok_or(ProcessorError::NotANode(json.clone()))?; + if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::CreateNamespace)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + Ok(ChronicleOperation::CreateNamespace(CreateNamespace { id: namespace })) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::AgentExists)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let agent_id = o.agent().ok_or(ProcessorError::MissingAgent)?; + Ok(ChronicleOperation::AgentExists(AgentExists { namespace, id: agent_id })) + } else if o + .has_type(&id_from_iri_string(vocab::ChronicleOperation::AgentActsOnBehalfOf)) + { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let delegate_id = o.delegate().ok_or(ProcessorError::MissingAgent)?; + let responsible_id = o.responsible().ok_or(ProcessorError::MissingAgent)?; + let activity_id = o.activity(); + + Ok(ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf::new( + namespace, + responsible_id, + delegate_id, + activity_id, + o.optional_role(), + ))) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::ActivityExists)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let activity_id = o.activity().ok_or(ProcessorError::MissingActivity)?; + Ok(ChronicleOperation::ActivityExists(ActivityExists { + namespace, + id: activity_id, + })) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::StartActivity)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let id = o.activity().ok_or(ProcessorError::MissingActivity)?; + let time_str = o.start_time().ok_or(ProcessorError::MissingTime)?; + match time_str.parse::>() { + Ok(time) => Ok(ChronicleOperation::start_activity(namespace, id, time)), + Err(e) => Err(ProcessorError::Time(e)), + } + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::EndActivity)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let id = o.activity().ok_or(ProcessorError::MissingActivity)?; + let time_str = o.end_time().ok_or(ProcessorError::MissingTime)?; + match time_str.parse::>() { + Ok(time) => Ok(ChronicleOperation::end_activity(namespace, id, time)), + Err(e) => Err(ProcessorError::Time(e)), + } + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::ActivityUses)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let id = o.entity().ok_or(ProcessorError::MissingEntity)?; + let activity = o.activity().ok_or(ProcessorError::MissingActivity)?; + Ok(ChronicleOperation::ActivityUses(ActivityUses { namespace, id, activity })) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::EntityExists)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let entity_id = o.entity().ok_or(ProcessorError::MissingEntity)?; + Ok(ChronicleOperation::EntityExists(EntityExists { namespace, id: entity_id })) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::WasGeneratedBy)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let id = o.entity().ok_or(ProcessorError::MissingEntity)?; + let activity = o.activity().ok_or(ProcessorError::MissingActivity)?; + Ok(ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity })) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::EntityDerive)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let id = o.entity().ok_or(ProcessorError::MissingEntity)?; + let used_id = o.used_entity().ok_or(ProcessorError::MissingEntity)?; + let activity_id = o.activity(); + let typ = o.derivation(); + Ok(ChronicleOperation::EntityDerive(EntityDerive { + namespace, + id, + used_id, + activity_id, + typ, + })) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::SetAttributes)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let domain = o.domain(); + let attrs = o.attributes(); + + let attributes = Attributes::new(domain, attrs); + let actor: SetAttributes = { + if o.has_key(&Term::Id(id_from_iri_string( + vocab::ChronicleOperation::EntityName, + ))) { + let id = o.entity().ok_or(ProcessorError::MissingEntity)?; + SetAttributes::Entity { namespace, id, attributes } + } else if o.has_key(&Term::Id(id_from_iri_string( + vocab::ChronicleOperation::AgentName, + ))) { + let id = o.agent().ok_or(ProcessorError::MissingAgent)?; + SetAttributes::Agent { namespace, id, attributes } + } else { + let id = o.activity().ok_or(ProcessorError::MissingActivity)?; + SetAttributes::Activity { namespace, id, attributes } + } + }; + + Ok(ChronicleOperation::SetAttributes(actor)) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::WasAssociatedWith)) + { + Ok(ChronicleOperation::WasAssociatedWith(WasAssociatedWith::new( + o.namespace().ok_or(ProcessorError::MissingNamespace)?, + o.activity().ok_or(ProcessorError::MissingActivity)?, + o.agent().ok_or(ProcessorError::MissingAgent)?, + o.optional_role(), + ))) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::WasAttributedTo)) { + Ok(ChronicleOperation::WasAttributedTo(WasAttributedTo::new( + o.namespace().ok_or(ProcessorError::MissingNamespace)?, + o.entity().ok_or(ProcessorError::MissingEntity)?, + o.agent().ok_or(ProcessorError::MissingAgent)?, + o.optional_role(), + ))) + } else if o.has_type(&id_from_iri_string(vocab::ChronicleOperation::WasInformedBy)) { + let namespace = o.namespace().ok_or(ProcessorError::MissingNamespace)?; + let activity = o.activity().ok_or(ProcessorError::MissingActivity)?; + let informing_activity = + o.informing_activity().ok_or(ProcessorError::MissingActivity)?; + Ok(ChronicleOperation::WasInformedBy(WasInformedBy { + namespace, + activity, + informing_activity, + })) + } else { + error!( "Unknown operation: {:?} {:?}", o.type_entry(), id_from_iri_string(vocab::ChronicleOperation::SetAttributes) ); - unreachable!() - } - } else { - Err(ProcessorError::NotANode(json.clone())) - } - } + unreachable!() + } + } else { + Err(ProcessorError::NotANode(json.clone())) + } + } } diff --git a/crates/common/src/prov/model/json_ld/mod.rs b/crates/common/src/prov/model/json_ld/mod.rs index 393829cc6..abb91f6bf 100644 --- a/crates/common/src/prov/model/json_ld/mod.rs +++ b/crates/common/src/prov/model/json_ld/mod.rs @@ -18,77 +18,77 @@ use thiserror_no_std::Error; #[derive(Error, Debug)] pub enum CompactionError { - #[error("JSON-LD: {inner}")] - JsonLd { inner: String }, - #[error("Tokio")] - Join, - #[error("Serde conversion: {source}")] - Serde { - #[from] - #[source] - source: serde_json::Error, - }, - #[error("Expanded document invalid: {message}")] - InvalidExpanded { message: String }, - #[error("Compacted document not a JSON object: {document}")] - NoObject { document: Value }, + #[error("JSON-LD: {inner}")] + JsonLd { inner: String }, + #[error("Tokio")] + Join, + #[error("Serde conversion: {source}")] + Serde { + #[from] + #[source] + source: serde_json::Error, + }, + #[error("Expanded document invalid: {message}")] + InvalidExpanded { message: String }, + #[error("Compacted document not a JSON object: {document}")] + NoObject { document: Value }, } #[derive(Debug)] pub struct ExpandedJson(pub serde_json::Value); fn construct_context_definition( - json: &serde_json::Value, - metadata: M, + json: &serde_json::Value, + metadata: M, ) -> json_ld::syntax::context::Definition - where - M: Clone + core::fmt::Debug, +where + M: Clone + core::fmt::Debug, { - use json_ld::syntax::{ - context::{ - definition::{Bindings, Version}, - Definition, TermDefinition, - }, - Entry, Nullable, TryFromJson, - }; - if let Value::Object(map) = json { - match map.get("@version") { - None => {} - Some(Value::Number(version)) if version.as_f64() == Some(1.1) => {} - Some(json_version) => panic!("unexpected JSON-LD context @version: {json_version}"), - }; - let mut bindings = Bindings::new(); - for (key, value) in map { - if key == "@version" { - // already handled above - } else if let Some('@') = key.chars().next() { - panic!("unexpected JSON-LD context key: {key}"); - } else { - let value = - json_ld::syntax::Value::from_serde_json(value.clone(), |_| metadata.clone()); - let term: Meta, M> = TryFromJson::try_from_json(value) - .expect("failed to convert {value} to term binding"); - bindings.insert( - Meta(key.clone().into(), metadata.clone()), - Meta(Nullable::Some(term.value().clone()), metadata.clone()), - ); - } - } - Definition { - base: None, - import: None, - language: None, - direction: None, - propagate: None, - protected: None, - type_: None, - version: Some(Entry::new(metadata.clone(), Meta::new(Version::V1_1, metadata))), - vocab: None, - bindings, - } - } else { - panic!("failed to convert JSON to LD context: {json:?}"); - } + use json_ld::syntax::{ + context::{ + definition::{Bindings, Version}, + Definition, TermDefinition, + }, + Entry, Nullable, TryFromJson, + }; + if let Value::Object(map) = json { + match map.get("@version") { + None => {}, + Some(Value::Number(version)) if version.as_f64() == Some(1.1) => {}, + Some(json_version) => panic!("unexpected JSON-LD context @version: {json_version}"), + }; + let mut bindings = Bindings::new(); + for (key, value) in map { + if key == "@version" { + // already handled above + } else if let Some('@') = key.chars().next() { + panic!("unexpected JSON-LD context key: {key}"); + } else { + let value = + json_ld::syntax::Value::from_serde_json(value.clone(), |_| metadata.clone()); + let term: Meta, M> = TryFromJson::try_from_json(value) + .expect("failed to convert {value} to term binding"); + bindings.insert( + Meta(key.clone().into(), metadata.clone()), + Meta(Nullable::Some(term.value().clone()), metadata.clone()), + ); + } + } + Definition { + base: None, + import: None, + language: None, + direction: None, + propagate: None, + protected: None, + type_: None, + version: Some(Entry::new(metadata.clone(), Meta::new(Version::V1_1, metadata))), + vocab: None, + bindings, + } + } else { + panic!("failed to convert JSON to LD context: {json:?}"); + } } lazy_static! { @@ -97,95 +97,95 @@ lazy_static! { } impl ExpandedJson { - async fn compact_unordered(self) -> Result { - use json_ld::{ - syntax::context, Compact, ExpandedDocument, Process, ProcessingMode, TryFromJson, - }; - - let vocabulary = no_vocabulary_mut(); - let mut loader: NoLoader = NoLoader::new(); - - // process context - let value = context::Value::One(Meta::new( - context::Context::Definition(JSON_LD_CONTEXT_DEFS.clone()), - (), - )); - let context_meta = Meta::new(value, ()); - let processed_context = context_meta - .process(vocabulary, &mut loader, None) - .await - .map_err(|e| CompactionError::JsonLd { inner: format!("{:?}", e) })?; - - // compact document - - let expanded_meta = json_ld::syntax::Value::from_serde_json(self.0, |_| ()); - - let expanded_doc: Meta, ()> = - TryFromJson::try_from_json_in(vocabulary, expanded_meta).map_err(|e| { - CompactionError::InvalidExpanded { message: format!("{:?}", e.into_value()) } - })?; - - let output = expanded_doc - .compact_full( - vocabulary, - processed_context.as_ref(), - &mut loader, - json_ld::compaction::Options { - processing_mode: ProcessingMode::JsonLd1_1, - compact_to_relative: true, - compact_arrays: true, - ordered: true, - }, - ) - .await - .map_err(|e| CompactionError::JsonLd { inner: e.to_string() })?; - - // Sort @graph - - // reference context - let json: Value = output.into_value().into(); - - if let Value::Object(mut map) = json { - map.insert( - "@context".to_string(), - Value::String("http://chronicle.works/chr/1.0/c.jsonld".to_string()), - ); - Ok(CompactedJson(Value::Object(map))) - } else { - Err(CompactionError::NoObject { document: json }) - } - } - - // Sort @graph by json value, as they are unstable and we need deterministic output - #[tracing::instrument(level = "trace", skip(self), ret)] - pub async fn compact(self) -> Result { - let mut v: serde_json::Value = - serde_json::from_str(&self.compact_unordered().await?.0.to_string())?; - - if let Some(v) = v.pointer_mut("/@graph").and_then(|p| p.as_array_mut()) { - v.sort_by_cached_key(|v| v.to_string()); - } - - Ok(v) - } - - pub async fn compact_stable_order(self) -> Result { - self.compact().await - } + async fn compact_unordered(self) -> Result { + use json_ld::{ + syntax::context, Compact, ExpandedDocument, Process, ProcessingMode, TryFromJson, + }; + + let vocabulary = no_vocabulary_mut(); + let mut loader: NoLoader = NoLoader::new(); + + // process context + let value = context::Value::One(Meta::new( + context::Context::Definition(JSON_LD_CONTEXT_DEFS.clone()), + (), + )); + let context_meta = Meta::new(value, ()); + let processed_context = context_meta + .process(vocabulary, &mut loader, None) + .await + .map_err(|e| CompactionError::JsonLd { inner: format!("{:?}", e) })?; + + // compact document + + let expanded_meta = json_ld::syntax::Value::from_serde_json(self.0, |_| ()); + + let expanded_doc: Meta, ()> = + TryFromJson::try_from_json_in(vocabulary, expanded_meta).map_err(|e| { + CompactionError::InvalidExpanded { message: format!("{:?}", e.into_value()) } + })?; + + let output = expanded_doc + .compact_full( + vocabulary, + processed_context.as_ref(), + &mut loader, + json_ld::compaction::Options { + processing_mode: ProcessingMode::JsonLd1_1, + compact_to_relative: true, + compact_arrays: true, + ordered: true, + }, + ) + .await + .map_err(|e| CompactionError::JsonLd { inner: e.to_string() })?; + + // Sort @graph + + // reference context + let json: Value = output.into_value().into(); + + if let Value::Object(mut map) = json { + map.insert( + "@context".to_string(), + Value::String("http://chronicle.works/chr/1.0/c.jsonld".to_string()), + ); + Ok(CompactedJson(Value::Object(map))) + } else { + Err(CompactionError::NoObject { document: json }) + } + } + + // Sort @graph by json value, as they are unstable and we need deterministic output + #[tracing::instrument(level = "trace", skip(self), ret)] + pub async fn compact(self) -> Result { + let mut v: serde_json::Value = + serde_json::from_str(&self.compact_unordered().await?.0.to_string())?; + + if let Some(v) = v.pointer_mut("/@graph").and_then(|p| p.as_array_mut()) { + v.sort_by_cached_key(|v| v.to_string()); + } + + Ok(v) + } + + pub async fn compact_stable_order(self) -> Result { + self.compact().await + } } pub struct CompactedJson(pub serde_json::Value); impl core::ops::Deref for CompactedJson { - type Target = serde_json::Value; + type Target = serde_json::Value; - fn deref(&self) -> &Self::Target { - &self.0 - } + fn deref(&self) -> &Self::Target { + &self.0 + } } impl CompactedJson { - pub fn pretty(&self) -> String { - serde_json::to_string_pretty(&self.0).unwrap() - } + pub fn pretty(&self) -> String { + serde_json::to_string_pretty(&self.0).unwrap() + } } diff --git a/crates/common/src/prov/model/json_ld/to_json_ld.rs b/crates/common/src/prov/model/json_ld/to_json_ld.rs index 6644d8373..f89709322 100644 --- a/crates/common/src/prov/model/json_ld/to_json_ld.rs +++ b/crates/common/src/prov/model/json_ld/to_json_ld.rs @@ -2,977 +2,977 @@ use serde_json::{json, Value}; #[cfg(not(feature = "std"))] use parity_scale_codec::{ - alloc::string::{String, ToString}, - alloc::vec::Vec, + alloc::string::{String, ToString}, + alloc::vec::Vec, }; use super::ExpandedJson; use crate::{ - attributes::{Attribute, Attributes}, - prov::{ - operations::{ChronicleOperation, CreateNamespace, DerivationType, *}, - vocab::{self, Chronicle, Prov}, - ChronicleIri, ExternalIdPart, FromCompact, ProvModel, UuidPart, - }, + attributes::{Attribute, Attributes}, + prov::{ + operations::{ChronicleOperation, CreateNamespace, DerivationType, *}, + vocab::{self, Chronicle, Prov}, + ChronicleIri, ExternalIdPart, FromCompact, ProvModel, UuidPart, + }, }; pub trait ToJson { - fn to_json(&self) -> ExpandedJson; + fn to_json(&self) -> ExpandedJson; } impl ToJson for ProvModel { - fn to_json(&self) -> ExpandedJson { - let mut doc = Vec::new(); + fn to_json(&self) -> ExpandedJson { + let mut doc = Vec::new(); - for (id, ns) in self.namespaces.iter() { - doc.push(json!({ + for (id, ns) in self.namespaces.iter() { + doc.push(json!({ "@id": (*id.de_compact()), "@type": [Chronicle::Namespace.as_str()], "http://chronicle.works/chronicle/ns#externalId": [{ "@value": ns.external_id.as_str(), }] })) - } + } - for ((_, id), agent) in self.agents.iter() { - let mut typ = vec![Prov::Agent.to_string()]; - if let Some(x) = agent.domaintypeid.as_ref() { - typ.push(x.de_compact()) - } + for ((_, id), agent) in self.agents.iter() { + let mut typ = vec![Prov::Agent.to_string()]; + if let Some(x) = agent.domaintypeid.as_ref() { + typ.push(x.de_compact()) + } - if let Value::Object(mut agentdoc) = json!({ + if let Value::Object(mut agentdoc) = json!({ "@id": (*id.de_compact()), "@type": typ, "http://chronicle.works/chronicle/ns#externalId": [{ "@value": agent.external_id.as_str(), }] }) { - if let Some(delegation) = - self.acted_on_behalf_of.get(&(agent.namespaceid.to_owned(), id.to_owned())) - { - let mut ids = Vec::new(); - let mut qualified_ids = Vec::new(); + if let Some(delegation) = + self.acted_on_behalf_of.get(&(agent.namespaceid.to_owned(), id.to_owned())) + { + let mut ids = Vec::new(); + let mut qualified_ids = Vec::new(); - for delegation in delegation.iter() { - ids.push(json!({"@id": delegation.responsible_id.de_compact()})); - qualified_ids.push(json!({"@id": delegation.id.de_compact()})); - } + for delegation in delegation.iter() { + ids.push(json!({"@id": delegation.responsible_id.de_compact()})); + qualified_ids.push(json!({"@id": delegation.id.de_compact()})); + } - agentdoc.insert(Prov::ActedOnBehalfOf.to_string(), Value::Array(ids)); + agentdoc.insert(Prov::ActedOnBehalfOf.to_string(), Value::Array(ids)); - agentdoc - .insert(Prov::QualifiedDelegation.to_string(), Value::Array(qualified_ids)); - } + agentdoc + .insert(Prov::QualifiedDelegation.to_string(), Value::Array(qualified_ids)); + } - let mut values = Vec::new(); + let mut values = Vec::new(); - values.push(json!({ + values.push(json!({ "@id": Value::String(agent.namespaceid.de_compact()), })); - agentdoc.insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); + agentdoc.insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); - Self::write_attributes(&mut agentdoc, agent.attributes.iter()); + Self::write_attributes(&mut agentdoc, agent.attributes.iter()); - doc.push(Value::Object(agentdoc)); - } - } + doc.push(Value::Object(agentdoc)); + } + } - for (_, associations) in self.association.iter() { - for association in (*associations).iter() { - if let Value::Object(mut associationdoc) = json!({ + for (_, associations) in self.association.iter() { + for association in (*associations).iter() { + if let Value::Object(mut associationdoc) = json!({ "@id": association.id.de_compact(), "@type": [Prov::Association.as_str()], }) { - let mut values = Vec::new(); + let mut values = Vec::new(); - values.push(json!({ + values.push(json!({ "@id": Value::String(association.agent_id.de_compact()), })); - associationdoc.insert(Prov::Responsible.to_string(), Value::Array(values)); + associationdoc.insert(Prov::Responsible.to_string(), Value::Array(values)); - associationdoc.insert( - Prov::HadActivity.to_string(), - Value::Array(vec![json!({ + associationdoc.insert( + Prov::HadActivity.to_string(), + Value::Array(vec![json!({ "@id": Value::String(association.activity_id.de_compact()), })]), - ); + ); - if let Some(role) = &association.role { - associationdoc.insert( - Prov::HadRole.to_string(), - json!([{ "@value": role.to_string()}]), - ); - } + if let Some(role) = &association.role { + associationdoc.insert( + Prov::HadRole.to_string(), + json!([{ "@value": role.to_string()}]), + ); + } - let mut values = Vec::new(); + let mut values = Vec::new(); - values.push(json!({ + values.push(json!({ "@id": Value::String(association.namespace_id.de_compact()), })); - associationdoc - .insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); + associationdoc + .insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); - doc.push(Value::Object(associationdoc)); - } - } - } + doc.push(Value::Object(associationdoc)); + } + } + } - for (_, attributions) in self.attribution.iter() { - for attribution in (*attributions).iter() { - if let Value::Object(mut attribution_doc) = json!({ + for (_, attributions) in self.attribution.iter() { + for attribution in (*attributions).iter() { + if let Value::Object(mut attribution_doc) = json!({ "@id": attribution.id.de_compact(), "@type": [Prov::Attribution.as_str()], }) { - let mut values = Vec::new(); + let mut values = Vec::new(); - values.push(json!({ + values.push(json!({ "@id": Value::String(attribution.agent_id.de_compact()), })); - attribution_doc.insert(Prov::Responsible.to_string(), Value::Array(values)); + attribution_doc.insert(Prov::Responsible.to_string(), Value::Array(values)); - attribution_doc.insert( - Prov::HadEntity.to_string(), - Value::Array(vec![json!({ + attribution_doc.insert( + Prov::HadEntity.to_string(), + Value::Array(vec![json!({ "@id": Value::String(attribution.entity_id.de_compact()), })]), - ); + ); - if let Some(role) = &attribution.role { - attribution_doc.insert( - Prov::HadRole.to_string(), - json!([{ "@value": role.to_string()}]), - ); - } + if let Some(role) = &attribution.role { + attribution_doc.insert( + Prov::HadRole.to_string(), + json!([{ "@value": role.to_string()}]), + ); + } - let mut values = Vec::new(); + let mut values = Vec::new(); - values.push(json!({ + values.push(json!({ "@id": Value::String(attribution.namespace_id.de_compact()), })); - attribution_doc - .insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); + attribution_doc + .insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); - doc.push(Value::Object(attribution_doc)); - } - } - } + doc.push(Value::Object(attribution_doc)); + } + } + } - for (_, delegations) in self.delegation.iter() { - for delegation in (*delegations).iter() { - if let Value::Object(mut delegationdoc) = json!({ + for (_, delegations) in self.delegation.iter() { + for delegation in (*delegations).iter() { + if let Value::Object(mut delegationdoc) = json!({ "@id": delegation.id.de_compact(), "@type": [Prov::Delegation.as_str()], }) { - if let Some(activity_id) = &delegation.activity_id { - delegationdoc.insert( - Prov::HadActivity.to_string(), - Value::Array(vec![json!({ + if let Some(activity_id) = &delegation.activity_id { + delegationdoc.insert( + Prov::HadActivity.to_string(), + Value::Array(vec![json!({ "@id": Value::String(activity_id.de_compact()), })]), - ); - } + ); + } - if let Some(role) = &delegation.role { - delegationdoc.insert( - Prov::HadRole.to_string(), - json!([{ "@value": role.to_string()}]), - ); - } + if let Some(role) = &delegation.role { + delegationdoc.insert( + Prov::HadRole.to_string(), + json!([{ "@value": role.to_string()}]), + ); + } - let mut responsible_ids = Vec::new(); - responsible_ids.push( - json!({ "@id": Value::String(delegation.responsible_id.de_compact())}), - ); + let mut responsible_ids = Vec::new(); + responsible_ids.push( + json!({ "@id": Value::String(delegation.responsible_id.de_compact())}), + ); - delegationdoc - .insert(Prov::ActedOnBehalfOf.to_string(), Value::Array(responsible_ids)); + delegationdoc + .insert(Prov::ActedOnBehalfOf.to_string(), Value::Array(responsible_ids)); - let mut delegate_ids = Vec::new(); - delegate_ids - .push(json!({ "@id": Value::String(delegation.delegate_id.de_compact())})); + let mut delegate_ids = Vec::new(); + delegate_ids + .push(json!({ "@id": Value::String(delegation.delegate_id.de_compact())})); - delegationdoc.insert(Prov::Delegate.to_string(), Value::Array(delegate_ids)); + delegationdoc.insert(Prov::Delegate.to_string(), Value::Array(delegate_ids)); - let mut values = Vec::new(); + let mut values = Vec::new(); - values.push(json!({ + values.push(json!({ "@id": Value::String(delegation.namespace_id.de_compact()), })); - delegationdoc.insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); + delegationdoc.insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); - doc.push(Value::Object(delegationdoc)); - } - } - } + doc.push(Value::Object(delegationdoc)); + } + } + } - for ((namespace, id), activity) in self.activities.iter() { - let mut typ = vec![Prov::Activity.de_compact()]; - if let Some(x) = activity.domaintype_id.as_ref() { - typ.push(x.de_compact()) - } + for ((namespace, id), activity) in self.activities.iter() { + let mut typ = vec![Prov::Activity.de_compact()]; + if let Some(x) = activity.domaintype_id.as_ref() { + typ.push(x.de_compact()) + } - if let Value::Object(mut activitydoc) = json!({ + if let Value::Object(mut activitydoc) = json!({ "@id": (*id.de_compact()), "@type": typ, "http://chronicle.works/chronicle/ns#externalId": [{ "@value": activity.external_id.as_str(), }] }) { - if let Some(time) = activity.started { - let mut values = Vec::new(); - values.push(json!({"@value": time.to_rfc3339()})); + if let Some(time) = activity.started { + let mut values = Vec::new(); + values.push(json!({"@value": time.to_rfc3339()})); - activitydoc.insert( - "http://www.w3.org/ns/prov#startedAtTime".to_string(), - Value::Array(values), - ); - } + activitydoc.insert( + "http://www.w3.org/ns/prov#startedAtTime".to_string(), + Value::Array(values), + ); + } - if let Some(time) = activity.ended { - let mut values = Vec::new(); - values.push(json!({"@value": time.to_rfc3339()})); + if let Some(time) = activity.ended { + let mut values = Vec::new(); + values.push(json!({"@value": time.to_rfc3339()})); - activitydoc.insert( - "http://www.w3.org/ns/prov#endedAtTime".to_string(), - Value::Array(values), - ); - } + activitydoc.insert( + "http://www.w3.org/ns/prov#endedAtTime".to_string(), + Value::Array(values), + ); + } - if let Some(asoc) = self.association.get(&(namespace.to_owned(), id.to_owned())) { - let mut ids = Vec::new(); + if let Some(asoc) = self.association.get(&(namespace.to_owned(), id.to_owned())) { + let mut ids = Vec::new(); - let mut qualified_ids = Vec::new(); - for asoc in asoc.iter() { - ids.push(json!({"@id": asoc.agent_id.de_compact()})); - qualified_ids.push(json!({"@id": asoc.id.de_compact()})); - } + let mut qualified_ids = Vec::new(); + for asoc in asoc.iter() { + ids.push(json!({"@id": asoc.agent_id.de_compact()})); + qualified_ids.push(json!({"@id": asoc.id.de_compact()})); + } - activitydoc.insert(Prov::WasAssociatedWith.de_compact(), Value::Array(ids)); + activitydoc.insert(Prov::WasAssociatedWith.de_compact(), Value::Array(ids)); - activitydoc.insert( - Prov::QualifiedAssociation.de_compact(), - Value::Array(qualified_ids), - ); - } + activitydoc.insert( + Prov::QualifiedAssociation.de_compact(), + Value::Array(qualified_ids), + ); + } - if let Some(usage) = self.usage.get(&(namespace.to_owned(), id.to_owned())) { - let mut ids = Vec::new(); + if let Some(usage) = self.usage.get(&(namespace.to_owned(), id.to_owned())) { + let mut ids = Vec::new(); - for usage in usage.iter() { - ids.push(json!({"@id": usage.entity_id.de_compact()})); - } + for usage in usage.iter() { + ids.push(json!({"@id": usage.entity_id.de_compact()})); + } - activitydoc.insert(Prov::Used.de_compact(), Value::Array(ids)); - } + activitydoc.insert(Prov::Used.de_compact(), Value::Array(ids)); + } - let mut values = Vec::new(); + let mut values = Vec::new(); - values.push(json!({ + values.push(json!({ "@id": Value::String(activity.namespace_id.de_compact()), })); - activitydoc.insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); + activitydoc.insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); - if let Some(activities) = - self.was_informed_by.get(&(namespace.to_owned(), id.to_owned())) - { - let mut values = Vec::new(); + if let Some(activities) = + self.was_informed_by.get(&(namespace.to_owned(), id.to_owned())) + { + let mut values = Vec::new(); - for (_, activity) in (*activities).iter() { - values.push(json!({ + for (_, activity) in (*activities).iter() { + values.push(json!({ "@id": Value::String(activity.de_compact()), })); - } - activitydoc.insert(Prov::WasInformedBy.to_string(), Value::Array(values)); - } + } + activitydoc.insert(Prov::WasInformedBy.to_string(), Value::Array(values)); + } - Self::write_attributes(&mut activitydoc, activity.attributes.iter()); + Self::write_attributes(&mut activitydoc, activity.attributes.iter()); - doc.push(Value::Object(activitydoc)); - } - } + doc.push(Value::Object(activitydoc)); + } + } - for ((namespace, id), entity) in self.entities.iter() { - let mut typ = vec![Prov::Entity.de_compact()]; - if let Some(x) = entity.domaintypeid.as_ref() { - typ.push(x.de_compact()) - } + for ((namespace, id), entity) in self.entities.iter() { + let mut typ = vec![Prov::Entity.de_compact()]; + if let Some(x) = entity.domaintypeid.as_ref() { + typ.push(x.de_compact()) + } - if let Value::Object(mut entitydoc) = json!({ + if let Value::Object(mut entitydoc) = json!({ "@id": (*id.de_compact()), "@type": typ, "http://chronicle.works/chronicle/ns#externalId": [{ "@value": entity.external_id.as_str() }] }) { - if let Some(derivation) = - self.derivation.get(&(namespace.to_owned(), id.to_owned())) - { - let mut derived_ids = Vec::new(); - let mut primary_ids = Vec::new(); - let mut quotation_ids = Vec::new(); - let mut revision_ids = Vec::new(); - - for derivation in derivation.iter() { - let id = json!({"@id": derivation.used_id.de_compact()}); - match derivation.typ { - DerivationType::PrimarySource => primary_ids.push(id), - DerivationType::Quotation => quotation_ids.push(id), - DerivationType::Revision => revision_ids.push(id), - DerivationType::None => derived_ids.push(id), - } - } - if !derived_ids.is_empty() { - entitydoc - .insert(Prov::WasDerivedFrom.to_string(), Value::Array(derived_ids)); - } - if !primary_ids.is_empty() { - entitydoc - .insert(Prov::HadPrimarySource.to_string(), Value::Array(primary_ids)); - } - if !quotation_ids.is_empty() { - entitydoc - .insert(Prov::WasQuotedFrom.to_string(), Value::Array(quotation_ids)); - } - if !revision_ids.is_empty() { - entitydoc - .insert(Prov::WasRevisionOf.to_string(), Value::Array(revision_ids)); - } - } - - if let Some(generation) = - self.generation.get(&(namespace.to_owned(), id.to_owned())) - { - let mut ids = Vec::new(); - - for generation in generation.iter() { - ids.push(json!({"@id": generation.activity_id.de_compact()})); - } - - entitydoc.insert(Prov::WasGeneratedBy.to_string(), Value::Array(ids)); - } - - let entity_key = (entity.namespace_id.clone(), entity.id.clone()); - - if let Some(attributions) = self.attribution.get(&entity_key) { - let mut ids = Vec::new(); - - let mut qualified_ids = Vec::new(); - for attribution in attributions.iter() { - ids.push(json!({"@id": attribution.agent_id.de_compact()})); - qualified_ids.push(json!({"@id": attribution.id.de_compact()})); - } - - entitydoc.insert(Prov::WasAttributedTo.de_compact(), Value::Array(ids)); - - entitydoc.insert( - Prov::QualifiedAttribution.de_compact(), - Value::Array(qualified_ids), - ); - } - - let mut values = Vec::new(); - - values.push(json!({ + if let Some(derivation) = + self.derivation.get(&(namespace.to_owned(), id.to_owned())) + { + let mut derived_ids = Vec::new(); + let mut primary_ids = Vec::new(); + let mut quotation_ids = Vec::new(); + let mut revision_ids = Vec::new(); + + for derivation in derivation.iter() { + let id = json!({"@id": derivation.used_id.de_compact()}); + match derivation.typ { + DerivationType::PrimarySource => primary_ids.push(id), + DerivationType::Quotation => quotation_ids.push(id), + DerivationType::Revision => revision_ids.push(id), + DerivationType::None => derived_ids.push(id), + } + } + if !derived_ids.is_empty() { + entitydoc + .insert(Prov::WasDerivedFrom.to_string(), Value::Array(derived_ids)); + } + if !primary_ids.is_empty() { + entitydoc + .insert(Prov::HadPrimarySource.to_string(), Value::Array(primary_ids)); + } + if !quotation_ids.is_empty() { + entitydoc + .insert(Prov::WasQuotedFrom.to_string(), Value::Array(quotation_ids)); + } + if !revision_ids.is_empty() { + entitydoc + .insert(Prov::WasRevisionOf.to_string(), Value::Array(revision_ids)); + } + } + + if let Some(generation) = + self.generation.get(&(namespace.to_owned(), id.to_owned())) + { + let mut ids = Vec::new(); + + for generation in generation.iter() { + ids.push(json!({"@id": generation.activity_id.de_compact()})); + } + + entitydoc.insert(Prov::WasGeneratedBy.to_string(), Value::Array(ids)); + } + + let entity_key = (entity.namespace_id.clone(), entity.id.clone()); + + if let Some(attributions) = self.attribution.get(&entity_key) { + let mut ids = Vec::new(); + + let mut qualified_ids = Vec::new(); + for attribution in attributions.iter() { + ids.push(json!({"@id": attribution.agent_id.de_compact()})); + qualified_ids.push(json!({"@id": attribution.id.de_compact()})); + } + + entitydoc.insert(Prov::WasAttributedTo.de_compact(), Value::Array(ids)); + + entitydoc.insert( + Prov::QualifiedAttribution.de_compact(), + Value::Array(qualified_ids), + ); + } + + let mut values = Vec::new(); + + values.push(json!({ "@id": Value::String(entity.namespace_id.de_compact()), })); - entitydoc.insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); + entitydoc.insert(Chronicle::HasNamespace.to_string(), Value::Array(values)); - Self::write_attributes(&mut entitydoc, entity.attributes.iter()); + Self::write_attributes(&mut entitydoc, entity.attributes.iter()); - doc.push(Value::Object(entitydoc)); - } - } + doc.push(Value::Object(entitydoc)); + } + } - ExpandedJson(Value::Array(doc)) - } + ExpandedJson(Value::Array(doc)) + } } impl ProvModel { - fn write_attributes<'a, I: Iterator>( - doc: &mut serde_json::Map, - attributes: I, - ) { - let mut attribute_node = serde_json::Map::new(); - - for attribute in attributes { - attribute_node.insert(attribute.typ.clone(), attribute.value.0.clone()); - } - - doc.insert( - Chronicle::Value.to_string(), - json!([{"@value" : Value::Object(attribute_node), "@type": "@json"}]), - ); - } + fn write_attributes<'a, I: Iterator>( + doc: &mut serde_json::Map, + attributes: I, + ) { + let mut attribute_node = serde_json::Map::new(); + + for attribute in attributes { + attribute_node.insert(attribute.typ.clone(), attribute.value.0.clone()); + } + + doc.insert( + Chronicle::Value.to_string(), + json!([{"@value" : Value::Object(attribute_node), "@type": "@json"}]), + ); + } } impl ToJson for ChronicleOperation { - fn to_json(&self) -> ExpandedJson { - let mut operation: Vec = Vec::new(); - - let o = match self { - ChronicleOperation::CreateNamespace(CreateNamespace { id, .. }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::CreateNamespace); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(id.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o - } - ChronicleOperation::AgentExists(AgentExists { namespace, id }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::AgentExists); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::AgentName, - ); - - o - } - ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf { - namespace, - id: _, // This is derivable from components - delegate_id, - activity_id, - role, - responsible_id, - }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::AgentActsOnBehalfOf); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(delegate_id.external_id_part()), - vocab::ChronicleOperation::DelegateId, - ); - - o.has_value( - OperationValue::string(responsible_id.external_id_part()), - vocab::ChronicleOperation::ResponsibleId, - ); - - if let Some(role) = role { - o.has_value(OperationValue::string(role), vocab::ChronicleOperation::Role); - } - - if let Some(activity_id) = activity_id { - o.has_value( - OperationValue::string(activity_id.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - } - - o - } - ChronicleOperation::ActivityExists(ActivityExists { namespace, id }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::ActivityExists); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - - o - } - ChronicleOperation::StartActivity(StartActivity { namespace, id, time }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::StartActivity); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - - o.has_value( - OperationValue::string(time.to_rfc3339()), - vocab::ChronicleOperation::StartActivityTime, - ); - - o - } - ChronicleOperation::EndActivity(EndActivity { namespace, id, time }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::EndActivity); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - - o.has_value( - OperationValue::string(time.to_rfc3339()), - vocab::ChronicleOperation::EndActivityTime, - ); - - o - } - ChronicleOperation::ActivityUses(ActivityUses { namespace, id, activity }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::ActivityUses); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::EntityName, - ); - - o.has_value( - OperationValue::string(activity.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - - o - } - ChronicleOperation::EntityExists(EntityExists { namespace, id }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::EntityExists); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::EntityName, - ); - - o - } - ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::WasGeneratedBy); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::EntityName, - ); - - o.has_value( - OperationValue::string(activity.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - - o - } - ChronicleOperation::WasInformedBy(WasInformedBy { - namespace, - activity, - informing_activity, - }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::WasInformedBy); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(activity.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - - o.has_value( - OperationValue::string(informing_activity.external_id_part()), - vocab::ChronicleOperation::InformingActivityName, - ); - - o - } - ChronicleOperation::EntityDerive(EntityDerive { - namespace, - id, - used_id, - activity_id, - typ, - }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::EntityDerive); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::EntityName, - ); - - o.has_value( - OperationValue::string(used_id.external_id_part()), - vocab::ChronicleOperation::UsedEntityName, - ); - - if let Some(activity) = activity_id { - o.has_value( - OperationValue::string(activity.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - } - - if *typ != DerivationType::None { - o.derivation(typ); - } - - o - } - ChronicleOperation::SetAttributes(SetAttributes::Entity { - namespace, - id, - attributes, - }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::SetAttributes); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::EntityName, - ); - - if let Some(domaintypeid) = attributes.get_typ() { - let id = OperationValue::string(domaintypeid.external_id_part()); - o.has_value(id, vocab::ChronicleOperation::DomaintypeId); - } - - o.attributes_object(attributes); - - o - } - ChronicleOperation::SetAttributes(SetAttributes::Activity { - namespace, - id, - attributes, - }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::SetAttributes); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - - if let Some(domaintypeid) = attributes.get_typ() { - let id = OperationValue::string(domaintypeid.external_id_part()); - o.has_value(id, vocab::ChronicleOperation::DomaintypeId); - } - - o.attributes_object(attributes); - - o - } - ChronicleOperation::SetAttributes(SetAttributes::Agent { - namespace, - id, - attributes, - }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::SetAttributes); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(id.external_id_part()), - vocab::ChronicleOperation::AgentName, - ); - - if let Some(domaintypeid) = attributes.get_typ() { - let id = OperationValue::string(domaintypeid.external_id_part()); - o.has_value(id, vocab::ChronicleOperation::DomaintypeId); - } - - o.attributes_object(attributes); - - o - } - ChronicleOperation::WasAssociatedWith(WasAssociatedWith { - id: _, - role, - namespace, - activity_id, - agent_id, - }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::WasAssociatedWith); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(activity_id.external_id_part()), - vocab::ChronicleOperation::ActivityName, - ); - - o.has_value( - OperationValue::string(agent_id.external_id_part()), - vocab::ChronicleOperation::AgentName, - ); - - if let Some(role) = role { - o.has_value(OperationValue::string(role), vocab::ChronicleOperation::Role); - } - - o - } - ChronicleOperation::WasAttributedTo(WasAttributedTo { - id: _, - role, - namespace, - entity_id, - agent_id, - }) => { - let mut o = Value::new_operation(vocab::ChronicleOperation::WasAttributedTo); - - o.has_value( - OperationValue::string(namespace.external_id_part()), - vocab::ChronicleOperation::NamespaceName, - ); - - o.has_value( - OperationValue::string(namespace.uuid_part()), - vocab::ChronicleOperation::NamespaceUuid, - ); - - o.has_value( - OperationValue::string(entity_id.external_id_part()), - vocab::ChronicleOperation::EntityName, - ); - - o.has_value( - OperationValue::string(agent_id.external_id_part()), - vocab::ChronicleOperation::AgentName, - ); - - if let Some(role) = role { - o.has_value(OperationValue::string(role), vocab::ChronicleOperation::Role); - } - - o - } - }; - operation.push(o); - super::ExpandedJson(operation.into()) - } + fn to_json(&self) -> ExpandedJson { + let mut operation: Vec = Vec::new(); + + let o = match self { + ChronicleOperation::CreateNamespace(CreateNamespace { id, .. }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::CreateNamespace); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(id.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o + }, + ChronicleOperation::AgentExists(AgentExists { namespace, id }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::AgentExists); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::AgentName, + ); + + o + }, + ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf { + namespace, + id: _, // This is derivable from components + delegate_id, + activity_id, + role, + responsible_id, + }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::AgentActsOnBehalfOf); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(delegate_id.external_id_part()), + vocab::ChronicleOperation::DelegateId, + ); + + o.has_value( + OperationValue::string(responsible_id.external_id_part()), + vocab::ChronicleOperation::ResponsibleId, + ); + + if let Some(role) = role { + o.has_value(OperationValue::string(role), vocab::ChronicleOperation::Role); + } + + if let Some(activity_id) = activity_id { + o.has_value( + OperationValue::string(activity_id.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + } + + o + }, + ChronicleOperation::ActivityExists(ActivityExists { namespace, id }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::ActivityExists); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + + o + }, + ChronicleOperation::StartActivity(StartActivity { namespace, id, time }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::StartActivity); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + + o.has_value( + OperationValue::string(time.to_rfc3339()), + vocab::ChronicleOperation::StartActivityTime, + ); + + o + }, + ChronicleOperation::EndActivity(EndActivity { namespace, id, time }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::EndActivity); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + + o.has_value( + OperationValue::string(time.to_rfc3339()), + vocab::ChronicleOperation::EndActivityTime, + ); + + o + }, + ChronicleOperation::ActivityUses(ActivityUses { namespace, id, activity }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::ActivityUses); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::EntityName, + ); + + o.has_value( + OperationValue::string(activity.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + + o + }, + ChronicleOperation::EntityExists(EntityExists { namespace, id }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::EntityExists); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::EntityName, + ); + + o + }, + ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::WasGeneratedBy); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::EntityName, + ); + + o.has_value( + OperationValue::string(activity.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + + o + }, + ChronicleOperation::WasInformedBy(WasInformedBy { + namespace, + activity, + informing_activity, + }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::WasInformedBy); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(activity.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + + o.has_value( + OperationValue::string(informing_activity.external_id_part()), + vocab::ChronicleOperation::InformingActivityName, + ); + + o + }, + ChronicleOperation::EntityDerive(EntityDerive { + namespace, + id, + used_id, + activity_id, + typ, + }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::EntityDerive); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::EntityName, + ); + + o.has_value( + OperationValue::string(used_id.external_id_part()), + vocab::ChronicleOperation::UsedEntityName, + ); + + if let Some(activity) = activity_id { + o.has_value( + OperationValue::string(activity.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + } + + if *typ != DerivationType::None { + o.derivation(typ); + } + + o + }, + ChronicleOperation::SetAttributes(SetAttributes::Entity { + namespace, + id, + attributes, + }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::SetAttributes); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::EntityName, + ); + + if let Some(domaintypeid) = attributes.get_typ() { + let id = OperationValue::string(domaintypeid.external_id_part()); + o.has_value(id, vocab::ChronicleOperation::DomaintypeId); + } + + o.attributes_object(attributes); + + o + }, + ChronicleOperation::SetAttributes(SetAttributes::Activity { + namespace, + id, + attributes, + }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::SetAttributes); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + + if let Some(domaintypeid) = attributes.get_typ() { + let id = OperationValue::string(domaintypeid.external_id_part()); + o.has_value(id, vocab::ChronicleOperation::DomaintypeId); + } + + o.attributes_object(attributes); + + o + }, + ChronicleOperation::SetAttributes(SetAttributes::Agent { + namespace, + id, + attributes, + }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::SetAttributes); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(id.external_id_part()), + vocab::ChronicleOperation::AgentName, + ); + + if let Some(domaintypeid) = attributes.get_typ() { + let id = OperationValue::string(domaintypeid.external_id_part()); + o.has_value(id, vocab::ChronicleOperation::DomaintypeId); + } + + o.attributes_object(attributes); + + o + }, + ChronicleOperation::WasAssociatedWith(WasAssociatedWith { + id: _, + role, + namespace, + activity_id, + agent_id, + }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::WasAssociatedWith); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(activity_id.external_id_part()), + vocab::ChronicleOperation::ActivityName, + ); + + o.has_value( + OperationValue::string(agent_id.external_id_part()), + vocab::ChronicleOperation::AgentName, + ); + + if let Some(role) = role { + o.has_value(OperationValue::string(role), vocab::ChronicleOperation::Role); + } + + o + }, + ChronicleOperation::WasAttributedTo(WasAttributedTo { + id: _, + role, + namespace, + entity_id, + agent_id, + }) => { + let mut o = Value::new_operation(vocab::ChronicleOperation::WasAttributedTo); + + o.has_value( + OperationValue::string(namespace.external_id_part()), + vocab::ChronicleOperation::NamespaceName, + ); + + o.has_value( + OperationValue::string(namespace.uuid_part()), + vocab::ChronicleOperation::NamespaceUuid, + ); + + o.has_value( + OperationValue::string(entity_id.external_id_part()), + vocab::ChronicleOperation::EntityName, + ); + + o.has_value( + OperationValue::string(agent_id.external_id_part()), + vocab::ChronicleOperation::AgentName, + ); + + if let Some(role) = role { + o.has_value(OperationValue::string(role), vocab::ChronicleOperation::Role); + } + + o + }, + }; + operation.push(o); + super::ExpandedJson(operation.into()) + } } struct OperationValue(String); impl OperationValue { - fn string(value: impl ToString) -> Self { - OperationValue(value.to_string()) - } - - #[allow(dead_code)] - fn identity(id: ChronicleIri) -> Self { - OperationValue(id.to_string()) - } + fn string(value: impl ToString) -> Self { + OperationValue(value.to_string()) + } + + #[allow(dead_code)] + fn identity(id: ChronicleIri) -> Self { + OperationValue(id.to_string()) + } } trait Operate { - fn new_operation(op: vocab::ChronicleOperation) -> Self; - fn new_type(id: OperationValue, op: vocab::ChronicleOperation) -> Self; - fn new_value(id: OperationValue) -> Self; - fn new_id(id: OperationValue) -> Self; - fn has_value(&mut self, value: OperationValue, op: vocab::ChronicleOperation); - fn has_id(&mut self, id: OperationValue, op: vocab::ChronicleOperation); - fn attributes_object(&mut self, attributes: &Attributes); - fn derivation(&mut self, typ: &DerivationType); + fn new_operation(op: vocab::ChronicleOperation) -> Self; + fn new_type(id: OperationValue, op: vocab::ChronicleOperation) -> Self; + fn new_value(id: OperationValue) -> Self; + fn new_id(id: OperationValue) -> Self; + fn has_value(&mut self, value: OperationValue, op: vocab::ChronicleOperation); + fn has_id(&mut self, id: OperationValue, op: vocab::ChronicleOperation); + fn attributes_object(&mut self, attributes: &Attributes); + fn derivation(&mut self, typ: &DerivationType); } impl Operate for Value { - fn new_type(id: OperationValue, op: vocab::ChronicleOperation) -> Self { - json!({ + fn new_type(id: OperationValue, op: vocab::ChronicleOperation) -> Self { + json!({ "@id": id.0, "@type": [op.as_str()], }) - } + } - fn new_value(id: OperationValue) -> Self { - json!({ + fn new_value(id: OperationValue) -> Self { + json!({ "@value": id.0 }) - } + } - fn new_id(id: OperationValue) -> Self { - json!({ + fn new_id(id: OperationValue) -> Self { + json!({ "@id": id.0 }) - } - - fn has_value(&mut self, value: OperationValue, op: vocab::ChronicleOperation) { - if let Value::Object(map) = self { - let key = op.to_string(); - let mut values: Vec = Vec::new(); - let object = Self::new_value(value); - values.push(object); - map.insert(key, Value::Array(values)); - } else { - panic!("use on JSON objects only"); - } - } - - fn has_id(&mut self, id: OperationValue, op: vocab::ChronicleOperation) { - if let Value::Object(map) = self { - let key = op.to_string(); - let mut value: Vec = Vec::new(); - let object = Self::new_id(id); - value.push(object); - map.insert(key, Value::Array(value)); - } else { - panic!("use on JSON objects only"); - } - } - - fn new_operation(op: vocab::ChronicleOperation) -> Self { - let id = OperationValue::string("_:n1"); - Self::new_type(id, op) - } - - fn attributes_object(&mut self, attributes: &Attributes) { - if let Value::Object(map) = self { - let mut attribute_node = serde_json::Map::new(); - for attribute in attributes.get_values() { - attribute_node.insert(attribute.typ.clone(), attribute.value.0.clone()); - } - map.insert( - vocab::ChronicleOperation::Attributes.to_string(), - json!([{"@value" : attribute_node, "@type": "@json"}]), - ); - } else { - panic!("use on JSON objects only"); - } - } - - fn derivation(&mut self, typ: &DerivationType) { - let typ = match typ { - DerivationType::None => panic!("should never handle a None derivation type"), - DerivationType::Revision => "Revision", - DerivationType::Quotation => "Quotation", - DerivationType::PrimarySource => "PrimarySource", - }; - let id = OperationValue::string(typ); - - self.has_value(id, vocab::ChronicleOperation::DerivationType); - } + } + + fn has_value(&mut self, value: OperationValue, op: vocab::ChronicleOperation) { + if let Value::Object(map) = self { + let key = op.to_string(); + let mut values: Vec = Vec::new(); + let object = Self::new_value(value); + values.push(object); + map.insert(key, Value::Array(values)); + } else { + panic!("use on JSON objects only"); + } + } + + fn has_id(&mut self, id: OperationValue, op: vocab::ChronicleOperation) { + if let Value::Object(map) = self { + let key = op.to_string(); + let mut value: Vec = Vec::new(); + let object = Self::new_id(id); + value.push(object); + map.insert(key, Value::Array(value)); + } else { + panic!("use on JSON objects only"); + } + } + + fn new_operation(op: vocab::ChronicleOperation) -> Self { + let id = OperationValue::string("_:n1"); + Self::new_type(id, op) + } + + fn attributes_object(&mut self, attributes: &Attributes) { + if let Value::Object(map) = self { + let mut attribute_node = serde_json::Map::new(); + for attribute in attributes.get_values() { + attribute_node.insert(attribute.typ.clone(), attribute.value.0.clone()); + } + map.insert( + vocab::ChronicleOperation::Attributes.to_string(), + json!([{"@value" : attribute_node, "@type": "@json"}]), + ); + } else { + panic!("use on JSON objects only"); + } + } + + fn derivation(&mut self, typ: &DerivationType) { + let typ = match typ { + DerivationType::None => panic!("should never handle a None derivation type"), + DerivationType::Revision => "Revision", + DerivationType::Quotation => "Quotation", + DerivationType::PrimarySource => "PrimarySource", + }; + let id = OperationValue::string(typ); + + self.has_value(id, vocab::ChronicleOperation::DerivationType); + } } diff --git a/crates/common/src/prov/model/mod.rs b/crates/common/src/prov/model/mod.rs index 6fc956d53..df24fa5f9 100644 --- a/crates/common/src/prov/model/mod.rs +++ b/crates/common/src/prov/model/mod.rs @@ -12,19 +12,17 @@ mod proptest; use core::{convert::Infallible, fmt::Debug}; #[cfg(not(feature = "std"))] use parity_scale_codec::{ - alloc::collections::{BTreeMap, BTreeSet}, - alloc::string::String, - alloc::vec::Vec, + alloc::collections::{BTreeMap, BTreeSet}, + alloc::string::String, + alloc::vec::Vec, }; #[cfg(not(feature = "std"))] -use scale_info::{ - prelude::borrow::ToOwned, prelude::sync::Arc, -}; +use scale_info::{prelude::borrow::ToOwned, prelude::sync::Arc}; use serde::Serialize; #[cfg(feature = "std")] use std::{ - collections::{BTreeMap, BTreeSet}, - sync::Arc, + collections::{BTreeMap, BTreeSet}, + sync::Arc, }; #[cfg(feature = "std")] @@ -36,524 +34,524 @@ use tracing::{instrument, trace}; use uuid::Uuid; use crate::{ - attributes::{Attribute, Attributes}, - identity::IdentityError, - prov::operations::WasAttributedTo, + attributes::{Attribute, Attributes}, + identity::IdentityError, + prov::operations::WasAttributedTo, }; use super::{ - id, - operations::{ - ActivityExists, ActivityUses, ActsOnBehalfOf, AgentExists, ChronicleOperation, - CreateNamespace, DerivationType, EndActivity, EntityDerive, EntityExists, SetAttributes, - StartActivity, TimeWrapper, WasAssociatedWith, WasGeneratedBy, WasInformedBy, - }, - ActivityId, AgentId, AssociationId, AttributionId, ChronicleIri, DelegationId, DomaintypeId, - EntityId, ExternalId, ExternalIdPart, NamespaceId, Role, UuidPart, + id, + operations::{ + ActivityExists, ActivityUses, ActsOnBehalfOf, AgentExists, ChronicleOperation, + CreateNamespace, DerivationType, EndActivity, EntityDerive, EntityExists, SetAttributes, + StartActivity, TimeWrapper, WasAssociatedWith, WasGeneratedBy, WasInformedBy, + }, + ActivityId, AgentId, AssociationId, AttributionId, ChronicleIri, DelegationId, DomaintypeId, + EntityId, ExternalId, ExternalIdPart, NamespaceId, Role, UuidPart, }; #[cfg(feature = "json-ld")] #[derive(Error, Debug)] pub enum ProcessorError { - #[error("Invalid address")] - Address, - #[error("Json Ld Error {0}")] - Compaction( - #[from] - #[source] - json_ld::CompactionError, - ), - #[error("Contradiction {0}")] - Contradiction(Contradiction), - #[error("Json Ld Error {inner}")] - Expansion { inner: String }, - #[error("IdentityError {0}")] - Identity( - #[from] - #[source] - IdentityError, - ), - #[error("Invalid IRI {0}")] - IRef( - #[from] - #[source] - iref::Error, - ), - #[error("Not a Chronicle IRI {0}")] - NotAChronicleIri( - #[from] - #[source] - id::ParseIriError, - ), - #[error("Missing @id {object:?}")] - MissingId { object: serde_json::Value }, - #[error("Missing property {iri}:{object:?}")] - MissingProperty { iri: String, object: serde_json::Value }, - #[error("Json LD object is not a node {0}")] - NotANode(serde_json::Value), - #[error("Chronicle value is not a JSON object")] - NotAnObject, - - #[error("Missing activity")] - MissingActivity, - - #[error("Missing namespace")] - MissingNamespace, - #[error("Missing entity")] - MissingEntity, - #[error("Missing agent")] - MissingAgent, - #[error("Missing time")] - MissingTime, - #[error("Missing derivation")] - MissingDerivation, - #[error("Missing domain type")] - MissingDomainType, - - #[error("OpaExecutorError: {0}")] - OpaExecutor( - #[from] - #[source] - anyhow::Error, - ), - #[error("Malformed JSON {0}")] - SerdeJson( - #[from] - #[source] - serde_json::Error, - ), - - #[error("Submission {0}")] - SubmissionFormat( - #[from] - #[source] - PayloadError, - ), - #[error("Submission body format: {0}")] - Time( - #[from] - #[source] - chrono::ParseError, - ), - #[error("Tokio Error")] - Tokio, - #[error("State is not valid utf8 {0}")] - Utf8( - #[from] - #[source] - core::str::Utf8Error, - ), + #[error("Invalid address")] + Address, + #[error("Json Ld Error {0}")] + Compaction( + #[from] + #[source] + json_ld::CompactionError, + ), + #[error("Contradiction {0}")] + Contradiction(Contradiction), + #[error("Json Ld Error {inner}")] + Expansion { inner: String }, + #[error("IdentityError {0}")] + Identity( + #[from] + #[source] + IdentityError, + ), + #[error("Invalid IRI {0}")] + IRef( + #[from] + #[source] + iref::Error, + ), + #[error("Not a Chronicle IRI {0}")] + NotAChronicleIri( + #[from] + #[source] + id::ParseIriError, + ), + #[error("Missing @id {object:?}")] + MissingId { object: serde_json::Value }, + #[error("Missing property {iri}:{object:?}")] + MissingProperty { iri: String, object: serde_json::Value }, + #[error("Json LD object is not a node {0}")] + NotANode(serde_json::Value), + #[error("Chronicle value is not a JSON object")] + NotAnObject, + + #[error("Missing activity")] + MissingActivity, + + #[error("Missing namespace")] + MissingNamespace, + #[error("Missing entity")] + MissingEntity, + #[error("Missing agent")] + MissingAgent, + #[error("Missing time")] + MissingTime, + #[error("Missing derivation")] + MissingDerivation, + #[error("Missing domain type")] + MissingDomainType, + + #[error("OpaExecutorError: {0}")] + OpaExecutor( + #[from] + #[source] + anyhow::Error, + ), + #[error("Malformed JSON {0}")] + SerdeJson( + #[from] + #[source] + serde_json::Error, + ), + + #[error("Submission {0}")] + SubmissionFormat( + #[from] + #[source] + PayloadError, + ), + #[error("Submission body format: {0}")] + Time( + #[from] + #[source] + chrono::ParseError, + ), + #[error("Tokio Error")] + Tokio, + #[error("State is not valid utf8 {0}")] + Utf8( + #[from] + #[source] + core::str::Utf8Error, + ), } #[cfg(not(feature = "json-ld"))] #[derive(Error, Debug)] pub enum ProcessorError { - #[error("Invalid address")] - Address, - #[error("Contradiction {0}")] - Contradiction(Contradiction), - #[error("IdentityError {0}")] - Identity( - #[from] - #[source] - IdentityError, - ), - #[error("Not a Chronicle IRI {0}")] - NotAChronicleIri( - #[from] - #[source] - id::ParseIriError, - ), - #[error("Missing @id {object:?}")] - MissingId { object: serde_json::Value }, - #[error("Missing property {iri}:{object:?}")] - MissingProperty { iri: String, object: serde_json::Value }, - #[error("Json LD object is not a node {0}")] - NotANode(serde_json::Value), - #[error("Chronicle value is not a JSON object")] - NotAnObject, - #[error("OpaExecutorError: {0}")] - OpaExecutor( - #[from] - #[source] - anyhow::Error, - ), - #[error("Malformed JSON {0}")] - SerdeJson( - #[from] - #[source] - serde_json::Error, - ), - #[error("Unparsable date/time {0}")] - SubmissionFormat( - #[from] - #[source] - PayloadError, - ), - #[error("Submission body format: {0}")] - Time( - #[from] - #[source] - chrono::ParseError, - ), - #[error("Tokio Error")] - Tokio, - #[error("State is not valid utf8 {0}")] - Utf8( - #[from] - #[source] - core::str::Utf8Error, - ), + #[error("Invalid address")] + Address, + #[error("Contradiction {0}")] + Contradiction(Contradiction), + #[error("IdentityError {0}")] + Identity( + #[from] + #[source] + IdentityError, + ), + #[error("Not a Chronicle IRI {0}")] + NotAChronicleIri( + #[from] + #[source] + id::ParseIriError, + ), + #[error("Missing @id {object:?}")] + MissingId { object: serde_json::Value }, + #[error("Missing property {iri}:{object:?}")] + MissingProperty { iri: String, object: serde_json::Value }, + #[error("Json LD object is not a node {0}")] + NotANode(serde_json::Value), + #[error("Chronicle value is not a JSON object")] + NotAnObject, + #[error("OpaExecutorError: {0}")] + OpaExecutor( + #[from] + #[source] + anyhow::Error, + ), + #[error("Malformed JSON {0}")] + SerdeJson( + #[from] + #[source] + serde_json::Error, + ), + #[error("Unparsable date/time {0}")] + SubmissionFormat( + #[from] + #[source] + PayloadError, + ), + #[error("Submission body format: {0}")] + Time( + #[from] + #[source] + chrono::ParseError, + ), + #[error("Tokio Error")] + Tokio, + #[error("State is not valid utf8 {0}")] + Utf8( + #[from] + #[source] + core::str::Utf8Error, + ), } #[derive(Error, Debug)] pub enum PayloadError { - #[error("No list of Chronicle operations")] - OpsNotAList, - #[error("Not a JSON object")] - NotAnObject, - #[error("No version number")] - VersionMissing, - #[error("Unknown version number")] - VersionUnknown, + #[error("No list of Chronicle operations")] + OpsNotAList, + #[error("Not a JSON object")] + NotAnObject, + #[error("No version number")] + VersionMissing, + #[error("Unknown version number")] + VersionUnknown, } impl From for ProcessorError { - fn from(_: Infallible) -> Self { - unreachable!() - } + fn from(_: Infallible) -> Self { + unreachable!() + } } #[derive(Error, Debug)] pub enum ChronicleTransactionIdError { - #[error("Invalid transaction id {id}")] - InvalidTransactionId { id: String }, + #[error("Invalid transaction id {id}")] + InvalidTransactionId { id: String }, } #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone, Copy, Default)] pub struct ChronicleTransactionId([u8; 16]); impl core::ops::Deref for ChronicleTransactionId { - type Target = [u8; 16]; + type Target = [u8; 16]; - fn deref(&self) -> &Self::Target { - &self.0 - } + fn deref(&self) -> &Self::Target { + &self.0 + } } impl core::fmt::Display for ChronicleTransactionId { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - f.write_str(&hex::encode(self.0)) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.write_str(&hex::encode(self.0)) + } } impl From for ChronicleTransactionId { - fn from(u: Uuid) -> Self { - Self(u.into_bytes()) - } + fn from(u: Uuid) -> Self { + Self(u.into_bytes()) + } } impl From<[u8; 16]> for ChronicleTransactionId { - fn from(u: [u8; 16]) -> Self { - Self(u) - } + fn from(u: [u8; 16]) -> Self { + Self(u) + } } impl core::convert::TryFrom for ChronicleTransactionId { - type Error = hex::FromHexError; + type Error = hex::FromHexError; - fn try_from(s: String) -> Result { - Self::try_from(s.as_str()) - } + fn try_from(s: String) -> Result { + Self::try_from(s.as_str()) + } } impl core::convert::TryFrom<&str> for ChronicleTransactionId { - type Error = hex::FromHexError; - - fn try_from(s: &str) -> Result { - let bytes = hex::decode(s)?; - let mut array = [0; 16]; - array.copy_from_slice(&bytes[0..16]); - Ok(Self(array)) - } + type Error = hex::FromHexError; + + fn try_from(s: &str) -> Result { + let bytes = hex::decode(s)?; + let mut array = [0; 16]; + array.copy_from_slice(&bytes[0..16]); + Ok(Self(array)) + } } #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Namespace { - pub id: NamespaceId, - pub uuid: [u8; 16], - pub external_id: ExternalId, + pub id: NamespaceId, + pub uuid: [u8; 16], + pub external_id: ExternalId, } impl Namespace { - pub fn new(id: NamespaceId, uuid: Uuid, external_id: &ExternalId) -> Self { - Self { id, uuid: uuid.into_bytes(), external_id: external_id.to_owned() } - } + pub fn new(id: NamespaceId, uuid: Uuid, external_id: &ExternalId) -> Self { + Self { id, uuid: uuid.into_bytes(), external_id: external_id.to_owned() } + } } #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Agent { - pub id: AgentId, - pub namespaceid: NamespaceId, - pub external_id: ExternalId, - pub domaintypeid: Option, - pub attributes: Vec, + pub id: AgentId, + pub namespaceid: NamespaceId, + pub external_id: ExternalId, + pub domaintypeid: Option, + pub attributes: Vec, } impl Agent { - pub fn has_attributes(self, attributes: Attributes) -> Self { - let Self { id, namespaceid, external_id, .. } = self; - - Self { - id, - namespaceid, - external_id, - domaintypeid: attributes.get_typ().clone(), - attributes: attributes.get_items().to_vec(), - } - } - - // Create a prototypical agent from its IRI, we can only determine external_id - pub fn exists(namespaceid: NamespaceId, id: AgentId) -> Self { - Self { - namespaceid, - external_id: id.external_id_part().to_owned(), - id, - domaintypeid: None, - attributes: Vec::new(), - } - } + pub fn has_attributes(self, attributes: Attributes) -> Self { + let Self { id, namespaceid, external_id, .. } = self; + + Self { + id, + namespaceid, + external_id, + domaintypeid: attributes.get_typ().clone(), + attributes: attributes.get_items().to_vec(), + } + } + + // Create a prototypical agent from its IRI, we can only determine external_id + pub fn exists(namespaceid: NamespaceId, id: AgentId) -> Self { + Self { + namespaceid, + external_id: id.external_id_part().to_owned(), + id, + domaintypeid: None, + attributes: Vec::new(), + } + } } #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Activity { - pub id: ActivityId, - pub namespace_id: NamespaceId, - pub external_id: ExternalId, - pub domaintype_id: Option, - pub attributes: Vec, - pub started: Option, - pub ended: Option, + pub id: ActivityId, + pub namespace_id: NamespaceId, + pub external_id: ExternalId, + pub domaintype_id: Option, + pub attributes: Vec, + pub started: Option, + pub ended: Option, } impl Activity { - pub fn has_attributes(self, attributes: Attributes) -> Self { - let Self { id, namespace_id, external_id, started, ended, .. } = self; - Self { - id, - namespace_id, - external_id, - started, - ended, - domaintype_id: attributes.get_typ().clone(), - attributes: attributes.get_items().to_vec(), - } - } - - // Create a prototypical agent from its IRI, we can only determine external_id - pub fn exists(namespace_id: NamespaceId, id: ActivityId) -> Self { - Self { - namespace_id, - external_id: id.external_id_part().to_owned(), - id, - started: None, - ended: None, - domaintype_id: None, - attributes: Vec::new(), - } - } + pub fn has_attributes(self, attributes: Attributes) -> Self { + let Self { id, namespace_id, external_id, started, ended, .. } = self; + Self { + id, + namespace_id, + external_id, + started, + ended, + domaintype_id: attributes.get_typ().clone(), + attributes: attributes.get_items().to_vec(), + } + } + + // Create a prototypical agent from its IRI, we can only determine external_id + pub fn exists(namespace_id: NamespaceId, id: ActivityId) -> Self { + Self { + namespace_id, + external_id: id.external_id_part().to_owned(), + id, + started: None, + ended: None, + domaintype_id: None, + attributes: Vec::new(), + } + } } #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct Entity { - pub id: EntityId, - pub namespace_id: NamespaceId, - pub external_id: ExternalId, - pub domaintypeid: Option, - pub attributes: Vec, + pub id: EntityId, + pub namespace_id: NamespaceId, + pub external_id: ExternalId, + pub domaintypeid: Option, + pub attributes: Vec, } impl Entity { - pub fn has_attributes(self, attributes: Attributes) -> Self { - let Self { id, namespace_id: namespaceid, external_id, .. } = self; - Self { - id, - namespace_id: namespaceid, - external_id, - domaintypeid: attributes.get_typ().clone(), - attributes: attributes.get_items().to_vec(), - } - } - - pub fn exists(namespaceid: NamespaceId, id: EntityId) -> Self { - Self { - external_id: id.external_id_part().to_owned(), - id, - namespace_id: namespaceid, - domaintypeid: None, - attributes: Vec::new(), - } - } + pub fn has_attributes(self, attributes: Attributes) -> Self { + let Self { id, namespace_id: namespaceid, external_id, .. } = self; + Self { + id, + namespace_id: namespaceid, + external_id, + domaintypeid: attributes.get_typ().clone(), + attributes: attributes.get_items().to_vec(), + } + } + + pub fn exists(namespaceid: NamespaceId, id: EntityId) -> Self { + Self { + external_id: id.external_id_part().to_owned(), + id, + namespace_id: namespaceid, + domaintypeid: None, + attributes: Vec::new(), + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub struct Derivation { - pub generated_id: EntityId, - pub used_id: EntityId, - pub activity_id: Option, - pub typ: DerivationType, + pub generated_id: EntityId, + pub used_id: EntityId, + pub activity_id: Option, + pub typ: DerivationType, } #[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub struct Delegation { - pub namespace_id: NamespaceId, - pub id: DelegationId, - pub delegate_id: AgentId, - pub responsible_id: AgentId, - pub activity_id: Option, - pub role: Option, + pub namespace_id: NamespaceId, + pub id: DelegationId, + pub delegate_id: AgentId, + pub responsible_id: AgentId, + pub activity_id: Option, + pub role: Option, } impl Delegation { - pub fn new( - namespace_id: &NamespaceId, - delegate_id: &AgentId, - responsible_id: &AgentId, - activity_id: Option<&ActivityId>, - role: Option, - ) -> Self { - Self { - namespace_id: namespace_id.clone(), - id: DelegationId::from_component_ids( - delegate_id, - responsible_id, - activity_id, - role.as_ref(), - ), - delegate_id: delegate_id.clone(), - responsible_id: responsible_id.clone(), - activity_id: activity_id.cloned(), - role, - } - } + pub fn new( + namespace_id: &NamespaceId, + delegate_id: &AgentId, + responsible_id: &AgentId, + activity_id: Option<&ActivityId>, + role: Option, + ) -> Self { + Self { + namespace_id: namespace_id.clone(), + id: DelegationId::from_component_ids( + delegate_id, + responsible_id, + activity_id, + role.as_ref(), + ), + delegate_id: delegate_id.clone(), + responsible_id: responsible_id.clone(), + activity_id: activity_id.cloned(), + role, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub struct Association { - pub namespace_id: NamespaceId, - pub id: AssociationId, - pub agent_id: AgentId, - pub activity_id: ActivityId, - pub role: Option, + pub namespace_id: NamespaceId, + pub id: AssociationId, + pub agent_id: AgentId, + pub activity_id: ActivityId, + pub role: Option, } impl Association { - pub fn new( - namespace_id: &NamespaceId, - agent_id: &AgentId, - activity_id: &ActivityId, - role: Option, - ) -> Self { - Self { - namespace_id: namespace_id.clone(), - id: AssociationId::from_component_ids(agent_id, activity_id, role.as_ref()), - agent_id: agent_id.clone(), - activity_id: activity_id.clone(), - role, - } - } + pub fn new( + namespace_id: &NamespaceId, + agent_id: &AgentId, + activity_id: &ActivityId, + role: Option, + ) -> Self { + Self { + namespace_id: namespace_id.clone(), + id: AssociationId::from_component_ids(agent_id, activity_id, role.as_ref()), + agent_id: agent_id.clone(), + activity_id: activity_id.clone(), + role, + } + } } #[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub struct Usage { - pub activity_id: ActivityId, - pub entity_id: EntityId, + pub activity_id: ActivityId, + pub entity_id: EntityId, } #[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub struct Generation { - pub activity_id: ActivityId, - pub generated_id: EntityId, + pub activity_id: ActivityId, + pub generated_id: EntityId, } #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub struct GeneratedEntity { - pub entity_id: EntityId, - pub generated_id: ActivityId, + pub entity_id: EntityId, + pub generated_id: ActivityId, } #[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] pub struct Attribution { - pub namespace_id: NamespaceId, - pub id: AttributionId, - pub agent_id: AgentId, - pub entity_id: EntityId, - pub role: Option, + pub namespace_id: NamespaceId, + pub id: AttributionId, + pub agent_id: AgentId, + pub entity_id: EntityId, + pub role: Option, } impl Attribution { - pub fn new( - namespace_id: &NamespaceId, - agent_id: &AgentId, - entity_id: &EntityId, - role: Option, - ) -> Self { - Self { - namespace_id: namespace_id.clone(), - id: AttributionId::from_component_ids(agent_id, entity_id, role.as_ref()), - agent_id: agent_id.clone(), - entity_id: entity_id.clone(), - role, - } - } + pub fn new( + namespace_id: &NamespaceId, + agent_id: &AgentId, + entity_id: &EntityId, + role: Option, + ) -> Self { + Self { + namespace_id: namespace_id.clone(), + id: AttributionId::from_component_ids(agent_id, entity_id, role.as_ref()), + agent_id: agent_id.clone(), + entity_id: entity_id.clone(), + role, + } + } } type NamespacedId = (NamespaceId, T); @@ -561,25 +559,24 @@ type NamespacedAgent = NamespacedId; type NamespacedEntity = NamespacedId; type NamespacedActivity = NamespacedId; - #[derive(Debug, Default, Clone, Copy)] pub struct ProvSummary { - pub total_agents: usize, - pub total_entities: usize, - pub total_activities: usize, - pub total_attributions: usize, - pub total_derivations: usize, - pub total_generations: usize, - pub total_usages: usize, - pub total_associations: usize, - pub total_delegations: usize, + pub total_agents: usize, + pub total_entities: usize, + pub total_activities: usize, + pub total_attributions: usize, + pub total_derivations: usize, + pub total_generations: usize, + pub total_usages: usize, + pub total_associations: usize, + pub total_delegations: usize, } use core::fmt; impl fmt::Display for ProvSummary { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!( + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( f, "{{ total_agents: {}, total_entities: {}, total_activities: {}, total_attributions: {}, total_derivations: {}, total_generations: {}, total_usages: {}, total_associations: {}, total_delegations: {} }}", self.total_agents, @@ -592,716 +589,715 @@ impl fmt::Display for ProvSummary { self.total_associations, self.total_delegations ) - } + } } - #[cfg_attr( - feature = "parity-encoding", - derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) + feature = "parity-encoding", + derive(scale_info::TypeInfo, parity_scale_codec::Encode, parity_scale_codec::Decode) )] #[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)] pub struct ProvModel { - pub namespaces: BTreeMap>, - pub agents: BTreeMap>, - pub acted_on_behalf_of: BTreeMap>>, - pub delegation: BTreeMap>>, - pub entities: BTreeMap>, - pub derivation: BTreeMap>>, - pub generation: BTreeMap>>, - pub attribution: BTreeMap>>, - pub activities: BTreeMap>, - pub was_informed_by: BTreeMap>>, - pub generated: BTreeMap>>, - pub association: BTreeMap>>, - pub usage: BTreeMap>>, + pub namespaces: BTreeMap>, + pub agents: BTreeMap>, + pub acted_on_behalf_of: BTreeMap>>, + pub delegation: BTreeMap>>, + pub entities: BTreeMap>, + pub derivation: BTreeMap>>, + pub generation: BTreeMap>>, + pub attribution: BTreeMap>>, + pub activities: BTreeMap>, + pub was_informed_by: BTreeMap>>, + pub generated: BTreeMap>>, + pub association: BTreeMap>>, + pub usage: BTreeMap>>, } #[cfg(feature = "parity-encoding")] pub mod provmodel_protocol { - use super::*; - - #[derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - Debug, - Default, - Clone, - Serialize, - Deserialize, - PartialEq, - Eq, - )] - pub struct ProvModelV1 { - pub namespaces: BTreeMap>, - /* We need NamespaceIdV1 / - * NamespaceV1 etc, recursively - * until there are only primitive - * types */ - pub agents: BTreeMap>, - pub acted_on_behalf_of: BTreeMap>>, - pub delegation: BTreeMap>>, - pub entities: BTreeMap>, - pub derivation: BTreeMap>>, - pub generation: BTreeMap>>, - pub attribution: BTreeMap>>, - pub activities: BTreeMap>, - pub was_informed_by: BTreeMap>>, - pub generated: BTreeMap>>, - pub association: BTreeMap>>, - pub usage: BTreeMap>>, - } - - impl From for ProvModel { - fn from(value: ProvModelV1) -> Self { - ProvModel { - namespaces: value.namespaces, - agents: value.agents, - acted_on_behalf_of: value.acted_on_behalf_of, - delegation: value.delegation, - entities: value.entities, - derivation: value.derivation, - generation: value.generation, - attribution: value.attribution, - activities: value.activities, - was_informed_by: value.was_informed_by, - generated: value.generated, - association: value.association, - usage: value.usage, - } - } - } + use super::*; + + #[derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + Debug, + Default, + Clone, + Serialize, + Deserialize, + PartialEq, + Eq, + )] + pub struct ProvModelV1 { + pub namespaces: BTreeMap>, + /* We need NamespaceIdV1 / + * NamespaceV1 etc, recursively + * until there are only primitive + * types */ + pub agents: BTreeMap>, + pub acted_on_behalf_of: BTreeMap>>, + pub delegation: BTreeMap>>, + pub entities: BTreeMap>, + pub derivation: BTreeMap>>, + pub generation: BTreeMap>>, + pub attribution: BTreeMap>>, + pub activities: BTreeMap>, + pub was_informed_by: BTreeMap>>, + pub generated: BTreeMap>>, + pub association: BTreeMap>>, + pub usage: BTreeMap>>, + } + + impl From for ProvModel { + fn from(value: ProvModelV1) -> Self { + ProvModel { + namespaces: value.namespaces, + agents: value.agents, + acted_on_behalf_of: value.acted_on_behalf_of, + delegation: value.delegation, + entities: value.entities, + derivation: value.derivation, + generation: value.generation, + attribution: value.attribution, + activities: value.activities, + was_informed_by: value.was_informed_by, + generated: value.generated, + association: value.association, + usage: value.usage, + } + } + } } #[cfg(feature = "parity-encoding")] // TODO: We can make these structures reasonably bounded (and copy ids with interning) - though JSON // attributes may need some handwaving impl parity_scale_codec::MaxEncodedLen for ProvModel { - fn max_encoded_len() -> usize { - 64 * 1024usize - } + fn max_encoded_len() -> usize { + 64 * 1024usize + } } impl ProvModel { - pub fn summarize(&self) -> ProvSummary { - ProvSummary { - total_agents: self.agents.len(), - total_entities: self.entities.len(), - total_activities: self.activities.len(), - total_attributions: self - .attribution - .values() - .map(|attributions| attributions.len()) - .sum(), - total_derivations: self.derivation.values().map(|derivations| derivations.len()).sum(), - total_generations: self.generation.values().map(|generations| generations.len()).sum(), - total_usages: self.usage.values().map(|usages| usages.len()).sum(), - total_associations: self - .association - .values() - .map(|associations| associations.len()) - .sum(), - total_delegations: self.delegation.values().map(|delegations| delegations.len()).sum(), - } - } - - /// Merge the supplied ProvModel into this one - pub fn combine(&mut self, other: &ProvModel) { - self.namespaces.extend(other.namespaces.clone()); - self.agents.extend(other.agents.clone()); - self.acted_on_behalf_of.extend(other.acted_on_behalf_of.clone()); - self.delegation.extend(other.delegation.clone()); - self.entities.extend(other.entities.clone()); - self.derivation.extend(other.derivation.clone()); - self.generation.extend(other.generation.clone()); - self.attribution.extend(other.attribution.clone()); - self.activities.extend(other.activities.clone()); - self.was_informed_by.extend(other.was_informed_by.clone()); - self.generated.extend(other.generated.clone()); - self.association.extend(other.association.clone()); - self.usage.extend(other.usage.clone()); - } - - /// Apply a sequence of `ChronicleTransaction` to an empty model, then return it - pub fn from_tx<'a, I>(tx: I) -> Result - where - I: IntoIterator, - { - let mut model = Self::default(); - for tx in tx { - model.apply(tx)?; - } - - Ok(model) - } - - /// Append a derivation to the model - pub fn was_derived_from( - &mut self, - namespace_id: NamespaceId, - typ: DerivationType, - used_id: EntityId, - id: EntityId, - activity_id: Option, - ) { - let derivation_set = - Arc::make_mut(self.derivation.entry((namespace_id, id.clone())).or_default()); - - derivation_set.insert(Derivation { typ, generated_id: id, used_id, activity_id }); - } - - /// Append a delegation to the model - pub fn qualified_delegation( - &mut self, - namespace_id: &NamespaceId, - responsible_id: &AgentId, - delegate_id: &AgentId, - activity_id: Option, - role: Option, - ) { - let delegation = Delegation { - namespace_id: namespace_id.clone(), - id: DelegationId::from_component_ids( - delegate_id, - responsible_id, - activity_id.as_ref(), - role.as_ref(), - ), - responsible_id: responsible_id.clone(), - delegate_id: delegate_id.clone(), - activity_id, - role, - }; - - let delegation_set = Arc::make_mut( - self.delegation - .entry((namespace_id.clone(), responsible_id.clone())) - .or_default(), - ); - delegation_set.insert(delegation.clone()); - - let acted_on_behalf_of_set = Arc::make_mut( - self.acted_on_behalf_of - .entry((namespace_id.clone(), responsible_id.clone())) - .or_default(), - ); - - acted_on_behalf_of_set.insert(delegation); - } - - pub fn qualified_association( - &mut self, - namespace_id: &NamespaceId, - activity_id: &ActivityId, - agent_id: &AgentId, - role: Option, - ) { - let association_set = Arc::make_mut( - self.association.entry((namespace_id.clone(), activity_id.clone())).or_default(), - ); - - association_set.insert(Association { - namespace_id: namespace_id.clone(), - id: AssociationId::from_component_ids(agent_id, activity_id, role.as_ref()), - agent_id: agent_id.clone(), - activity_id: activity_id.clone(), - role, - }); - } - - pub fn was_generated_by( - &mut self, - namespace_id: NamespaceId, - generated_id: &EntityId, - activity_id: &ActivityId, - ) { - let generation_set = Arc::make_mut( - self.generation.entry((namespace_id.clone(), generated_id.clone())).or_default(), - ); - generation_set.insert(Generation { - activity_id: activity_id.clone(), - generated_id: generated_id.clone(), - }); - } - - pub fn generated( - &mut self, - namespace_id: NamespaceId, - generated_id: &ActivityId, - entity_id: &EntityId, - ) { - let generated_set = Arc::make_mut( - self.generated.entry((namespace_id.clone(), generated_id.clone())).or_default(), - ); - - generated_set.insert(GeneratedEntity { - entity_id: entity_id.clone(), - generated_id: generated_id.clone(), - }); - } - - pub fn used( - &mut self, - namespace_id: NamespaceId, - activity_id: &ActivityId, - entity_id: &EntityId, - ) { - let usage_set = Arc::make_mut( - self.usage.entry((namespace_id.clone(), activity_id.clone())).or_default(), - ); - - usage_set.insert(Usage { activity_id: activity_id.clone(), entity_id: entity_id.clone() }); - } - - pub fn was_informed_by( - &mut self, - namespace_id: NamespaceId, - activity_id: &ActivityId, - informing_activity_id: &ActivityId, - ) { - let was_informed_by_set = Arc::make_mut( - self.was_informed_by - .entry((namespace_id.clone(), activity_id.clone())) - .or_default(), - ); - - was_informed_by_set.insert((namespace_id, informing_activity_id.clone())); - } - - pub fn qualified_attribution( - &mut self, - namespace_id: &NamespaceId, - entity_id: &EntityId, - agent_id: &AgentId, - role: Option, - ) { - let attribution_set = Arc::make_mut( - self.attribution.entry((namespace_id.clone(), entity_id.clone())).or_default(), - ); - - attribution_set.insert(Attribution { - namespace_id: namespace_id.clone(), - id: AttributionId::from_component_ids(agent_id, entity_id, role.as_ref()), - agent_id: agent_id.clone(), - entity_id: entity_id.clone(), - role, - }); - } - - /// Ensure we have the referenced namespace in our model - pub fn namespace_context(&mut self, ns: &NamespaceId) { - let (namespace_name, uuid) = (ns.external_id_part(), ns.uuid_part()); - - self.namespaces.insert( - ns.clone(), - Namespace { - id: ns.clone(), - uuid: uuid.into_bytes(), - external_id: namespace_name.to_owned(), - } - .into(), - ); - } - - /// Ensure we have the referenced agent in our model, so that open world - /// assumptions can be made - pub fn agent_context(&mut self, ns: &NamespaceId, agent: &AgentId) { - self.agents - .entry((ns.clone(), agent.clone())) - .or_insert_with(|| Agent::exists(ns.clone(), agent.clone()).into()); - } - - pub fn get_agent(&mut self, ns: &NamespaceId, agent: &AgentId) -> Option<&Agent> { - self.agents.get(&(ns.clone(), agent.clone())).map(|arc| arc.as_ref()) - } - - pub fn modify_agent( - &mut self, - ns: &NamespaceId, - agent: &AgentId, - f: F, - ) { - if let Some(arc) = self.agents.get_mut(&(ns.clone(), agent.clone())) { - let agent: &mut Agent = Arc::make_mut(arc); - f(agent); - } - } - - /// Ensure we have the referenced entity in our model, so that open world - /// assumptions can be made - pub fn entity_context(&mut self, ns: &NamespaceId, entity: &EntityId) { - self.entities - .entry((ns.clone(), entity.clone())) - .or_insert_with(|| Entity::exists(ns.clone(), entity.clone()).into()); - } - - pub fn get_entity(&mut self, ns: &NamespaceId, entity: &EntityId) -> Option<&Entity> { - self.entities.get(&(ns.clone(), entity.clone())).map(|arc| arc.as_ref()) - } - - pub fn modify_entity( - &mut self, - ns: &NamespaceId, - entity: &EntityId, - f: F, - ) { - if let Some(arc) = self.entities.get_mut(&(ns.clone(), entity.clone())) { - let entity: &mut Entity = Arc::make_mut(arc); - f(entity); - } - } - - /// Ensure we have the referenced activity in our model, so that open world - /// assumptions can be made - pub fn activity_context(&mut self, ns: &NamespaceId, activity: &ActivityId) { - self.activities - .entry((ns.clone(), activity.clone())) - .or_insert_with(|| Activity::exists(ns.clone(), activity.clone()).into()); - } - - pub fn get_activity(&mut self, ns: &NamespaceId, activity: &ActivityId) -> Option<&Activity> { - self.activities.get(&(ns.clone(), activity.clone())).map(|arc| arc.as_ref()) - } - - pub fn modify_activity( - &mut self, - ns: &NamespaceId, - activity: &ActivityId, - f: F, - ) { - if let Some(arc) = self.activities.get_mut(&(ns.clone(), activity.clone())) { - let activity: &mut Activity = Arc::make_mut(arc); - f(activity); - } - } - - /// Transform a sequence of `ChronicleOperation` events into a provenance model, - /// If a statement requires a subject or object that does not currently exist in the model, then - /// we create it If an operation contradicts a previous statement, then we record the - /// contradiction, but attempt to apply as much of the operation as possible - #[instrument(skip(self, tx), level = "trace", name = "apply_chronicle_operation", fields( + pub fn summarize(&self) -> ProvSummary { + ProvSummary { + total_agents: self.agents.len(), + total_entities: self.entities.len(), + total_activities: self.activities.len(), + total_attributions: self + .attribution + .values() + .map(|attributions| attributions.len()) + .sum(), + total_derivations: self.derivation.values().map(|derivations| derivations.len()).sum(), + total_generations: self.generation.values().map(|generations| generations.len()).sum(), + total_usages: self.usage.values().map(|usages| usages.len()).sum(), + total_associations: self + .association + .values() + .map(|associations| associations.len()) + .sum(), + total_delegations: self.delegation.values().map(|delegations| delegations.len()).sum(), + } + } + + /// Merge the supplied ProvModel into this one + pub fn combine(&mut self, other: &ProvModel) { + self.namespaces.extend(other.namespaces.clone()); + self.agents.extend(other.agents.clone()); + self.acted_on_behalf_of.extend(other.acted_on_behalf_of.clone()); + self.delegation.extend(other.delegation.clone()); + self.entities.extend(other.entities.clone()); + self.derivation.extend(other.derivation.clone()); + self.generation.extend(other.generation.clone()); + self.attribution.extend(other.attribution.clone()); + self.activities.extend(other.activities.clone()); + self.was_informed_by.extend(other.was_informed_by.clone()); + self.generated.extend(other.generated.clone()); + self.association.extend(other.association.clone()); + self.usage.extend(other.usage.clone()); + } + + /// Apply a sequence of `ChronicleTransaction` to an empty model, then return it + pub fn from_tx<'a, I>(tx: I) -> Result + where + I: IntoIterator, + { + let mut model = Self::default(); + for tx in tx { + model.apply(tx)?; + } + + Ok(model) + } + + /// Append a derivation to the model + pub fn was_derived_from( + &mut self, + namespace_id: NamespaceId, + typ: DerivationType, + used_id: EntityId, + id: EntityId, + activity_id: Option, + ) { + let derivation_set = + Arc::make_mut(self.derivation.entry((namespace_id, id.clone())).or_default()); + + derivation_set.insert(Derivation { typ, generated_id: id, used_id, activity_id }); + } + + /// Append a delegation to the model + pub fn qualified_delegation( + &mut self, + namespace_id: &NamespaceId, + responsible_id: &AgentId, + delegate_id: &AgentId, + activity_id: Option, + role: Option, + ) { + let delegation = Delegation { + namespace_id: namespace_id.clone(), + id: DelegationId::from_component_ids( + delegate_id, + responsible_id, + activity_id.as_ref(), + role.as_ref(), + ), + responsible_id: responsible_id.clone(), + delegate_id: delegate_id.clone(), + activity_id, + role, + }; + + let delegation_set = Arc::make_mut( + self.delegation + .entry((namespace_id.clone(), responsible_id.clone())) + .or_default(), + ); + delegation_set.insert(delegation.clone()); + + let acted_on_behalf_of_set = Arc::make_mut( + self.acted_on_behalf_of + .entry((namespace_id.clone(), responsible_id.clone())) + .or_default(), + ); + + acted_on_behalf_of_set.insert(delegation); + } + + pub fn qualified_association( + &mut self, + namespace_id: &NamespaceId, + activity_id: &ActivityId, + agent_id: &AgentId, + role: Option, + ) { + let association_set = Arc::make_mut( + self.association.entry((namespace_id.clone(), activity_id.clone())).or_default(), + ); + + association_set.insert(Association { + namespace_id: namespace_id.clone(), + id: AssociationId::from_component_ids(agent_id, activity_id, role.as_ref()), + agent_id: agent_id.clone(), + activity_id: activity_id.clone(), + role, + }); + } + + pub fn was_generated_by( + &mut self, + namespace_id: NamespaceId, + generated_id: &EntityId, + activity_id: &ActivityId, + ) { + let generation_set = Arc::make_mut( + self.generation.entry((namespace_id.clone(), generated_id.clone())).or_default(), + ); + generation_set.insert(Generation { + activity_id: activity_id.clone(), + generated_id: generated_id.clone(), + }); + } + + pub fn generated( + &mut self, + namespace_id: NamespaceId, + generated_id: &ActivityId, + entity_id: &EntityId, + ) { + let generated_set = Arc::make_mut( + self.generated.entry((namespace_id.clone(), generated_id.clone())).or_default(), + ); + + generated_set.insert(GeneratedEntity { + entity_id: entity_id.clone(), + generated_id: generated_id.clone(), + }); + } + + pub fn used( + &mut self, + namespace_id: NamespaceId, + activity_id: &ActivityId, + entity_id: &EntityId, + ) { + let usage_set = Arc::make_mut( + self.usage.entry((namespace_id.clone(), activity_id.clone())).or_default(), + ); + + usage_set.insert(Usage { activity_id: activity_id.clone(), entity_id: entity_id.clone() }); + } + + pub fn was_informed_by( + &mut self, + namespace_id: NamespaceId, + activity_id: &ActivityId, + informing_activity_id: &ActivityId, + ) { + let was_informed_by_set = Arc::make_mut( + self.was_informed_by + .entry((namespace_id.clone(), activity_id.clone())) + .or_default(), + ); + + was_informed_by_set.insert((namespace_id, informing_activity_id.clone())); + } + + pub fn qualified_attribution( + &mut self, + namespace_id: &NamespaceId, + entity_id: &EntityId, + agent_id: &AgentId, + role: Option, + ) { + let attribution_set = Arc::make_mut( + self.attribution.entry((namespace_id.clone(), entity_id.clone())).or_default(), + ); + + attribution_set.insert(Attribution { + namespace_id: namespace_id.clone(), + id: AttributionId::from_component_ids(agent_id, entity_id, role.as_ref()), + agent_id: agent_id.clone(), + entity_id: entity_id.clone(), + role, + }); + } + + /// Ensure we have the referenced namespace in our model + pub fn namespace_context(&mut self, ns: &NamespaceId) { + let (namespace_name, uuid) = (ns.external_id_part(), ns.uuid_part()); + + self.namespaces.insert( + ns.clone(), + Namespace { + id: ns.clone(), + uuid: uuid.into_bytes(), + external_id: namespace_name.to_owned(), + } + .into(), + ); + } + + /// Ensure we have the referenced agent in our model, so that open world + /// assumptions can be made + pub fn agent_context(&mut self, ns: &NamespaceId, agent: &AgentId) { + self.agents + .entry((ns.clone(), agent.clone())) + .or_insert_with(|| Agent::exists(ns.clone(), agent.clone()).into()); + } + + pub fn get_agent(&mut self, ns: &NamespaceId, agent: &AgentId) -> Option<&Agent> { + self.agents.get(&(ns.clone(), agent.clone())).map(|arc| arc.as_ref()) + } + + pub fn modify_agent( + &mut self, + ns: &NamespaceId, + agent: &AgentId, + f: F, + ) { + if let Some(arc) = self.agents.get_mut(&(ns.clone(), agent.clone())) { + let agent: &mut Agent = Arc::make_mut(arc); + f(agent); + } + } + + /// Ensure we have the referenced entity in our model, so that open world + /// assumptions can be made + pub fn entity_context(&mut self, ns: &NamespaceId, entity: &EntityId) { + self.entities + .entry((ns.clone(), entity.clone())) + .or_insert_with(|| Entity::exists(ns.clone(), entity.clone()).into()); + } + + pub fn get_entity(&mut self, ns: &NamespaceId, entity: &EntityId) -> Option<&Entity> { + self.entities.get(&(ns.clone(), entity.clone())).map(|arc| arc.as_ref()) + } + + pub fn modify_entity( + &mut self, + ns: &NamespaceId, + entity: &EntityId, + f: F, + ) { + if let Some(arc) = self.entities.get_mut(&(ns.clone(), entity.clone())) { + let entity: &mut Entity = Arc::make_mut(arc); + f(entity); + } + } + + /// Ensure we have the referenced activity in our model, so that open world + /// assumptions can be made + pub fn activity_context(&mut self, ns: &NamespaceId, activity: &ActivityId) { + self.activities + .entry((ns.clone(), activity.clone())) + .or_insert_with(|| Activity::exists(ns.clone(), activity.clone()).into()); + } + + pub fn get_activity(&mut self, ns: &NamespaceId, activity: &ActivityId) -> Option<&Activity> { + self.activities.get(&(ns.clone(), activity.clone())).map(|arc| arc.as_ref()) + } + + pub fn modify_activity( + &mut self, + ns: &NamespaceId, + activity: &ActivityId, + f: F, + ) { + if let Some(arc) = self.activities.get_mut(&(ns.clone(), activity.clone())) { + let activity: &mut Activity = Arc::make_mut(arc); + f(activity); + } + } + + /// Transform a sequence of `ChronicleOperation` events into a provenance model, + /// If a statement requires a subject or object that does not currently exist in the model, then + /// we create it If an operation contradicts a previous statement, then we record the + /// contradiction, but attempt to apply as much of the operation as possible + #[instrument(skip(self, tx), level = "trace", name = "apply_chronicle_operation", fields( op = ? tx, model = ? self ), ret(Debug))] - pub fn apply(&mut self, tx: &ChronicleOperation) -> Result<(), Contradiction> { - let tx = tx.to_owned(); - match tx { - ChronicleOperation::CreateNamespace(CreateNamespace { id }) => { - self.namespace_context(&id); - Ok(()) - } - ChronicleOperation::AgentExists(AgentExists { namespace, id, .. }) => { - self.agent_context(&namespace, &id); - - Ok(()) - } - ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf { - id: _, - namespace, - delegate_id, - activity_id, - role, - responsible_id, - }) => { - self.agent_context(&namespace, &delegate_id); - self.agent_context(&namespace, &responsible_id); - - if let Some(activity_id) = activity_id.clone() { - self.activity_context(&namespace, &activity_id); - } - - self.qualified_delegation( - &namespace, - &responsible_id, - &delegate_id, - activity_id, - role, - ); - - Ok(()) - } - ChronicleOperation::ActivityExists(ActivityExists { namespace, id, .. }) => { - self.activity_context(&namespace, &id); - - Ok(()) - } - ChronicleOperation::StartActivity(StartActivity { namespace, id, time }) => { - self.activity_context(&namespace, &id); - - let activity = self.get_activity(&namespace, &id); - - trace!(check_start_contradiction = ?time, existing_time=?activity.and_then(|activity| activity.started)); - match ( - activity.and_then(|activity| activity.started), - activity.and_then(|activity| activity.ended), - ) { - (Some(TimeWrapper(started)), _) if started != time.0 => - return Err(Contradiction::start_date_alteration( - id.into(), - namespace, - started, - time.0, - )), - (_, Some(TimeWrapper(ended))) if ended < time.0 => - return Err(Contradiction::invalid_range( - id.into(), - namespace, - time.0, - ended, - )), - _ => {} - }; - - self.modify_activity(&namespace, &id, move |activity| { - activity.started = Some(time); - }); - - Ok(()) - } - ChronicleOperation::EndActivity(EndActivity { namespace, id, time }) => { - self.activity_context(&namespace, &id); - - let activity = self.get_activity(&namespace, &id); - - trace!(check_end_contradiction = ?time, existing_time=?activity.and_then(|activity| activity.ended)); - match ( - activity.and_then(|activity| activity.started), - activity.and_then(|activity| activity.ended), - ) { - (_, Some(TimeWrapper(ended))) if ended != time.0 => - return Err(Contradiction::end_date_alteration( - id.into(), - namespace, - ended, - time.0, - )), - (Some(TimeWrapper(started)), _) if started > time.0 => - return Err(Contradiction::invalid_range( - id.into(), - namespace, - started, - time.0, - )), - _ => {} - }; - - self.modify_activity(&namespace, &id, move |activity| { - activity.ended = Some(time); - }); - - Ok(()) - } - ChronicleOperation::WasAssociatedWith(WasAssociatedWith { - id: _, - role, - namespace, - activity_id, - agent_id, - }) => { - self.agent_context(&namespace, &agent_id); - self.activity_context(&namespace, &activity_id); - self.qualified_association(&namespace, &activity_id, &agent_id, role); - - Ok(()) - } - ChronicleOperation::WasAttributedTo(WasAttributedTo { - id: _, - role, - namespace, - entity_id, - agent_id, - }) => { - self.agent_context(&namespace, &agent_id); - self.entity_context(&namespace, &entity_id); - self.qualified_attribution(&namespace, &entity_id, &agent_id, role); - - Ok(()) - } - ChronicleOperation::ActivityUses(ActivityUses { namespace, id, activity }) => { - self.activity_context(&namespace, &activity); - self.entity_context(&namespace, &id); - - self.used(namespace, &activity, &id); - - Ok(()) - } - ChronicleOperation::EntityExists(EntityExists { namespace, id, .. }) => { - self.entity_context(&namespace, &id); - Ok(()) - } - ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity }) => { - self.entity_context(&namespace, &id); - self.activity_context(&namespace, &activity); - - self.was_generated_by(namespace, &id, &activity); - - Ok(()) - } - ChronicleOperation::WasInformedBy(WasInformedBy { - namespace, - activity, - informing_activity, - }) => { - self.activity_context(&namespace, &activity); - self.activity_context(&namespace, &informing_activity); - - self.was_informed_by(namespace, &activity, &informing_activity); - - Ok(()) - } - ChronicleOperation::EntityDerive(EntityDerive { - namespace, - id, - typ, - used_id, - activity_id, - }) => { - self.entity_context(&namespace, &id); - self.entity_context(&namespace, &used_id); - - if let Some(activity_id) = &activity_id { - self.activity_context(&namespace, activity_id); - } - - self.was_derived_from(namespace, typ, used_id, id, activity_id); - - Ok(()) - } - ChronicleOperation::SetAttributes(SetAttributes::Entity { - namespace, - id, - attributes, - }) => { - self.entity_context(&namespace, &id); - - if let Some(current) = self - .entities - .get(&(namespace.clone(), id.clone())) - .map(|entity| &entity.attributes) - { - Self::validate_attribute_changes( - &id.clone().into(), - &namespace, - current, - &attributes, - )?; - }; - - self.modify_entity(&namespace, &id, move |entity| { - entity.domaintypeid = attributes.get_typ().clone(); - entity.attributes = attributes.get_items().to_vec(); - }); - - Ok(()) - } - ChronicleOperation::SetAttributes(SetAttributes::Activity { - namespace, - id, - attributes, - }) => { - self.activity_context(&namespace, &id); - - if let Some(current) = self - .activities - .get(&(namespace.clone(), id.clone())) - .map(|activity| &activity.attributes) - { - Self::validate_attribute_changes( - &id.clone().into(), - &namespace, - current, - &attributes, - )?; - }; - - self.modify_activity(&namespace, &id, move |activity| { - activity.domaintype_id = attributes.get_typ().clone(); - activity.attributes = attributes.get_items().to_vec(); - }); - - Ok(()) - } - ChronicleOperation::SetAttributes(SetAttributes::Agent { - namespace, - id, - attributes, - }) => { - self.agent_context(&namespace, &id); - - if let Some(current) = - self.agents.get(&(namespace.clone(), id.clone())).map(|agent| &agent.attributes) - { - Self::validate_attribute_changes( - &id.clone().into(), - &namespace, - current, - &attributes, - )?; - }; - - self.modify_agent(&namespace, &id, move |agent| { - agent.domaintypeid = attributes.get_typ().clone(); - agent.attributes = attributes.get_items().to_vec(); - }); - - Ok(()) - } - } - } - - /// Allow additional attributes, but changing an existing attribute is not allowed - #[instrument(level = "trace", ret(Debug))] - fn validate_attribute_changes( - id: &ChronicleIri, - namespace: &NamespaceId, - current: &Vec, - attempted: &Attributes, - ) -> Result<(), Contradiction> { - let current_map: BTreeMap = - current.iter().map(|attr| (attr.typ.clone(), attr)).collect(); - let contradictions = attempted - .get_items() - .iter() - .filter_map(|attempted_attr| { - current_map.get(&attempted_attr.typ).and_then(|¤t_attr| { - if attempted_attr != current_attr { - Some(( - attempted_attr.typ.clone(), - attempted_attr.clone(), - (*current_attr).clone(), - )) - } else { - None - } - }) - }) - .collect::>(); - - if contradictions.is_empty() { - Ok(()) - } else { - Err(Contradiction::attribute_value_change( - id.clone(), - namespace.clone(), - contradictions, - )) - } - } - - #[cfg(feature = "json-ld")] - pub(crate) fn add_agent(&mut self, agent: Agent) { - self.agents.insert((agent.namespaceid.clone(), agent.id.clone()), agent.into()); - } - - #[cfg(feature = "json-ld")] - pub(crate) fn add_activity(&mut self, activity: Activity) { - self.activities - .insert((activity.namespace_id.clone(), activity.id.clone()), activity.into()); - } - - #[cfg(feature = "json-ld")] - pub(crate) fn add_entity(&mut self, entity: Entity) { - self.entities - .insert((entity.namespace_id.clone(), entity.id.clone()), entity.into()); - } + pub fn apply(&mut self, tx: &ChronicleOperation) -> Result<(), Contradiction> { + let tx = tx.to_owned(); + match tx { + ChronicleOperation::CreateNamespace(CreateNamespace { id }) => { + self.namespace_context(&id); + Ok(()) + }, + ChronicleOperation::AgentExists(AgentExists { namespace, id, .. }) => { + self.agent_context(&namespace, &id); + + Ok(()) + }, + ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf { + id: _, + namespace, + delegate_id, + activity_id, + role, + responsible_id, + }) => { + self.agent_context(&namespace, &delegate_id); + self.agent_context(&namespace, &responsible_id); + + if let Some(activity_id) = activity_id.clone() { + self.activity_context(&namespace, &activity_id); + } + + self.qualified_delegation( + &namespace, + &responsible_id, + &delegate_id, + activity_id, + role, + ); + + Ok(()) + }, + ChronicleOperation::ActivityExists(ActivityExists { namespace, id, .. }) => { + self.activity_context(&namespace, &id); + + Ok(()) + }, + ChronicleOperation::StartActivity(StartActivity { namespace, id, time }) => { + self.activity_context(&namespace, &id); + + let activity = self.get_activity(&namespace, &id); + + trace!(check_start_contradiction = ?time, existing_time=?activity.and_then(|activity| activity.started)); + match ( + activity.and_then(|activity| activity.started), + activity.and_then(|activity| activity.ended), + ) { + (Some(TimeWrapper(started)), _) if started != time.0 => + return Err(Contradiction::start_date_alteration( + id.into(), + namespace, + started, + time.0, + )), + (_, Some(TimeWrapper(ended))) if ended < time.0 => + return Err(Contradiction::invalid_range( + id.into(), + namespace, + time.0, + ended, + )), + _ => {}, + }; + + self.modify_activity(&namespace, &id, move |activity| { + activity.started = Some(time); + }); + + Ok(()) + }, + ChronicleOperation::EndActivity(EndActivity { namespace, id, time }) => { + self.activity_context(&namespace, &id); + + let activity = self.get_activity(&namespace, &id); + + trace!(check_end_contradiction = ?time, existing_time=?activity.and_then(|activity| activity.ended)); + match ( + activity.and_then(|activity| activity.started), + activity.and_then(|activity| activity.ended), + ) { + (_, Some(TimeWrapper(ended))) if ended != time.0 => + return Err(Contradiction::end_date_alteration( + id.into(), + namespace, + ended, + time.0, + )), + (Some(TimeWrapper(started)), _) if started > time.0 => + return Err(Contradiction::invalid_range( + id.into(), + namespace, + started, + time.0, + )), + _ => {}, + }; + + self.modify_activity(&namespace, &id, move |activity| { + activity.ended = Some(time); + }); + + Ok(()) + }, + ChronicleOperation::WasAssociatedWith(WasAssociatedWith { + id: _, + role, + namespace, + activity_id, + agent_id, + }) => { + self.agent_context(&namespace, &agent_id); + self.activity_context(&namespace, &activity_id); + self.qualified_association(&namespace, &activity_id, &agent_id, role); + + Ok(()) + }, + ChronicleOperation::WasAttributedTo(WasAttributedTo { + id: _, + role, + namespace, + entity_id, + agent_id, + }) => { + self.agent_context(&namespace, &agent_id); + self.entity_context(&namespace, &entity_id); + self.qualified_attribution(&namespace, &entity_id, &agent_id, role); + + Ok(()) + }, + ChronicleOperation::ActivityUses(ActivityUses { namespace, id, activity }) => { + self.activity_context(&namespace, &activity); + self.entity_context(&namespace, &id); + + self.used(namespace, &activity, &id); + + Ok(()) + }, + ChronicleOperation::EntityExists(EntityExists { namespace, id, .. }) => { + self.entity_context(&namespace, &id); + Ok(()) + }, + ChronicleOperation::WasGeneratedBy(WasGeneratedBy { namespace, id, activity }) => { + self.entity_context(&namespace, &id); + self.activity_context(&namespace, &activity); + + self.was_generated_by(namespace, &id, &activity); + + Ok(()) + }, + ChronicleOperation::WasInformedBy(WasInformedBy { + namespace, + activity, + informing_activity, + }) => { + self.activity_context(&namespace, &activity); + self.activity_context(&namespace, &informing_activity); + + self.was_informed_by(namespace, &activity, &informing_activity); + + Ok(()) + }, + ChronicleOperation::EntityDerive(EntityDerive { + namespace, + id, + typ, + used_id, + activity_id, + }) => { + self.entity_context(&namespace, &id); + self.entity_context(&namespace, &used_id); + + if let Some(activity_id) = &activity_id { + self.activity_context(&namespace, activity_id); + } + + self.was_derived_from(namespace, typ, used_id, id, activity_id); + + Ok(()) + }, + ChronicleOperation::SetAttributes(SetAttributes::Entity { + namespace, + id, + attributes, + }) => { + self.entity_context(&namespace, &id); + + if let Some(current) = self + .entities + .get(&(namespace.clone(), id.clone())) + .map(|entity| &entity.attributes) + { + Self::validate_attribute_changes( + &id.clone().into(), + &namespace, + current, + &attributes, + )?; + }; + + self.modify_entity(&namespace, &id, move |entity| { + entity.domaintypeid = attributes.get_typ().clone(); + entity.attributes = attributes.get_items().to_vec(); + }); + + Ok(()) + }, + ChronicleOperation::SetAttributes(SetAttributes::Activity { + namespace, + id, + attributes, + }) => { + self.activity_context(&namespace, &id); + + if let Some(current) = self + .activities + .get(&(namespace.clone(), id.clone())) + .map(|activity| &activity.attributes) + { + Self::validate_attribute_changes( + &id.clone().into(), + &namespace, + current, + &attributes, + )?; + }; + + self.modify_activity(&namespace, &id, move |activity| { + activity.domaintype_id = attributes.get_typ().clone(); + activity.attributes = attributes.get_items().to_vec(); + }); + + Ok(()) + }, + ChronicleOperation::SetAttributes(SetAttributes::Agent { + namespace, + id, + attributes, + }) => { + self.agent_context(&namespace, &id); + + if let Some(current) = + self.agents.get(&(namespace.clone(), id.clone())).map(|agent| &agent.attributes) + { + Self::validate_attribute_changes( + &id.clone().into(), + &namespace, + current, + &attributes, + )?; + }; + + self.modify_agent(&namespace, &id, move |agent| { + agent.domaintypeid = attributes.get_typ().clone(); + agent.attributes = attributes.get_items().to_vec(); + }); + + Ok(()) + }, + } + } + + /// Allow additional attributes, but changing an existing attribute is not allowed + #[instrument(level = "trace", ret(Debug))] + fn validate_attribute_changes( + id: &ChronicleIri, + namespace: &NamespaceId, + current: &Vec, + attempted: &Attributes, + ) -> Result<(), Contradiction> { + let current_map: BTreeMap = + current.iter().map(|attr| (attr.typ.clone(), attr)).collect(); + let contradictions = attempted + .get_items() + .iter() + .filter_map(|attempted_attr| { + current_map.get(&attempted_attr.typ).and_then(|¤t_attr| { + if attempted_attr != current_attr { + Some(( + attempted_attr.typ.clone(), + attempted_attr.clone(), + (*current_attr).clone(), + )) + } else { + None + } + }) + }) + .collect::>(); + + if contradictions.is_empty() { + Ok(()) + } else { + Err(Contradiction::attribute_value_change( + id.clone(), + namespace.clone(), + contradictions, + )) + } + } + + #[cfg(feature = "json-ld")] + pub(crate) fn add_agent(&mut self, agent: Agent) { + self.agents.insert((agent.namespaceid.clone(), agent.id.clone()), agent.into()); + } + + #[cfg(feature = "json-ld")] + pub(crate) fn add_activity(&mut self, activity: Activity) { + self.activities + .insert((activity.namespace_id.clone(), activity.id.clone()), activity.into()); + } + + #[cfg(feature = "json-ld")] + pub(crate) fn add_entity(&mut self, entity: Entity) { + self.entities + .insert((entity.namespace_id.clone(), entity.id.clone()), entity.into()); + } } diff --git a/crates/common/src/prov/model/proptest.rs b/crates/common/src/prov/model/proptest.rs index 2a6553f99..5e98fc62a 100644 --- a/crates/common/src/prov/model/proptest.rs +++ b/crates/common/src/prov/model/proptest.rs @@ -3,12 +3,12 @@ use proptest::{option, prelude::*}; use uuid::Uuid; use crate::{ - attributes::{Attribute, Attributes}, - prov::{ - ActivityId, AgentId, Association, AssociationId, Attribution, Contradiction, - Delegation, DelegationId, Derivation, DomaintypeId, EntityId, ExternalId, ExternalIdPart, - Generation, json_ld::ToJson, NamespaceId, operations::*, ProvModel, Role, Usage, UuidPart, - }, + attributes::{Attribute, Attributes}, + prov::{ + json_ld::ToJson, operations::*, ActivityId, AgentId, Association, AssociationId, + Attribution, Contradiction, Delegation, DelegationId, Derivation, DomaintypeId, EntityId, + ExternalId, ExternalIdPart, Generation, NamespaceId, ProvModel, Role, Usage, UuidPart, + }, }; use super::{ActivityUses, ActsOnBehalfOf, EntityDerive, StartActivity}; @@ -289,8 +289,8 @@ prop_compose! { } } -fn transaction() -> impl Strategy { - prop_oneof![ +fn transaction() -> impl Strategy { + prop_oneof![ 1 => create_agent().prop_map(ChronicleOperation::AgentExists), 1 => create_activity().prop_map(ChronicleOperation::ActivityExists), 1 => start_activity().prop_map(ChronicleOperation::StartActivity), @@ -307,8 +307,8 @@ fn transaction() -> impl Strategy { ] } -fn operation_seq() -> impl Strategy> { - proptest::collection::vec(transaction(), 1..50) +fn operation_seq() -> impl Strategy> { + proptest::collection::vec(transaction(), 1..50) } proptest! { diff --git a/crates/common/src/prov/operations.rs b/crates/common/src/prov/operations.rs index e2e90c779..58f5a2443 100644 --- a/crates/common/src/prov/operations.rs +++ b/crates/common/src/prov/operations.rs @@ -2,836 +2,836 @@ use chrono::{DateTime, NaiveDateTime, TimeZone, Utc}; #[cfg(feature = "diesel-bindings")] use diesel::{ - self, - backend::Backend, - deserialize::FromSql, - serialize::{Output, ToSql}, - sql_types::Integer, - AsExpression, QueryId, SqlType, + self, + backend::Backend, + deserialize::FromSql, + serialize::{Output, ToSql}, + sql_types::Integer, + AsExpression, QueryId, SqlType, }; #[cfg(not(feature = "std"))] use parity_scale_codec::alloc::string::String; #[cfg(not(feature = "std"))] -use scale_info::prelude::{vec::Vec, vec}; +use scale_info::prelude::{vec, vec::Vec}; use crate::attributes::Attributes; use super::{ - ActivityId, AgentId, AssociationId, AttributionId, DelegationId, EntityId, NamespaceId, Role, + ActivityId, AgentId, AssociationId, AttributionId, DelegationId, EntityId, NamespaceId, Role, }; #[derive(Debug, Copy, Clone, PartialEq, Ord, PartialOrd, Eq, Hash, Serialize, Deserialize)] #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[cfg_attr(feature = "diesel-bindings", derive(AsExpression, SqlType, QueryId))] #[cfg_attr(feature = "diesel-bindings", diesel(sql_type = Integer))] #[repr(i32)] pub enum DerivationType { - None, - Revision, - Quotation, - PrimarySource, + None, + Revision, + Quotation, + PrimarySource, } #[cfg(feature = "diesel-bindings")] mod bindings { - use super::*; - - impl ToSql for DerivationType - where - DB: Backend, - i32: ToSql, - { - fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> diesel::serialize::Result { - match self { - DerivationType::None => (-1).to_sql(out), - DerivationType::Revision => 1.to_sql(out), - DerivationType::Quotation => 2.to_sql(out), - DerivationType::PrimarySource => 3.to_sql(out), - } - } - } - - impl FromSql for DerivationType - where - DB: Backend, - i32: FromSql, - { - fn from_sql(bytes: ::RawValue<'_>) -> diesel::deserialize::Result { - match i32::from_sql(bytes)? { - -1 => Ok(DerivationType::None), - 1 => Ok(DerivationType::Revision), - 2 => Ok(DerivationType::Quotation), - 3 => Ok(DerivationType::PrimarySource), - _ => Err("Unrecognized enum variant".into()), - } - } - } + use super::*; + + impl ToSql for DerivationType + where + DB: Backend, + i32: ToSql, + { + fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> diesel::serialize::Result { + match self { + DerivationType::None => (-1).to_sql(out), + DerivationType::Revision => 1.to_sql(out), + DerivationType::Quotation => 2.to_sql(out), + DerivationType::PrimarySource => 3.to_sql(out), + } + } + } + + impl FromSql for DerivationType + where + DB: Backend, + i32: FromSql, + { + fn from_sql(bytes: ::RawValue<'_>) -> diesel::deserialize::Result { + match i32::from_sql(bytes)? { + -1 => Ok(DerivationType::None), + 1 => Ok(DerivationType::Revision), + 2 => Ok(DerivationType::Quotation), + 3 => Ok(DerivationType::PrimarySource), + _ => Err("Unrecognized enum variant".into()), + } + } + } } impl TryFrom for DerivationType { - type Error = &'static str; + type Error = &'static str; - fn try_from(value: i32) -> Result { - match value { - -1 => Ok(DerivationType::None), - 1 => Ok(DerivationType::Revision), - 2 => Ok(DerivationType::Quotation), - 3 => Ok(DerivationType::PrimarySource), - _ => Err("Unrecognized enum variant when converting from 'i32'"), - } - } + fn try_from(value: i32) -> Result { + match value { + -1 => Ok(DerivationType::None), + 1 => Ok(DerivationType::Revision), + 2 => Ok(DerivationType::Quotation), + 3 => Ok(DerivationType::PrimarySource), + _ => Err("Unrecognized enum variant when converting from 'i32'"), + } + } } impl DerivationType { - pub fn revision() -> Self { - Self::Revision - } + pub fn revision() -> Self { + Self::Revision + } - pub fn quotation() -> Self { - Self::Quotation - } + pub fn quotation() -> Self { + Self::Quotation + } - pub fn primary_source() -> Self { - Self::PrimarySource - } + pub fn primary_source() -> Self { + Self::PrimarySource + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct CreateNamespace { - pub id: NamespaceId, + pub id: NamespaceId, } impl CreateNamespace { - pub fn new(id: NamespaceId) -> Self { - Self { id } - } + pub fn new(id: NamespaceId) -> Self { + Self { id } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct AgentExists { - pub namespace: NamespaceId, - pub id: AgentId, + pub namespace: NamespaceId, + pub id: AgentId, } impl AgentExists { - pub fn new(namespace: NamespaceId, id: AgentId) -> Self { - Self { namespace, id } - } + pub fn new(namespace: NamespaceId, id: AgentId) -> Self { + Self { namespace, id } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct ActsOnBehalfOf { - pub id: DelegationId, - pub role: Option, - pub activity_id: Option, - pub responsible_id: AgentId, - pub delegate_id: AgentId, - pub namespace: NamespaceId, + pub id: DelegationId, + pub role: Option, + pub activity_id: Option, + pub responsible_id: AgentId, + pub delegate_id: AgentId, + pub namespace: NamespaceId, } impl ActsOnBehalfOf { - pub fn new( - namespace: NamespaceId, - responsible_id: AgentId, - delegate_id: AgentId, - activity_id: Option, - role: Option, - ) -> Self { - Self { - namespace, - id: DelegationId::from_component_ids( - &delegate_id, - &responsible_id, - activity_id.as_ref(), - role.as_ref(), - ), - role, - activity_id, - responsible_id, - delegate_id, - } - } + pub fn new( + namespace: NamespaceId, + responsible_id: AgentId, + delegate_id: AgentId, + activity_id: Option, + role: Option, + ) -> Self { + Self { + namespace, + id: DelegationId::from_component_ids( + &delegate_id, + &responsible_id, + activity_id.as_ref(), + role.as_ref(), + ), + role, + activity_id, + responsible_id, + delegate_id, + } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct ActivityExists { - pub namespace: NamespaceId, - pub id: ActivityId, + pub namespace: NamespaceId, + pub id: ActivityId, } impl ActivityExists { - pub fn new(namespace: NamespaceId, id: ActivityId) -> Self { - Self { namespace, id } - } + pub fn new(namespace: NamespaceId, id: ActivityId) -> Self { + Self { namespace, id } + } } #[derive(Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Debug)] pub struct TimeWrapper(pub DateTime); impl TimeWrapper { - pub fn to_rfc3339(&self) -> String { - self.0.to_rfc3339() - } + pub fn to_rfc3339(&self) -> String { + self.0.to_rfc3339() + } - pub fn naive_utc(&self) -> NaiveDateTime { - self.0.naive_utc() - } + pub fn naive_utc(&self) -> NaiveDateTime { + self.0.naive_utc() + } } impl core::fmt::Display for TimeWrapper { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", self.0.to_rfc3339()) - } + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.0.to_rfc3339()) + } } impl From> for TimeWrapper { - fn from(dt: DateTime) -> Self { - TimeWrapper(dt) - } + fn from(dt: DateTime) -> Self { + TimeWrapper(dt) + } } #[cfg(feature = "parity-encoding")] impl scale_encode::EncodeAsType for TimeWrapper { - fn encode_as_type_to( - &self, - type_id: u32, - types: &scale_info::PortableRegistry, - out: &mut scale_encode::Vec, - ) -> Result<(), scale_encode::Error> { - let timestamp = self.0.timestamp(); - let subsec_nanos = self.0.timestamp_subsec_nanos(); - (timestamp, subsec_nanos).encode_as_type_to(type_id, types, out) - } + fn encode_as_type_to( + &self, + type_id: u32, + types: &scale_info::PortableRegistry, + out: &mut scale_encode::Vec, + ) -> Result<(), scale_encode::Error> { + let timestamp = self.0.timestamp(); + let subsec_nanos = self.0.timestamp_subsec_nanos(); + (timestamp, subsec_nanos).encode_as_type_to(type_id, types, out) + } } #[cfg(feature = "parity-encoding")] impl parity_scale_codec::Encode for TimeWrapper { - fn encode_to(&self, dest: &mut T) { - let timestamp = self.0.timestamp(); - let subsec_nanos = self.0.timestamp_subsec_nanos(); - (timestamp, subsec_nanos).encode_to(dest); - } + fn encode_to(&self, dest: &mut T) { + let timestamp = self.0.timestamp(); + let subsec_nanos = self.0.timestamp_subsec_nanos(); + (timestamp, subsec_nanos).encode_to(dest); + } } #[cfg(feature = "parity-encoding")] impl parity_scale_codec::Decode for TimeWrapper { - fn decode( - input: &mut I, - ) -> Result { - let (timestamp, subsec_nanos) = <(i64, u32)>::decode(input)?; + fn decode( + input: &mut I, + ) -> Result { + let (timestamp, subsec_nanos) = <(i64, u32)>::decode(input)?; - let datetime = - Utc.timestamp_opt(timestamp, subsec_nanos).single().ok_or("Invalid timestamp")?; + let datetime = + Utc.timestamp_opt(timestamp, subsec_nanos).single().ok_or("Invalid timestamp")?; - Ok(Self(datetime)) - } + Ok(Self(datetime)) + } } #[cfg(feature = "parity-encoding")] impl scale_info::TypeInfo for TimeWrapper { - type Identity = Self; + type Identity = Self; - fn type_info() -> scale_info::Type { - scale_info::Type::builder() - .path(scale_info::Path::new("TimeWrapper", module_path!())) - .composite( - scale_info::build::Fields::unnamed() - .field(|f| f.ty::().type_name("Timestamp")) - .field(|f| f.ty::().type_name("SubsecNanos")), - ) - } + fn type_info() -> scale_info::Type { + scale_info::Type::builder() + .path(scale_info::Path::new("TimeWrapper", module_path!())) + .composite( + scale_info::build::Fields::unnamed() + .field(|f| f.ty::().type_name("Timestamp")) + .field(|f| f.ty::().type_name("SubsecNanos")), + ) + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct StartActivity { - pub namespace: NamespaceId, - pub id: ActivityId, - pub time: TimeWrapper, + pub namespace: NamespaceId, + pub id: ActivityId, + pub time: TimeWrapper, } impl StartActivity { - pub fn new(namespace: NamespaceId, id: ActivityId, time: DateTime) -> Self { - Self { namespace, id, time: TimeWrapper(time) } - } + pub fn new(namespace: NamespaceId, id: ActivityId, time: DateTime) -> Self { + Self { namespace, id, time: TimeWrapper(time) } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct EndActivity { - pub namespace: NamespaceId, - pub id: ActivityId, - pub time: TimeWrapper, + pub namespace: NamespaceId, + pub id: ActivityId, + pub time: TimeWrapper, } impl EndActivity { - pub fn new(namespace: NamespaceId, id: ActivityId, time: DateTime) -> Self { - Self { namespace, id, time: TimeWrapper(time) } - } + pub fn new(namespace: NamespaceId, id: ActivityId, time: DateTime) -> Self { + Self { namespace, id, time: TimeWrapper(time) } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct ActivityUses { - pub namespace: NamespaceId, - pub id: EntityId, - pub activity: ActivityId, + pub namespace: NamespaceId, + pub id: EntityId, + pub activity: ActivityId, } impl ActivityUses { - /// Creates a new `ActivityUses` instance. - /// - /// # Arguments - /// - /// * `namespace` - The namespace identifier for the activity. - /// * `id` - The unique identifier for the entity being used. - /// * `activity` - The unique identifier for the activity using the entity. - pub fn new(namespace: NamespaceId, id: EntityId, activity: ActivityId) -> Self { - Self { namespace, id, activity } - } + /// Creates a new `ActivityUses` instance. + /// + /// # Arguments + /// + /// * `namespace` - The namespace identifier for the activity. + /// * `id` - The unique identifier for the entity being used. + /// * `activity` - The unique identifier for the activity using the entity. + pub fn new(namespace: NamespaceId, id: EntityId, activity: ActivityId) -> Self { + Self { namespace, id, activity } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct EntityExists { - pub namespace: NamespaceId, - pub id: EntityId, + pub namespace: NamespaceId, + pub id: EntityId, } impl EntityExists { - /// Creates a new `EntityExists` instance. - /// - /// # Arguments - /// - /// * `namespace` - The namespace identifier for the entity. - /// * `id` - The identifier for the entity. - #[tracing::instrument(skip(namespace, id), fields(namespace = % namespace, entity_id = % id))] - pub fn new(namespace: NamespaceId, id: EntityId) -> Self { - Self { namespace, id } - } + /// Creates a new `EntityExists` instance. + /// + /// # Arguments + /// + /// * `namespace` - The namespace identifier for the entity. + /// * `id` - The identifier for the entity. + #[tracing::instrument(skip(namespace, id), fields(namespace = % namespace, entity_id = % id))] + pub fn new(namespace: NamespaceId, id: EntityId) -> Self { + Self { namespace, id } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct WasGeneratedBy { - pub namespace: NamespaceId, - pub id: EntityId, - pub activity: ActivityId, + pub namespace: NamespaceId, + pub id: EntityId, + pub activity: ActivityId, } impl WasGeneratedBy { - /// Creates a new `WasGeneratedBy` instance. - /// - /// # Arguments - /// - /// * `namespace` - The namespace identifier for the entity. - /// * `id` - The unique identifier for the entity. - /// * `activity` - The identifier for the activity that generated the entity. - pub fn new(namespace: NamespaceId, id: EntityId, activity: ActivityId) -> Self { - Self { namespace, id, activity } - } + /// Creates a new `WasGeneratedBy` instance. + /// + /// # Arguments + /// + /// * `namespace` - The namespace identifier for the entity. + /// * `id` - The unique identifier for the entity. + /// * `activity` - The identifier for the activity that generated the entity. + pub fn new(namespace: NamespaceId, id: EntityId, activity: ActivityId) -> Self { + Self { namespace, id, activity } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct EntityDerive { - pub namespace: NamespaceId, - pub id: EntityId, - pub used_id: EntityId, - pub activity_id: Option, - pub typ: DerivationType, + pub namespace: NamespaceId, + pub id: EntityId, + pub used_id: EntityId, + pub activity_id: Option, + pub typ: DerivationType, } impl EntityDerive { - /// Creates a new `EntityDerive` instance. - /// - /// # Arguments - /// - /// * `namespace` - The namespace identifier for the entity. - /// * `id` - The unique identifier for the entity. - /// * `used_id` - The identifier for the entity that was used. - /// * `activity_id` - The identifier for the activity that derived the entity, if any. - /// * `typ` - The type of derivation. - pub fn new( - namespace: NamespaceId, - id: EntityId, - used_id: EntityId, - activity_id: Option, - typ: DerivationType, - ) -> Self { - Self { namespace, id, used_id, activity_id, typ } - } + /// Creates a new `EntityDerive` instance. + /// + /// # Arguments + /// + /// * `namespace` - The namespace identifier for the entity. + /// * `id` - The unique identifier for the entity. + /// * `used_id` - The identifier for the entity that was used. + /// * `activity_id` - The identifier for the activity that derived the entity, if any. + /// * `typ` - The type of derivation. + pub fn new( + namespace: NamespaceId, + id: EntityId, + used_id: EntityId, + activity_id: Option, + typ: DerivationType, + ) -> Self { + Self { namespace, id, used_id, activity_id, typ } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct WasAssociatedWith { - pub id: AssociationId, - pub role: Option, - pub namespace: NamespaceId, - pub activity_id: ActivityId, - pub agent_id: AgentId, + pub id: AssociationId, + pub role: Option, + pub namespace: NamespaceId, + pub activity_id: ActivityId, + pub agent_id: AgentId, } impl WasAssociatedWith { - pub fn new( - namespace: NamespaceId, - activity_id: ActivityId, - agent_id: AgentId, - role: Option, - ) -> Self { - Self { - id: AssociationId::from_component_ids(&agent_id, &activity_id, role.as_ref()), - role, - namespace, - activity_id, - agent_id, - } - } + pub fn new( + namespace: NamespaceId, + activity_id: ActivityId, + agent_id: AgentId, + role: Option, + ) -> Self { + Self { + id: AssociationId::from_component_ids(&agent_id, &activity_id, role.as_ref()), + role, + namespace, + activity_id, + agent_id, + } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct WasAttributedTo { - pub id: AttributionId, - pub role: Option, - pub namespace: NamespaceId, - pub entity_id: EntityId, - pub agent_id: AgentId, + pub id: AttributionId, + pub role: Option, + pub namespace: NamespaceId, + pub entity_id: EntityId, + pub agent_id: AgentId, } impl WasAttributedTo { - #[tracing::instrument(skip(namespace, role))] - pub fn new( - namespace: NamespaceId, - entity_id: EntityId, - agent_id: AgentId, - role: Option, - ) -> Self { - Self { - id: AttributionId::from_component_ids(&agent_id, &entity_id, role.as_ref()), - role, - namespace, - entity_id, - agent_id, - } - } + #[tracing::instrument(skip(namespace, role))] + pub fn new( + namespace: NamespaceId, + entity_id: EntityId, + agent_id: AgentId, + role: Option, + ) -> Self { + Self { + id: AttributionId::from_component_ids(&agent_id, &entity_id, role.as_ref()), + role, + namespace, + entity_id, + agent_id, + } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub struct WasInformedBy { - pub namespace: NamespaceId, - pub activity: ActivityId, - pub informing_activity: ActivityId, + pub namespace: NamespaceId, + pub activity: ActivityId, + pub informing_activity: ActivityId, } impl WasInformedBy { - /// Creates a new `WasInformedBy` instance. - /// - /// # Arguments - /// - /// * `namespace` - The namespace identifier for the activity. - /// * `activity` - The ActivityId for the activity that was informed. - /// * `informing_activity` - The ActivityId for the informing activity. - pub fn new( - namespace: NamespaceId, - activity: ActivityId, - informing_activity: ActivityId, - ) -> Self { - Self { namespace, activity, informing_activity } - } + /// Creates a new `WasInformedBy` instance. + /// + /// # Arguments + /// + /// * `namespace` - The namespace identifier for the activity. + /// * `activity` - The ActivityId for the activity that was informed. + /// * `informing_activity` - The ActivityId for the informing activity. + pub fn new( + namespace: NamespaceId, + activity: ActivityId, + informing_activity: ActivityId, + ) -> Self { + Self { namespace, activity, informing_activity } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub enum SetAttributes { - Entity { namespace: NamespaceId, id: EntityId, attributes: Attributes }, - Agent { namespace: NamespaceId, id: AgentId, attributes: Attributes }, - Activity { namespace: NamespaceId, id: ActivityId, attributes: Attributes }, + Entity { namespace: NamespaceId, id: EntityId, attributes: Attributes }, + Agent { namespace: NamespaceId, id: AgentId, attributes: Attributes }, + Activity { namespace: NamespaceId, id: ActivityId, attributes: Attributes }, } impl SetAttributes { - pub fn agent(namespace: NamespaceId, id: AgentId, attributes: Attributes) -> Self { - SetAttributes::Agent { namespace, id, attributes } - } + pub fn agent(namespace: NamespaceId, id: AgentId, attributes: Attributes) -> Self { + SetAttributes::Agent { namespace, id, attributes } + } - pub fn entity(namespace: NamespaceId, id: EntityId, attributes: Attributes) -> Self { - SetAttributes::Entity { namespace, id, attributes } - } + pub fn entity(namespace: NamespaceId, id: EntityId, attributes: Attributes) -> Self { + SetAttributes::Entity { namespace, id, attributes } + } - pub fn activity(namespace: NamespaceId, id: ActivityId, attributes: Attributes) -> Self { - SetAttributes::Activity { namespace, id, attributes } - } + pub fn activity(namespace: NamespaceId, id: ActivityId, attributes: Attributes) -> Self { + SetAttributes::Activity { namespace, id, attributes } + } } #[cfg_attr( - feature = "parity-encoding", - derive( - scale_info::TypeInfo, - parity_scale_codec::Encode, - parity_scale_codec::Decode, - scale_encode::EncodeAsType - ) + feature = "parity-encoding", + derive( + scale_info::TypeInfo, + parity_scale_codec::Encode, + parity_scale_codec::Decode, + scale_encode::EncodeAsType + ) )] #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] pub enum ChronicleOperation { - CreateNamespace(CreateNamespace), - AgentExists(AgentExists), - AgentActsOnBehalfOf(ActsOnBehalfOf), - ActivityExists(ActivityExists), - StartActivity(StartActivity), - EndActivity(EndActivity), - ActivityUses(ActivityUses), - EntityExists(EntityExists), - WasGeneratedBy(WasGeneratedBy), - EntityDerive(EntityDerive), - SetAttributes(SetAttributes), - WasAssociatedWith(WasAssociatedWith), - WasAttributedTo(WasAttributedTo), - WasInformedBy(WasInformedBy), + CreateNamespace(CreateNamespace), + AgentExists(AgentExists), + AgentActsOnBehalfOf(ActsOnBehalfOf), + ActivityExists(ActivityExists), + StartActivity(StartActivity), + EndActivity(EndActivity), + ActivityUses(ActivityUses), + EntityExists(EntityExists), + WasGeneratedBy(WasGeneratedBy), + EntityDerive(EntityDerive), + SetAttributes(SetAttributes), + WasAssociatedWith(WasAssociatedWith), + WasAttributedTo(WasAttributedTo), + WasInformedBy(WasInformedBy), } impl ChronicleOperation { - #[tracing::instrument] - #[tracing::instrument] - pub fn create_namespace(id: NamespaceId) -> Self { - ChronicleOperation::CreateNamespace(CreateNamespace::new(id)) - } - - #[tracing::instrument] - pub fn agent_exists(namespace: NamespaceId, id: AgentId) -> Self { - ChronicleOperation::AgentExists(AgentExists::new(namespace, id)) - } - - #[tracing::instrument] - pub fn agent_acts_on_behalf_of( - namespace: NamespaceId, - responsible_id: AgentId, - delegate_id: AgentId, - activity_id: Option, - role: Option, - ) -> Self { - ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf::new( - namespace, - responsible_id, - delegate_id, - activity_id, - role, - )) - } - - #[tracing::instrument] - pub fn activity_exists(namespace: NamespaceId, id: ActivityId) -> Self { - ChronicleOperation::ActivityExists(ActivityExists::new(namespace, id)) - } - - #[tracing::instrument] - pub fn start_activity( - namespace: NamespaceId, - id: ActivityId, - start_time: DateTime, - ) -> Self { - ChronicleOperation::StartActivity(StartActivity::new(namespace, id, start_time)) - } - - #[tracing::instrument] - pub fn end_activity(namespace: NamespaceId, id: ActivityId, end_time: DateTime) -> Self { - ChronicleOperation::EndActivity(EndActivity::new(namespace, id, end_time)) - } - - #[tracing::instrument] - pub fn activity_used( - namespace: NamespaceId, - activity_id: ActivityId, - entity_id: EntityId, - ) -> Self { - ChronicleOperation::ActivityUses(ActivityUses::new(namespace, entity_id, activity_id)) - } - - #[tracing::instrument] - pub fn entity_exists(namespace: NamespaceId, id: EntityId) -> Self { - ChronicleOperation::EntityExists(EntityExists::new(namespace, id)) - } - - #[tracing::instrument] - pub fn was_generated_by( - namespace: NamespaceId, - entity_id: EntityId, - activity_id: ActivityId, - ) -> Self { - ChronicleOperation::WasGeneratedBy(WasGeneratedBy::new(namespace, entity_id, activity_id)) - } - - pub fn entity_derive( - namespace: NamespaceId, - source_id: EntityId, - target_id: EntityId, - activity_id: Option, - derivation_type: DerivationType, - ) -> Self { - ChronicleOperation::EntityDerive(EntityDerive::new( - namespace, - source_id, - target_id, - activity_id, - derivation_type, - )) - } - - pub fn set_attributes(set_attributes: SetAttributes) -> Self { - ChronicleOperation::SetAttributes(set_attributes) - } - - #[tracing::instrument] - pub fn was_associated_with( - namespace: NamespaceId, - activity_id: ActivityId, - agent_id: AgentId, - role: Option, - ) -> Self { - ChronicleOperation::WasAssociatedWith(WasAssociatedWith::new( - namespace, - activity_id, - agent_id, - role, - )) - } - - pub fn was_attributed_to( - namespace: NamespaceId, - entity_id: EntityId, - agent_id: AgentId, - role: Option, - ) -> Self { - ChronicleOperation::WasAttributedTo(WasAttributedTo::new( - namespace, entity_id, agent_id, role, - )) - } - - #[tracing::instrument] - pub fn was_informed_by( - namespace: NamespaceId, - informed: ActivityId, - informant: ActivityId, - ) -> Self { - ChronicleOperation::WasInformedBy(WasInformedBy::new(namespace, informed, informant)) - } - - /// Returns a reference to the `NamespaceId` of the `ChronicleOperation` - pub fn namespace(&self) -> &NamespaceId { - match self { - ChronicleOperation::ActivityExists(o) => &o.namespace, - ChronicleOperation::AgentExists(o) => &o.namespace, - ChronicleOperation::AgentActsOnBehalfOf(o) => &o.namespace, - ChronicleOperation::CreateNamespace(o) => &o.id, - ChronicleOperation::StartActivity(o) => &o.namespace, - ChronicleOperation::EndActivity(o) => &o.namespace, - ChronicleOperation::ActivityUses(o) => &o.namespace, - ChronicleOperation::EntityExists(o) => &o.namespace, - ChronicleOperation::WasGeneratedBy(o) => &o.namespace, - ChronicleOperation::EntityDerive(o) => &o.namespace, - ChronicleOperation::SetAttributes(o) => match o { - SetAttributes::Activity { namespace, .. } => namespace, - SetAttributes::Agent { namespace, .. } => namespace, - SetAttributes::Entity { namespace, .. } => namespace, - }, - ChronicleOperation::WasAssociatedWith(o) => &o.namespace, - ChronicleOperation::WasAttributedTo(o) => &o.namespace, - ChronicleOperation::WasInformedBy(o) => &o.namespace, - } - } - - // Chronicle is open world, so the use of an id implies that it exists. Match an operation and - // return the implied existential operations. - pub fn implied_by(&self) -> Vec { - match self { - ChronicleOperation::AgentActsOnBehalfOf(o) => vec![ - ChronicleOperation::agent_exists(o.namespace.clone(), o.delegate_id.clone()), - ChronicleOperation::agent_exists(o.namespace.clone(), o.responsible_id.clone()), - ], - ChronicleOperation::StartActivity(o) => { - vec![ChronicleOperation::activity_exists(o.namespace.clone(), o.id.clone())] - } - ChronicleOperation::EndActivity(o) => { - vec![ChronicleOperation::activity_exists(o.namespace.clone(), o.id.clone())] - } - ChronicleOperation::ActivityUses(o) => vec![ - ChronicleOperation::activity_exists(o.namespace.clone(), o.activity.clone()), - ChronicleOperation::entity_exists(o.namespace.clone(), o.id.clone()), - ], - ChronicleOperation::EntityExists(o) => { - vec![ChronicleOperation::entity_exists(o.namespace.clone(), o.id.clone())] - } - ChronicleOperation::WasGeneratedBy(o) => vec![ - ChronicleOperation::entity_exists(o.namespace.clone(), o.id.clone()), - ChronicleOperation::activity_exists(o.namespace.clone(), o.activity.clone()), - ], - ChronicleOperation::EntityDerive(o) => { - let mut ops = vec![ - ChronicleOperation::entity_exists(o.namespace.clone(), o.id.clone()), - ChronicleOperation::entity_exists(o.namespace.clone(), o.used_id.clone()), - ]; - if let Some(activity_id) = &o.activity_id { - ops.push(ChronicleOperation::activity_exists( - o.namespace.clone(), - activity_id.clone(), - )); - } - ops - } - ChronicleOperation::SetAttributes(o) => match o { - SetAttributes::Activity { namespace, id, .. } => { - vec![ChronicleOperation::activity_exists(namespace.clone(), id.clone())] - } - SetAttributes::Agent { namespace, id, .. } => { - vec![ChronicleOperation::agent_exists(namespace.clone(), id.clone())] - } - SetAttributes::Entity { namespace, id, .. } => { - vec![ChronicleOperation::entity_exists(namespace.clone(), id.clone())] - } - }, - ChronicleOperation::WasAssociatedWith(o) => vec![ - ChronicleOperation::activity_exists(o.namespace.clone(), o.activity_id.clone()), - ChronicleOperation::agent_exists(o.namespace.clone(), o.agent_id.clone()), - ], - ChronicleOperation::WasAttributedTo(o) => vec![ - ChronicleOperation::entity_exists(o.namespace.clone(), o.entity_id.clone()), - ChronicleOperation::agent_exists(o.namespace.clone(), o.agent_id.clone()), - ], - ChronicleOperation::WasInformedBy(o) => vec![ - ChronicleOperation::activity_exists(o.namespace.clone(), o.activity.clone()), - ChronicleOperation::activity_exists( - o.namespace.clone(), - o.informing_activity.clone(), - ), - ], - _ => vec![], - } - } + #[tracing::instrument] + #[tracing::instrument] + pub fn create_namespace(id: NamespaceId) -> Self { + ChronicleOperation::CreateNamespace(CreateNamespace::new(id)) + } + + #[tracing::instrument] + pub fn agent_exists(namespace: NamespaceId, id: AgentId) -> Self { + ChronicleOperation::AgentExists(AgentExists::new(namespace, id)) + } + + #[tracing::instrument] + pub fn agent_acts_on_behalf_of( + namespace: NamespaceId, + responsible_id: AgentId, + delegate_id: AgentId, + activity_id: Option, + role: Option, + ) -> Self { + ChronicleOperation::AgentActsOnBehalfOf(ActsOnBehalfOf::new( + namespace, + responsible_id, + delegate_id, + activity_id, + role, + )) + } + + #[tracing::instrument] + pub fn activity_exists(namespace: NamespaceId, id: ActivityId) -> Self { + ChronicleOperation::ActivityExists(ActivityExists::new(namespace, id)) + } + + #[tracing::instrument] + pub fn start_activity( + namespace: NamespaceId, + id: ActivityId, + start_time: DateTime, + ) -> Self { + ChronicleOperation::StartActivity(StartActivity::new(namespace, id, start_time)) + } + + #[tracing::instrument] + pub fn end_activity(namespace: NamespaceId, id: ActivityId, end_time: DateTime) -> Self { + ChronicleOperation::EndActivity(EndActivity::new(namespace, id, end_time)) + } + + #[tracing::instrument] + pub fn activity_used( + namespace: NamespaceId, + activity_id: ActivityId, + entity_id: EntityId, + ) -> Self { + ChronicleOperation::ActivityUses(ActivityUses::new(namespace, entity_id, activity_id)) + } + + #[tracing::instrument] + pub fn entity_exists(namespace: NamespaceId, id: EntityId) -> Self { + ChronicleOperation::EntityExists(EntityExists::new(namespace, id)) + } + + #[tracing::instrument] + pub fn was_generated_by( + namespace: NamespaceId, + entity_id: EntityId, + activity_id: ActivityId, + ) -> Self { + ChronicleOperation::WasGeneratedBy(WasGeneratedBy::new(namespace, entity_id, activity_id)) + } + + pub fn entity_derive( + namespace: NamespaceId, + source_id: EntityId, + target_id: EntityId, + activity_id: Option, + derivation_type: DerivationType, + ) -> Self { + ChronicleOperation::EntityDerive(EntityDerive::new( + namespace, + source_id, + target_id, + activity_id, + derivation_type, + )) + } + + pub fn set_attributes(set_attributes: SetAttributes) -> Self { + ChronicleOperation::SetAttributes(set_attributes) + } + + #[tracing::instrument] + pub fn was_associated_with( + namespace: NamespaceId, + activity_id: ActivityId, + agent_id: AgentId, + role: Option, + ) -> Self { + ChronicleOperation::WasAssociatedWith(WasAssociatedWith::new( + namespace, + activity_id, + agent_id, + role, + )) + } + + pub fn was_attributed_to( + namespace: NamespaceId, + entity_id: EntityId, + agent_id: AgentId, + role: Option, + ) -> Self { + ChronicleOperation::WasAttributedTo(WasAttributedTo::new( + namespace, entity_id, agent_id, role, + )) + } + + #[tracing::instrument] + pub fn was_informed_by( + namespace: NamespaceId, + informed: ActivityId, + informant: ActivityId, + ) -> Self { + ChronicleOperation::WasInformedBy(WasInformedBy::new(namespace, informed, informant)) + } + + /// Returns a reference to the `NamespaceId` of the `ChronicleOperation` + pub fn namespace(&self) -> &NamespaceId { + match self { + ChronicleOperation::ActivityExists(o) => &o.namespace, + ChronicleOperation::AgentExists(o) => &o.namespace, + ChronicleOperation::AgentActsOnBehalfOf(o) => &o.namespace, + ChronicleOperation::CreateNamespace(o) => &o.id, + ChronicleOperation::StartActivity(o) => &o.namespace, + ChronicleOperation::EndActivity(o) => &o.namespace, + ChronicleOperation::ActivityUses(o) => &o.namespace, + ChronicleOperation::EntityExists(o) => &o.namespace, + ChronicleOperation::WasGeneratedBy(o) => &o.namespace, + ChronicleOperation::EntityDerive(o) => &o.namespace, + ChronicleOperation::SetAttributes(o) => match o { + SetAttributes::Activity { namespace, .. } => namespace, + SetAttributes::Agent { namespace, .. } => namespace, + SetAttributes::Entity { namespace, .. } => namespace, + }, + ChronicleOperation::WasAssociatedWith(o) => &o.namespace, + ChronicleOperation::WasAttributedTo(o) => &o.namespace, + ChronicleOperation::WasInformedBy(o) => &o.namespace, + } + } + + // Chronicle is open world, so the use of an id implies that it exists. Match an operation and + // return the implied existential operations. + pub fn implied_by(&self) -> Vec { + match self { + ChronicleOperation::AgentActsOnBehalfOf(o) => vec![ + ChronicleOperation::agent_exists(o.namespace.clone(), o.delegate_id.clone()), + ChronicleOperation::agent_exists(o.namespace.clone(), o.responsible_id.clone()), + ], + ChronicleOperation::StartActivity(o) => { + vec![ChronicleOperation::activity_exists(o.namespace.clone(), o.id.clone())] + }, + ChronicleOperation::EndActivity(o) => { + vec![ChronicleOperation::activity_exists(o.namespace.clone(), o.id.clone())] + }, + ChronicleOperation::ActivityUses(o) => vec![ + ChronicleOperation::activity_exists(o.namespace.clone(), o.activity.clone()), + ChronicleOperation::entity_exists(o.namespace.clone(), o.id.clone()), + ], + ChronicleOperation::EntityExists(o) => { + vec![ChronicleOperation::entity_exists(o.namespace.clone(), o.id.clone())] + }, + ChronicleOperation::WasGeneratedBy(o) => vec![ + ChronicleOperation::entity_exists(o.namespace.clone(), o.id.clone()), + ChronicleOperation::activity_exists(o.namespace.clone(), o.activity.clone()), + ], + ChronicleOperation::EntityDerive(o) => { + let mut ops = vec![ + ChronicleOperation::entity_exists(o.namespace.clone(), o.id.clone()), + ChronicleOperation::entity_exists(o.namespace.clone(), o.used_id.clone()), + ]; + if let Some(activity_id) = &o.activity_id { + ops.push(ChronicleOperation::activity_exists( + o.namespace.clone(), + activity_id.clone(), + )); + } + ops + }, + ChronicleOperation::SetAttributes(o) => match o { + SetAttributes::Activity { namespace, id, .. } => { + vec![ChronicleOperation::activity_exists(namespace.clone(), id.clone())] + }, + SetAttributes::Agent { namespace, id, .. } => { + vec![ChronicleOperation::agent_exists(namespace.clone(), id.clone())] + }, + SetAttributes::Entity { namespace, id, .. } => { + vec![ChronicleOperation::entity_exists(namespace.clone(), id.clone())] + }, + }, + ChronicleOperation::WasAssociatedWith(o) => vec![ + ChronicleOperation::activity_exists(o.namespace.clone(), o.activity_id.clone()), + ChronicleOperation::agent_exists(o.namespace.clone(), o.agent_id.clone()), + ], + ChronicleOperation::WasAttributedTo(o) => vec![ + ChronicleOperation::entity_exists(o.namespace.clone(), o.entity_id.clone()), + ChronicleOperation::agent_exists(o.namespace.clone(), o.agent_id.clone()), + ], + ChronicleOperation::WasInformedBy(o) => vec![ + ChronicleOperation::activity_exists(o.namespace.clone(), o.activity.clone()), + ChronicleOperation::activity_exists( + o.namespace.clone(), + o.informing_activity.clone(), + ), + ], + _ => vec![], + } + } } diff --git a/crates/common/src/prov/vocab.rs b/crates/common/src/prov/vocab.rs index bd0e982f8..b7c4e731b 100644 --- a/crates/common/src/prov/vocab.rs +++ b/crates/common/src/prov/vocab.rs @@ -3,310 +3,310 @@ pub use chronicle_operations::ChronicleOperation; pub use prov::Prov; mod chronicle_operations { - #[derive(Clone, Copy, PartialEq, Eq, Hash)] - pub enum ChronicleOperation { - CreateNamespace, - NamespaceName, - NamespaceUuid, - AgentExists, - AgentName, - AgentUuid, - AgentActsOnBehalfOf, - DelegateId, - ResponsibleId, - ActivityExists, - ActivityName, - StartActivity, - StartActivityTime, - EndActivity, - EndActivityTime, - WasAssociatedWith, - WasAttributedTo, - ActivityUses, - EntityName, - Locator, - Role, - EntityExists, - WasGeneratedBy, - EntityDerive, - DerivationType, - UsedEntityName, - SetAttributes, - Attributes, - Attribute, - DomaintypeId, - WasInformedBy, - InformingActivityName, - Generated, - } - - const ACTIVITY_EXISTS: &str = "http://chronicle.works/chronicleoperations/ns#ActivityExists"; - const ACTIVITY_NAME: &str = "http://chronicle.works/chronicleoperations/ns#ActivityName"; - const START_ACTIVITY: &str = "http://chronicle.works/chronicleoperations/ns#StartActivity"; - const START_ACTIVITY_TIME: &str = - "http://chronicle.works/chronicleoperations/ns#StartActivityTime"; - const END_ACTIVITY: &str = "http://chronicle.works/chronicleoperations/ns#EndActivity"; - const END_ACTIVITY_TIME: &str = "http://chronicle.works/chronicleoperations/ns#EndActivityTime"; - const WAS_ASSOCIATED_WITH: &str = - "http://chronicle.works/chronicleoperations/ns#WasAssociatedWith"; - const WAS_ATTRIBUTED_TO: &str = "http://chronicle.works/chronicleoperations/ns#WasAttributedTo"; - const ACTIVITY_USES: &str = "http://chronicle.works/chronicleoperations/ns#ActivityUses"; - const ENTITY_NAME: &str = "http://chronicle.works/chronicleoperations/ns#EntityName"; - const LOCATOR: &str = "http://chronicle.works/chronicleoperations/ns#Locator"; - const ROLE: &str = "http://chronicle.works/chronicleoperations/ns#Role"; - const ENTITY_EXISTS: &str = "http://chronicle.works/chronicleoperations/ns#EntityExists"; - const WAS_GENERATED_BY: &str = "http://chronicle.works/chronicleoperations/ns#WasGeneratedBy"; - const ENTITY_DERIVE: &str = "http://chronicle.works/chronicleoperations/ns#EntityDerive"; - const DERIVATION_TYPE: &str = "http://chronicle.works/chronicleoperations/ns#DerivationType"; - const USED_ENTITY_NAME: &str = "http://chronicle.works/chronicleoperations/ns#UsedEntityName"; - const SET_ATTRIBUTES: &str = "http://chronicle.works/chronicleoperations/ns#SetAttributes"; - const ATTRIBUTES: &str = "http://chronicle.works/chronicleoperations/ns#Attributes"; - const ATTRIBUTE: &str = "http://chronicle.works/chronicleoperations/ns#Attribute"; - const DOMAINTYPE_ID: &str = "http://chronicle.works/chronicleoperations/ns#DomaintypeId"; - const WAS_INFORMED_BY: &str = "http://chronicle.works/chronicleoperations/ns#WasInformedBy"; - const INFORMING_ACTIVITY_NAME: &str = - "http://chronicle.works/chronicleoperations/ns#InformingActivityName"; - const GENERATED: &str = "http://chronicle.works/chronicleoperations/ns#Generated"; - const CREATE_NAMESPACE: &str = "http://chronicle.works/chronicleoperations/ns#CreateNamespace"; - const NAMESPACE_NAME: &str = "http://chronicle.works/chronicleoperations/ns#namespaceName"; - const NAMESPACE_UUID: &str = "http://chronicle.works/chronicleoperations/ns#namespaceUuid"; - const AGENT_EXISTS: &str = "http://chronicle.works/chronicleoperations/ns#AgentExists"; - const AGENT_NAME: &str = "http://chronicle.works/chronicleoperations/ns#agentName"; - const AGENT_UUID: &str = "http://chronicle.works/chronicleoperations/ns#agentUuid"; - const AGENT_ACTS_ON_BEHALF_OF: &str = - "http://chronicle.works/chronicleoperations/ns#AgentActsOnBehalfOf"; - const DELEGATE_ID: &str = "http://chronicle.works/chronicleoperations/ns#delegateId"; - const RESPONSIBLE_ID: &str = "http://chronicle.works/chronicleoperations/ns#responsibleId"; - - impl AsRef for ChronicleOperation { - fn as_ref(&self) -> &'static str { - match self { - ChronicleOperation::ActivityExists => ACTIVITY_EXISTS, - ChronicleOperation::ActivityName => ACTIVITY_NAME, - ChronicleOperation::StartActivity => START_ACTIVITY, - ChronicleOperation::StartActivityTime => START_ACTIVITY_TIME, - ChronicleOperation::EndActivity => END_ACTIVITY, - ChronicleOperation::EndActivityTime => END_ACTIVITY_TIME, - ChronicleOperation::WasAssociatedWith => WAS_ASSOCIATED_WITH, - ChronicleOperation::WasAttributedTo => WAS_ATTRIBUTED_TO, - ChronicleOperation::ActivityUses => ACTIVITY_USES, - ChronicleOperation::EntityName => ENTITY_NAME, - ChronicleOperation::Locator => LOCATOR, - ChronicleOperation::Role => ROLE, - ChronicleOperation::EntityExists => ENTITY_EXISTS, - ChronicleOperation::WasGeneratedBy => WAS_GENERATED_BY, - ChronicleOperation::EntityDerive => ENTITY_DERIVE, - ChronicleOperation::DerivationType => DERIVATION_TYPE, - ChronicleOperation::UsedEntityName => USED_ENTITY_NAME, - ChronicleOperation::SetAttributes => SET_ATTRIBUTES, - ChronicleOperation::Attributes => ATTRIBUTES, - ChronicleOperation::Attribute => ATTRIBUTE, - ChronicleOperation::DomaintypeId => DOMAINTYPE_ID, - ChronicleOperation::WasInformedBy => WAS_INFORMED_BY, - ChronicleOperation::InformingActivityName => INFORMING_ACTIVITY_NAME, - ChronicleOperation::Generated => GENERATED, - ChronicleOperation::CreateNamespace => CREATE_NAMESPACE, - ChronicleOperation::NamespaceName => NAMESPACE_NAME, - ChronicleOperation::NamespaceUuid => NAMESPACE_UUID, - ChronicleOperation::AgentExists => AGENT_EXISTS, - ChronicleOperation::AgentName => AGENT_NAME, - ChronicleOperation::AgentUuid => AGENT_UUID, - ChronicleOperation::AgentActsOnBehalfOf => AGENT_ACTS_ON_BEHALF_OF, - ChronicleOperation::DelegateId => DELEGATE_ID, - ChronicleOperation::ResponsibleId => RESPONSIBLE_ID, - } - } - } - - #[cfg(feature = "json-ld")] - impl From for iri_string::types::IriString { - fn from(val: ChronicleOperation) -> Self { - use iri_string::types::UriString; - UriString::try_from(val.as_str().to_string()).unwrap().into() - } - } - - impl ChronicleOperation { - pub fn as_str(&self) -> &str { - self.as_ref() - } - } - - impl core::fmt::Display for ChronicleOperation { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", self.as_str()) - } - } + #[derive(Clone, Copy, PartialEq, Eq, Hash)] + pub enum ChronicleOperation { + CreateNamespace, + NamespaceName, + NamespaceUuid, + AgentExists, + AgentName, + AgentUuid, + AgentActsOnBehalfOf, + DelegateId, + ResponsibleId, + ActivityExists, + ActivityName, + StartActivity, + StartActivityTime, + EndActivity, + EndActivityTime, + WasAssociatedWith, + WasAttributedTo, + ActivityUses, + EntityName, + Locator, + Role, + EntityExists, + WasGeneratedBy, + EntityDerive, + DerivationType, + UsedEntityName, + SetAttributes, + Attributes, + Attribute, + DomaintypeId, + WasInformedBy, + InformingActivityName, + Generated, + } + + const ACTIVITY_EXISTS: &str = "http://chronicle.works/chronicleoperations/ns#ActivityExists"; + const ACTIVITY_NAME: &str = "http://chronicle.works/chronicleoperations/ns#ActivityName"; + const START_ACTIVITY: &str = "http://chronicle.works/chronicleoperations/ns#StartActivity"; + const START_ACTIVITY_TIME: &str = + "http://chronicle.works/chronicleoperations/ns#StartActivityTime"; + const END_ACTIVITY: &str = "http://chronicle.works/chronicleoperations/ns#EndActivity"; + const END_ACTIVITY_TIME: &str = "http://chronicle.works/chronicleoperations/ns#EndActivityTime"; + const WAS_ASSOCIATED_WITH: &str = + "http://chronicle.works/chronicleoperations/ns#WasAssociatedWith"; + const WAS_ATTRIBUTED_TO: &str = "http://chronicle.works/chronicleoperations/ns#WasAttributedTo"; + const ACTIVITY_USES: &str = "http://chronicle.works/chronicleoperations/ns#ActivityUses"; + const ENTITY_NAME: &str = "http://chronicle.works/chronicleoperations/ns#EntityName"; + const LOCATOR: &str = "http://chronicle.works/chronicleoperations/ns#Locator"; + const ROLE: &str = "http://chronicle.works/chronicleoperations/ns#Role"; + const ENTITY_EXISTS: &str = "http://chronicle.works/chronicleoperations/ns#EntityExists"; + const WAS_GENERATED_BY: &str = "http://chronicle.works/chronicleoperations/ns#WasGeneratedBy"; + const ENTITY_DERIVE: &str = "http://chronicle.works/chronicleoperations/ns#EntityDerive"; + const DERIVATION_TYPE: &str = "http://chronicle.works/chronicleoperations/ns#DerivationType"; + const USED_ENTITY_NAME: &str = "http://chronicle.works/chronicleoperations/ns#UsedEntityName"; + const SET_ATTRIBUTES: &str = "http://chronicle.works/chronicleoperations/ns#SetAttributes"; + const ATTRIBUTES: &str = "http://chronicle.works/chronicleoperations/ns#Attributes"; + const ATTRIBUTE: &str = "http://chronicle.works/chronicleoperations/ns#Attribute"; + const DOMAINTYPE_ID: &str = "http://chronicle.works/chronicleoperations/ns#DomaintypeId"; + const WAS_INFORMED_BY: &str = "http://chronicle.works/chronicleoperations/ns#WasInformedBy"; + const INFORMING_ACTIVITY_NAME: &str = + "http://chronicle.works/chronicleoperations/ns#InformingActivityName"; + const GENERATED: &str = "http://chronicle.works/chronicleoperations/ns#Generated"; + const CREATE_NAMESPACE: &str = "http://chronicle.works/chronicleoperations/ns#CreateNamespace"; + const NAMESPACE_NAME: &str = "http://chronicle.works/chronicleoperations/ns#namespaceName"; + const NAMESPACE_UUID: &str = "http://chronicle.works/chronicleoperations/ns#namespaceUuid"; + const AGENT_EXISTS: &str = "http://chronicle.works/chronicleoperations/ns#AgentExists"; + const AGENT_NAME: &str = "http://chronicle.works/chronicleoperations/ns#agentName"; + const AGENT_UUID: &str = "http://chronicle.works/chronicleoperations/ns#agentUuid"; + const AGENT_ACTS_ON_BEHALF_OF: &str = + "http://chronicle.works/chronicleoperations/ns#AgentActsOnBehalfOf"; + const DELEGATE_ID: &str = "http://chronicle.works/chronicleoperations/ns#delegateId"; + const RESPONSIBLE_ID: &str = "http://chronicle.works/chronicleoperations/ns#responsibleId"; + + impl AsRef for ChronicleOperation { + fn as_ref(&self) -> &'static str { + match self { + ChronicleOperation::ActivityExists => ACTIVITY_EXISTS, + ChronicleOperation::ActivityName => ACTIVITY_NAME, + ChronicleOperation::StartActivity => START_ACTIVITY, + ChronicleOperation::StartActivityTime => START_ACTIVITY_TIME, + ChronicleOperation::EndActivity => END_ACTIVITY, + ChronicleOperation::EndActivityTime => END_ACTIVITY_TIME, + ChronicleOperation::WasAssociatedWith => WAS_ASSOCIATED_WITH, + ChronicleOperation::WasAttributedTo => WAS_ATTRIBUTED_TO, + ChronicleOperation::ActivityUses => ACTIVITY_USES, + ChronicleOperation::EntityName => ENTITY_NAME, + ChronicleOperation::Locator => LOCATOR, + ChronicleOperation::Role => ROLE, + ChronicleOperation::EntityExists => ENTITY_EXISTS, + ChronicleOperation::WasGeneratedBy => WAS_GENERATED_BY, + ChronicleOperation::EntityDerive => ENTITY_DERIVE, + ChronicleOperation::DerivationType => DERIVATION_TYPE, + ChronicleOperation::UsedEntityName => USED_ENTITY_NAME, + ChronicleOperation::SetAttributes => SET_ATTRIBUTES, + ChronicleOperation::Attributes => ATTRIBUTES, + ChronicleOperation::Attribute => ATTRIBUTE, + ChronicleOperation::DomaintypeId => DOMAINTYPE_ID, + ChronicleOperation::WasInformedBy => WAS_INFORMED_BY, + ChronicleOperation::InformingActivityName => INFORMING_ACTIVITY_NAME, + ChronicleOperation::Generated => GENERATED, + ChronicleOperation::CreateNamespace => CREATE_NAMESPACE, + ChronicleOperation::NamespaceName => NAMESPACE_NAME, + ChronicleOperation::NamespaceUuid => NAMESPACE_UUID, + ChronicleOperation::AgentExists => AGENT_EXISTS, + ChronicleOperation::AgentName => AGENT_NAME, + ChronicleOperation::AgentUuid => AGENT_UUID, + ChronicleOperation::AgentActsOnBehalfOf => AGENT_ACTS_ON_BEHALF_OF, + ChronicleOperation::DelegateId => DELEGATE_ID, + ChronicleOperation::ResponsibleId => RESPONSIBLE_ID, + } + } + } + + #[cfg(feature = "json-ld")] + impl From for iri_string::types::IriString { + fn from(val: ChronicleOperation) -> Self { + use iri_string::types::UriString; + UriString::try_from(val.as_str().to_string()).unwrap().into() + } + } + + impl ChronicleOperation { + pub fn as_str(&self) -> &str { + self.as_ref() + } + } + + impl core::fmt::Display for ChronicleOperation { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.as_str()) + } + } } mod prov { - #[derive(Clone, Copy, PartialEq, Eq, Hash)] - pub enum Prov { - Agent, - Entity, - Activity, - WasAssociatedWith, - QualifiedAssociation, - QualifiedAttribution, - Association, - Attribution, - Responsible, - WasGeneratedBy, - Used, - WasAttributedTo, - StartedAtTime, - EndedAtTime, - WasDerivedFrom, - HadPrimarySource, - WasQuotedFrom, - WasRevisionOf, - ActedOnBehalfOf, - QualifiedDelegation, - Delegation, - Delegate, - HadRole, - HadActivity, - HadEntity, - WasInformedBy, - Generated, - } - - const AGENT: &str = "http://www.w3.org/ns/prov#Agent"; - const ENTITY: &str = "http://www.w3.org/ns/prov#Entity"; - const ACTIVITY: &str = "http://www.w3.org/ns/prov#Activity"; - const WAS_ASSOCIATED_WITH: &str = "http://www.w3.org/ns/prov#wasAssociatedWith"; - const QUALIFIED_ASSOCIATION: &str = "http://www.w3.org/ns/prov#qualifiedAssociation"; - const QUALIFIED_ATTRIBUTION: &str = "http://www.w3.org/ns/prov#qualifiedAttribution"; - const ASSOCIATION: &str = "http://www.w3.org/ns/prov#Association"; - const ATTRIBUTION: &str = "http://www.w3.org/ns/prov#Attribution"; - const RESPONSIBLE: &str = "http://www.w3.org/ns/prov#agent"; - const WAS_GENERATED_BY: &str = "http://www.w3.org/ns/prov#wasGeneratedBy"; - const USED: &str = "http://www.w3.org/ns/prov#used"; - const WAS_ATTRIBUTED_TO: &str = "http://www.w3.org/ns/prov#wasAttributedTo"; - const STARTED_AT_TIME: &str = "http://www.w3.org/ns/prov#startedAtTime"; - const ENDED_AT_TIME: &str = "http://www.w3.org/ns/prov#endedAtTime"; - const WAS_DERIVED_FROM: &str = "http://www.w3.org/ns/prov#wasDerivedFrom"; - const HAD_PRIMARY_SOURCE: &str = "http://www.w3.org/ns/prov#hadPrimarySource"; - const WAS_QUOTED_FROM: &str = "http://www.w3.org/ns/prov#wasQuotedFrom"; - const WAS_REVISION_OF: &str = "http://www.w3.org/ns/prov#wasRevisionOf"; - const ACTED_ON_BEHALF_OF: &str = "http://www.w3.org/ns/prov#actedOnBehalfOf"; - const QUALIFIED_DELEGATION: &str = "http://www.w3.org/ns/prov#qualifiedDelegation"; - const DELEGATION: &str = "http://www.w3.org/ns/prov#Delegation"; - const DELEGATE: &str = "http://www.w3.org/ns/prov#agent"; - const HAD_ROLE: &str = "http://www.w3.org/ns/prov#hadRole"; - const HAD_ACTIVITY: &str = "http://www.w3.org/ns/prov#hadActivity"; - const HAD_ENTITY: &str = "http://www.w3.org/ns/prov#hadEntity"; - const WAS_INFORMED_BY: &str = "http://www.w3.org/ns/prov#wasInformedBy"; - const GENERATED: &str = "http://www.w3.org/ns/prov#generated"; - - impl AsRef for Prov { - fn as_ref(&self) -> &'static str { - match self { - Prov::Agent => AGENT, - Prov::Entity => ENTITY, - Prov::Activity => ACTIVITY, - Prov::WasAssociatedWith => WAS_ASSOCIATED_WITH, - Prov::QualifiedAssociation => QUALIFIED_ASSOCIATION, - Prov::QualifiedAttribution => QUALIFIED_ATTRIBUTION, - Prov::Association => ASSOCIATION, - Prov::Attribution => ATTRIBUTION, - Prov::Responsible => RESPONSIBLE, - Prov::WasGeneratedBy => WAS_GENERATED_BY, - Prov::Used => USED, - Prov::WasAttributedTo => WAS_ATTRIBUTED_TO, - Prov::StartedAtTime => STARTED_AT_TIME, - Prov::EndedAtTime => ENDED_AT_TIME, - Prov::WasDerivedFrom => WAS_DERIVED_FROM, - Prov::HadPrimarySource => HAD_PRIMARY_SOURCE, - Prov::WasQuotedFrom => WAS_QUOTED_FROM, - Prov::WasRevisionOf => WAS_REVISION_OF, - Prov::ActedOnBehalfOf => ACTED_ON_BEHALF_OF, - Prov::QualifiedDelegation => QUALIFIED_DELEGATION, - Prov::Delegation => DELEGATION, - Prov::Delegate => DELEGATE, - Prov::HadRole => HAD_ROLE, - Prov::HadActivity => HAD_ACTIVITY, - Prov::HadEntity => HAD_ENTITY, - Prov::WasInformedBy => WAS_INFORMED_BY, - Prov::Generated => GENERATED, - } - } - } - - #[cfg(feature = "json-ld")] - impl From for iri_string::types::IriString { - fn from(val: Prov) -> Self { - use iri_string::types::UriString; - UriString::try_from(val.as_str().to_string()).unwrap().into() - } - } - - impl Prov { - pub fn as_str(&self) -> &str { - self.as_ref() - } - } - - impl core::fmt::Display for Prov { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", self.as_str()) - } - } + #[derive(Clone, Copy, PartialEq, Eq, Hash)] + pub enum Prov { + Agent, + Entity, + Activity, + WasAssociatedWith, + QualifiedAssociation, + QualifiedAttribution, + Association, + Attribution, + Responsible, + WasGeneratedBy, + Used, + WasAttributedTo, + StartedAtTime, + EndedAtTime, + WasDerivedFrom, + HadPrimarySource, + WasQuotedFrom, + WasRevisionOf, + ActedOnBehalfOf, + QualifiedDelegation, + Delegation, + Delegate, + HadRole, + HadActivity, + HadEntity, + WasInformedBy, + Generated, + } + + const AGENT: &str = "http://www.w3.org/ns/prov#Agent"; + const ENTITY: &str = "http://www.w3.org/ns/prov#Entity"; + const ACTIVITY: &str = "http://www.w3.org/ns/prov#Activity"; + const WAS_ASSOCIATED_WITH: &str = "http://www.w3.org/ns/prov#wasAssociatedWith"; + const QUALIFIED_ASSOCIATION: &str = "http://www.w3.org/ns/prov#qualifiedAssociation"; + const QUALIFIED_ATTRIBUTION: &str = "http://www.w3.org/ns/prov#qualifiedAttribution"; + const ASSOCIATION: &str = "http://www.w3.org/ns/prov#Association"; + const ATTRIBUTION: &str = "http://www.w3.org/ns/prov#Attribution"; + const RESPONSIBLE: &str = "http://www.w3.org/ns/prov#agent"; + const WAS_GENERATED_BY: &str = "http://www.w3.org/ns/prov#wasGeneratedBy"; + const USED: &str = "http://www.w3.org/ns/prov#used"; + const WAS_ATTRIBUTED_TO: &str = "http://www.w3.org/ns/prov#wasAttributedTo"; + const STARTED_AT_TIME: &str = "http://www.w3.org/ns/prov#startedAtTime"; + const ENDED_AT_TIME: &str = "http://www.w3.org/ns/prov#endedAtTime"; + const WAS_DERIVED_FROM: &str = "http://www.w3.org/ns/prov#wasDerivedFrom"; + const HAD_PRIMARY_SOURCE: &str = "http://www.w3.org/ns/prov#hadPrimarySource"; + const WAS_QUOTED_FROM: &str = "http://www.w3.org/ns/prov#wasQuotedFrom"; + const WAS_REVISION_OF: &str = "http://www.w3.org/ns/prov#wasRevisionOf"; + const ACTED_ON_BEHALF_OF: &str = "http://www.w3.org/ns/prov#actedOnBehalfOf"; + const QUALIFIED_DELEGATION: &str = "http://www.w3.org/ns/prov#qualifiedDelegation"; + const DELEGATION: &str = "http://www.w3.org/ns/prov#Delegation"; + const DELEGATE: &str = "http://www.w3.org/ns/prov#agent"; + const HAD_ROLE: &str = "http://www.w3.org/ns/prov#hadRole"; + const HAD_ACTIVITY: &str = "http://www.w3.org/ns/prov#hadActivity"; + const HAD_ENTITY: &str = "http://www.w3.org/ns/prov#hadEntity"; + const WAS_INFORMED_BY: &str = "http://www.w3.org/ns/prov#wasInformedBy"; + const GENERATED: &str = "http://www.w3.org/ns/prov#generated"; + + impl AsRef for Prov { + fn as_ref(&self) -> &'static str { + match self { + Prov::Agent => AGENT, + Prov::Entity => ENTITY, + Prov::Activity => ACTIVITY, + Prov::WasAssociatedWith => WAS_ASSOCIATED_WITH, + Prov::QualifiedAssociation => QUALIFIED_ASSOCIATION, + Prov::QualifiedAttribution => QUALIFIED_ATTRIBUTION, + Prov::Association => ASSOCIATION, + Prov::Attribution => ATTRIBUTION, + Prov::Responsible => RESPONSIBLE, + Prov::WasGeneratedBy => WAS_GENERATED_BY, + Prov::Used => USED, + Prov::WasAttributedTo => WAS_ATTRIBUTED_TO, + Prov::StartedAtTime => STARTED_AT_TIME, + Prov::EndedAtTime => ENDED_AT_TIME, + Prov::WasDerivedFrom => WAS_DERIVED_FROM, + Prov::HadPrimarySource => HAD_PRIMARY_SOURCE, + Prov::WasQuotedFrom => WAS_QUOTED_FROM, + Prov::WasRevisionOf => WAS_REVISION_OF, + Prov::ActedOnBehalfOf => ACTED_ON_BEHALF_OF, + Prov::QualifiedDelegation => QUALIFIED_DELEGATION, + Prov::Delegation => DELEGATION, + Prov::Delegate => DELEGATE, + Prov::HadRole => HAD_ROLE, + Prov::HadActivity => HAD_ACTIVITY, + Prov::HadEntity => HAD_ENTITY, + Prov::WasInformedBy => WAS_INFORMED_BY, + Prov::Generated => GENERATED, + } + } + } + + #[cfg(feature = "json-ld")] + impl From for iri_string::types::IriString { + fn from(val: Prov) -> Self { + use iri_string::types::UriString; + UriString::try_from(val.as_str().to_string()).unwrap().into() + } + } + + impl Prov { + pub fn as_str(&self) -> &str { + self.as_ref() + } + } + + impl core::fmt::Display for Prov { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.as_str()) + } + } } mod chronicle { - use core::str::FromStr; - - use iri_string::types::UriString; - #[cfg(not(feature = "std"))] - use parity_scale_codec::alloc::string::String; - #[cfg(not(feature = "std"))] - use scale_info::prelude::{*, borrow::ToOwned, string::ToString}; - use uuid::Uuid; - - use crate::prov::{ActivityId, AgentId, EntityId, ExternalId, ExternalIdPart, Role}; - - #[derive(Clone, Copy, PartialEq, Eq, Hash)] - pub enum Chronicle { - Namespace, - HasNamespace, - Value, - } - - const NAMESPACE: &str = "http://chronicle.works/chronicle/ns#Namespace"; - const HAS_NAMESPACE: &str = "http://chronicle.works/chronicle/ns#hasNamespace"; - const VALUE: &str = "http://chronicle.works/chronicle/ns#Value"; - - impl AsRef for Chronicle { - fn as_ref(&self) -> &'static str { - match self { - Chronicle::Namespace => NAMESPACE, - Chronicle::HasNamespace => HAS_NAMESPACE, - Chronicle::Value => VALUE, - } - } - } - - #[cfg(feature = "json-ld")] - impl From for iri_string::types::IriString { - fn from(val: Chronicle) -> Self { - UriString::try_from(val.as_str().to_string()).unwrap().into() - } - } - - impl Chronicle { - pub fn as_str(&self) -> &str { - self.as_ref() - } - } - - impl core::fmt::Display for Chronicle { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{}", self.as_str()) - } - } - - lazy_static::lazy_static! { + use core::str::FromStr; + + use iri_string::types::UriString; + #[cfg(not(feature = "std"))] + use parity_scale_codec::alloc::string::String; + #[cfg(not(feature = "std"))] + use scale_info::prelude::{borrow::ToOwned, string::ToString, *}; + use uuid::Uuid; + + use crate::prov::{ActivityId, AgentId, EntityId, ExternalId, ExternalIdPart, Role}; + + #[derive(Clone, Copy, PartialEq, Eq, Hash)] + pub enum Chronicle { + Namespace, + HasNamespace, + Value, + } + + const NAMESPACE: &str = "http://chronicle.works/chronicle/ns#Namespace"; + const HAS_NAMESPACE: &str = "http://chronicle.works/chronicle/ns#hasNamespace"; + const VALUE: &str = "http://chronicle.works/chronicle/ns#Value"; + + impl AsRef for Chronicle { + fn as_ref(&self) -> &'static str { + match self { + Chronicle::Namespace => NAMESPACE, + Chronicle::HasNamespace => HAS_NAMESPACE, + Chronicle::Value => VALUE, + } + } + } + + #[cfg(feature = "json-ld")] + impl From for iri_string::types::IriString { + fn from(val: Chronicle) -> Self { + UriString::try_from(val.as_str().to_string()).unwrap().into() + } + } + + impl Chronicle { + pub fn as_str(&self) -> &str { + self.as_ref() + } + } + + impl core::fmt::Display for Chronicle { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "{}", self.as_str()) + } + } + + lazy_static::lazy_static! { static ref ENCODE_SET: percent_encoding::AsciiSet = percent_encoding::NON_ALPHANUMERIC .remove(b'_') @@ -314,157 +314,157 @@ mod chronicle { .remove(b'.'); } - /// Operations to format specific Iri kinds, using percentage encoding to ensure they are - /// infallible. This module provides functionality to create various types of IRIs with - /// percent encoding applied to external IDs where necessary. - impl Chronicle { - pub const LEGACY_PREFIXES: &'static [&'static str] = - &["http://btp.works/chronicle/ns#", "http://blockchaintp.com/chronicle/ns#"]; - pub const LONG_PREFIX: &'static str = "http://chronicle.works/chronicle/ns#"; - pub const PREFIX: &'static str = "chronicle"; - - /// Encodes the given external ID using percent-encoding to ensure it is a valid Chronicle - /// CURIE - fn encode_external_id(external_id: &ExternalId) -> String { - percent_encoding::utf8_percent_encode(external_id.as_str(), &ENCODE_SET).to_string() - } - - /// Constructs a namespace IRI using a given external ID and UUID. - pub fn namespace( - external_id: &ExternalId, - id: &Uuid, - ) -> Result { - let encoded_external_id = Self::encode_external_id(external_id); - UriString::from_str(&format!("{}:ns:{}:{}", Self::PREFIX, encoded_external_id, id)) - } - - /// Constructs an agent IRI using a given external ID. - pub fn agent( - external_id: &ExternalId, - ) -> Result> { - let encoded_external_id = Self::encode_external_id(external_id); - format!("{}:agent:{}", Self::PREFIX, encoded_external_id).try_into() - } - - /// Constructs an activity IRI using a given external ID. - pub fn activity( - external_id: &ExternalId, - ) -> Result> { - let encoded_external_id = Self::encode_external_id(external_id); - format!("{}:activity:{}", Self::PREFIX, encoded_external_id).try_into() - } - - /// Constructs an entity IRI using a given external ID. - pub fn entity( - external_id: &ExternalId, - ) -> Result> { - let encoded_external_id = Self::encode_external_id(external_id); - format!("{}:entity:{}", Self::PREFIX, encoded_external_id).try_into() - } - - /// Constructs a domaintype IRI using a given external ID. - pub fn domaintype( - external_id: &ExternalId, - ) -> Result> { - let encoded_external_id = Self::encode_external_id(external_id); - format!("{}:domaintype:{}", Self::PREFIX, encoded_external_id).try_into() - } - - /// Constructs an association IRI using given agent and activity IDs, and an optional role. - pub fn association( - agent: &AgentId, - activity: &ActivityId, - role: &Option, - ) -> Result> { - let encoded_agent_id = Self::encode_external_id(agent.external_id_part()); - let encoded_activity_id = Self::encode_external_id(activity.external_id_part()); - let encoded_role = role - .as_ref() - .map(|r| Self::encode_external_id(&ExternalId::from(r.as_str()))) - .unwrap_or_else(|| "".to_owned()); - format!( - "{}:association:{}:{}:role={}", - Self::PREFIX, - encoded_agent_id, - encoded_activity_id, - encoded_role, - ) - .try_into() - } - - /// Constructs a delegation IRI using given delegate and responsible agent IDs, and optional - /// activity and role. - #[tracing::instrument( - name = "delegation_iri_creation", - skip(delegate, responsible, activity, role) - )] - pub fn delegation( - delegate: &AgentId, - responsible: &AgentId, - activity: &Option, - role: &Option, - ) -> Result> { - let encoded_delegate_id = Self::encode_external_id(delegate.external_id_part()); - let encoded_responsible_id = Self::encode_external_id(responsible.external_id_part()); - let encoded_activity_id = activity - .as_ref() - .map(|a| Self::encode_external_id(a.external_id_part())) - .unwrap_or_default(); - let encoded_role = role - .as_ref() - .map(|r| Self::encode_external_id(&ExternalId::from(r.as_str()))) - .unwrap_or_else(|| "".to_owned()); - format!( - "{}:delegation:{}:{}:role={}:activity={}", - Self::PREFIX, - encoded_delegate_id, - encoded_responsible_id, - encoded_role, - encoded_activity_id, - ) - .try_into() - } - - /// Constructs an attribution IRI using given agent and entity IDs, and an optional role. - #[tracing::instrument(name = "attribution_iri_creation", skip(agent, entity, role))] - pub fn attribution( - agent: &AgentId, - entity: &EntityId, - role: &Option, - ) -> Result> { - let encoded_agent_id = Self::encode_external_id(agent.external_id_part()); - let encoded_entity_id = Self::encode_external_id(entity.external_id_part()); - let encoded_role = role - .as_ref() - .map(|r| Self::encode_external_id(&ExternalId::from(r.as_str()))) - .unwrap_or_else(|| "".to_owned()); - format!( - "{}:attribution:{}:{}:role={}", - Self::PREFIX, - encoded_agent_id, - encoded_entity_id, - encoded_role, - ) - .try_into() - } - } + /// Operations to format specific Iri kinds, using percentage encoding to ensure they are + /// infallible. This module provides functionality to create various types of IRIs with + /// percent encoding applied to external IDs where necessary. + impl Chronicle { + pub const LEGACY_PREFIXES: &'static [&'static str] = + &["http://btp.works/chronicle/ns#", "http://blockchaintp.com/chronicle/ns#"]; + pub const LONG_PREFIX: &'static str = "http://chronicle.works/chronicle/ns#"; + pub const PREFIX: &'static str = "chronicle"; + + /// Encodes the given external ID using percent-encoding to ensure it is a valid Chronicle + /// CURIE + fn encode_external_id(external_id: &ExternalId) -> String { + percent_encoding::utf8_percent_encode(external_id.as_str(), &ENCODE_SET).to_string() + } + + /// Constructs a namespace IRI using a given external ID and UUID. + pub fn namespace( + external_id: &ExternalId, + id: &Uuid, + ) -> Result { + let encoded_external_id = Self::encode_external_id(external_id); + UriString::from_str(&format!("{}:ns:{}:{}", Self::PREFIX, encoded_external_id, id)) + } + + /// Constructs an agent IRI using a given external ID. + pub fn agent( + external_id: &ExternalId, + ) -> Result> { + let encoded_external_id = Self::encode_external_id(external_id); + format!("{}:agent:{}", Self::PREFIX, encoded_external_id).try_into() + } + + /// Constructs an activity IRI using a given external ID. + pub fn activity( + external_id: &ExternalId, + ) -> Result> { + let encoded_external_id = Self::encode_external_id(external_id); + format!("{}:activity:{}", Self::PREFIX, encoded_external_id).try_into() + } + + /// Constructs an entity IRI using a given external ID. + pub fn entity( + external_id: &ExternalId, + ) -> Result> { + let encoded_external_id = Self::encode_external_id(external_id); + format!("{}:entity:{}", Self::PREFIX, encoded_external_id).try_into() + } + + /// Constructs a domaintype IRI using a given external ID. + pub fn domaintype( + external_id: &ExternalId, + ) -> Result> { + let encoded_external_id = Self::encode_external_id(external_id); + format!("{}:domaintype:{}", Self::PREFIX, encoded_external_id).try_into() + } + + /// Constructs an association IRI using given agent and activity IDs, and an optional role. + pub fn association( + agent: &AgentId, + activity: &ActivityId, + role: &Option, + ) -> Result> { + let encoded_agent_id = Self::encode_external_id(agent.external_id_part()); + let encoded_activity_id = Self::encode_external_id(activity.external_id_part()); + let encoded_role = role + .as_ref() + .map(|r| Self::encode_external_id(&ExternalId::from(r.as_str()))) + .unwrap_or_else(|| "".to_owned()); + format!( + "{}:association:{}:{}:role={}", + Self::PREFIX, + encoded_agent_id, + encoded_activity_id, + encoded_role, + ) + .try_into() + } + + /// Constructs a delegation IRI using given delegate and responsible agent IDs, and optional + /// activity and role. + #[tracing::instrument( + name = "delegation_iri_creation", + skip(delegate, responsible, activity, role) + )] + pub fn delegation( + delegate: &AgentId, + responsible: &AgentId, + activity: &Option, + role: &Option, + ) -> Result> { + let encoded_delegate_id = Self::encode_external_id(delegate.external_id_part()); + let encoded_responsible_id = Self::encode_external_id(responsible.external_id_part()); + let encoded_activity_id = activity + .as_ref() + .map(|a| Self::encode_external_id(a.external_id_part())) + .unwrap_or_default(); + let encoded_role = role + .as_ref() + .map(|r| Self::encode_external_id(&ExternalId::from(r.as_str()))) + .unwrap_or_else(|| "".to_owned()); + format!( + "{}:delegation:{}:{}:role={}:activity={}", + Self::PREFIX, + encoded_delegate_id, + encoded_responsible_id, + encoded_role, + encoded_activity_id, + ) + .try_into() + } + + /// Constructs an attribution IRI using given agent and entity IDs, and an optional role. + #[tracing::instrument(name = "attribution_iri_creation", skip(agent, entity, role))] + pub fn attribution( + agent: &AgentId, + entity: &EntityId, + role: &Option, + ) -> Result> { + let encoded_agent_id = Self::encode_external_id(agent.external_id_part()); + let encoded_entity_id = Self::encode_external_id(entity.external_id_part()); + let encoded_role = role + .as_ref() + .map(|r| Self::encode_external_id(&ExternalId::from(r.as_str()))) + .unwrap_or_else(|| "".to_owned()); + format!( + "{}:attribution:{}:{}:role={}", + Self::PREFIX, + encoded_agent_id, + encoded_entity_id, + encoded_role, + ) + .try_into() + } + } } /// As these operations are meant to be infallible, prop test them to ensure #[cfg(test)] #[allow(clippy::useless_conversion)] mod test { - use proptest::prelude::*; - use uuid::Uuid; + use proptest::prelude::*; + use uuid::Uuid; - use crate::prov::{ - ActivityId, AgentId, AssociationId, AttributionId, DelegationId, DomaintypeId, EntityId, - ExternalId, ExternalIdPart, NamespaceId, Role, - }; + use crate::prov::{ + ActivityId, AgentId, AssociationId, AttributionId, DelegationId, DomaintypeId, EntityId, + ExternalId, ExternalIdPart, NamespaceId, Role, + }; - use super::Chronicle; + use super::Chronicle; - proptest! { + proptest! { #![proptest_config(ProptestConfig { max_shrink_iters: std::u32::MAX, verbose: 0, .. ProptestConfig::default() })] diff --git a/crates/embedded-substrate/src/lib.rs b/crates/embedded-substrate/src/lib.rs index 2ec3f8443..f907049ea 100644 --- a/crates/embedded-substrate/src/lib.rs +++ b/crates/embedded-substrate/src/lib.rs @@ -3,31 +3,31 @@ use protocol_substrate::SubxtClientError; use protocol_substrate_chronicle::ChronicleSubstrateClient; use sc_cli::{print_node_infos, CliConfiguration, Signals, SubstrateCli}; use subxt::{ - config::ExtrinsicParams, - ext::futures::{pin_mut, FutureExt}, - utils::{AccountId32, MultiAddress, MultiSignature}, + config::ExtrinsicParams, + ext::futures::{pin_mut, FutureExt}, + utils::{AccountId32, MultiAddress, MultiSignature}, }; use tempfile::TempDir; use thiserror::Error; use tokio::{ - select, - sync::oneshot::{channel, Sender}, + select, + sync::oneshot::{channel, Sender}, }; use lazy_static::lazy_static; use std::{ - collections::BTreeMap, - sync::{Arc, Mutex}, - time::Duration, + collections::BTreeMap, + sync::{Arc, Mutex}, + time::Duration, }; use tracing::info; #[derive(Debug, Error)] pub enum Error { - #[error("Substrate invocation error: {source}")] - Cli { source: anyhow::Error }, - #[error("No free ports")] - NoFreePorts, + #[error("Substrate invocation error: {source}")] + Cli { source: anyhow::Error }, + #[error("No free ports")] + NoFreePorts, } // Substrate initialization is costly and includes log configuration, so we need to keep and reuse @@ -38,312 +38,312 @@ lazy_static! { } pub struct EmbeddedSubstrate { - shutdown: Option>, - _state: TempDir, - rpc_port: u16, + shutdown: Option>, + _state: TempDir, + rpc_port: u16, } impl EmbeddedSubstrate { - pub async fn connect_chronicle( - &self, - ) -> Result, SubxtClientError> - where - C: subxt::Config< - Hash=subxt::utils::H256, - Address=MultiAddress, - AccountId=AccountId32, - Signature=MultiSignature, - >, - >::OtherParams: Default, - { - ChronicleSubstrateClient::::connect(format!("ws://127.0.0.1:{}", self.rpc_port)).await - } - - pub fn port(&self) -> u16 { - self.rpc_port - } + pub async fn connect_chronicle( + &self, + ) -> Result, SubxtClientError> + where + C: subxt::Config< + Hash = subxt::utils::H256, + Address = MultiAddress, + AccountId = AccountId32, + Signature = MultiSignature, + >, + >::OtherParams: Default, + { + ChronicleSubstrateClient::::connect(format!("ws://127.0.0.1:{}", self.rpc_port)).await + } + + pub fn port(&self) -> u16 { + self.rpc_port + } } impl Drop for EmbeddedSubstrate { - fn drop(&mut self) { - if let Some(shutdown) = self.shutdown.take() { - if let Err(e) = shutdown.send(()) { - tracing::error!("Failed to send shutdown signal: {:?}", e); - } - } else { - tracing::warn!("Shutdown signal was already taken"); - } - } + fn drop(&mut self) { + if let Some(shutdown) = self.shutdown.take() { + if let Err(e) = shutdown.send(()) { + tracing::error!("Failed to send shutdown signal: {:?}", e); + } + } else { + tracing::warn!("Shutdown signal was already taken"); + } + } } pub async fn shared_dev_node_rpc_on_arbitrary_port() -> Result, Error> { - shared_dev_node_rpc_on_port( - portpicker::pick_unused_port().ok_or_else(|| Error::NoFreePorts)?, - false, - ) - .await + shared_dev_node_rpc_on_port( + portpicker::pick_unused_port().ok_or_else(|| Error::NoFreePorts)?, + false, + ) + .await } // Utilize the CLI run command to bring up a substrate-chronicle dev mode node with a new runtime // Utilize the CLI run command to bring up a substrate-chronicle dev mode node with a new runtime // thread. Execute node until receipt of a drop channel message or signal pub async fn shared_dev_node_rpc_on_port( - port: u16, - configure_logging: bool, + port: u16, + configure_logging: bool, ) -> Result, Error> { - let rt = tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap(); - let handle = rt.handle().clone(); - - if let Some(substrate_instance) = SUBSTRATE_INSTANCES.lock().unwrap().get(&port) { - return Ok(substrate_instance.clone()); - } - - let (live_tx, live_rx) = channel::<()>(); - let (tx, rx) = channel(); - let tmp_dir = tempfile::tempdir().unwrap(); - let tmp_path = format!("{}", tmp_dir.path().to_string_lossy()); - - std::thread::spawn(move || { - let cli = Cli::from_iter([ - "--chain dev", - "--force-authoring", - "--alice", - &*format!("--rpc-port={}", port), - "--rpc-cors=all", - &*format!("-d{}", tmp_path), - ]); - - let signals = handle - .block_on(async { Signals::capture() }) - .map_err(|e| tracing::error!("{}", e)) - .unwrap(); - - let config = cli - .create_configuration(&cli.run, handle.clone()) - .map_err(|e| tracing::error!("{}", e)) - .unwrap(); - - print_node_infos::(&config); - - if configure_logging { - cli.run - .init( - &"https://chronicle.works".to_owned(), - &"2.0.dev".to_owned(), - |_, _| {}, - &config, - ) - .unwrap(); - } - - let mut task_manager = handle - .block_on(async move { service::new_full(config).map_err(sc_cli::Error::Service) }) - .map_err(|e| tracing::error!("{}", e)) - .unwrap(); - - live_tx.send(()).unwrap(); - - let task_manager = handle.block_on(async move { - let signal_exit = signals.future().fuse(); - let task_exit = task_manager.future().fuse(); - let drop_exit = async move { - let _ = rx.await; - tracing::info!("Shutdown message"); - }; - - pin_mut!(signal_exit, drop_exit); - - select! { + let rt = tokio::runtime::Builder::new_multi_thread().enable_all().build().unwrap(); + let handle = rt.handle().clone(); + + if let Some(substrate_instance) = SUBSTRATE_INSTANCES.lock().unwrap().get(&port) { + return Ok(substrate_instance.clone()); + } + + let (live_tx, live_rx) = channel::<()>(); + let (tx, rx) = channel(); + let tmp_dir = tempfile::tempdir().unwrap(); + let tmp_path = format!("{}", tmp_dir.path().to_string_lossy()); + + std::thread::spawn(move || { + let cli = Cli::from_iter([ + "--chain dev", + "--force-authoring", + "--alice", + &*format!("--rpc-port={}", port), + "--rpc-cors=all", + &*format!("-d{}", tmp_path), + ]); + + let signals = handle + .block_on(async { Signals::capture() }) + .map_err(|e| tracing::error!("{}", e)) + .unwrap(); + + let config = cli + .create_configuration(&cli.run, handle.clone()) + .map_err(|e| tracing::error!("{}", e)) + .unwrap(); + + print_node_infos::(&config); + + if configure_logging { + cli.run + .init( + &"https://chronicle.works".to_owned(), + &"2.0.dev".to_owned(), + |_, _| {}, + &config, + ) + .unwrap(); + } + + let mut task_manager = handle + .block_on(async move { service::new_full(config).map_err(sc_cli::Error::Service) }) + .map_err(|e| tracing::error!("{}", e)) + .unwrap(); + + live_tx.send(()).unwrap(); + + let task_manager = handle.block_on(async move { + let signal_exit = signals.future().fuse(); + let task_exit = task_manager.future().fuse(); + let drop_exit = async move { + let _ = rx.await; + tracing::info!("Shutdown message"); + }; + + pin_mut!(signal_exit, drop_exit); + + select! { _ = signal_exit => {}, _ = drop_exit => {}, _ = task_exit => {}, } - task_manager - }); + task_manager + }); - let task_registry = task_manager.into_task_registry(); - let shutdown_timeout = Duration::from_secs(60); - rt.shutdown_timeout(shutdown_timeout); + let task_registry = task_manager.into_task_registry(); + let shutdown_timeout = Duration::from_secs(60); + rt.shutdown_timeout(shutdown_timeout); - let running_tasks = task_registry.running_tasks(); + let running_tasks = task_registry.running_tasks(); - if !running_tasks.is_empty() { - tracing::error!("Detected running(potentially stalled) tasks on shutdown:"); - running_tasks.iter().for_each(|(task, count)| { - let instances_desc = - if *count > 1 { format!("with {} instances ", count) } else { "".to_string() }; + if !running_tasks.is_empty() { + tracing::error!("Detected running(potentially stalled) tasks on shutdown:"); + running_tasks.iter().for_each(|(task, count)| { + let instances_desc = + if *count > 1 { format!("with {} instances ", count) } else { "".to_string() }; - if task.is_default_group() { - tracing::error!( + if task.is_default_group() { + tracing::error!( "Task \"{}\" was still running {}after waiting {} seconds to finish.", task.name, instances_desc, 60 ); - } else { - tracing::error!( + } else { + tracing::error!( "Task \"{}\" (Group: {}) was still running {}after waiting {} seconds to finish.", task.name, task.group, instances_desc, 60 ); - } - }); - } + } + }); + } - info!("Shut down embedded substrate instance on port {}", port); - }); + info!("Shut down embedded substrate instance on port {}", port); + }); - tracing::info!("Await substrate boot"); - let _ = live_rx.await; - tracing::info!("Substrate booted"); + tracing::info!("Await substrate boot"); + let _ = live_rx.await; + tracing::info!("Substrate booted"); - let instance = - Arc::new(EmbeddedSubstrate { shutdown: tx.into(), rpc_port: port, _state: tmp_dir }); + let instance = + Arc::new(EmbeddedSubstrate { shutdown: tx.into(), rpc_port: port, _state: tmp_dir }); - SUBSTRATE_INSTANCES.lock().unwrap().insert(port, instance.clone()); + SUBSTRATE_INSTANCES.lock().unwrap().insert(port, instance.clone()); - Ok(instance) + Ok(instance) } pub fn remove_shared_substrate_by_port(port: u16) { - let mut instances = SUBSTRATE_INSTANCES.lock().unwrap(); - if let Some(_instance) = instances.get(&port) { - instances.remove(&port); - } else { - tracing::warn!("No running substrate instance found on port {}", port); - } + let mut instances = SUBSTRATE_INSTANCES.lock().unwrap(); + if let Some(_instance) = instances.get(&port) { + instances.remove(&port); + } else { + tracing::warn!("No running substrate instance found on port {}", port); + } } pub fn remove_shared_substrate(substrate: &EmbeddedSubstrate) { - remove_shared_substrate_by_port(substrate.port()) + remove_shared_substrate_by_port(substrate.port()) } #[cfg(test)] pub mod test_runtime { - use chronicle_signing::{ - chronicle_secret_names, ChronicleSecretsOptions, ChronicleSigning, BATCHER_NAMESPACE, - CHRONICLE_NAMESPACE, - }; - - use protocol_abstract::{LedgerReader, LedgerWriter}; - use protocol_substrate_chronicle::{ - common::{ - attributes::Attributes, - identity::SignedIdentity, - prov::{ - operations::{AgentExists, ChronicleOperation, CreateNamespace, SetAttributes}, - AgentId, DomaintypeId, ExternalId, NamespaceId, - }, - }, - ChronicleEvent, ChronicleTransaction, - }; - use subxt::{ - ext::{ - futures::StreamExt, - sp_core::{Pair, Public}, - }, - PolkadotConfig, - }; - use uuid::Uuid; - - fn get_from_seed(seed: &str) -> [u8; 32] { - let k = TPublic::Pair::from_string(&format!("//{}", seed), None) - .expect("static values are valid; qed"); - let mut buf = [0; 32]; - buf.copy_from_slice(&k.to_raw_vec()); - - buf - } - - #[tokio::test] - pub async fn connect() { - let handle = crate::shared_dev_node_rpc_on_port(2003, true).await.unwrap(); - - let client = handle.connect_chronicle::().await.unwrap(); - - let mut events = - client.state_updates(protocol_abstract::FromBlock::Head, None).await.unwrap(); - - let signing = ChronicleSigning::new( - chronicle_secret_names(), - vec![ - ( - CHRONICLE_NAMESPACE.to_string(), - ChronicleSecretsOptions::seeded( - vec![( - "chronicle-pk".to_string(), - get_from_seed::("Chronicle"), - )] - .into_iter() - .collect(), - ), - ), - ( - BATCHER_NAMESPACE.to_string(), - ChronicleSecretsOptions::seeded( - vec![( - "batcher-pk".to_string(), - get_from_seed::("Chronicle"), - )] - .into_iter() - .collect(), - ), - ), - ], - ) - .await - .unwrap(); - - let (submit, id) = client - .pre_submit( - ChronicleTransaction::new( - &signing, - SignedIdentity::new_no_identity(), - vec![ - ChronicleOperation::CreateNamespace(CreateNamespace::new( - NamespaceId::from_external_id( - &ExternalId::from("test"), - Uuid::default(), - ), - )), - ChronicleOperation::AgentExists(AgentExists::new( - NamespaceId::from_external_id( - &ExternalId::from("test"), - Uuid::default(), - ), - AgentId::from_external_id("test"), - )), - ChronicleOperation::SetAttributes(SetAttributes::agent( - NamespaceId::from_external_id( - &ExternalId::from("test"), - Uuid::default(), - ), - AgentId::from_external_id("test"), - Attributes::type_only(Some(DomaintypeId::from_external_id("test"))), - )), - ], - ) - .await - .unwrap(), - ) - .await - .unwrap(); - - let _res = client - .do_submit(protocol_abstract::WriteConsistency::Strong, submit) - .await - .unwrap(); - - let (ev, _id, _block, _pos, _) = events.next().await.unwrap(); - - match ev { - ChronicleEvent::Committed { diff, .. } => { - tracing::info!("{:?}", diff) - } - ChronicleEvent::Contradicted { .. } => panic!("Contradicted"), - } - } + use chronicle_signing::{ + chronicle_secret_names, ChronicleSecretsOptions, ChronicleSigning, BATCHER_NAMESPACE, + CHRONICLE_NAMESPACE, + }; + + use protocol_abstract::{LedgerReader, LedgerWriter}; + use protocol_substrate_chronicle::{ + common::{ + attributes::Attributes, + identity::SignedIdentity, + prov::{ + operations::{AgentExists, ChronicleOperation, CreateNamespace, SetAttributes}, + AgentId, DomaintypeId, ExternalId, NamespaceId, + }, + }, + ChronicleEvent, ChronicleTransaction, + }; + use subxt::{ + ext::{ + futures::StreamExt, + sp_core::{Pair, Public}, + }, + PolkadotConfig, + }; + use uuid::Uuid; + + fn get_from_seed(seed: &str) -> [u8; 32] { + let k = TPublic::Pair::from_string(&format!("//{}", seed), None) + .expect("static values are valid; qed"); + let mut buf = [0; 32]; + buf.copy_from_slice(&k.to_raw_vec()); + + buf + } + + #[tokio::test] + pub async fn connect() { + let handle = crate::shared_dev_node_rpc_on_port(2003, true).await.unwrap(); + + let client = handle.connect_chronicle::().await.unwrap(); + + let mut events = + client.state_updates(protocol_abstract::FromBlock::Head, None).await.unwrap(); + + let signing = ChronicleSigning::new( + chronicle_secret_names(), + vec![ + ( + CHRONICLE_NAMESPACE.to_string(), + ChronicleSecretsOptions::seeded( + vec![( + "chronicle-pk".to_string(), + get_from_seed::("Chronicle"), + )] + .into_iter() + .collect(), + ), + ), + ( + BATCHER_NAMESPACE.to_string(), + ChronicleSecretsOptions::seeded( + vec![( + "batcher-pk".to_string(), + get_from_seed::("Chronicle"), + )] + .into_iter() + .collect(), + ), + ), + ], + ) + .await + .unwrap(); + + let (submit, id) = client + .pre_submit( + ChronicleTransaction::new( + &signing, + SignedIdentity::new_no_identity(), + vec![ + ChronicleOperation::CreateNamespace(CreateNamespace::new( + NamespaceId::from_external_id( + &ExternalId::from("test"), + Uuid::default(), + ), + )), + ChronicleOperation::AgentExists(AgentExists::new( + NamespaceId::from_external_id( + &ExternalId::from("test"), + Uuid::default(), + ), + AgentId::from_external_id("test"), + )), + ChronicleOperation::SetAttributes(SetAttributes::agent( + NamespaceId::from_external_id( + &ExternalId::from("test"), + Uuid::default(), + ), + AgentId::from_external_id("test"), + Attributes::type_only(Some(DomaintypeId::from_external_id("test"))), + )), + ], + ) + .await + .unwrap(), + ) + .await + .unwrap(); + + let _res = client + .do_submit(protocol_abstract::WriteConsistency::Strong, submit) + .await + .unwrap(); + + let (ev, _id, _block, _pos, _) = events.next().await.unwrap(); + + match ev { + ChronicleEvent::Committed { diff, .. } => { + tracing::info!("{:?}", diff) + }, + ChronicleEvent::Contradicted { .. } => panic!("Contradicted"), + } + } } diff --git a/crates/gq-subscribe/src/main.rs b/crates/gq-subscribe/src/main.rs index 2577b66c6..854881c7e 100644 --- a/crates/gq-subscribe/src/main.rs +++ b/crates/gq-subscribe/src/main.rs @@ -2,122 +2,122 @@ use std::net::{SocketAddr, ToSocketAddrs}; use clap::{Arg, Command}; use http::{HeaderValue, StatusCode}; -use rand::{distributions::Alphanumeric, Rng, thread_rng}; +use rand::{distributions::Alphanumeric, thread_rng, Rng}; use serde_json::{json, Value}; use tungstenite::{client::IntoClientRequest, connect, Message}; fn main() -> Result<(), anyhow::Error> { - let args = Command::new("gq-ws") - .author("Blockchain Technology Partners") - .about("Perform GraphQL subscription to a websocket") - .arg( - Arg::new("request") - .long("subscription") - .short('s') - .takes_value(true) - .required(true) - .help("the GraphQL subscription request"), - ) - .arg( - Arg::new("count") - .long("notification-count") - .short('c') - .takes_value(true) - .required(true) - .help("how many responses to report"), - ) - .arg( - Arg::new("address") - .long("chronicle-address") - .short('a') - .takes_value(true) - .default_value("localhost:9982") - .help("the network address of the Chronicle API"), - ) - .arg( - Arg::new("token") - .long("bearer-token") - .short('t') - .takes_value(true) - .help("the bearer token to pass for authorization"), - ) - .get_matches(); + let args = Command::new("gq-ws") + .author("Blockchain Technology Partners") + .about("Perform GraphQL subscription to a websocket") + .arg( + Arg::new("request") + .long("subscription") + .short('s') + .takes_value(true) + .required(true) + .help("the GraphQL subscription request"), + ) + .arg( + Arg::new("count") + .long("notification-count") + .short('c') + .takes_value(true) + .required(true) + .help("how many responses to report"), + ) + .arg( + Arg::new("address") + .long("chronicle-address") + .short('a') + .takes_value(true) + .default_value("localhost:9982") + .help("the network address of the Chronicle API"), + ) + .arg( + Arg::new("token") + .long("bearer-token") + .short('t') + .takes_value(true) + .help("the bearer token to pass for authorization"), + ) + .get_matches(); - let subscription_query = args.value_of("request").unwrap(); - let notification_count: u32 = args.value_of("count").unwrap().parse()?; - let chronicle_address: SocketAddr = args - .value_of("address") - .unwrap() - .to_socket_addrs()? - .next() - .expect("network address required for Chronicle API"); - let bearer_token = args.value_of("token"); + let subscription_query = args.value_of("request").unwrap(); + let notification_count: u32 = args.value_of("count").unwrap().parse()?; + let chronicle_address: SocketAddr = args + .value_of("address") + .unwrap() + .to_socket_addrs()? + .next() + .expect("network address required for Chronicle API"); + let bearer_token = args.value_of("token"); - // generate random ID for subscription - let subscription_id: String = - thread_rng().sample_iter(&Alphanumeric).take(12).map(char::from).collect(); + // generate random ID for subscription + let subscription_id: String = + thread_rng().sample_iter(&Alphanumeric).take(12).map(char::from).collect(); - // prepare websocket request - let mut client_request = format!("ws://{chronicle_address}/ws").into_client_request()?; - let headers = client_request.headers_mut(); - if let Some(token) = bearer_token { - headers.insert("Authorization", HeaderValue::from_str(&format!("Bearer {token}"))?); - } - headers.insert("Sec-WebSocket-Protocol", HeaderValue::from_str("graphql-ws")?); + // prepare websocket request + let mut client_request = format!("ws://{chronicle_address}/ws").into_client_request()?; + let headers = client_request.headers_mut(); + if let Some(token) = bearer_token { + headers.insert("Authorization", HeaderValue::from_str(&format!("Bearer {token}"))?); + } + headers.insert("Sec-WebSocket-Protocol", HeaderValue::from_str("graphql-ws")?); - // connect and upgrade websocket - let (mut socket, response) = connect(client_request)?; - if response.status() != StatusCode::SWITCHING_PROTOCOLS { - panic!("failed connect and upgrade: {response:#?}"); - } + // connect and upgrade websocket + let (mut socket, response) = connect(client_request)?; + if response.status() != StatusCode::SWITCHING_PROTOCOLS { + panic!("failed connect and upgrade: {response:#?}"); + } - // initialize gql connection - let conn_init_json = json!({ + // initialize gql connection + let conn_init_json = json!({ "type": "connection_init" }); - let conn_init_msg = Message::Text(serde_json::to_string(&conn_init_json)?); - socket.send(conn_init_msg)?; - let conn_response = socket.read()?; - if let Value::Object(map) = serde_json::from_str::(&conn_response.clone().into_text()?)? - { - if map.get("type") == Some(&Value::String("connection_ack".to_string())) { - // connection initialized, so subscribe - let subscription_json = json!({ + let conn_init_msg = Message::Text(serde_json::to_string(&conn_init_json)?); + socket.send(conn_init_msg)?; + let conn_response = socket.read()?; + if let Value::Object(map) = serde_json::from_str::(&conn_response.clone().into_text()?)? + { + if map.get("type") == Some(&Value::String("connection_ack".to_string())) { + // connection initialized, so subscribe + let subscription_json = json!({ "type": "start", "id": subscription_id, "payload": { "query": subscription_query } }); - let subscription_msg = Message::Text(serde_json::to_string(&subscription_json)?); - socket.send(subscription_msg)?; + let subscription_msg = Message::Text(serde_json::to_string(&subscription_json)?); + socket.send(subscription_msg)?; - // receive and print notifications - let data_json = Value::String("data".to_string()); - let subscription_id_json = Value::String(subscription_id); - let mut remaining = notification_count; - while remaining > 0 { - remaining -= 1; - let notification_msg = socket.read()?; - let notification_json = - serde_json::from_str::(¬ification_msg.into_text()?)?; + // receive and print notifications + let data_json = Value::String("data".to_string()); + let subscription_id_json = Value::String(subscription_id); + let mut remaining = notification_count; + while remaining > 0 { + remaining -= 1; + let notification_msg = socket.read()?; + let notification_json = + serde_json::from_str::(¬ification_msg.into_text()?)?; - if let Value::Object(map) = notification_json.clone() { - if map.get("type") == Some(&data_json) && - map.get("id") == Some(&subscription_id_json) - { - let notification_pretty = - serde_json::to_string_pretty(map.get("payload").unwrap())?; - println!("{notification_pretty}"); - } else { - panic!("expected a response to subscription, got: {notification_json}"); - } - } else { - panic!("expected a JSON object notification, got: {notification_json}"); - } - } - return Ok(()); - } - } - panic!("expected acknowledgement of connection initialization, got: {conn_response}"); + if let Value::Object(map) = notification_json.clone() { + if map.get("type") == Some(&data_json) && + map.get("id") == Some(&subscription_id_json) + { + let notification_pretty = + serde_json::to_string_pretty(map.get("payload").unwrap())?; + println!("{notification_pretty}"); + } else { + panic!("expected a response to subscription, got: {notification_json}"); + } + } else { + panic!("expected a JSON object notification, got: {notification_json}"); + } + } + return Ok(()); + } + } + panic!("expected acknowledgement of connection initialization, got: {conn_response}"); } diff --git a/crates/id-provider/src/main.rs b/crates/id-provider/src/main.rs index 919d5f3ba..69842fbf4 100644 --- a/crates/id-provider/src/main.rs +++ b/crates/id-provider/src/main.rs @@ -1,69 +1,69 @@ use std::process::Command; use oauth2::{ - AuthorizationCode, AuthUrl, basic::BasicClient, ClientId, ClientSecret, CsrfToken, - PkceCodeChallenge, RedirectUrl, reqwest::http_client, Scope, TokenResponse, TokenUrl, + basic::BasicClient, reqwest::http_client, AuthUrl, AuthorizationCode, ClientId, ClientSecret, + CsrfToken, PkceCodeChallenge, RedirectUrl, Scope, TokenResponse, TokenUrl, }; use url::Url; fn main() -> Result<(), anyhow::Error> { - // construct OAuth query: authorization code flow with PKCE + // construct OAuth query: authorization code flow with PKCE - let oauth_client = BasicClient::new( - ClientId::new("client-id".to_string()), - Some(ClientSecret::new("client-secret".to_string())), - AuthUrl::new("http://localhost:8090/authorize".to_string())?, - Some(TokenUrl::new("http://localhost:8090/token".to_string())?), - ) - .set_redirect_uri(RedirectUrl::new("http://example.com/callback".to_string())?); + let oauth_client = BasicClient::new( + ClientId::new("client-id".to_string()), + Some(ClientSecret::new("client-secret".to_string())), + AuthUrl::new("http://localhost:8090/authorize".to_string())?, + Some(TokenUrl::new("http://localhost:8090/token".to_string())?), + ) + .set_redirect_uri(RedirectUrl::new("http://example.com/callback".to_string())?); - let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256(); + let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256(); - let (auth_url, csrf_token) = oauth_client - .authorize_url(CsrfToken::new_random) - .add_scope(Scope::new("openid".to_string())) - .add_scope(Scope::new("profile".to_string())) - .add_scope(Scope::new("email".to_string())) - .set_pkce_challenge(pkce_challenge) - .url(); + let (auth_url, csrf_token) = oauth_client + .authorize_url(CsrfToken::new_random) + .add_scope(Scope::new("openid".to_string())) + .add_scope(Scope::new("profile".to_string())) + .add_scope(Scope::new("email".to_string())) + .set_pkce_challenge(pkce_challenge) + .url(); - // use curl to handle HTTP basic authentication + // use curl to handle HTTP basic authentication - let args = vec![ - "-w".to_string(), - "%{redirect_url}\n".to_string(), - "-u".to_string(), - "rmalina1:test-password".to_string(), - auth_url.to_string(), - ]; + let args = vec![ + "-w".to_string(), + "%{redirect_url}\n".to_string(), + "-u".to_string(), + "rmalina1:test-password".to_string(), + auth_url.to_string(), + ]; - let curl_output = Command::new("curl").args(args).output()?; + let curl_output = Command::new("curl").args(args).output()?; - // parse URL from redirect to callback with authorization code + // parse URL from redirect to callback with authorization code - let url = Url::parse(std::str::from_utf8(&curl_output.stdout)?.trim())?; + let url = Url::parse(std::str::from_utf8(&curl_output.stdout)?.trim())?; - let mut query_state = None; - let mut query_code = None; + let mut query_state = None; + let mut query_code = None; - for (key, value) in url.query_pairs() { - match key.to_string().as_str() { - "state" => query_state = Some(value), - "code" => query_code = Some(value), - _ => {} - } - } + for (key, value) in url.query_pairs() { + match key.to_string().as_str() { + "state" => query_state = Some(value), + "code" => query_code = Some(value), + _ => {}, + } + } - assert_eq!(*csrf_token.secret(), query_state.unwrap().to_string()); + assert_eq!(*csrf_token.secret(), query_state.unwrap().to_string()); - // exchange authorization code for access token + // exchange authorization code for access token - let auth_code = query_code.unwrap(); - let token_response = oauth_client - .exchange_code(AuthorizationCode::new(auth_code.to_string())) - .set_pkce_verifier(pkce_verifier) - .request(http_client)?; + let auth_code = query_code.unwrap(); + let token_response = oauth_client + .exchange_code(AuthorizationCode::new(auth_code.to_string())) + .set_pkce_verifier(pkce_verifier) + .request(http_client)?; - println!("{}", token_response.access_token().secret()); - Ok(()) + println!("{}", token_response.access_token().secret()); + Ok(()) } diff --git a/crates/opactl/build.rs b/crates/opactl/build.rs index 5a1d86bbe..afb2c9546 100644 --- a/crates/opactl/build.rs +++ b/crates/opactl/build.rs @@ -1,8 +1,8 @@ fn main() { - //Create a .VERSION file containing 'local' if it does not exist + //Create a .VERSION file containing 'local' if it does not exist - let version_file = std::path::Path::new("../../.VERSION"); - if !version_file.exists() { - std::fs::write(version_file, "local").expect("Unable to write file"); - } + let version_file = std::path::Path::new("../../.VERSION"); + if !version_file.exists() { + std::fs::write(version_file, "local").expect("Unable to write file"); + } } diff --git a/crates/opactl/src/cli.rs b/crates/opactl/src/cli.rs index 003bd5f5a..7f2ef32d2 100644 --- a/crates/opactl/src/cli.rs +++ b/crates/opactl/src/cli.rs @@ -1,170 +1,170 @@ use std::path::PathBuf; use clap::{ - Arg, - ArgAction, ArgMatches, builder::{NonEmptyStringValueParser, StringValueParser}, Command, ValueHint, + builder::{NonEmptyStringValueParser, StringValueParser}, + Arg, ArgAction, ArgMatches, Command, ValueHint, }; use tracing::info; use url::Url; use chronicle_signing::{ - BATCHER_NAMESPACE, ChronicleSecretsOptions, ChronicleSigning, OPA_NAMESPACE, opa_secret_names, - SecretError, + opa_secret_names, ChronicleSecretsOptions, ChronicleSigning, SecretError, BATCHER_NAMESPACE, + OPA_NAMESPACE, }; // Generate an ephemeral key if no key is provided fn batcher_key() -> Arg { - Arg::new("batcher-key-from-store") + Arg::new("batcher-key-from-store") .long("batcher-key-from-store") .num_args(0) .help("If specified the key 'batcher-pk' will be used to sign sawtooth transactions, otherwise an ephemeral key will be generated") } fn wait_args(command: Command) -> Command { - command.arg( - Arg::new("wait") - .long("wait") - .num_args(0..=1) - .value_parser(clap::value_parser!(u32).range(0..)) - .default_value("5") - .default_missing_value("5") - .help("Wait for the specified number of blocks to be committed before exiting"), - ) + command.arg( + Arg::new("wait") + .long("wait") + .num_args(0..=1) + .value_parser(clap::value_parser!(u32).range(0..)) + .default_value("5") + .default_missing_value("5") + .help("Wait for the specified number of blocks to be committed before exiting"), + ) } fn bootstrap() -> Command { - wait_args( - Command::new("bootstrap") - .about("Initialize the OPA transaction processor with a root key from the keystore") - .arg(batcher_key()), - ) + wait_args( + Command::new("bootstrap") + .about("Initialize the OPA transaction processor with a root key from the keystore") + .arg(batcher_key()), + ) } fn generate() -> Command { - Command::new("generate") - .arg(Arg::new("output").short('o').long("output").num_args(0..=1).help( - "The name to write the key to, if not specified then the key is written to stdout", - )) - .about("Generate a new private key and write it to the keystore") + Command::new("generate") + .arg(Arg::new("output").short('o').long("output").num_args(0..=1).help( + "The name to write the key to, if not specified then the key is written to stdout", + )) + .about("Generate a new private key and write it to the keystore") } fn rotate_root() -> Command { - wait_args( - Command::new("rotate-root") - .about("Rotate the root key for the OPA transaction processor") - .arg( - Arg::new("new-root-key") - .short('n') - .long("new-root-key") - .env("NEW_ROOT_KEY") - .required(true) - .num_args(1) - .value_hint(ValueHint::FilePath) - .help("The name of the new key in the keystore to register as the root key"), - ) - .arg(batcher_key()), - ) + wait_args( + Command::new("rotate-root") + .about("Rotate the root key for the OPA transaction processor") + .arg( + Arg::new("new-root-key") + .short('n') + .long("new-root-key") + .env("NEW_ROOT_KEY") + .required(true) + .num_args(1) + .value_hint(ValueHint::FilePath) + .help("The name of the new key in the keystore to register as the root key"), + ) + .arg(batcher_key()), + ) } fn register_key() -> Command { - wait_args( - Command::new("register-key") - .about("Register a new non root key with the OPA transaction processor") - .arg( - Arg::new("new-key") - .long("new-key") - .required(true) - .num_args(1) - .value_hint(ValueHint::FilePath) - .help("The keystore name of a PEM-encoded key to register"), - ) - .arg( - Arg::new("id") - .short('i') - .long("id") - .required(true) - .num_args(1) - .value_hint(ValueHint::Unknown) - .value_parser(NonEmptyStringValueParser::new()) - .help("The id of the key"), - ) - .arg( - Arg::new("overwrite") - .short('o') - .long("overwrite") - .action(ArgAction::SetTrue) - .help("Replace any existing non-root key"), - ) - .arg(batcher_key()), - ) + wait_args( + Command::new("register-key") + .about("Register a new non root key with the OPA transaction processor") + .arg( + Arg::new("new-key") + .long("new-key") + .required(true) + .num_args(1) + .value_hint(ValueHint::FilePath) + .help("The keystore name of a PEM-encoded key to register"), + ) + .arg( + Arg::new("id") + .short('i') + .long("id") + .required(true) + .num_args(1) + .value_hint(ValueHint::Unknown) + .value_parser(NonEmptyStringValueParser::new()) + .help("The id of the key"), + ) + .arg( + Arg::new("overwrite") + .short('o') + .long("overwrite") + .action(ArgAction::SetTrue) + .help("Replace any existing non-root key"), + ) + .arg(batcher_key()), + ) } fn rotate_key() -> Command { - wait_args( - Command::new("rotate-key") - .about("Rotate the key with the specified id for the OPA transaction processor") - .arg( - Arg::new("current-key") - .long("current-key") - .env("CURRENT_KEY") - .required(true) - .num_args(1) - .value_hint(ValueHint::FilePath) - .help("The keystore name of the current registered key"), - ) - .arg( - Arg::new("new-key") - .long("new-key") - .env("NEW_KEY") - .required(true) - .num_args(1) - .value_hint(ValueHint::FilePath) - .help("The keystore name of the new key to register"), - ) - .arg( - Arg::new("id") - .short('i') - .long("id") - .required(true) - .num_args(1) - .value_hint(ValueHint::Unknown) - .value_parser(NonEmptyStringValueParser::new()) - .help("The id of the key"), - ) - .arg(batcher_key()), - ) + wait_args( + Command::new("rotate-key") + .about("Rotate the key with the specified id for the OPA transaction processor") + .arg( + Arg::new("current-key") + .long("current-key") + .env("CURRENT_KEY") + .required(true) + .num_args(1) + .value_hint(ValueHint::FilePath) + .help("The keystore name of the current registered key"), + ) + .arg( + Arg::new("new-key") + .long("new-key") + .env("NEW_KEY") + .required(true) + .num_args(1) + .value_hint(ValueHint::FilePath) + .help("The keystore name of the new key to register"), + ) + .arg( + Arg::new("id") + .short('i') + .long("id") + .required(true) + .num_args(1) + .value_hint(ValueHint::Unknown) + .value_parser(NonEmptyStringValueParser::new()) + .help("The id of the key"), + ) + .arg(batcher_key()), + ) } fn set_policy() -> Command { - wait_args( - Command::new("set-policy") - .about("Set policy with id, requires access to root private key") - .arg( - Arg::new("id") - .short('i') - .long("id") - .num_args(1) - .value_hint(ValueHint::Unknown) - .value_parser(NonEmptyStringValueParser::new()) - .default_value("default") - .help("The id of the new policy"), - ) - .arg( - Arg::new("policy") - .short('p') - .long("policy") - .num_args(1) - .required(true) - .value_hint(ValueHint::Url) - .value_parser(StringValueParser::new()) - .help("A path or url to a policy bundle"), - ) - .arg(batcher_key()), - ) + wait_args( + Command::new("set-policy") + .about("Set policy with id, requires access to root private key") + .arg( + Arg::new("id") + .short('i') + .long("id") + .num_args(1) + .value_hint(ValueHint::Unknown) + .value_parser(NonEmptyStringValueParser::new()) + .default_value("default") + .help("The id of the new policy"), + ) + .arg( + Arg::new("policy") + .short('p') + .long("policy") + .num_args(1) + .required(true) + .value_hint(ValueHint::Url) + .value_parser(StringValueParser::new()) + .help("A path or url to a policy bundle"), + ) + .arg(batcher_key()), + ) } fn get_key() -> Command { - Command::new("get-key") + Command::new("get-key") .about("Get the currently registered public key") .arg( Arg::new("id") @@ -188,28 +188,28 @@ fn get_key() -> Command { } fn get_policy() -> Command { - Command::new("get-policy") - .about("Get the currently registered policy") - .arg( - Arg::new("id") - .short('i') - .long("id") - .num_args(1) - .value_hint(ValueHint::Unknown) - .value_parser(NonEmptyStringValueParser::new()) - .default_value("default") - .help("The id of the policy, if not specified then the default policy is returned"), - ) - .arg( - Arg::new("output") - .short('o') - .long("output") - .num_args(1) - .required(true) - .value_hint(ValueHint::FilePath) - .value_parser(NonEmptyStringValueParser::new()) - .help("The path to write the policy to"), - ) + Command::new("get-policy") + .about("Get the currently registered policy") + .arg( + Arg::new("id") + .short('i') + .long("id") + .num_args(1) + .value_hint(ValueHint::Unknown) + .value_parser(NonEmptyStringValueParser::new()) + .default_value("default") + .help("The id of the policy, if not specified then the default policy is returned"), + ) + .arg( + Arg::new("output") + .short('o') + .long("output") + .num_args(1) + .required(true) + .value_hint(ValueHint::FilePath) + .value_parser(NonEmptyStringValueParser::new()) + .help("The path to write the policy to"), + ) } pub const LONG_VERSION: &str = const_format::formatcp!( @@ -219,167 +219,167 @@ pub const LONG_VERSION: &str = const_format::formatcp!( ); pub fn cli() -> Command { - info!(opa_version = LONG_VERSION); - Command::new("opactl") - .version(LONG_VERSION) - .author("Blockchain Technology Partners") - .about("A command line tool for interacting with the OPA transaction processor") - .arg( - Arg::new("keystore-path") - .long("keystore-path") - .help("The path to a directory containing keys") - .value_parser(clap::value_parser!(PathBuf)) - .value_hint(ValueHint::DirPath) - .env("KEYSTORE_PATH") - .default_value("."), - ) - .arg( - Arg::new("batcher-key-from-path") - .long("batcher-key-from-path") - .action(ArgAction::SetTrue) - .help("Load batcher key from keystore path") - .conflicts_with("batcher-key-from-vault") - .conflicts_with("batcher-key-generated"), - ) - .arg( - Arg::new("batcher-key-from-vault") - .long("batcher-key-from-vault") - .action(ArgAction::SetTrue) - .help("Use Hashicorp Vault to store the batcher key") - .conflicts_with("batcher-key-from-path") - .conflicts_with("batcher-key-generated"), - ) - .arg( - Arg::new("batcher-key-generated") - .long("batcher-key-generated") - .action(ArgAction::SetTrue) - .help("Generate the batcher key in memory") - .conflicts_with("batcher-key-from-path") - .conflicts_with("batcher-key-from-vault"), - ) - .arg( - Arg::new("opa-key-from-path") - .long("opa-key-from-path") - .action(ArgAction::SetTrue) - .help("Use keystore path for the opa key located in 'opa-pk'") - .conflicts_with("opa-key-from-vault"), - ) - .arg( - Arg::new("opa-key-from-vault") - .long("opa-key-from-vault") - .action(ArgAction::SetTrue) - .help("Use Hashicorp Vault to store the Opa key") - .conflicts_with("opa-key-from-path"), - ) - .arg( - Arg::new("vault-address") - .long("vault-address") - .num_args(0..=1) - .value_parser(clap::value_parser!(Url)) - .value_hint(ValueHint::Url) - .help("URL for connecting to Hashicorp Vault") - .env("VAULT_ADDRESS"), - ) - .arg( - Arg::new("vault-token") - .long("vault-token") - .num_args(0..=1) - .help("Token for connecting to Hashicorp Vault") - .env("VAULT_TOKEN"), - ) - .arg( - Arg::new("vault-mount-path") - .long("vault-mount-path") - .num_args(0..=1) - .value_hint(ValueHint::DirPath) - .help("Mount path for vault secrets") - .default_value("/") - .env("VAULT_MOUNT_PATH"), - ) - .arg( - Arg::new("sawtooth-address") - .short('a') - .long("sawtooth-address") - .num_args(0..=1) - .help("The address of the Sawtooth ZMQ api, as zmq://host:port") - .value_parser(clap::value_parser!(Url)) - .env("SAWTOOTH_ADDRESS") - .default_value("tcp://localhost:4004"), - ) - .subcommand(bootstrap()) - .subcommand(generate()) - .subcommand(rotate_root()) - .subcommand(register_key()) - .subcommand(rotate_key()) - .subcommand(set_policy()) - .subcommand(get_key()) - .subcommand(get_policy()) + info!(opa_version = LONG_VERSION); + Command::new("opactl") + .version(LONG_VERSION) + .author("Blockchain Technology Partners") + .about("A command line tool for interacting with the OPA transaction processor") + .arg( + Arg::new("keystore-path") + .long("keystore-path") + .help("The path to a directory containing keys") + .value_parser(clap::value_parser!(PathBuf)) + .value_hint(ValueHint::DirPath) + .env("KEYSTORE_PATH") + .default_value("."), + ) + .arg( + Arg::new("batcher-key-from-path") + .long("batcher-key-from-path") + .action(ArgAction::SetTrue) + .help("Load batcher key from keystore path") + .conflicts_with("batcher-key-from-vault") + .conflicts_with("batcher-key-generated"), + ) + .arg( + Arg::new("batcher-key-from-vault") + .long("batcher-key-from-vault") + .action(ArgAction::SetTrue) + .help("Use Hashicorp Vault to store the batcher key") + .conflicts_with("batcher-key-from-path") + .conflicts_with("batcher-key-generated"), + ) + .arg( + Arg::new("batcher-key-generated") + .long("batcher-key-generated") + .action(ArgAction::SetTrue) + .help("Generate the batcher key in memory") + .conflicts_with("batcher-key-from-path") + .conflicts_with("batcher-key-from-vault"), + ) + .arg( + Arg::new("opa-key-from-path") + .long("opa-key-from-path") + .action(ArgAction::SetTrue) + .help("Use keystore path for the opa key located in 'opa-pk'") + .conflicts_with("opa-key-from-vault"), + ) + .arg( + Arg::new("opa-key-from-vault") + .long("opa-key-from-vault") + .action(ArgAction::SetTrue) + .help("Use Hashicorp Vault to store the Opa key") + .conflicts_with("opa-key-from-path"), + ) + .arg( + Arg::new("vault-address") + .long("vault-address") + .num_args(0..=1) + .value_parser(clap::value_parser!(Url)) + .value_hint(ValueHint::Url) + .help("URL for connecting to Hashicorp Vault") + .env("VAULT_ADDRESS"), + ) + .arg( + Arg::new("vault-token") + .long("vault-token") + .num_args(0..=1) + .help("Token for connecting to Hashicorp Vault") + .env("VAULT_TOKEN"), + ) + .arg( + Arg::new("vault-mount-path") + .long("vault-mount-path") + .num_args(0..=1) + .value_hint(ValueHint::DirPath) + .help("Mount path for vault secrets") + .default_value("/") + .env("VAULT_MOUNT_PATH"), + ) + .arg( + Arg::new("sawtooth-address") + .short('a') + .long("sawtooth-address") + .num_args(0..=1) + .help("The address of the Sawtooth ZMQ api, as zmq://host:port") + .value_parser(clap::value_parser!(Url)) + .env("SAWTOOTH_ADDRESS") + .default_value("tcp://localhost:4004"), + ) + .subcommand(bootstrap()) + .subcommand(generate()) + .subcommand(rotate_root()) + .subcommand(register_key()) + .subcommand(rotate_key()) + .subcommand(set_policy()) + .subcommand(get_key()) + .subcommand(get_policy()) } // Chronicle secret store needs to know what secret names are used in advance, // so extract from potential cli args fn additional_secret_names(expected: Vec<&str>, matches: &ArgMatches) -> Vec { - expected.iter().filter_map(|x| matches.get_one::(x).cloned()).collect() + expected.iter().filter_map(|x| matches.get_one::(x).cloned()).collect() } // Batcher keys may be ephemeral if batcher-key-from-path is not set, also we need to know secret // names in advance, so must inspect the supplied CLI arguments pub(crate) async fn configure_signing( - expected: Vec<&str>, - root_matches: &ArgMatches, - matches: &ArgMatches, + expected: Vec<&str>, + root_matches: &ArgMatches, + matches: &ArgMatches, ) -> Result { - let mut secret_names = opa_secret_names(); - secret_names.append( - &mut additional_secret_names(expected, matches) - .into_iter() - .map(|name| (OPA_NAMESPACE.to_string(), name.to_string())) - .collect(), - ); - let keystore_path = root_matches.get_one::("keystore-path").unwrap(); + let mut secret_names = opa_secret_names(); + secret_names.append( + &mut additional_secret_names(expected, matches) + .into_iter() + .map(|name| (OPA_NAMESPACE.to_string(), name.to_string())) + .collect(), + ); + let keystore_path = root_matches.get_one::("keystore-path").unwrap(); - let opa_key_from_vault = root_matches.get_one("opa-key-from-vault").is_some_and(|x| *x); - let opa_secret_options = if opa_key_from_vault { - ChronicleSecretsOptions::stored_in_vault( - matches.get_one("vault-url").unwrap(), - matches.get_one("vault-token").cloned().unwrap(), - matches.get_one("vault-mount-path").cloned().unwrap(), - ) - } else { - ChronicleSecretsOptions::stored_at_path(keystore_path) - }; - let opa_secret = (OPA_NAMESPACE.to_string(), opa_secret_options); + let opa_key_from_vault = root_matches.get_one("opa-key-from-vault").is_some_and(|x| *x); + let opa_secret_options = if opa_key_from_vault { + ChronicleSecretsOptions::stored_in_vault( + matches.get_one("vault-url").unwrap(), + matches.get_one("vault-token").cloned().unwrap(), + matches.get_one("vault-mount-path").cloned().unwrap(), + ) + } else { + ChronicleSecretsOptions::stored_at_path(keystore_path) + }; + let opa_secret = (OPA_NAMESPACE.to_string(), opa_secret_options); - let batcher_key_from_path = root_matches.get_one("batcher-key-from-path").is_some_and(|x| *x); - let batcher_key_from_vault = root_matches.get_one("batcher-key-from-vault").is_some_and(|x| *x); - let batcher_secret_options = if batcher_key_from_path { - ChronicleSecretsOptions::stored_at_path(keystore_path) - } else if batcher_key_from_vault { - ChronicleSecretsOptions::stored_in_vault( - matches.get_one("vault-url").unwrap(), - matches.get_one("vault-token").cloned().unwrap(), - matches.get_one("vault-mount-path").cloned().unwrap(), - ) - } else { - ChronicleSecretsOptions::generate_in_memory() - }; - let batcher_secret = (BATCHER_NAMESPACE.to_string(), batcher_secret_options); + let batcher_key_from_path = root_matches.get_one("batcher-key-from-path").is_some_and(|x| *x); + let batcher_key_from_vault = root_matches.get_one("batcher-key-from-vault").is_some_and(|x| *x); + let batcher_secret_options = if batcher_key_from_path { + ChronicleSecretsOptions::stored_at_path(keystore_path) + } else if batcher_key_from_vault { + ChronicleSecretsOptions::stored_in_vault( + matches.get_one("vault-url").unwrap(), + matches.get_one("vault-token").cloned().unwrap(), + matches.get_one("vault-mount-path").cloned().unwrap(), + ) + } else { + ChronicleSecretsOptions::generate_in_memory() + }; + let batcher_secret = (BATCHER_NAMESPACE.to_string(), batcher_secret_options); - let secrets = vec![opa_secret, batcher_secret]; - ChronicleSigning::new(secret_names, secrets).await + let secrets = vec![opa_secret, batcher_secret]; + ChronicleSigning::new(secret_names, secrets).await } #[derive(Debug, Clone, Copy)] pub(crate) enum Wait { - NoWait, - NumberOfBlocks(u32), + NoWait, + NumberOfBlocks(u32), } impl Wait { - pub(crate) fn from_matches(matches: &ArgMatches) -> Self { - match matches.get_one::("wait") { - Some(blocks) if *blocks > 0 => Wait::NumberOfBlocks(*blocks), - _ => Wait::NoWait, - } - } + pub(crate) fn from_matches(matches: &ArgMatches) -> Self { + match matches.get_one::("wait") { + Some(blocks) if *blocks > 0 => Wait::NumberOfBlocks(*blocks), + _ => Wait::NoWait, + } + } } diff --git a/crates/opactl/src/main.rs b/crates/opactl/src/main.rs index 98e41df07..80e0c1f04 100644 --- a/crates/opactl/src/main.rs +++ b/crates/opactl/src/main.rs @@ -11,27 +11,27 @@ use rand_core::SeedableRng; use serde::Serialize; use thiserror::Error; use tokio::runtime::Handle; -use tracing::{debug, error, info, instrument, Instrument, Level, span}; +use tracing::{debug, error, info, instrument, span, Instrument, Level}; use url::Url; use user_error::UFE; use uuid::Uuid; -use chronicle_signing::{OPA_PK, OpaKnownKeyNamesSigner, SecretError}; +use chronicle_signing::{OpaKnownKeyNamesSigner, SecretError, OPA_PK}; use cli::{configure_signing, Wait}; use common::{ opa::{ codec::{KeysV1, PolicyV1}, - Keys, - Policy, std::{FromUrlError, key_address, load_bytes_from_url}, + std::{key_address, load_bytes_from_url, FromUrlError}, + Keys, Policy, }, prov::ChronicleTransactionId, }; use protocol_abstract::{FromBlock, LedgerReader, LedgerWriter}; use protocol_substrate::{PolkadotConfig, SubstrateStateReader, SubxtClientError}; use protocol_substrate_opa::{ - OpaEvent, - OpaSubstrateClient, - submission_builder::SubmissionBuilder, transaction::{OpaTransaction, TransactionError}, + submission_builder::SubmissionBuilder, + transaction::{OpaTransaction, TransactionError}, + OpaEvent, OpaSubstrateClient, }; mod cli; @@ -41,108 +41,108 @@ mod test; #[derive(Error, Debug)] pub enum OpaCtlError { - #[error("Operation cancelled {0}")] - Cancelled(oneshot::Canceled), - - #[error("Communication error: {0}")] - Communication( - #[from] - #[source] - SubxtClientError, - ), - - #[error("IO error: {0}")] - IO( - #[from] - #[source] - std::io::Error, - ), - - #[error("Json error: {0}")] - Json( - #[from] - #[source] - serde_json::Error, - ), - - #[error("Pkcs8 error")] - Pkcs8, - - #[error("Transaction failed: {0}")] - TransactionFailed(String), - - #[error("Transaction not found after wait: {0}")] - TransactionNotFound(ChronicleTransactionId), - - #[error("Error loading from URL: {0}")] - Url( - #[from] - #[source] - FromUrlError, - ), - - #[error("Utf8 error: {0}")] - Utf8( - #[from] - #[source] - std::str::Utf8Error, - ), - - #[error("Signing: {0}")] - Signing( - #[from] - #[source] - SecretError, - ), - - #[error("Missing Argument")] - MissingArgument(String), - - #[error("Not found")] - NotFound, - - #[error("Could not build transaction {0}")] - InvalidTransaction( - #[from] - #[source] - TransactionError, - ), + #[error("Operation cancelled {0}")] + Cancelled(oneshot::Canceled), + + #[error("Communication error: {0}")] + Communication( + #[from] + #[source] + SubxtClientError, + ), + + #[error("IO error: {0}")] + IO( + #[from] + #[source] + std::io::Error, + ), + + #[error("Json error: {0}")] + Json( + #[from] + #[source] + serde_json::Error, + ), + + #[error("Pkcs8 error")] + Pkcs8, + + #[error("Transaction failed: {0}")] + TransactionFailed(String), + + #[error("Transaction not found after wait: {0}")] + TransactionNotFound(ChronicleTransactionId), + + #[error("Error loading from URL: {0}")] + Url( + #[from] + #[source] + FromUrlError, + ), + + #[error("Utf8 error: {0}")] + Utf8( + #[from] + #[source] + std::str::Utf8Error, + ), + + #[error("Signing: {0}")] + Signing( + #[from] + #[source] + SecretError, + ), + + #[error("Missing Argument")] + MissingArgument(String), + + #[error("Not found")] + NotFound, + + #[error("Could not build transaction {0}")] + InvalidTransaction( + #[from] + #[source] + TransactionError, + ), } impl From for OpaCtlError { - fn from(_: Infallible) -> Self { - unreachable!() - } + fn from(_: Infallible) -> Self { + unreachable!() + } } impl UFE for OpaCtlError {} #[derive(Debug, Serialize)] pub enum Waited { - NoWait, - WaitedAndFound(OpaEvent), - WaitedAndDidNotFind, + NoWait, + WaitedAndFound(OpaEvent), + WaitedAndDidNotFind, } // Collect incoming transaction ids before running submission, as there is the /// potential to miss transactions if we do not collect them 'before' submission async fn ambient_transactions< - R: LedgerReader + Send + Sync + Clone + 'static, + R: LedgerReader + Send + Sync + Clone + 'static, >( - client: &R, - goal_tx_id: ChronicleTransactionId, - max_steps: u32, -) -> impl Future> { - let span = span!(Level::DEBUG, "wait_for_opa_transaction"); - let client = client.clone(); - // Set up a oneshot channel to notify the returned task - let (notify_tx, notify_rx) = oneshot::channel::(); - - // And a oneshot channel to ensure we are receiving events from the chain - // before we return - let (receiving_events_tx, receiving_events_rx) = oneshot::channel::<()>(); - - Handle::current().spawn(async move { + client: &R, + goal_tx_id: ChronicleTransactionId, + max_steps: u32, +) -> impl Future> { + let span = span!(Level::DEBUG, "wait_for_opa_transaction"); + let client = client.clone(); + // Set up a oneshot channel to notify the returned task + let (notify_tx, notify_rx) = oneshot::channel::(); + + // And a oneshot channel to ensure we are receiving events from the chain + // before we return + let (receiving_events_tx, receiving_events_rx) = oneshot::channel::<()>(); + + Handle::current().spawn(async move { // We can immediately return if we are not waiting debug!(waiting_for=?goal_tx_id, max_steps=?max_steps); let goal_clone = goal_tx_id; @@ -188,245 +188,245 @@ async fn ambient_transactions< } }.instrument(span)); - // Wait for the task to start receiving events - let _ = receiving_events_rx.await; + // Wait for the task to start receiving events + let _ = receiving_events_rx.await; - notify_rx + notify_rx } #[instrument(skip(client, matches, submission))] async fn handle_wait< - CLIENT: LedgerReader - + LedgerWriter - + Clone - + Send - + Sync - + 'static, + CLIENT: LedgerReader + + LedgerWriter + + Clone + + Send + + Sync + + 'static, >( - matches: &ArgMatches, - client: &CLIENT, - submission: OpaTransaction, + matches: &ArgMatches, + client: &CLIENT, + submission: OpaTransaction, ) -> Result { - let wait = Wait::from_matches(matches); - match wait { - Wait::NoWait => { - let (ext, _) = client.pre_submit(submission).await?; - client - .do_submit(protocol_abstract::WriteConsistency::Weak, ext) - .await - .map_err(|(e, _id)| e)?; - - Ok(Waited::NoWait) - } - Wait::NumberOfBlocks(blocks) => { - let (ext, tx_id) = client.pre_submit(submission).await?; - let waiter = ambient_transactions(client, tx_id, blocks).await; - client - .do_submit(protocol_abstract::WriteConsistency::Strong, ext) - .await - .map_err(|(e, _id)| e)?; - debug!(awaiting_tx=%tx_id, waiting_blocks=%blocks); - match waiter.await { - Ok(Waited::WaitedAndDidNotFind) => Err(OpaCtlError::TransactionNotFound(tx_id)), - Ok(x) => Ok(x), - Err(e) => Err(OpaCtlError::Cancelled(e)), - } - } - } + let wait = Wait::from_matches(matches); + match wait { + Wait::NoWait => { + let (ext, _) = client.pre_submit(submission).await?; + client + .do_submit(protocol_abstract::WriteConsistency::Weak, ext) + .await + .map_err(|(e, _id)| e)?; + + Ok(Waited::NoWait) + }, + Wait::NumberOfBlocks(blocks) => { + let (ext, tx_id) = client.pre_submit(submission).await?; + let waiter = ambient_transactions(client, tx_id, blocks).await; + client + .do_submit(protocol_abstract::WriteConsistency::Strong, ext) + .await + .map_err(|(e, _id)| e)?; + debug!(awaiting_tx=%tx_id, waiting_blocks=%blocks); + match waiter.await { + Ok(Waited::WaitedAndDidNotFind) => Err(OpaCtlError::TransactionNotFound(tx_id)), + Ok(x) => Ok(x), + Err(e) => Err(OpaCtlError::Cancelled(e)), + } + }, + } } async fn dispatch_args< - CLIENT: LedgerWriter - + Send - + Sync - + LedgerReader - + SubstrateStateReader - + Clone - + 'static, + CLIENT: LedgerWriter + + Send + + Sync + + LedgerReader + + SubstrateStateReader + + Clone + + 'static, >( - matches: ArgMatches, - client: &CLIENT, + matches: ArgMatches, + client: &CLIENT, ) -> Result { - let span = span!(Level::TRACE, "dispatch_args"); - let _entered = span.enter(); - let span_id = span.id().map(|x| x.into_u64()).unwrap_or(u64::MAX); - match matches.subcommand() { - Some(("bootstrap", command_matches)) => { - let signing = configure_signing(vec![], &matches, command_matches).await?; - let bootstrap = SubmissionBuilder::bootstrap_root(signing.opa_verifying().await?) - .build(span_id, Uuid::new_v4()); - Ok(handle_wait( - command_matches, - client, - OpaTransaction::bootstrap_root(bootstrap, &signing).await?, - ) - .await?) - } - Some(("generate", matches)) => { - let key = SecretKey::random(StdRng::from_entropy()); - let key = key.to_pkcs8_pem(LineEnding::CRLF).map_err(|_| OpaCtlError::Pkcs8)?; - - if let Some(path) = matches.get_one::("output") { - let mut file = File::create(path)?; - file.write_all(key.as_bytes())?; - } else { - print!("{}", *key); - } - - Ok(Waited::NoWait) - } - Some(("rotate-root", command_matches)) => { - let signing = - configure_signing(vec!["new-root-key"], &matches, command_matches).await?; - let rotate_key = SubmissionBuilder::rotate_key( - "root", - &signing, - OPA_PK, - command_matches - .get_one::("new-root-key") - .ok_or_else(|| OpaCtlError::MissingArgument("new-root-key".to_owned()))?, - ) - .await? - .build(span_id, Uuid::new_v4()); - Ok(handle_wait( - command_matches, - client, - OpaTransaction::rotate_root(rotate_key, &signing).await?, - ) - .await?) - } - Some(("register-key", command_matches)) => { - let signing = configure_signing(vec!["new-key"], &matches, command_matches).await?; - let new_key = &command_matches - .get_one::("new-key") - .ok_or_else(|| OpaCtlError::MissingArgument("new-key".to_owned()))?; - let id = command_matches.get_one::("id").unwrap(); - let overwrite_existing = command_matches.get_flag("overwrite"); - let register_key = - SubmissionBuilder::register_key(id, new_key, &signing, overwrite_existing) - .await? - .build(span_id, Uuid::new_v4()); - Ok(handle_wait( - command_matches, - client, - OpaTransaction::register_key(id, register_key, &signing, overwrite_existing) - .await?, - ) - .await?) - } - Some(("rotate-key", command_matches)) => { - let signing = - configure_signing(vec!["current-key", "new-key"], &matches, command_matches) - .await?; - - let current_key = &command_matches - .get_one::("current-key") - .ok_or_else(|| OpaCtlError::MissingArgument("new-key".to_owned()))?; - let new_key = &command_matches - .get_one::("new-key") - .ok_or_else(|| OpaCtlError::MissingArgument("new-key".to_owned()))?; - let id = command_matches.get_one::("id").unwrap(); - let rotate_key = SubmissionBuilder::rotate_key(id, &signing, new_key, current_key) - .await? - .build(span_id, Uuid::new_v4()); - Ok(handle_wait( - command_matches, - client, - OpaTransaction::rotate_key(id, rotate_key, &signing).await?, - ) - .await?) - } - Some(("set-policy", command_matches)) => { - let signing = configure_signing(vec![], &matches, command_matches).await?; - let policy: &String = command_matches.get_one("policy").unwrap(); - - let policy = load_bytes_from_url(policy).await?; - - let id = command_matches.get_one::("id").unwrap(); - - let bootstrap = SubmissionBuilder::set_policy(id, policy, &signing) - .await? - .build(span_id, Uuid::new_v4()); - Ok(handle_wait( - command_matches, - client, - OpaTransaction::set_policy(id, bootstrap, &signing).await?, - ) - .await?) - } - Some(("get-key", matches)) => { - let key: Result, _> = client - .get_state_entry( - "Opa", - "KeyStore", - key_address(matches.get_one::("id").unwrap()), - ) - .await; - - let key: KeysV1 = key.map_err(OpaCtlError::from)?.ok_or(OpaCtlError::NotFound)?; - let key = Keys::try_from(key)?; - - debug!(loaded_key = ?key); - - let key = key.current.key; - - if let Some(path) = matches.get_one::("output") { - let mut file = File::create(path)?; - file.write_all(key.as_bytes())?; - } else { - print!("{}", key.as_str()); - } - - Ok(Waited::NoWait) - } - Some(("get-policy", matches)) => { - let policy: Option = client - .get_state_entry( - "Opa", - "PolicyStore", - key_address(matches.get_one::("id").unwrap()), - ) - .await?; - - let policy = policy.ok_or(OpaCtlError::NotFound)?; - - if let Some(path) = matches.get_one::("output") { - let mut file = File::create(path)?; - file.write_all(Policy::try_from(policy)?.as_bytes())?; - } - - Ok(Waited::NoWait) - } - _ => Ok(Waited::NoWait), - } + let span = span!(Level::TRACE, "dispatch_args"); + let _entered = span.enter(); + let span_id = span.id().map(|x| x.into_u64()).unwrap_or(u64::MAX); + match matches.subcommand() { + Some(("bootstrap", command_matches)) => { + let signing = configure_signing(vec![], &matches, command_matches).await?; + let bootstrap = SubmissionBuilder::bootstrap_root(signing.opa_verifying().await?) + .build(span_id, Uuid::new_v4()); + Ok(handle_wait( + command_matches, + client, + OpaTransaction::bootstrap_root(bootstrap, &signing).await?, + ) + .await?) + }, + Some(("generate", matches)) => { + let key = SecretKey::random(StdRng::from_entropy()); + let key = key.to_pkcs8_pem(LineEnding::CRLF).map_err(|_| OpaCtlError::Pkcs8)?; + + if let Some(path) = matches.get_one::("output") { + let mut file = File::create(path)?; + file.write_all(key.as_bytes())?; + } else { + print!("{}", *key); + } + + Ok(Waited::NoWait) + }, + Some(("rotate-root", command_matches)) => { + let signing = + configure_signing(vec!["new-root-key"], &matches, command_matches).await?; + let rotate_key = SubmissionBuilder::rotate_key( + "root", + &signing, + OPA_PK, + command_matches + .get_one::("new-root-key") + .ok_or_else(|| OpaCtlError::MissingArgument("new-root-key".to_owned()))?, + ) + .await? + .build(span_id, Uuid::new_v4()); + Ok(handle_wait( + command_matches, + client, + OpaTransaction::rotate_root(rotate_key, &signing).await?, + ) + .await?) + }, + Some(("register-key", command_matches)) => { + let signing = configure_signing(vec!["new-key"], &matches, command_matches).await?; + let new_key = &command_matches + .get_one::("new-key") + .ok_or_else(|| OpaCtlError::MissingArgument("new-key".to_owned()))?; + let id = command_matches.get_one::("id").unwrap(); + let overwrite_existing = command_matches.get_flag("overwrite"); + let register_key = + SubmissionBuilder::register_key(id, new_key, &signing, overwrite_existing) + .await? + .build(span_id, Uuid::new_v4()); + Ok(handle_wait( + command_matches, + client, + OpaTransaction::register_key(id, register_key, &signing, overwrite_existing) + .await?, + ) + .await?) + }, + Some(("rotate-key", command_matches)) => { + let signing = + configure_signing(vec!["current-key", "new-key"], &matches, command_matches) + .await?; + + let current_key = &command_matches + .get_one::("current-key") + .ok_or_else(|| OpaCtlError::MissingArgument("new-key".to_owned()))?; + let new_key = &command_matches + .get_one::("new-key") + .ok_or_else(|| OpaCtlError::MissingArgument("new-key".to_owned()))?; + let id = command_matches.get_one::("id").unwrap(); + let rotate_key = SubmissionBuilder::rotate_key(id, &signing, new_key, current_key) + .await? + .build(span_id, Uuid::new_v4()); + Ok(handle_wait( + command_matches, + client, + OpaTransaction::rotate_key(id, rotate_key, &signing).await?, + ) + .await?) + }, + Some(("set-policy", command_matches)) => { + let signing = configure_signing(vec![], &matches, command_matches).await?; + let policy: &String = command_matches.get_one("policy").unwrap(); + + let policy = load_bytes_from_url(policy).await?; + + let id = command_matches.get_one::("id").unwrap(); + + let bootstrap = SubmissionBuilder::set_policy(id, policy, &signing) + .await? + .build(span_id, Uuid::new_v4()); + Ok(handle_wait( + command_matches, + client, + OpaTransaction::set_policy(id, bootstrap, &signing).await?, + ) + .await?) + }, + Some(("get-key", matches)) => { + let key: Result, _> = client + .get_state_entry( + "Opa", + "KeyStore", + key_address(matches.get_one::("id").unwrap()), + ) + .await; + + let key: KeysV1 = key.map_err(OpaCtlError::from)?.ok_or(OpaCtlError::NotFound)?; + let key = Keys::try_from(key)?; + + debug!(loaded_key = ?key); + + let key = key.current.key; + + if let Some(path) = matches.get_one::("output") { + let mut file = File::create(path)?; + file.write_all(key.as_bytes())?; + } else { + print!("{}", key.as_str()); + } + + Ok(Waited::NoWait) + }, + Some(("get-policy", matches)) => { + let policy: Option = client + .get_state_entry( + "Opa", + "PolicyStore", + key_address(matches.get_one::("id").unwrap()), + ) + .await?; + + let policy = policy.ok_or(OpaCtlError::NotFound)?; + + if let Some(path) = matches.get_one::("output") { + let mut file = File::create(path)?; + file.write_all(Policy::try_from(policy)?.as_bytes())?; + } + + Ok(Waited::NoWait) + }, + _ => Ok(Waited::NoWait), + } } #[tokio::main] async fn main() { - chronicle_telemetry::telemetry(false, chronicle_telemetry::ConsoleLogging::Pretty); - let args = cli::cli().get_matches(); - let address: &Url = args.get_one("sawtooth-address").unwrap(); - let client = match OpaSubstrateClient::::connect(address).await { - Ok(client) => client, - Err(e) => { - error!("Failed to connect to the OPA Substrate Client: {:?}", e); - std::process::exit(-1); - } - }; - dispatch_args(args, &client) - .await - .map_err(|opactl| { - error!(?opactl); - opactl.into_ufe().print(); - std::process::exit(1); - }) - .map(|waited| { - if let Waited::WaitedAndFound(op) = waited { - println!( - "{}", - serde_json::to_string_pretty(&serde_json::to_value(op).unwrap()).unwrap() - ); - } - }) - .ok(); + chronicle_telemetry::telemetry(chronicle_telemetry::ConsoleLogging::Pretty); + let args = cli::cli().get_matches(); + let address: &Url = args.get_one("sawtooth-address").unwrap(); + let client = match OpaSubstrateClient::::connect(address).await { + Ok(client) => client, + Err(e) => { + error!("Failed to connect to the OPA Substrate Client: {:?}", e); + std::process::exit(-1); + }, + }; + dispatch_args(args, &client) + .await + .map_err(|opactl| { + error!(?opactl); + opactl.into_ufe().print(); + std::process::exit(1); + }) + .map(|waited| { + if let Waited::WaitedAndFound(op) = waited { + println!( + "{}", + serde_json::to_string_pretty(&serde_json::to_value(op).unwrap()).unwrap() + ); + } + }) + .ok(); } diff --git a/crates/opactl/src/test/mockchain.rs b/crates/opactl/src/test/mockchain.rs index d89246e1e..f7f9f17f2 100644 --- a/crates/opactl/src/test/mockchain.rs +++ b/crates/opactl/src/test/mockchain.rs @@ -1,8 +1,8 @@ use frame_support::traits::{ConstU16, ConstU64}; use sp_core::H256; use sp_runtime::{ - BuildStorage, - traits::{BlakeTwo256, IdentityLookup}, + traits::{BlakeTwo256, IdentityLookup}, + BuildStorage, }; type Block = frame_system::mocking::MockBlock; @@ -17,44 +17,44 @@ frame_support::construct_runtime!( ); impl frame_system::Config for Test { - type AccountData = (); - type AccountId = u64; - type BaseCallFilter = frame_support::traits::Everything; - type Block = Block; - type BlockHashCount = ConstU64<250>; - type BlockLength = (); - type BlockWeights = (); - type DbWeight = (); - type Hash = H256; - type Hashing = BlakeTwo256; - type Lookup = IdentityLookup; - type MaxConsumers = frame_support::traits::ConstU32<16>; - type Nonce = u64; - type OnKilledAccount = (); - type OnNewAccount = (); - type OnSetCode = (); - type PalletInfo = PalletInfo; - type RuntimeCall = RuntimeCall; - type RuntimeEvent = RuntimeEvent; - type RuntimeOrigin = RuntimeOrigin; - type SS58Prefix = ConstU16<42>; - type SystemWeightInfo = (); - type Version = (); - type RuntimeTask = (); - type SingleBlockMigrations = (); - type MultiBlockMigrator = (); - type PreInherents = (); - type PostInherents = (); - type PostTransactions = (); + type AccountData = (); + type AccountId = u64; + type BaseCallFilter = frame_support::traits::Everything; + type Block = Block; + type BlockHashCount = ConstU64<250>; + type BlockLength = (); + type BlockWeights = (); + type DbWeight = (); + type Hash = H256; + type Hashing = BlakeTwo256; + type Lookup = IdentityLookup; + type MaxConsumers = frame_support::traits::ConstU32<16>; + type MultiBlockMigrator = (); + type Nonce = u64; + type OnKilledAccount = (); + type OnNewAccount = (); + type OnSetCode = (); + type PalletInfo = PalletInfo; + type PostInherents = (); + type PostTransactions = (); + type PreInherents = (); + type RuntimeCall = RuntimeCall; + type RuntimeEvent = RuntimeEvent; + type RuntimeOrigin = RuntimeOrigin; + type RuntimeTask = (); + type SS58Prefix = ConstU16<42>; + type SingleBlockMigrations = (); + type SystemWeightInfo = (); + type Version = (); } impl pallet_opa::Config for Test { - type OpaSubmission = common::opa::codec::OpaSubmissionV1; - type RuntimeEvent = RuntimeEvent; - type WeightInfo = (); + type OpaSubmission = common::opa::codec::OpaSubmissionV1; + type RuntimeEvent = RuntimeEvent; + type WeightInfo = (); } // Build genesis storage according to the mock runtime. pub fn new_test_ext() -> sp_io::TestExternalities { - frame_system::GenesisConfig::::default().build_storage().unwrap().into() + frame_system::GenesisConfig::::default().build_storage().unwrap().into() } diff --git a/crates/opactl/src/test/mod.rs b/crates/opactl/src/test/mod.rs index 971f32fa0..9b9cdc4f8 100644 --- a/crates/opactl/src/test/mod.rs +++ b/crates/opactl/src/test/mod.rs @@ -2,8 +2,8 @@ use std::io::Write; use clap::ArgMatches; use k256::{ - pkcs8::{EncodePrivateKey, LineEnding}, - SecretKey, + pkcs8::{EncodePrivateKey, LineEnding}, + SecretKey, }; use rand::rngs::StdRng; use rand_core::SeedableRng; @@ -17,45 +17,44 @@ mod mockchain; mod stubstrate; fn get_opactl_cmd(command_line: &str) -> ArgMatches { - let cli = cli::cli(); - cli.get_matches_from(command_line.split_whitespace()) + let cli = cli::cli(); + cli.get_matches_from(command_line.split_whitespace()) } fn key_from_seed(seed: u8) -> String { - let secret: SecretKey = SecretKey::random(StdRng::from_seed([seed; 32])); - secret.to_pkcs8_pem(LineEnding::CRLF).unwrap().to_string() + let secret: SecretKey = SecretKey::random(StdRng::from_seed([seed; 32])); + secret.to_pkcs8_pem(LineEnding::CRLF).unwrap().to_string() } // Cli should automatically create ephemeral batcher keys, but we need to supply named keyfiles // in a temp directory async fn bootstrap_root_state() -> (String, Stubstrate, TempDir) { - let root_key = key_from_seed(0); + let root_key = key_from_seed(0); - let keystore = tempfile::tempdir().unwrap(); - let keyfile_path = keystore.path().join("./opa-pk"); - std::fs::write(&keyfile_path, root_key.as_bytes()).unwrap(); + let keystore = tempfile::tempdir().unwrap(); + let keyfile_path = keystore.path().join("./opa-pk"); + std::fs::write(&keyfile_path, root_key.as_bytes()).unwrap(); - let matches = get_opactl_cmd(&format!( - "opactl --batcher-key-generated --keystore-path {} bootstrap", - keystore.path().display() - )); + let matches = get_opactl_cmd(&format!( + "opactl --batcher-key-generated --keystore-path {} bootstrap", + keystore.path().display() + )); - let stubstrate = Stubstrate::new(); + let stubstrate = Stubstrate::new(); - tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await; - dispatch_args(matches, &stubstrate).await.unwrap(); + tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await; + dispatch_args(matches, &stubstrate).await.unwrap(); - (root_key, stubstrate, keystore) + (root_key, stubstrate, keystore) } - //TODO: downloads #[tokio::test] #[ignore] async fn bootstrap_root_and_get_key() { - let (_root_key, opa_tp, _keystore) = bootstrap_root_state().await; - //Generate a key pem and set env vars - insta::assert_yaml_snapshot!(opa_tp.stored_keys(), { + let (_root_key, opa_tp, _keystore) = bootstrap_root_state().await; + //Generate a key pem and set env vars + insta::assert_yaml_snapshot!(opa_tp.stored_keys(), { ".**.date" => "[date]", ".**.key" => "[pem]", } ,@r###" @@ -67,15 +66,15 @@ async fn bootstrap_root_and_get_key() { expired: ~ "###); - tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await; + tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await; - let out_keyfile = NamedTempFile::new().unwrap(); + let out_keyfile = NamedTempFile::new().unwrap(); - let matches = get_opactl_cmd( - format!("opactl get-key --output {}", out_keyfile.path().display(), ).as_str(), - ); + let matches = get_opactl_cmd( + format!("opactl get-key --output {}", out_keyfile.path().display(),).as_str(), + ); - insta::assert_yaml_snapshot!( + insta::assert_yaml_snapshot!( dispatch_args(matches, &opa_tp) .await .unwrap(), @r###" @@ -86,14 +85,14 @@ async fn bootstrap_root_and_get_key() { #[tokio::test] async fn rotate_root() { - let (_root_key, opa_tp, keystore) = bootstrap_root_state().await; + let (_root_key, opa_tp, keystore) = bootstrap_root_state().await; - let new_root_key = key_from_seed(1); + let new_root_key = key_from_seed(1); - let keyfile_path = keystore.path().join("./new-root-1"); - std::fs::write(&keyfile_path, new_root_key.as_bytes()).unwrap(); + let keyfile_path = keystore.path().join("./new-root-1"); + std::fs::write(&keyfile_path, new_root_key.as_bytes()).unwrap(); - let matches = get_opactl_cmd( + let matches = get_opactl_cmd( format!( "opactl --batcher-key-generated --opa-key-from-path --keystore-path {} rotate-root --new-root-key new-root-1", keystore.path().display(), @@ -101,7 +100,7 @@ async fn rotate_root() { .as_str(), ); - insta::assert_yaml_snapshot!( + insta::assert_yaml_snapshot!( dispatch_args(matches, &opa_tp) .await .unwrap(), { @@ -123,7 +122,7 @@ async fn rotate_root() { correlation_id: "[correlation_id]" "###); - insta::assert_yaml_snapshot!(opa_tp.stored_keys(),{ + insta::assert_yaml_snapshot!(opa_tp.stored_keys(),{ ".**.date" => "[date]", ".**.key" => "[pem]", ".**.correlation_id" => "[correlation_id]" @@ -141,14 +140,14 @@ async fn rotate_root() { #[tokio::test] async fn register_and_rotate_key() { - let (_root_key, opa_tp, keystore) = bootstrap_root_state().await; + let (_root_key, opa_tp, keystore) = bootstrap_root_state().await; - let new_key = key_from_seed(1); + let new_key = key_from_seed(1); - let keyfile_path = keystore.path().join("./new-key-1"); - std::fs::write(&keyfile_path, new_key.as_bytes()).unwrap(); + let keyfile_path = keystore.path().join("./new-key-1"); + std::fs::write(&keyfile_path, new_key.as_bytes()).unwrap(); - let matches = get_opactl_cmd( + let matches = get_opactl_cmd( format!( "opactl --batcher-key-generated --keystore-path {} register-key --new-key new-key-1 --id test", keystore.path().display(), @@ -156,7 +155,7 @@ async fn register_and_rotate_key() { .as_str(), ); - insta::assert_yaml_snapshot!( + insta::assert_yaml_snapshot!( dispatch_args(matches,&opa_tp) .await .unwrap(), { @@ -176,7 +175,7 @@ async fn register_and_rotate_key() { correlation_id: "[correlation_id]" "###); - insta::assert_yaml_snapshot!(opa_tp.stored_keys(), { + insta::assert_yaml_snapshot!(opa_tp.stored_keys(), { ".**.date" => "[date]", ".**.key" => "[pem]", ".**.correlation_id" => "[correlation_id]" @@ -194,12 +193,12 @@ async fn register_and_rotate_key() { expired: ~ "###); - let new_key_2 = key_from_seed(1); + let new_key_2 = key_from_seed(1); - let keyfile_path = keystore.path().join("./new-key-2"); - std::fs::write(&keyfile_path, new_key_2.as_bytes()).unwrap(); + let keyfile_path = keystore.path().join("./new-key-2"); + std::fs::write(&keyfile_path, new_key_2.as_bytes()).unwrap(); - let matches = get_opactl_cmd( + let matches = get_opactl_cmd( format!( "opactl --batcher-key-generated --keystore-path {} rotate-key --current-key new-key-1 --new-key new-key-2 --id test", keystore.path().display(), @@ -207,7 +206,7 @@ async fn register_and_rotate_key() { .as_str(), ); - insta::assert_yaml_snapshot!( + insta::assert_yaml_snapshot!( dispatch_args(matches, &opa_tp) .await .unwrap(), { @@ -229,7 +228,7 @@ async fn register_and_rotate_key() { correlation_id: "[correlation_id]" "###); - insta::assert_yaml_snapshot!(opa_tp.stored_keys(), { + insta::assert_yaml_snapshot!(opa_tp.stored_keys(), { ".**.date" => "[date]", ".**.key" => "[pem]", ".**.correlation_id" => "[correlation_id]" @@ -252,24 +251,24 @@ async fn register_and_rotate_key() { #[tokio::test] async fn set_and_update_policy() { - let (root_key, opa_tp, keystore) = bootstrap_root_state().await; + let (root_key, opa_tp, keystore) = bootstrap_root_state().await; - let mut root_keyfile = NamedTempFile::new().unwrap(); - root_keyfile.write_all(root_key.as_bytes()).unwrap(); + let mut root_keyfile = NamedTempFile::new().unwrap(); + root_keyfile.write_all(root_key.as_bytes()).unwrap(); - let mut policy = NamedTempFile::new().unwrap(); - policy.write_all(&[0]).unwrap(); + let mut policy = NamedTempFile::new().unwrap(); + policy.write_all(&[0]).unwrap(); - let matches = get_opactl_cmd( - format!( - "opactl --batcher-key-generated --keystore-path {} set-policy --id test --policy {}", - keystore.path().display(), - policy.path().display() - ) - .as_str(), - ); + let matches = get_opactl_cmd( + format!( + "opactl --batcher-key-generated --keystore-path {} set-policy --id test --policy {}", + keystore.path().display(), + policy.path().display() + ) + .as_str(), + ); - insta::assert_yaml_snapshot!(dispatch_args( + insta::assert_yaml_snapshot!(dispatch_args( matches, &opa_tp, ) @@ -320,7 +319,7 @@ async fn set_and_update_policy() { correlation_id: "[correlation_id]" "###); - insta::assert_yaml_snapshot!(opa_tp.stored_policy(), { + insta::assert_yaml_snapshot!(opa_tp.stored_policy(), { ".**.date" => "[date]", ".**.key" => "[pem]", ".**.correlation_id" => "[correlation_id]", @@ -363,18 +362,18 @@ async fn set_and_update_policy() { - 230 "###); - policy.write_all(&[1]).unwrap(); + policy.write_all(&[1]).unwrap(); - let matches = get_opactl_cmd( - format!( - "opactl --batcher-key-generated --keystore-path {} set-policy --id test --policy {}", - keystore.path().display(), - policy.path().display() - ) - .as_str(), - ); + let matches = get_opactl_cmd( + format!( + "opactl --batcher-key-generated --keystore-path {} set-policy --id test --policy {}", + keystore.path().display(), + policy.path().display() + ) + .as_str(), + ); - insta::assert_yaml_snapshot!(dispatch_args(matches, &opa_tp) + insta::assert_yaml_snapshot!(dispatch_args(matches, &opa_tp) .await .unwrap(), { ".**.date" => "[date]", @@ -422,7 +421,7 @@ async fn set_and_update_policy() { correlation_id: "[correlation_id]" "### ); - insta::assert_yaml_snapshot!(opa_tp.stored_policy(), { + insta::assert_yaml_snapshot!(opa_tp.stored_policy(), { ".**.date" => "[date]", ".**.key" => "[pem]", } ,@r###" diff --git a/crates/opactl/src/test/stubstrate.rs b/crates/opactl/src/test/stubstrate.rs index 93617e6c5..d60efc956 100644 --- a/crates/opactl/src/test/stubstrate.rs +++ b/crates/opactl/src/test/stubstrate.rs @@ -7,11 +7,11 @@ use subxt::metadata::{DecodeWithMetadata, EncodeWithMetadata}; use common::opa::{codec::OpaSubmissionV1, Keys, PolicyMeta}; use pallet_opa::{ChronicleTransactionId, Event}; use protocol_abstract::{ - BlockId, FromBlock, LedgerEvent, LedgerEventContext, LedgerReader, LedgerTransaction, - LedgerWriter, Position, Span, WriteConsistency, + BlockId, FromBlock, LedgerEvent, LedgerEventContext, LedgerReader, LedgerTransaction, + LedgerWriter, Position, Span, WriteConsistency, }; use protocol_substrate::{PolkadotConfig, SubstrateStateReader, SubxtClientError}; -use protocol_substrate_opa::{OpaEvent, OpaEventCodec, transaction::OpaTransaction}; +use protocol_substrate_opa::{transaction::OpaTransaction, OpaEvent, OpaEventCodec}; use crate::test::mockchain::System; @@ -19,144 +19,144 @@ use super::mockchain::{new_test_ext, OpaModule, RuntimeEvent, RuntimeOrigin, Tes #[derive(Clone)] pub struct Stubstrate { - rt: Arc>, - tx: tokio::sync::broadcast::Sender, - events: Arc>>, + rt: Arc>, + tx: tokio::sync::broadcast::Sender, + events: Arc>>, } impl Stubstrate { - pub fn new() -> Self { - let (tx, rx) = tokio::sync::broadcast::channel(100); - Self { rt: Arc::new(Mutex::new(new_test_ext())), tx, events: Arc::new(Mutex::new(vec![])) } - } - - #[tracing::instrument(skip(self))] - pub fn readable_events(&self) -> Vec { - self.events.lock().unwrap().clone() - } - - pub fn stored_keys(&self) -> Vec { - self.rt.lock().unwrap().execute_with(|| { - pallet_opa::KeyStore::::iter_values() - .map(|k| k.try_into().unwrap()) - .collect() - }) - } - - pub fn stored_policy(&self) -> Vec { - self.rt.lock().unwrap().execute_with(|| { - pallet_opa::PolicyMetaStore::::iter_values() - .map(|k| k.try_into().unwrap()) - .collect() - }) - } + pub fn new() -> Self { + let (tx, rx) = tokio::sync::broadcast::channel(100); + Self { rt: Arc::new(Mutex::new(new_test_ext())), tx, events: Arc::new(Mutex::new(vec![])) } + } + + #[tracing::instrument(skip(self))] + pub fn readable_events(&self) -> Vec { + self.events.lock().unwrap().clone() + } + + pub fn stored_keys(&self) -> Vec { + self.rt.lock().unwrap().execute_with(|| { + pallet_opa::KeyStore::::iter_values() + .map(|k| k.try_into().unwrap()) + .collect() + }) + } + + pub fn stored_policy(&self) -> Vec { + self.rt.lock().unwrap().execute_with(|| { + pallet_opa::PolicyMetaStore::::iter_values() + .map(|k| k.try_into().unwrap()) + .collect() + }) + } } #[async_trait::async_trait] impl LedgerReader for Stubstrate { - type Error = SubxtClientError; - type Event = OpaEvent; - type EventCodec = OpaEventCodec; - - async fn block_height(&self) -> Result<(Position, BlockId), Self::Error> { - unimplemented!(); - } - - /// Subscribe to state updates from this ledger, starting at `offset`, and - /// ending the stream after `number_of_blocks` blocks have been processed. - async fn state_updates( - &self, - // The block to start from - from_block: FromBlock, - // The number of blocks to process before ending the stream - number_of_blocks: Option, - ) -> Result>, Self::Error> { - tracing::debug!("Starting state updates stream from block {:?}", from_block); - let rx = self.tx.subscribe(); - let stream = tokio_stream::wrappers::BroadcastStream::new(rx) - .map(|event| { - let event = event.unwrap(); - let correlation_id = event.correlation_id().into(); - (event, correlation_id, BlockId::Unknown, Position::from(0), Span::NotTraced) - }) - .boxed(); - Ok(stream) - } + type Error = SubxtClientError; + type Event = OpaEvent; + type EventCodec = OpaEventCodec; + + async fn block_height(&self) -> Result<(Position, BlockId), Self::Error> { + unimplemented!(); + } + + /// Subscribe to state updates from this ledger, starting at `offset`, and + /// ending the stream after `number_of_blocks` blocks have been processed. + async fn state_updates( + &self, + // The block to start from + from_block: FromBlock, + // The number of blocks to process before ending the stream + number_of_blocks: Option, + ) -> Result>, Self::Error> { + tracing::debug!("Starting state updates stream from block {:?}", from_block); + let rx = self.tx.subscribe(); + let stream = tokio_stream::wrappers::BroadcastStream::new(rx) + .map(|event| { + let event = event.unwrap(); + let correlation_id = event.correlation_id().into(); + (event, correlation_id, BlockId::Unknown, Position::from(0), Span::NotTraced) + }) + .boxed(); + Ok(stream) + } } #[async_trait::async_trait] impl LedgerWriter for Stubstrate { - type Error = SubxtClientError; - type Submittable = OpaTransaction; - type Transaction = OpaTransaction; - - // Minimally process the transaction offline to get a transaction id and submittable type - async fn pre_submit( - &self, - tx: Self::Transaction, - ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error> { - let id = tx.correlation_id().into(); - Ok((tx, id)) - } - - // Submit is used to submit a transaction to the ledger - async fn do_submit( - &self, - _consistency: WriteConsistency, - submittable: Self::Submittable, - ) -> Result { - self.rt.lock().unwrap().execute_with(|| { - System::set_block_number(1); - OpaModule::apply( - RuntimeOrigin::signed(1), - OpaSubmissionV1::from(submittable.submission().clone()), - ) - .unwrap(); - - let ev = System::events().last().unwrap().event.clone(); - - let opa_event = match ev { - RuntimeEvent::OpaModule(event) => match event { - Event::::PolicyUpdate(meta, id) => Some(OpaEvent::PolicyUpdate { - policy: meta.try_into().unwrap(), - correlation_id: id, - }), - Event::::KeyUpdate(keys, id) => Some(OpaEvent::KeyUpdate { - keys: keys.try_into().unwrap(), - correlation_id: id, - }), - _ => None, - }, - _ => None, - }; - - if let Some(event) = opa_event { - self.events.lock().unwrap().push(event.clone()); - self.tx.send(event).unwrap(); - } else { - tracing::warn!("Received an event that is not an OpaEvent"); - } - }); - - Ok(submittable.correlation_id().into()) - } + type Error = SubxtClientError; + type Submittable = OpaTransaction; + type Transaction = OpaTransaction; + + // Minimally process the transaction offline to get a transaction id and submittable type + async fn pre_submit( + &self, + tx: Self::Transaction, + ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error> { + let id = tx.correlation_id().into(); + Ok((tx, id)) + } + + // Submit is used to submit a transaction to the ledger + async fn do_submit( + &self, + _consistency: WriteConsistency, + submittable: Self::Submittable, + ) -> Result { + self.rt.lock().unwrap().execute_with(|| { + System::set_block_number(1); + OpaModule::apply( + RuntimeOrigin::signed(1), + OpaSubmissionV1::from(submittable.submission().clone()), + ) + .unwrap(); + + let ev = System::events().last().unwrap().event.clone(); + + let opa_event = match ev { + RuntimeEvent::OpaModule(event) => match event { + Event::::PolicyUpdate(meta, id) => Some(OpaEvent::PolicyUpdate { + policy: meta.try_into().unwrap(), + correlation_id: id, + }), + Event::::KeyUpdate(keys, id) => Some(OpaEvent::KeyUpdate { + keys: keys.try_into().unwrap(), + correlation_id: id, + }), + _ => None, + }, + _ => None, + }; + + if let Some(event) = opa_event { + self.events.lock().unwrap().push(event.clone()); + self.tx.send(event).unwrap(); + } else { + tracing::warn!("Received an event that is not an OpaEvent"); + } + }); + + Ok(submittable.correlation_id().into()) + } } #[async_trait::async_trait] impl SubstrateStateReader for Stubstrate { - type Error = SubxtClientError; - - async fn get_state_entry( - &self, - pallet_name: &str, - entry_name: &str, - address: K, - ) -> Result, Self::Error> { - tracing::info!( + type Error = SubxtClientError; + + async fn get_state_entry( + &self, + pallet_name: &str, + entry_name: &str, + address: K, + ) -> Result, Self::Error> { + tracing::info!( "Attempting to retrieve state entry for pallet: {}, entry: {}", pallet_name, entry_name ); - unimplemented!() - } + unimplemented!() + } } diff --git a/crates/pallet-chronicle/src/lib.rs b/crates/pallet-chronicle/src/lib.rs index 2dede4d0c..fd0cf4902 100644 --- a/crates/pallet-chronicle/src/lib.rs +++ b/crates/pallet-chronicle/src/lib.rs @@ -20,7 +20,7 @@ mod tests; pub mod weights; pub mod chronicle_core { - pub use common::{ledger::*, prov::*}; + pub use common::{ledger::*, prov::*}; } pub use weights::*; @@ -28,215 +28,215 @@ pub use weights::*; // A configuration type for opa settings, serializable to JSON etc #[derive(frame_support::Serialize, frame_support::Deserialize)] pub struct OpaConfiguration { - pub policy_name: scale_info::prelude::string::String, - pub entrypoint: scale_info::prelude::string::String, + pub policy_name: scale_info::prelude::string::String, + pub entrypoint: scale_info::prelude::string::String, } #[frame_support::pallet] pub mod pallet { - use super::*; - use common::ledger::OperationSubmission; - use frame_support::{pallet_prelude::*, traits::BuildGenesisConfig}; - use frame_system::pallet_prelude::*; - use sp_core::blake2_128; - use sp_std::{collections::btree_set::BTreeSet, vec::Vec}; - - #[pallet::pallet] - pub struct Pallet(_); - - /// Configure the pallet by specifying the parameters and types on which it depends. - #[pallet::config] - pub trait Config: frame_system::Config { - /// Because this pallet emits events, it depends on the runtime's definition of an event. - type RuntimeEvent: From> + IsType<::RuntimeEvent>; - /// Type representing the weight of this pallet - type WeightInfo: WeightInfo; - - type OperationSubmission: Parameter + Into + parity_scale_codec::Codec; - } - - /// Genesis configuration, whether or not we need to enforce OPA policies - #[pallet::genesis_config] - pub struct GenesisConfig { - pub opa_settings: Option, - pub _phantom: PhantomData, - } - - impl Default for GenesisConfig { - fn default() -> Self { - Self { opa_settings: None, _phantom: PhantomData } - } - } - - #[pallet::genesis_build] - impl BuildGenesisConfig for GenesisConfig { - fn build(&self) { - tracing::info!("Chronicle: Building genesis configuration."); - if let Some(ref settings) = self.opa_settings { - OpaSettings::::put(Some(common::opa::OpaSettings { - policy_address: common::opa::PolicyAddress::from(blake2_128( - settings.policy_name.as_bytes(), - )), - policy_name: settings.policy_name.clone(), - entrypoint: settings.entrypoint.clone(), - })); - tracing::debug!("Chronicle: OPA settings are set."); - } else { - OpaSettings::::put(None::); - } - } - } - - #[pallet::storage] - #[pallet::getter(fn prov)] - pub type Provenance = StorageMap<_, Twox128, ChronicleAddress, common::prov::ProvModel>; - - #[pallet::storage] - #[pallet::getter(fn get_opa_settings)] - pub type OpaSettings = StorageValue<_, Option>; - - // Pallets use events to inform users when important changes are made. - // https://docs.substrate.io/main-docs/build/events-errors/ - #[pallet::event] - #[pallet::generate_deposit(pub (super) fn deposit_event)] - pub enum Event { - Applied(common::prov::ProvModel, common::identity::SignedIdentity, [u8; 16]), - Contradiction(common::prov::Contradiction, common::identity::SignedIdentity, [u8; 16]), - } - - // Errors inform users that something went wrong. - #[pallet::error] - pub enum Error { - Address, - Contradiction, - Compaction, - Expansion, - Identity, - IRef, - NotAChronicleIri, - MissingId, - MissingProperty, - NotANode, - NotAnObject, - OpaExecutor, - SerdeJson, - SubmissionFormat, - Time, - Tokio, - Utf8, - } - - impl From for Error { - fn from(error: common::prov::ProcessorError) -> Self { - match error { - common::prov::ProcessorError::Address => Error::Address, - common::prov::ProcessorError::Contradiction { .. } => Error::Contradiction, - common::prov::ProcessorError::Identity(_) => Error::Identity, - common::prov::ProcessorError::NotAChronicleIri { .. } => Error::NotAChronicleIri, - common::prov::ProcessorError::MissingId { .. } => Error::MissingId, - common::prov::ProcessorError::MissingProperty { .. } => Error::MissingProperty, - common::prov::ProcessorError::NotANode(_) => Error::NotANode, - common::prov::ProcessorError::NotAnObject => Error::NotAnObject, - common::prov::ProcessorError::OpaExecutor(_) => Error::OpaExecutor, - common::prov::ProcessorError::SerdeJson(_) => Error::SerdeJson, - common::prov::ProcessorError::SubmissionFormat(_) => Error::SubmissionFormat, - common::prov::ProcessorError::Time(_) => Error::Time, - common::prov::ProcessorError::Tokio => Error::Tokio, - common::prov::ProcessorError::Utf8(_) => Error::Utf8, - _ => unreachable!(), - } - } - } - - // Dispatchable functions allows users to interact with the pallet and invoke state changes. - // These functions materialize as "extrinsics", which are often compared to transactions. - // Dispatchable functions must be annotated with a weight and must return a DispatchResult. - #[pallet::call] - impl Pallet { - // Apply a vector of chronicle operations, yielding an event that indicates state change or - // contradiction - #[pallet::call_index(0)] - #[pallet::weight({ + use super::*; + use common::ledger::OperationSubmission; + use frame_support::{pallet_prelude::*, traits::BuildGenesisConfig}; + use frame_system::pallet_prelude::*; + use sp_core::blake2_128; + use sp_std::{collections::btree_set::BTreeSet, vec::Vec}; + + #[pallet::pallet] + pub struct Pallet(_); + + /// Configure the pallet by specifying the parameters and types on which it depends. + #[pallet::config] + pub trait Config: frame_system::Config { + /// Because this pallet emits events, it depends on the runtime's definition of an event. + type RuntimeEvent: From> + IsType<::RuntimeEvent>; + /// Type representing the weight of this pallet + type WeightInfo: WeightInfo; + + type OperationSubmission: Parameter + Into + parity_scale_codec::Codec; + } + + /// Genesis configuration, whether or not we need to enforce OPA policies + #[pallet::genesis_config] + pub struct GenesisConfig { + pub opa_settings: Option, + pub _phantom: PhantomData, + } + + impl Default for GenesisConfig { + fn default() -> Self { + Self { opa_settings: None, _phantom: PhantomData } + } + } + + #[pallet::genesis_build] + impl BuildGenesisConfig for GenesisConfig { + fn build(&self) { + tracing::info!("Chronicle: Building genesis configuration."); + if let Some(ref settings) = self.opa_settings { + OpaSettings::::put(Some(common::opa::OpaSettings { + policy_address: common::opa::PolicyAddress::from(blake2_128( + settings.policy_name.as_bytes(), + )), + policy_name: settings.policy_name.clone(), + entrypoint: settings.entrypoint.clone(), + })); + tracing::debug!("Chronicle: OPA settings are set."); + } else { + OpaSettings::::put(None::); + } + } + } + + #[pallet::storage] + #[pallet::getter(fn prov)] + pub type Provenance = StorageMap<_, Twox128, ChronicleAddress, common::prov::ProvModel>; + + #[pallet::storage] + #[pallet::getter(fn get_opa_settings)] + pub type OpaSettings = StorageValue<_, Option>; + + // Pallets use events to inform users when important changes are made. + // https://docs.substrate.io/main-docs/build/events-errors/ + #[pallet::event] + #[pallet::generate_deposit(pub (super) fn deposit_event)] + pub enum Event { + Applied(common::prov::ProvModel, common::identity::SignedIdentity, [u8; 16]), + Contradiction(common::prov::Contradiction, common::identity::SignedIdentity, [u8; 16]), + } + + // Errors inform users that something went wrong. + #[pallet::error] + pub enum Error { + Address, + Contradiction, + Compaction, + Expansion, + Identity, + IRef, + NotAChronicleIri, + MissingId, + MissingProperty, + NotANode, + NotAnObject, + OpaExecutor, + SerdeJson, + SubmissionFormat, + Time, + Tokio, + Utf8, + } + + impl From for Error { + fn from(error: common::prov::ProcessorError) -> Self { + match error { + common::prov::ProcessorError::Address => Error::Address, + common::prov::ProcessorError::Contradiction { .. } => Error::Contradiction, + common::prov::ProcessorError::Identity(_) => Error::Identity, + common::prov::ProcessorError::NotAChronicleIri { .. } => Error::NotAChronicleIri, + common::prov::ProcessorError::MissingId { .. } => Error::MissingId, + common::prov::ProcessorError::MissingProperty { .. } => Error::MissingProperty, + common::prov::ProcessorError::NotANode(_) => Error::NotANode, + common::prov::ProcessorError::NotAnObject => Error::NotAnObject, + common::prov::ProcessorError::OpaExecutor(_) => Error::OpaExecutor, + common::prov::ProcessorError::SerdeJson(_) => Error::SerdeJson, + common::prov::ProcessorError::SubmissionFormat(_) => Error::SubmissionFormat, + common::prov::ProcessorError::Time(_) => Error::Time, + common::prov::ProcessorError::Tokio => Error::Tokio, + common::prov::ProcessorError::Utf8(_) => Error::Utf8, + _ => unreachable!(), + } + } + } + + // Dispatchable functions allows users to interact with the pallet and invoke state changes. + // These functions materialize as "extrinsics", which are often compared to transactions. + // Dispatchable functions must be annotated with a weight and must return a DispatchResult. + #[pallet::call] + impl Pallet { + // Apply a vector of chronicle operations, yielding an event that indicates state change or + // contradiction + #[pallet::call_index(0)] + #[pallet::weight({ let weight = T::WeightInfo::operation_weight(& operations.items); let dispatch_class = DispatchClass::Normal; let pays_fee = Pays::No; (weight, dispatch_class, pays_fee) })] - pub fn apply(origin: OriginFor, operations: OperationSubmission) -> DispatchResult { - // Check that the extrinsic was signed and get the signer. - // This function will return an error if the extrinsic is not signed. - // https://docs.substrate.io/main-docs/build/origins/ - let _who = ensure_signed(origin)?; - - // Get operations and load tßheir dependencies - let deps = operations - .items - .iter() - .flat_map(|tx| tx.dependencies()) - .collect::>(); - - let initial_input_models: Vec<_> = deps - .into_iter() - .map(|addr| (addr.clone(), Provenance::::get(&addr))) - .collect(); - - let mut state: common::ledger::OperationState = common::ledger::OperationState::new(); - - state.update_state(initial_input_models.into_iter()); - - let mut model = common::prov::ProvModel::default(); - - for op in operations.items.iter() { - let res = op.process(model, state.input()); - match res { - // A contradiction raises an event, not an error and shortcuts processing - - // contradiction attempts are useful provenance and should not be a purely - // operational concern - Err(common::prov::ProcessorError::Contradiction(source)) => { - tracing::info!(contradiction = %source); - - Self::deposit_event(Event::::Contradiction( - source, - (*operations.identity).clone(), - operations.correlation_id, - )); - - return Ok(()); - } - // Severe errors should be logged - Err(e) => { - tracing::error!(chronicle_prov_failure = %e); - - return Err(Error::::from(e).into()); - } - Ok((tx_output, updated_model)) => { - state.update_state_from_output(tx_output.into_iter()); - model = updated_model; - } - } - } - - // Compute delta - let dirty = state.dirty().collect::>(); - - tracing::trace!(dirty = ?dirty); - - let mut delta = common::prov::ProvModel::default(); - for common::ledger::StateOutput { address, data } in dirty { - delta.combine(&data); - - // Update storage. - Provenance::::set(&address, Some(data)); - } - - // Emit an event. - Self::deposit_event(Event::Applied( - delta, - (*operations.identity).clone(), - operations.correlation_id, - )); - // Return a successful DispatchResultWithPostInfo - Ok(()) - } - } + pub fn apply(origin: OriginFor, operations: OperationSubmission) -> DispatchResult { + // Check that the extrinsic was signed and get the signer. + // This function will return an error if the extrinsic is not signed. + // https://docs.substrate.io/main-docs/build/origins/ + let _who = ensure_signed(origin)?; + + // Get operations and load tßheir dependencies + let deps = operations + .items + .iter() + .flat_map(|tx| tx.dependencies()) + .collect::>(); + + let initial_input_models: Vec<_> = deps + .into_iter() + .map(|addr| (addr.clone(), Provenance::::get(&addr))) + .collect(); + + let mut state: common::ledger::OperationState = common::ledger::OperationState::new(); + + state.update_state(initial_input_models.into_iter()); + + let mut model = common::prov::ProvModel::default(); + + for op in operations.items.iter() { + let res = op.process(model, state.input()); + match res { + // A contradiction raises an event, not an error and shortcuts processing - + // contradiction attempts are useful provenance and should not be a purely + // operational concern + Err(common::prov::ProcessorError::Contradiction(source)) => { + tracing::info!(contradiction = %source); + + Self::deposit_event(Event::::Contradiction( + source, + (*operations.identity).clone(), + operations.correlation_id, + )); + + return Ok(()); + }, + // Severe errors should be logged + Err(e) => { + tracing::error!(chronicle_prov_failure = %e); + + return Err(Error::::from(e).into()); + }, + Ok((tx_output, updated_model)) => { + state.update_state_from_output(tx_output.into_iter()); + model = updated_model; + }, + } + } + + // Compute delta + let dirty = state.dirty().collect::>(); + + tracing::trace!(dirty = ?dirty); + + let mut delta = common::prov::ProvModel::default(); + for common::ledger::StateOutput { address, data } in dirty { + delta.combine(&data); + + // Update storage. + Provenance::::set(&address, Some(data)); + } + + // Emit an event. + Self::deposit_event(Event::Applied( + delta, + (*operations.identity).clone(), + operations.correlation_id, + )); + // Return a successful DispatchResultWithPostInfo + Ok(()) + } + } } diff --git a/crates/pallet-chronicle/src/mock.rs b/crates/pallet-chronicle/src/mock.rs index 17c30eaa7..f03eb5fb2 100644 --- a/crates/pallet-chronicle/src/mock.rs +++ b/crates/pallet-chronicle/src/mock.rs @@ -3,8 +3,8 @@ use common::ledger::OperationSubmission; use frame_support::traits::{ConstU16, ConstU64}; use sp_core::H256; use sp_runtime::{ - traits::{BlakeTwo256, IdentityLookup}, - BuildStorage, + traits::{BlakeTwo256, IdentityLookup}, + BuildStorage, }; type Block = frame_system::mocking::MockBlock; @@ -19,44 +19,44 @@ frame_support::construct_runtime!( ); impl frame_system::Config for Test { - type AccountData = (); - type AccountId = u64; - type BaseCallFilter = frame_support::traits::Everything; - type Block = Block; - type BlockHashCount = ConstU64<250>; - type BlockLength = (); - type BlockWeights = (); - type DbWeight = (); - type Hash = H256; - type Hashing = BlakeTwo256; - type Lookup = IdentityLookup; - type MaxConsumers = frame_support::traits::ConstU32<16>; - type Nonce = u64; - type OnKilledAccount = (); - type OnNewAccount = (); - type OnSetCode = (); - type PalletInfo = PalletInfo; - type RuntimeCall = RuntimeCall; - type RuntimeEvent = RuntimeEvent; - type RuntimeOrigin = RuntimeOrigin; - type SS58Prefix = ConstU16<42>; - type SystemWeightInfo = (); - type Version = (); - type RuntimeTask = (); - type SingleBlockMigrations = (); - type MultiBlockMigrator = (); - type PreInherents = (); - type PostInherents = (); - type PostTransactions = (); + type AccountData = (); + type AccountId = u64; + type BaseCallFilter = frame_support::traits::Everything; + type Block = Block; + type BlockHashCount = ConstU64<250>; + type BlockLength = (); + type BlockWeights = (); + type DbWeight = (); + type Hash = H256; + type Hashing = BlakeTwo256; + type Lookup = IdentityLookup; + type MaxConsumers = frame_support::traits::ConstU32<16>; + type MultiBlockMigrator = (); + type Nonce = u64; + type OnKilledAccount = (); + type OnNewAccount = (); + type OnSetCode = (); + type PalletInfo = PalletInfo; + type PostInherents = (); + type PostTransactions = (); + type PreInherents = (); + type RuntimeCall = RuntimeCall; + type RuntimeEvent = RuntimeEvent; + type RuntimeOrigin = RuntimeOrigin; + type RuntimeTask = (); + type SS58Prefix = ConstU16<42>; + type SingleBlockMigrations = (); + type SystemWeightInfo = (); + type Version = (); } impl pallet_template::Config for Test { - type OperationSubmission = OperationSubmission; - type RuntimeEvent = RuntimeEvent; - type WeightInfo = (); + type OperationSubmission = OperationSubmission; + type RuntimeEvent = RuntimeEvent; + type WeightInfo = (); } // Build genesis storage according to the mock runtime. pub fn new_test_ext() -> sp_io::TestExternalities { - frame_system::GenesisConfig::::default().build_storage().unwrap().into() + frame_system::GenesisConfig::::default().build_storage().unwrap().into() } diff --git a/crates/pallet-chronicle/src/tests.rs b/crates/pallet-chronicle/src/tests.rs index 4c16033f3..9de9a02dc 100644 --- a/crates/pallet-chronicle/src/tests.rs +++ b/crates/pallet-chronicle/src/tests.rs @@ -1,61 +1,61 @@ use crate::{mock::*, Event}; use common::{ - ledger::OperationSubmission, - prov::{ - operations::{ChronicleOperation, CreateNamespace}, - NamespaceId, - }, + ledger::OperationSubmission, + prov::{ + operations::{ChronicleOperation, CreateNamespace}, + NamespaceId, + }, }; use frame_support::assert_ok; use uuid::Uuid; #[test] fn it_works_for_default_value() { - new_test_ext().execute_with(|| { - // Go past genesis block so events get deposited - System::set_block_number(1); - let op = OperationSubmission::new_anonymous(Uuid::from_bytes([0u8; 16]), vec![]); - // Dispatch a signed extrinsic. - assert_ok!(ChronicleModule::apply(RuntimeOrigin::signed(1), op.clone())); - // Assert that the correct event was deposited - System::assert_last_event( - Event::Applied( - common::prov::ProvModel::default(), - common::identity::SignedIdentity::new_no_identity(), - op.correlation_id, - ) - .into(), - ); - }); + new_test_ext().execute_with(|| { + // Go past genesis block so events get deposited + System::set_block_number(1); + let op = OperationSubmission::new_anonymous(Uuid::from_bytes([0u8; 16]), vec![]); + // Dispatch a signed extrinsic. + assert_ok!(ChronicleModule::apply(RuntimeOrigin::signed(1), op.clone())); + // Assert that the correct event was deposited + System::assert_last_event( + Event::Applied( + common::prov::ProvModel::default(), + common::identity::SignedIdentity::new_no_identity(), + op.correlation_id, + ) + .into(), + ); + }); } #[test] fn single_operation() { - chronicle_telemetry::telemetry(false, chronicle_telemetry::ConsoleLogging::Pretty); - new_test_ext().execute_with(|| { - // Go past genesis block so events get deposited - System::set_block_number(1); - let uuid = Uuid::from_u128(0u128); - let op = ChronicleOperation::CreateNamespace(CreateNamespace { - id: NamespaceId::from_external_id("test", uuid), - }); + chronicle_telemetry::telemetry(chronicle_telemetry::ConsoleLogging::Pretty); + new_test_ext().execute_with(|| { + // Go past genesis block so events get deposited + System::set_block_number(1); + let uuid = Uuid::from_u128(0u128); + let op = ChronicleOperation::CreateNamespace(CreateNamespace { + id: NamespaceId::from_external_id("test", uuid), + }); - let sub = OperationSubmission::new_anonymous(Uuid::from_bytes([0u8; 16]), vec![op.clone()]); - // Dispatch our operation - assert_ok!(ChronicleModule::apply(RuntimeOrigin::signed(1), sub.clone(),)); + let sub = OperationSubmission::new_anonymous(Uuid::from_bytes([0u8; 16]), vec![op.clone()]); + // Dispatch our operation + assert_ok!(ChronicleModule::apply(RuntimeOrigin::signed(1), sub.clone(),)); - // Apply that operation to a new prov model for assertion - - // the pallet execution should produce an identical delta - let mut delta_model = common::prov::ProvModel::default(); - delta_model.apply(&op).unwrap(); - // Assert that the delta is correct - System::assert_last_event( - Event::Applied( - delta_model, - common::identity::SignedIdentity::new_no_identity(), - sub.correlation_id, - ) - .into(), - ); - }); + // Apply that operation to a new prov model for assertion - + // the pallet execution should produce an identical delta + let mut delta_model = common::prov::ProvModel::default(); + delta_model.apply(&op).unwrap(); + // Assert that the delta is correct + System::assert_last_event( + Event::Applied( + delta_model, + common::identity::SignedIdentity::new_no_identity(), + sub.correlation_id, + ) + .into(), + ); + }); } diff --git a/crates/pallet-opa/src/lib.rs b/crates/pallet-opa/src/lib.rs index e2934302a..cbdb18f77 100644 --- a/crates/pallet-opa/src/lib.rs +++ b/crates/pallet-opa/src/lib.rs @@ -5,27 +5,27 @@ use core::convert::Infallible; /// Re-export types required for runtime pub use common::prov::*; use common::{ - k256::ecdsa::{Signature, VerifyingKey}, - opa::{ - codec::{NewPublicKeyV1, OpaSubmissionV1, PayloadV1, SignedOperationV1}, - BootstrapRoot, KeyAddress, KeyRegistration, Keys, OpaSubmission, Operation, Payload, - PolicyAddress, PolicyMeta, PolicyMetaAddress, RegisterKey, RotateKey, SetPolicy, - SignedOperation, SignedOperationPayload, - }, + k256::ecdsa::{Signature, VerifyingKey}, + opa::{ + codec::{NewPublicKeyV1, OpaSubmissionV1, PayloadV1, SignedOperationV1}, + BootstrapRoot, KeyAddress, KeyRegistration, Keys, OpaSubmission, Operation, Payload, + PolicyAddress, PolicyMeta, PolicyMetaAddress, RegisterKey, RotateKey, SetPolicy, + SignedOperation, SignedOperationPayload, + }, }; use scale_info::prelude::format; pub fn policy_address(id: impl AsRef) -> PolicyAddress { - blake2_128(format!("opa:policy:binary:{}", id.as_ref()).as_bytes()).into() + blake2_128(format!("opa:policy:binary:{}", id.as_ref()).as_bytes()).into() } pub fn policy_meta_address(id: impl AsRef) -> PolicyMetaAddress { - blake2_128(format!("opa:policy:meta:{}", id.as_ref()).as_bytes()).into() + blake2_128(format!("opa:policy:meta:{}", id.as_ref()).as_bytes()).into() } pub fn key_address(id: impl AsRef) -> KeyAddress { - blake2_128(format!("opa:keys:{}", id.as_ref()).as_bytes()).into() + blake2_128(format!("opa:keys:{}", id.as_ref()).as_bytes()).into() } /// Edit this file to define custom logic or remove it if it is not needed. @@ -38,7 +38,7 @@ pub use pallet::*; pub mod weights; pub mod opa_core { - pub use common::{ledger::*, opa::*}; + pub use common::{ledger::*, opa::*}; } use parity_scale_codec::Encode; @@ -48,15 +48,15 @@ pub use weights::*; #[derive(Debug)] enum OpaError { - OperationSignatureVerification, - InvalidSigningKey, - InvalidOperation, + OperationSignatureVerification, + InvalidSigningKey, + InvalidOperation, } impl From for OpaError { - fn from(_: Infallible) -> Self { - unreachable!() - } + fn from(_: Infallible) -> Self { + unreachable!() + } } // Verifies the submission. @@ -66,329 +66,329 @@ impl From for OpaError { // key of the operation #[instrument(skip(submission, root_keys), ret(Debug))] fn verify_signed_operation( - submission: &OpaSubmissionV1, - root_keys: &Option, + submission: &OpaSubmissionV1, + root_keys: &Option, ) -> Result<(), OpaError> { - use k256::ecdsa::signature::Verifier; - match &submission.payload { - PayloadV1::BootstrapRoot(_) => Ok(()), - PayloadV1::SignedOperation(SignedOperationV1 { payload, verifying_key, signature }) => { - if root_keys.is_none() { - error!("No registered root keys for signature verification"); - return Err(OpaError::OperationSignatureVerification); - } - let payload_bytes = payload.encode(); - let signature: Signature = k256::ecdsa::signature::Signature::from_bytes(signature) - .map_err(|e| { - error!(signature = ?signature, signature_load_error = ?e); - OpaError::OperationSignatureVerification - })?; - let signing_key = ::from_public_key_pem( - verifying_key.as_str(), - ) - .map_err(|e| { - error!(verifying_key = ?verifying_key, key_load_error = ?e); - OpaError::OperationSignatureVerification - })?; - if let Err(e) = signing_key.verify(&payload_bytes, &signature) { - error!(signature = ?signature, verify_error = ?e); - return Err(OpaError::OperationSignatureVerification); - } - - if *verifying_key == root_keys.as_ref().unwrap().current.key { - Ok(()) - } else { - error!(verifying_key = ?verifying_key, current_key = ?root_keys.as_ref().unwrap().current.key, "Invalid signing key"); - Err(OpaError::InvalidSigningKey) - } - } - } + use k256::ecdsa::signature::Verifier; + match &submission.payload { + PayloadV1::BootstrapRoot(_) => Ok(()), + PayloadV1::SignedOperation(SignedOperationV1 { payload, verifying_key, signature }) => { + if root_keys.is_none() { + error!("No registered root keys for signature verification"); + return Err(OpaError::OperationSignatureVerification); + } + let payload_bytes = payload.encode(); + let signature: Signature = k256::ecdsa::signature::Signature::from_bytes(signature) + .map_err(|e| { + error!(signature = ?signature, signature_load_error = ?e); + OpaError::OperationSignatureVerification + })?; + let signing_key = ::from_public_key_pem( + verifying_key.as_str(), + ) + .map_err(|e| { + error!(verifying_key = ?verifying_key, key_load_error = ?e); + OpaError::OperationSignatureVerification + })?; + if let Err(e) = signing_key.verify(&payload_bytes, &signature) { + error!(signature = ?signature, verify_error = ?e); + return Err(OpaError::OperationSignatureVerification); + } + + if *verifying_key == root_keys.as_ref().unwrap().current.key { + Ok(()) + } else { + error!(verifying_key = ?verifying_key, current_key = ?root_keys.as_ref().unwrap().current.key, "Invalid signing key"); + Err(OpaError::InvalidSigningKey) + } + }, + } } // Either apply our bootstrap operation or our signed operation #[instrument(skip(payload), ret(Debug))] fn apply_signed_operation( - correlation_id: ChronicleTransactionId, - payload: Payload, + correlation_id: ChronicleTransactionId, + payload: Payload, ) -> Result<(), OpaError> { - use scale_info::prelude::string::ToString; - match payload { - Payload::BootstrapRoot(BootstrapRoot { public_key }) => { - let existing_key = pallet::KeyStore::::try_get(key_address("root")); - - if existing_key.is_ok() { - error!("OPA TP has already been bootstrapped"); - return Err(OpaError::InvalidOperation); - } - - let keys = Keys { - id: "root".to_string(), - current: KeyRegistration { key: public_key, version: 0 }, - expired: None, - }; - - pallet::KeyStore::::set(key_address("root"), Some(keys.clone().into())); - - pallet::Pallet::::deposit_event(pallet::Event::::KeyUpdate( - keys.into(), - correlation_id, - )); - - Ok(()) - } - Payload::SignedOperation(SignedOperation { - payload: SignedOperationPayload { operation }, - verifying_key: _, - signature: _, - }) => apply_signed_operation_payload::(correlation_id, operation), - } + use scale_info::prelude::string::ToString; + match payload { + Payload::BootstrapRoot(BootstrapRoot { public_key }) => { + let existing_key = pallet::KeyStore::::try_get(key_address("root")); + + if existing_key.is_ok() { + error!("OPA TP has already been bootstrapped"); + return Err(OpaError::InvalidOperation); + } + + let keys = Keys { + id: "root".to_string(), + current: KeyRegistration { key: public_key, version: 0 }, + expired: None, + }; + + pallet::KeyStore::::set(key_address("root"), Some(keys.clone().into())); + + pallet::Pallet::::deposit_event(pallet::Event::::KeyUpdate( + keys.into(), + correlation_id, + )); + + Ok(()) + }, + Payload::SignedOperation(SignedOperation { + payload: SignedOperationPayload { operation }, + verifying_key: _, + signature: _, + }) => apply_signed_operation_payload::(correlation_id, operation), + } } #[instrument(skip(payload), ret(Debug))] fn apply_signed_operation_payload( - correlation_id: ChronicleTransactionId, - payload: Operation, + correlation_id: ChronicleTransactionId, + payload: Operation, ) -> Result<(), OpaError> { - match payload { - Operation::RegisterKey(RegisterKey { public_key, id, overwrite_existing }) => { - if id == "root" { - error!("Cannot register a key with the id 'root'"); - return Err(OpaError::InvalidOperation); - } - - let existing_key = pallet::KeyStore::::try_get(key_address(&id)); - - if existing_key.is_ok() { - if overwrite_existing { - tracing::debug!("Registration replaces existing key"); - } else { - error!("Key already registered"); - return Err(OpaError::InvalidOperation); - } - } - - let keys = Keys { - id, - current: KeyRegistration { key: public_key, version: 0 }, - expired: None, - }; - - pallet::KeyStore::::set(key_address(&keys.id), Some(keys.clone().into())); - - pallet::Pallet::::deposit_event(pallet::Event::::KeyUpdate( - keys.into(), - correlation_id, - )); - - Ok(()) - } - Operation::RotateKey(RotateKey { - payload, - previous_signing_key, - previous_signature, - new_signing_key, - new_signature, - }) => { - // Get current key registration from state - let existing_key = pallet::KeyStore::::try_get(key_address(&payload.id)); - - if existing_key.is_err() { - error!("No key to rotate"); - return Err(OpaError::InvalidOperation); - } - - let existing_key = existing_key.unwrap(); - - if previous_signing_key != existing_key.current.key { - error!("Key does not match current key"); - return Err(OpaError::InvalidOperation); - } - - let payload_id = payload.id.clone(); - let payload_bytes: NewPublicKeyV1 = payload.into(); - // Verify the previous key and signature - let payload_bytes = payload_bytes.encode(); - let previous_signature = Signature::try_from(&*previous_signature) - .map_err(|_| OpaError::OperationSignatureVerification)?; - let previous_key = ::from_public_key_pem( - previous_signing_key.as_str(), - ) - .map_err(|_| OpaError::OperationSignatureVerification)?; - - k256::ecdsa::signature::Verifier::verify( - &previous_key, - &payload_bytes, - &previous_signature, - ) - .map_err(|_| OpaError::OperationSignatureVerification)?; - - //Verify the new key and signature - let new_signature = Signature::try_from(&*new_signature) - .map_err(|_| OpaError::OperationSignatureVerification)?; - let new_key = ::from_public_key_pem( - new_signing_key.as_str(), - ) - .map_err(|_| OpaError::OperationSignatureVerification)?; - - k256::ecdsa::signature::Verifier::verify(&new_key, &payload_bytes, &new_signature) - .map_err(|_| OpaError::OperationSignatureVerification)?; - - //Store new keys - let keys = Keys { - id: payload_id, - current: KeyRegistration { - key: new_signing_key, - version: existing_key.current.version + 1, - }, - expired: Some(KeyRegistration { - key: previous_signing_key, - version: existing_key.current.version, - }), - }; - - pallet::KeyStore::::set(key_address(&keys.id), Some(keys.clone().into())); - - pallet::Pallet::::deposit_event(pallet::Event::::KeyUpdate( - keys.into(), - correlation_id, - )); - - Ok(()) - } - Operation::SetPolicy(SetPolicy { policy, id }) => { - let hash = sp_core::blake2_128(policy.as_bytes()); - - let meta = PolicyMeta { - id: id.clone(), - hash: hash.into(), - policy_address: policy_address(&*id), - }; - - pallet::PolicyMetaStore::::set(policy_meta_address(&*id), Some(meta.clone().into())); - - pallet::PolicyStore::::set(policy_address(&*id), Some(policy.into())); - - pallet::Pallet::::deposit_event(pallet::Event::::PolicyUpdate( - meta.into(), - correlation_id, - )); - - Ok(()) - } - } + match payload { + Operation::RegisterKey(RegisterKey { public_key, id, overwrite_existing }) => { + if id == "root" { + error!("Cannot register a key with the id 'root'"); + return Err(OpaError::InvalidOperation); + } + + let existing_key = pallet::KeyStore::::try_get(key_address(&id)); + + if existing_key.is_ok() { + if overwrite_existing { + tracing::debug!("Registration replaces existing key"); + } else { + error!("Key already registered"); + return Err(OpaError::InvalidOperation); + } + } + + let keys = Keys { + id, + current: KeyRegistration { key: public_key, version: 0 }, + expired: None, + }; + + pallet::KeyStore::::set(key_address(&keys.id), Some(keys.clone().into())); + + pallet::Pallet::::deposit_event(pallet::Event::::KeyUpdate( + keys.into(), + correlation_id, + )); + + Ok(()) + }, + Operation::RotateKey(RotateKey { + payload, + previous_signing_key, + previous_signature, + new_signing_key, + new_signature, + }) => { + // Get current key registration from state + let existing_key = pallet::KeyStore::::try_get(key_address(&payload.id)); + + if existing_key.is_err() { + error!("No key to rotate"); + return Err(OpaError::InvalidOperation); + } + + let existing_key = existing_key.unwrap(); + + if previous_signing_key != existing_key.current.key { + error!("Key does not match current key"); + return Err(OpaError::InvalidOperation); + } + + let payload_id = payload.id.clone(); + let payload_bytes: NewPublicKeyV1 = payload.into(); + // Verify the previous key and signature + let payload_bytes = payload_bytes.encode(); + let previous_signature = Signature::try_from(&*previous_signature) + .map_err(|_| OpaError::OperationSignatureVerification)?; + let previous_key = ::from_public_key_pem( + previous_signing_key.as_str(), + ) + .map_err(|_| OpaError::OperationSignatureVerification)?; + + k256::ecdsa::signature::Verifier::verify( + &previous_key, + &payload_bytes, + &previous_signature, + ) + .map_err(|_| OpaError::OperationSignatureVerification)?; + + //Verify the new key and signature + let new_signature = Signature::try_from(&*new_signature) + .map_err(|_| OpaError::OperationSignatureVerification)?; + let new_key = ::from_public_key_pem( + new_signing_key.as_str(), + ) + .map_err(|_| OpaError::OperationSignatureVerification)?; + + k256::ecdsa::signature::Verifier::verify(&new_key, &payload_bytes, &new_signature) + .map_err(|_| OpaError::OperationSignatureVerification)?; + + //Store new keys + let keys = Keys { + id: payload_id, + current: KeyRegistration { + key: new_signing_key, + version: existing_key.current.version + 1, + }, + expired: Some(KeyRegistration { + key: previous_signing_key, + version: existing_key.current.version, + }), + }; + + pallet::KeyStore::::set(key_address(&keys.id), Some(keys.clone().into())); + + pallet::Pallet::::deposit_event(pallet::Event::::KeyUpdate( + keys.into(), + correlation_id, + )); + + Ok(()) + }, + Operation::SetPolicy(SetPolicy { policy, id }) => { + let hash = sp_core::blake2_128(policy.as_bytes()); + + let meta = PolicyMeta { + id: id.clone(), + hash: hash.into(), + policy_address: policy_address(&*id), + }; + + pallet::PolicyMetaStore::::set(policy_meta_address(&*id), Some(meta.clone().into())); + + pallet::PolicyStore::::set(policy_address(&*id), Some(policy.into())); + + pallet::Pallet::::deposit_event(pallet::Event::::PolicyUpdate( + meta.into(), + correlation_id, + )); + + Ok(()) + }, + } } fn root_keys_from_state() -> Result, OpaError> { - let existing_key = pallet::KeyStore::::try_get(key_address("root")); + let existing_key = pallet::KeyStore::::try_get(key_address("root")); - if let Ok(existing_key) = existing_key { - Ok(Some(existing_key.try_into()?)) - } else { - Ok(None) - } + if let Ok(existing_key) = existing_key { + Ok(Some(existing_key.try_into()?)) + } else { + Ok(None) + } } #[frame_support::pallet] pub mod pallet { - use super::*; - use frame_support::pallet_prelude::*; - use frame_system::pallet_prelude::*; - - #[pallet::pallet] - pub struct Pallet(_); - - /// Configure the pallet by specifying the parameters and types on which it depends. - #[pallet::config] - pub trait Config: frame_system::Config { - /// Because this pallet emits events, it depends on the runtime's definition of an event. - type RuntimeEvent: From> + IsType<::RuntimeEvent>; - /// Type representing the weight of this pallet - type WeightInfo: WeightInfo; - - type OpaSubmission: Parameter - + Into - + parity_scale_codec::Codec; - } - - // The pallet's runtime storage items. - // https://docs.substrate.io/main-docs/build/runtime-storage/ - #[pallet::storage] - #[pallet::getter(fn get_policy)] - // Learn more about declaring storage items: - // https://docs.substrate.io/main-docs/build/runtime-storage/#declaring-storage-items - pub type PolicyStore = - StorageMap<_, Twox128, common::opa::PolicyAddress, common::opa::codec::PolicyV1>; - #[pallet::storage] - #[pallet::getter(fn get_policy_meta)] - pub type PolicyMetaStore = - StorageMap<_, Twox128, common::opa::PolicyMetaAddress, common::opa::codec::PolicyMetaV1>; - #[pallet::storage] - #[pallet::getter(fn get_key)] - pub type KeyStore = - StorageMap<_, Twox128, common::opa::KeyAddress, common::opa::codec::KeysV1>; - - // Pallets use events to inform users when important changes are made. - // https://docs.substrate.io/main-docs/build/events-errors/ - #[pallet::event] - #[pallet::generate_deposit(pub (super) fn deposit_event)] - pub enum Event { - PolicyUpdate(common::opa::codec::PolicyMetaV1, ChronicleTransactionId), - KeyUpdate(common::opa::codec::KeysV1, ChronicleTransactionId), - } - - // Errors inform users that something went wrong. - #[pallet::error] - pub enum Error { - OperationSignatureVerification, - InvalidSigningKey, - JsonSerialize, - InvalidOperation, - } - - impl From for Error { - fn from(error: OpaError) -> Self { - match error { - OpaError::OperationSignatureVerification => Error::OperationSignatureVerification, - OpaError::InvalidSigningKey => Error::InvalidSigningKey, - OpaError::InvalidOperation => Error::InvalidOperation, - } - } - } - - // Dispatchable functions allows users to interact with the pallet and invoke state changes. - // These functions materialize as "extrinsics", which are often compared to transactions. - // Dispatchable functions must be annotated with a weight and must return a DispatchResult. - #[pallet::call] - impl Pallet { - // Apply a vector of chronicle operations, yielding an event that indicates state change or - // contradiction - #[pallet::call_index(0)] - #[pallet::weight(T::WeightInfo::apply())] - pub fn apply(origin: OriginFor, submission: T::OpaSubmission) -> DispatchResult { - // Check that the extrinsic was signed and get the signer. - // This function will return an error if the extrinsic is not signed. - // https://docs.substrate.io/main-docs/build/origins/ - let _who = ensure_signed(origin)?; - - // We need to validate the submission's own internal signatures at the codec level - let submission: OpaSubmissionV1 = submission.into(); - - super::verify_signed_operation::( - &submission, - &super::root_keys_from_state::().map_err(Error::::from)?, - ) - .map_err(Error::::from)?; - - let submission: OpaSubmission = submission.into(); - - super::apply_signed_operation::( - submission.correlation_id.into(), - submission.payload, - ) - .map_err(Error::::from)?; - - // Return a successful DispatchResultWithPostInfo - Ok(()) - } - } + use super::*; + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + + #[pallet::pallet] + pub struct Pallet(_); + + /// Configure the pallet by specifying the parameters and types on which it depends. + #[pallet::config] + pub trait Config: frame_system::Config { + /// Because this pallet emits events, it depends on the runtime's definition of an event. + type RuntimeEvent: From> + IsType<::RuntimeEvent>; + /// Type representing the weight of this pallet + type WeightInfo: WeightInfo; + + type OpaSubmission: Parameter + + Into + + parity_scale_codec::Codec; + } + + // The pallet's runtime storage items. + // https://docs.substrate.io/main-docs/build/runtime-storage/ + #[pallet::storage] + #[pallet::getter(fn get_policy)] + // Learn more about declaring storage items: + // https://docs.substrate.io/main-docs/build/runtime-storage/#declaring-storage-items + pub type PolicyStore = + StorageMap<_, Twox128, common::opa::PolicyAddress, common::opa::codec::PolicyV1>; + #[pallet::storage] + #[pallet::getter(fn get_policy_meta)] + pub type PolicyMetaStore = + StorageMap<_, Twox128, common::opa::PolicyMetaAddress, common::opa::codec::PolicyMetaV1>; + #[pallet::storage] + #[pallet::getter(fn get_key)] + pub type KeyStore = + StorageMap<_, Twox128, common::opa::KeyAddress, common::opa::codec::KeysV1>; + + // Pallets use events to inform users when important changes are made. + // https://docs.substrate.io/main-docs/build/events-errors/ + #[pallet::event] + #[pallet::generate_deposit(pub (super) fn deposit_event)] + pub enum Event { + PolicyUpdate(common::opa::codec::PolicyMetaV1, ChronicleTransactionId), + KeyUpdate(common::opa::codec::KeysV1, ChronicleTransactionId), + } + + // Errors inform users that something went wrong. + #[pallet::error] + pub enum Error { + OperationSignatureVerification, + InvalidSigningKey, + JsonSerialize, + InvalidOperation, + } + + impl From for Error { + fn from(error: OpaError) -> Self { + match error { + OpaError::OperationSignatureVerification => Error::OperationSignatureVerification, + OpaError::InvalidSigningKey => Error::InvalidSigningKey, + OpaError::InvalidOperation => Error::InvalidOperation, + } + } + } + + // Dispatchable functions allows users to interact with the pallet and invoke state changes. + // These functions materialize as "extrinsics", which are often compared to transactions. + // Dispatchable functions must be annotated with a weight and must return a DispatchResult. + #[pallet::call] + impl Pallet { + // Apply a vector of chronicle operations, yielding an event that indicates state change or + // contradiction + #[pallet::call_index(0)] + #[pallet::weight(T::WeightInfo::apply())] + pub fn apply(origin: OriginFor, submission: T::OpaSubmission) -> DispatchResult { + // Check that the extrinsic was signed and get the signer. + // This function will return an error if the extrinsic is not signed. + // https://docs.substrate.io/main-docs/build/origins/ + let _who = ensure_signed(origin)?; + + // We need to validate the submission's own internal signatures at the codec level + let submission: OpaSubmissionV1 = submission.into(); + + super::verify_signed_operation::( + &submission, + &super::root_keys_from_state::().map_err(Error::::from)?, + ) + .map_err(Error::::from)?; + + let submission: OpaSubmission = submission.into(); + + super::apply_signed_operation::( + submission.correlation_id.into(), + submission.payload, + ) + .map_err(Error::::from)?; + + // Return a successful DispatchResultWithPostInfo + Ok(()) + } + } } diff --git a/crates/protocol-abstract/src/abstract_ledger.rs b/crates/protocol-abstract/src/abstract_ledger.rs index 4d4cac66b..b1523ad83 100644 --- a/crates/protocol-abstract/src/abstract_ledger.rs +++ b/crates/protocol-abstract/src/abstract_ledger.rs @@ -11,93 +11,93 @@ use tracing::{instrument, warn}; #[derive(Debug, Error)] pub enum BlockIdError { - #[error("Parse {0}")] - Parse( - #[from] - #[source] - anyhow::Error, - ), + #[error("Parse {0}")] + Parse( + #[from] + #[source] + anyhow::Error, + ), } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum BlockId { - Unknown, //Block ids can be null, empty string etc - Block(H256), //ToDo - trait + Unknown, //Block ids can be null, empty string etc + Block(H256), //ToDo - trait } impl From for BlockId { - fn from(hash: H256) -> Self { - BlockId::Block(hash) - } + fn from(hash: H256) -> Self { + BlockId::Block(hash) + } } impl TryFrom<&str> for BlockId { - type Error = BlockIdError; + type Error = BlockIdError; - #[instrument(level = "trace", skip(s), err)] - fn try_from(s: &str) -> Result { - let hash = H256::from_str(s).map_err(|e| BlockIdError::Parse(anyhow::Error::new(e)))?; - Ok(BlockId::Block(hash)) - } + #[instrument(level = "trace", skip(s), err)] + fn try_from(s: &str) -> Result { + let hash = H256::from_str(s).map_err(|e| BlockIdError::Parse(anyhow::Error::new(e)))?; + Ok(BlockId::Block(hash)) + } } impl std::fmt::Display for BlockId { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - BlockId::Unknown => f.write_str("Unknown"), - BlockId::Block(hash) => f.write_str(&format!("{:?}", hash)), - } - } + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + BlockId::Unknown => f.write_str("Unknown"), + BlockId::Block(hash) => f.write_str(&format!("{:?}", hash)), + } + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct Position(u32); impl From for Position { - fn from(height: u32) -> Self { - Position(height) - } + fn from(height: u32) -> Self { + Position(height) + } } impl PartialOrd for Position { - fn partial_cmp(&self, other: &Self) -> Option { - let (Position(x), Position(y)) = (self, other); - x.partial_cmp(y) - } + fn partial_cmp(&self, other: &Self) -> Option { + let (Position(x), Position(y)) = (self, other); + x.partial_cmp(y) + } } impl Position { - pub fn new(height: u32) -> Self { - Position(height) - } - - pub fn map(&self, f: F) -> T - where - F: FnOnce(&u32) -> T, - { - f(&self.0) - } - - pub fn distance(&self, other: &Self) -> u32 { - let (Position(x), Position(y)) = (self, other); - x.saturating_sub(*y) - } + pub fn new(height: u32) -> Self { + Position(height) + } + + pub fn map(&self, f: F) -> T + where + F: FnOnce(&u32) -> T, + { + f(&self.0) + } + + pub fn distance(&self, other: &Self) -> u32 { + let (Position(x), Position(y)) = (self, other); + x.saturating_sub(*y) + } } impl std::fmt::Display for Position { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Position(x) => f.write_str(&format!("{}", x)), - } - } + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Position(x) => f.write_str(&format!("{}", x)), + } + } } // Type that can contain a distributed tracing span for transaction processors // that support it #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Span { - Span(u64), - NotTraced, + Span(u64), + NotTraced, } // An application specific ledger event with its corresponding transaction id, @@ -106,190 +106,190 @@ pub type LedgerEventContext = (Event, ChronicleTransactionId, BlockId, Po #[async_trait::async_trait] pub trait LedgerEvent { - fn correlation_id(&self) -> [u8; 16]; + fn correlation_id(&self) -> [u8; 16]; } #[async_trait::async_trait] pub trait LedgerEventCodec { - type Source; - type Sink: LedgerEvent + Send + Sync; - type Error: std::error::Error; - // Attempt to deserialize an event, where there may be none present in the source - async fn maybe_deserialize( - source: Self::Source, - ) -> Result, Self::Error> - where - Self: Sized; + type Source; + type Sink: LedgerEvent + Send + Sync; + type Error: std::error::Error; + // Attempt to deserialize an event, where there may be none present in the source + async fn maybe_deserialize( + source: Self::Source, + ) -> Result, Self::Error> + where + Self: Sized; } pub trait MessageBuilder {} #[async_trait::async_trait] pub trait LedgerTransaction { - type Error: std::error::Error + Send + Sync + 'static; - type Payload: Sized + Send + Sync; - async fn as_payload(&self) -> Result; - fn correlation_id(&self) -> [u8; 16]; + type Error: std::error::Error + Send + Sync + 'static; + type Payload: Sized + Send + Sync; + async fn as_payload(&self) -> Result; + fn correlation_id(&self) -> [u8; 16]; } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum WriteConsistency { - Weak, - Strong, + Weak, + Strong, } #[async_trait::async_trait] pub trait LedgerWriter { - type Error: std::error::Error; - type Transaction: LedgerTransaction; - type Submittable: Sized; - - // Minimally process the transaction offline to get a transaction id and submittable type - async fn pre_submit( - &self, - tx: Self::Transaction, - ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error>; - - // Submit is used to submit a transaction to the ledger - async fn do_submit( - &self, - consistency: WriteConsistency, - submittable: Self::Submittable, - ) -> Result; + type Error: std::error::Error; + type Transaction: LedgerTransaction; + type Submittable: Sized; + + // Minimally process the transaction offline to get a transaction id and submittable type + async fn pre_submit( + &self, + tx: Self::Transaction, + ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error>; + + // Submit is used to submit a transaction to the ledger + async fn do_submit( + &self, + consistency: WriteConsistency, + submittable: Self::Submittable, + ) -> Result; } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum FromBlock { - // Do not attempt to catch up, start from the current head - Head, - // Discover the first useful block and start from there - First, - // Start from the given block - BlockId(BlockId), + // Do not attempt to catch up, start from the current head + Head, + // Discover the first useful block and start from there + First, + // Start from the given block + BlockId(BlockId), } #[async_trait::async_trait] pub trait LedgerReader { - type Event: LedgerEvent; - type EventCodec: LedgerEventCodec; - type Error: std::error::Error; - // Get the block height of the ledger, and the id of the highest block - async fn block_height(&self) -> Result<(Position, BlockId), Self::Error>; - /// Subscribe to state updates from this ledger, starting at `offset`, and - /// ending the stream after `number_of_blocks` blocks have been processed. - async fn state_updates( - &self, - // The block to start from - from_block: FromBlock, - // The number of blocks to process before ending the stream - number_of_blocks: Option, - ) -> Result>, Self::Error>; + type Event: LedgerEvent; + type EventCodec: LedgerEventCodec; + type Error: std::error::Error; + // Get the block height of the ledger, and the id of the highest block + async fn block_height(&self) -> Result<(Position, BlockId), Self::Error>; + /// Subscribe to state updates from this ledger, starting at `offset`, and + /// ending the stream after `number_of_blocks` blocks have been processed. + async fn state_updates( + &self, + // The block to start from + from_block: FromBlock, + // The number of blocks to process before ending the stream + number_of_blocks: Option, + ) -> Result>, Self::Error>; } pub fn retryable_ledger(ledger: L, retry_delay: Duration) -> RetryLedger { - RetryLedger::new(ledger, retry_delay) + RetryLedger::new(ledger, retry_delay) } #[derive(Clone)] pub struct RetryLedger { - inner: L, - retry_delay: Duration, + inner: L, + retry_delay: Duration, } impl RetryLedger { - pub fn new(inner: L, retry_delay: Duration) -> Self { - Self { inner, retry_delay } - } + pub fn new(inner: L, retry_delay: Duration) -> Self { + Self { inner, retry_delay } + } } #[async_trait::async_trait] impl LedgerWriter for RetryLedger - where - L: LedgerReader + LedgerWriter + Send + Sync, - ::Error: Send + Sync + 'static, - ::Transaction: Send + Sync + 'static, - L::Submittable: Send + Sync + 'static + Clone, +where + L: LedgerReader + LedgerWriter + Send + Sync, + ::Error: Send + Sync + 'static, + ::Transaction: Send + Sync + 'static, + L::Submittable: Send + Sync + 'static + Clone, { - type Error = ::Error; - type Submittable = L::Submittable; - type Transaction = L::Transaction; - - async fn pre_submit( - &self, - tx: Self::Transaction, - ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error> { - tracing::debug!(target: "ledger_writer", "Pre-submitting transaction"); - let pre_submit_result = self.inner.pre_submit(tx).await; - match pre_submit_result { - Ok(result) => Ok(result), - Err(e) => { - tracing::error!(error = %e, "Failed to pre-submit transaction"); - Err(e) - } - } - } - - async fn do_submit( - &self, - consistency: WriteConsistency, - submittable: Self::Submittable, - ) -> Result { - let mut attempts = 0; - loop { - match self.inner.do_submit(consistency, submittable.clone()).await { - Ok(result) => { - tracing::info!(target: "ledger_writer", "Successfully submitted transaction"); - return Ok(result); - } - Err(e) => { - attempts += 1; - tracing::warn!(error = %e.0, attempts, "Failed to submit transaction, retrying after delay"); - tokio::time::sleep(self.retry_delay).await; - } - } - } - } + type Error = ::Error; + type Submittable = L::Submittable; + type Transaction = L::Transaction; + + async fn pre_submit( + &self, + tx: Self::Transaction, + ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error> { + tracing::debug!(target: "ledger_writer", "Pre-submitting transaction"); + let pre_submit_result = self.inner.pre_submit(tx).await; + match pre_submit_result { + Ok(result) => Ok(result), + Err(e) => { + tracing::error!(error = %e, "Failed to pre-submit transaction"); + Err(e) + }, + } + } + + async fn do_submit( + &self, + consistency: WriteConsistency, + submittable: Self::Submittable, + ) -> Result { + let mut attempts = 0; + loop { + match self.inner.do_submit(consistency, submittable.clone()).await { + Ok(result) => { + tracing::info!(target: "ledger_writer", "Successfully submitted transaction"); + return Ok(result); + }, + Err(e) => { + attempts += 1; + tracing::warn!(error = %e.0, attempts, "Failed to submit transaction, retrying after delay"); + tokio::time::sleep(self.retry_delay).await; + }, + } + } + } } #[async_trait] impl LedgerReader for RetryLedger - where - ::Error: Send + Sync, +where + ::Error: Send + Sync, { - type Error = L::Error; - type Event = L::Event; - type EventCodec = L::EventCodec; - - async fn block_height(&self) -> Result<(Position, BlockId), Self::Error> { - let mut attempts = 0; - loop { - match self.inner.block_height().await { - Ok(result) => { - tracing::info!(target: "ledger_reader", "Successfully retrieved block height"); - return Ok(result); - } - Err(e) => { - attempts += 1; - tracing::warn!(error = %e, attempts, "Failed to get block height, retrying after delay"); - tokio::time::sleep(self.retry_delay).await; - } - } - } - } - - async fn state_updates( - &self, - from_block: FromBlock, - number_of_blocks: Option, - ) -> Result>, Self::Error> { - loop { - match self.inner.state_updates(from_block, number_of_blocks).await { - Ok(stream) => return Ok(stream), - Err(e) => { - warn!(error = %e, "Failed to subscribe to state updates, retrying after delay"); - sleep(self.retry_delay).await; - } - } - } - } + type Error = L::Error; + type Event = L::Event; + type EventCodec = L::EventCodec; + + async fn block_height(&self) -> Result<(Position, BlockId), Self::Error> { + let mut attempts = 0; + loop { + match self.inner.block_height().await { + Ok(result) => { + tracing::info!(target: "ledger_reader", "Successfully retrieved block height"); + return Ok(result); + }, + Err(e) => { + attempts += 1; + tracing::warn!(error = %e, attempts, "Failed to get block height, retrying after delay"); + tokio::time::sleep(self.retry_delay).await; + }, + } + } + } + + async fn state_updates( + &self, + from_block: FromBlock, + number_of_blocks: Option, + ) -> Result>, Self::Error> { + loop { + match self.inner.state_updates(from_block, number_of_blocks).await { + Ok(stream) => return Ok(stream), + Err(e) => { + warn!(error = %e, "Failed to subscribe to state updates, retrying after delay"); + sleep(self.retry_delay).await; + }, + } + } + } } diff --git a/crates/protocol-substrate-chronicle/src/lib.rs b/crates/protocol-substrate-chronicle/src/lib.rs index 56d56bb0c..6f1578dc3 100644 --- a/crates/protocol-substrate-chronicle/src/lib.rs +++ b/crates/protocol-substrate-chronicle/src/lib.rs @@ -1,11 +1,11 @@ mod subxt_client; pub mod protocol { - pub use protocol_abstract::*; + pub use protocol_abstract::*; } pub mod common { - pub use common::*; + pub use common::*; } pub use subxt_client::*; diff --git a/crates/protocol-substrate-chronicle/src/subxt_client.rs b/crates/protocol-substrate-chronicle/src/subxt_client.rs index 82dfeb1f4..2abb377ac 100644 --- a/crates/protocol-substrate-chronicle/src/subxt_client.rs +++ b/crates/protocol-substrate-chronicle/src/subxt_client.rs @@ -1,23 +1,23 @@ use std::{convert::Infallible, marker::PhantomData, sync::Arc}; use chronicle_signing::{ - ChronicleSigning, OwnedSecret, SecretError, BATCHER_NAMESPACE, BATCHER_PK, + ChronicleSigning, OwnedSecret, SecretError, BATCHER_NAMESPACE, BATCHER_PK, }; use common::{ - identity::SignedIdentity, ledger::OperationSubmission, opa::OpaSettings, - prov::operations::ChronicleOperation, + identity::SignedIdentity, ledger::OperationSubmission, opa::OpaSettings, + prov::operations::ChronicleOperation, }; use protocol_substrate::{SubstrateClient, SubxtClientError}; use subxt::ext::{ - codec::Decode, - scale_value::Composite, - sp_core::{blake2_256, Pair}, + codec::Decode, + scale_value::Composite, + sp_core::{blake2_256, Pair}, }; use subxt::{ - tx::Signer, - utils::{AccountId32, MultiAddress, MultiSignature}, + tx::Signer, + utils::{AccountId32, MultiAddress, MultiSignature}, }; use protocol_abstract::{LedgerEvent, LedgerEventCodec, LedgerTransaction, Span}; @@ -25,220 +25,220 @@ use protocol_abstract::{LedgerEvent, LedgerEventCodec, LedgerTransaction, Span}; //This type must match pallet::Event but we cannot reference it directly #[derive(Debug, Clone)] pub enum ChronicleEvent { - Committed { - diff: common::prov::ProvModel, - identity: SignedIdentity, - correlation_id: [u8; 16], - }, - Contradicted { - contradiction: common::prov::Contradiction, - identity: SignedIdentity, - correlation_id: [u8; 16], - }, + Committed { + diff: common::prov::ProvModel, + identity: SignedIdentity, + correlation_id: [u8; 16], + }, + Contradicted { + contradiction: common::prov::Contradiction, + identity: SignedIdentity, + correlation_id: [u8; 16], + }, } //This type must match pallet::Event but we cannot reference it directly pub struct ChronicleEventCodec - where - C: subxt::Config, +where + C: subxt::Config, { - _p: PhantomData, + _p: PhantomData, } impl ChronicleEvent { - #[tracing::instrument(level = "trace", skip(diff, identity), fields( + #[tracing::instrument(level = "trace", skip(diff, identity), fields( diff = tracing::field::debug(& diff), identity = tracing::field::debug(& identity), correlation_id = tracing::field::debug(& correlation_id) ))] - pub fn new_committed( - diff: common::prov::ProvModel, - identity: SignedIdentity, - correlation_id: [u8; 16], - ) -> Self { - ChronicleEvent::Committed { diff, identity, correlation_id } - } - - pub fn new_contradicted( - contradiction: common::prov::Contradiction, - identity: SignedIdentity, - correlation_id: [u8; 16], - ) -> Self { - ChronicleEvent::Contradicted { contradiction, identity, correlation_id } - } + pub fn new_committed( + diff: common::prov::ProvModel, + identity: SignedIdentity, + correlation_id: [u8; 16], + ) -> Self { + ChronicleEvent::Committed { diff, identity, correlation_id } + } + + pub fn new_contradicted( + contradiction: common::prov::Contradiction, + identity: SignedIdentity, + correlation_id: [u8; 16], + ) -> Self { + ChronicleEvent::Contradicted { contradiction, identity, correlation_id } + } } fn extract_event( - event: subxt::events::EventDetails, + event: subxt::events::EventDetails, ) -> Result, SubxtClientError> - where - C: subxt::Config, +where + C: subxt::Config, { - type Applied = (common::prov::ProvModel, common::identity::SignedIdentity, [u8; 16]); - type Contradicted = (common::prov::Contradiction, common::identity::SignedIdentity, [u8; 16]); - match (event.pallet_name(), event.variant_name(), event.field_bytes()) { - ("Chronicle", "Applied", mut event_bytes) => match Applied::decode(&mut event_bytes) { - Ok((prov_model, identity, correlation_id)) => - Ok(Some(ChronicleEvent::new_committed(prov_model, identity, correlation_id))), - Err(e) => { - tracing::error!("Failed to decode ProvModel: {}", e); - Err(e.into()) - } - }, - ("Chronicle", "Contradicted", mut event_bytes) => { - match Contradicted::decode(&mut event_bytes) { - Ok((contradiction, identity, correlation_id)) => - Ok(ChronicleEvent::new_contradicted(contradiction, identity, correlation_id) - .into()), - Err(e) => { - tracing::error!("Failed to decode Contradiction: {}", e); - Err(e.into()) - } - } - } - (_pallet, _event, _) => Ok(None), - } + type Applied = (common::prov::ProvModel, common::identity::SignedIdentity, [u8; 16]); + type Contradicted = (common::prov::Contradiction, common::identity::SignedIdentity, [u8; 16]); + match (event.pallet_name(), event.variant_name(), event.field_bytes()) { + ("Chronicle", "Applied", mut event_bytes) => match Applied::decode(&mut event_bytes) { + Ok((prov_model, identity, correlation_id)) => + Ok(Some(ChronicleEvent::new_committed(prov_model, identity, correlation_id))), + Err(e) => { + tracing::error!("Failed to decode ProvModel: {}", e); + Err(e.into()) + }, + }, + ("Chronicle", "Contradicted", mut event_bytes) => { + match Contradicted::decode(&mut event_bytes) { + Ok((contradiction, identity, correlation_id)) => + Ok(ChronicleEvent::new_contradicted(contradiction, identity, correlation_id) + .into()), + Err(e) => { + tracing::error!("Failed to decode Contradiction: {}", e); + Err(e.into()) + }, + } + }, + (_pallet, _event, _) => Ok(None), + } } impl LedgerEvent for ChronicleEvent { - fn correlation_id(&self) -> [u8; 16] { - match self { - Self::Committed { correlation_id, .. } => *correlation_id, - Self::Contradicted { correlation_id, .. } => *correlation_id, - } - } + fn correlation_id(&self) -> [u8; 16] { + match self { + Self::Committed { correlation_id, .. } => *correlation_id, + Self::Contradicted { correlation_id, .. } => *correlation_id, + } + } } #[async_trait::async_trait] impl LedgerEventCodec for ChronicleEventCodec - where - C: subxt::Config, +where + C: subxt::Config, { - type Error = SubxtClientError; - type Sink = ChronicleEvent; - type Source = subxt::events::EventDetails; - - async fn maybe_deserialize( - source: Self::Source, - ) -> Result, Self::Error> - where - Self: Sized, - { - match extract_event(source) { - Ok(Some(ev)) => Ok(Some((ev, Span::NotTraced))), - Ok(None) => Ok(None), - Err(e) => Err(e), - } - } + type Error = SubxtClientError; + type Sink = ChronicleEvent; + type Source = subxt::events::EventDetails; + + async fn maybe_deserialize( + source: Self::Source, + ) -> Result, Self::Error> + where + Self: Sized, + { + match extract_event(source) { + Ok(Some(ev)) => Ok(Some((ev, Span::NotTraced))), + Ok(None) => Ok(None), + Err(e) => Err(e), + } + } } pub struct ChronicleTransaction { - pub correlation_id: uuid::Uuid, - key: subxt::ext::sp_core::ecdsa::Pair, //We need the batcher key to sign transactions - pub identity: Arc, - pub operations: Arc>, + pub correlation_id: uuid::Uuid, + key: subxt::ext::sp_core::ecdsa::Pair, //We need the batcher key to sign transactions + pub identity: Arc, + pub operations: Arc>, } impl ChronicleTransaction { - pub async fn new( - signer: &ChronicleSigning, - identity: SignedIdentity, - operations: impl IntoIterator, - ) -> Result { - Ok(Self { - correlation_id: uuid::Uuid::new_v4(), - key: subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( - &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), - ) - .unwrap(), - identity: identity.into(), - operations: Arc::new(operations.into_iter().collect::>()), - }) - } + pub async fn new( + signer: &ChronicleSigning, + identity: SignedIdentity, + operations: impl IntoIterator, + ) -> Result { + Ok(Self { + correlation_id: uuid::Uuid::new_v4(), + key: subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( + &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), + ) + .unwrap(), + identity: identity.into(), + operations: Arc::new(operations.into_iter().collect::>()), + }) + } } // This type must match the signature of the extrinsic call #[derive( - scale_info::TypeInfo, - scale_encode::EncodeAsType, - parity_scale_codec::Encode, - parity_scale_codec::Decode, + scale_info::TypeInfo, + scale_encode::EncodeAsType, + parity_scale_codec::Encode, + parity_scale_codec::Decode, )] pub struct ApplyArgs { - pub operations: OperationSubmission, + pub operations: OperationSubmission, } #[async_trait::async_trait] impl LedgerTransaction for ChronicleTransaction { - type Error = Infallible; - type Payload = ApplyArgs; - - async fn as_payload(&self) -> Result { - Ok(ApplyArgs { - operations: OperationSubmission { - correlation_id: self.correlation_id.into_bytes(), - identity: self.identity.clone(), - items: self.operations.clone(), - }, - }) - } - - fn correlation_id(&self) -> [u8; 16] { - self.correlation_id.into_bytes() - } + type Error = Infallible; + type Payload = ApplyArgs; + + async fn as_payload(&self) -> Result { + Ok(ApplyArgs { + operations: OperationSubmission { + correlation_id: self.correlation_id.into_bytes(), + identity: self.identity.clone(), + items: self.operations.clone(), + }, + }) + } + + fn correlation_id(&self) -> [u8; 16] { + self.correlation_id.into_bytes() + } } ///Subxt signer needs to be infallible, so we need to keep a copy of key material here impl Signer for ChronicleTransaction - where - C: subxt::Config< - AccountId=AccountId32, - Address=MultiAddress, - Signature=MultiSignature, - >, +where + C: subxt::Config< + AccountId = AccountId32, + Address = MultiAddress, + Signature = MultiSignature, + >, { - // The account id for an ecdsa key is the blake2_256 hash of the compressed public key - fn account_id(&self) -> AccountId32 { - AccountId32::from(blake2_256(&self.key.public().0)) - } - - fn address(&self) -> MultiAddress<::AccountId, ()> { - MultiAddress::Id(>::account_id(self)) - } - - fn sign(&self, signer_payload: &[u8]) -> MultiSignature { - self.key.sign(signer_payload).into() - } + // The account id for an ecdsa key is the blake2_256 hash of the compressed public key + fn account_id(&self) -> AccountId32 { + AccountId32::from(blake2_256(&self.key.public().0)) + } + + fn address(&self) -> MultiAddress<::AccountId, ()> { + MultiAddress::Id(>::account_id(self)) + } + + fn sign(&self, signer_payload: &[u8]) -> MultiSignature { + self.key.sign(signer_payload).into() + } } #[async_trait::async_trait] pub trait SettingsLoader { - async fn load_settings_from_storage(&self) -> Result, SubxtClientError>; + async fn load_settings_from_storage(&self) -> Result, SubxtClientError>; } pub type ChronicleSubstrateClient = -SubstrateClient, ChronicleTransaction>; + SubstrateClient, ChronicleTransaction>; #[async_trait::async_trait] impl SettingsLoader for ChronicleSubstrateClient - where - C: subxt::Config, +where + C: subxt::Config, { - async fn load_settings_from_storage(&self) -> Result, SubxtClientError> { - tracing::debug!("Loading OPA settings from storage."); - let call = subxt::dynamic::runtime_api_call( - "Chronicle", - "get_opa_settings", - Composite::unnamed(vec![]), - ); - let settings: Option = self - .client - .runtime_api() - .at_latest() - .await? - .call(call) - .await - .map_err(SubxtClientError::from) - .and_then(|r| r.as_type::>().map_err(SubxtClientError::from))?; - - Ok(settings) - } + async fn load_settings_from_storage(&self) -> Result, SubxtClientError> { + tracing::debug!("Loading OPA settings from storage."); + let call = subxt::dynamic::runtime_api_call( + "Chronicle", + "get_opa_settings", + Composite::unnamed(vec![]), + ); + let settings: Option = self + .client + .runtime_api() + .at_latest() + .await? + .call(call) + .await + .map_err(SubxtClientError::from) + .and_then(|r| r.as_type::>().map_err(SubxtClientError::from))?; + + Ok(settings) + } } diff --git a/crates/protocol-substrate-opa/src/lib.rs b/crates/protocol-substrate-opa/src/lib.rs index a486ca23c..849a2ddd2 100644 --- a/crates/protocol-substrate-opa/src/lib.rs +++ b/crates/protocol-substrate-opa/src/lib.rs @@ -7,12 +7,12 @@ use protocol_abstract::{LedgerEvent, LedgerEventCodec, Span}; use protocol_substrate::{SubstrateClient, SubxtClientError}; use serde::Serialize; use subxt::{ - ext::{ - codec::Decode, - sp_core::{blake2_256, Pair}, - }, - tx::Signer, - utils::{AccountId32, MultiAddress, MultiSignature}, + ext::{ + codec::Decode, + sp_core::{blake2_256, Pair}, + }, + tx::Signer, + utils::{AccountId32, MultiAddress, MultiSignature}, }; use transaction::OpaTransaction; @@ -24,113 +24,113 @@ pub mod transaction; pub use subxt::ext::sp_core::blake2_128 as policy_hash; pub struct OpaEventCodec - where - C: subxt::Config, +where + C: subxt::Config, { - _p: PhantomData, + _p: PhantomData, } //This type must match pallet::Event but we cannot reference it directly #[derive(Debug, Clone, Serialize)] pub enum OpaEvent { - PolicyUpdate { policy: common::opa::PolicyMeta, correlation_id: ChronicleTransactionId }, - KeyUpdate { keys: common::opa::Keys, correlation_id: ChronicleTransactionId }, + PolicyUpdate { policy: common::opa::PolicyMeta, correlation_id: ChronicleTransactionId }, + KeyUpdate { keys: common::opa::Keys, correlation_id: ChronicleTransactionId }, } impl OpaEvent { - fn new_policy_update( - policy_meta: common::opa::PolicyMeta, - transaction_id: ChronicleTransactionId, - ) -> Self { - OpaEvent::PolicyUpdate { policy: policy_meta, correlation_id: transaction_id } - } - - fn new_key_update(keys: common::opa::Keys, correlation_id: ChronicleTransactionId) -> Self { - OpaEvent::KeyUpdate { keys, correlation_id } - } + fn new_policy_update( + policy_meta: common::opa::PolicyMeta, + transaction_id: ChronicleTransactionId, + ) -> Self { + OpaEvent::PolicyUpdate { policy: policy_meta, correlation_id: transaction_id } + } + + fn new_key_update(keys: common::opa::Keys, correlation_id: ChronicleTransactionId) -> Self { + OpaEvent::KeyUpdate { keys, correlation_id } + } } fn extract_event( - event: subxt::events::EventDetails, + event: subxt::events::EventDetails, ) -> Result, SubxtClientError> - where - C: subxt::Config, +where + C: subxt::Config, { - type PolicyUpdate = (common::opa::codec::PolicyMetaV1, ChronicleTransactionId); - type KeyUpdate = (common::opa::codec::KeysV1, ChronicleTransactionId); - match (event.pallet_name(), event.variant_name(), event.field_bytes()) { - ("Opa", "PolicyUpdate", mut event_bytes) => match PolicyUpdate::decode(&mut event_bytes) { - Ok((meta, correlation_id)) => - Ok(Some(OpaEvent::new_policy_update(meta.try_into()?, correlation_id))), - Err(e) => { - tracing::error!("Failed to decode ProvModel: {}", e); - Err(e.into()) - } - }, - ("Chronicle", "KeyUpdate", mut event_bytes) => match KeyUpdate::decode(&mut event_bytes) { - Ok((keys, correlation_id)) => - Ok(OpaEvent::new_key_update(keys.try_into()?, correlation_id).into()), - Err(e) => { - tracing::error!("Failed to decode Contradiction: {}", e); - Err(e.into()) - } - }, - (_pallet, _event, _) => Ok(None), - } + type PolicyUpdate = (common::opa::codec::PolicyMetaV1, ChronicleTransactionId); + type KeyUpdate = (common::opa::codec::KeysV1, ChronicleTransactionId); + match (event.pallet_name(), event.variant_name(), event.field_bytes()) { + ("Opa", "PolicyUpdate", mut event_bytes) => match PolicyUpdate::decode(&mut event_bytes) { + Ok((meta, correlation_id)) => + Ok(Some(OpaEvent::new_policy_update(meta.try_into()?, correlation_id))), + Err(e) => { + tracing::error!("Failed to decode ProvModel: {}", e); + Err(e.into()) + }, + }, + ("Chronicle", "KeyUpdate", mut event_bytes) => match KeyUpdate::decode(&mut event_bytes) { + Ok((keys, correlation_id)) => + Ok(OpaEvent::new_key_update(keys.try_into()?, correlation_id).into()), + Err(e) => { + tracing::error!("Failed to decode Contradiction: {}", e); + Err(e.into()) + }, + }, + (_pallet, _event, _) => Ok(None), + } } #[async_trait::async_trait] impl LedgerEventCodec for OpaEventCodec - where - C: subxt::Config, +where + C: subxt::Config, { - type Error = SubxtClientError; - type Sink = OpaEvent; - type Source = subxt::events::EventDetails; - - async fn maybe_deserialize( - source: Self::Source, - ) -> Result, Self::Error> - where - Self: Sized, - { - match extract_event(source) { - Ok(Some(ev)) => Ok(Some((ev, Span::NotTraced))), - Ok(None) => Ok(None), - Err(e) => Err(e), - } - } + type Error = SubxtClientError; + type Sink = OpaEvent; + type Source = subxt::events::EventDetails; + + async fn maybe_deserialize( + source: Self::Source, + ) -> Result, Self::Error> + where + Self: Sized, + { + match extract_event(source) { + Ok(Some(ev)) => Ok(Some((ev, Span::NotTraced))), + Ok(None) => Ok(None), + Err(e) => Err(e), + } + } } impl LedgerEvent for OpaEvent { - fn correlation_id(&self) -> [u8; 16] { - match self { - Self::PolicyUpdate { correlation_id, .. } => **correlation_id, - Self::KeyUpdate { correlation_id, .. } => **correlation_id, - } - } + fn correlation_id(&self) -> [u8; 16] { + match self { + Self::PolicyUpdate { correlation_id, .. } => **correlation_id, + Self::KeyUpdate { correlation_id, .. } => **correlation_id, + } + } } impl Signer for OpaTransaction - where - C: subxt::Config< - AccountId=AccountId32, - Address=MultiAddress, - Signature=MultiSignature, - >, +where + C: subxt::Config< + AccountId = AccountId32, + Address = MultiAddress, + Signature = MultiSignature, + >, { - // The account id for an ecdsa key is the blake2_256 hash of the compressed public key - fn account_id(&self) -> AccountId32 { - AccountId32::from(blake2_256(&self.account_key().public().0)) - } - - fn address(&self) -> MultiAddress<::AccountId, ()> { - MultiAddress::Id(>::account_id(self)) - } - - fn sign(&self, signer_payload: &[u8]) -> MultiSignature { - self.account_key().sign(signer_payload).into() - } + // The account id for an ecdsa key is the blake2_256 hash of the compressed public key + fn account_id(&self) -> AccountId32 { + AccountId32::from(blake2_256(&self.account_key().public().0)) + } + + fn address(&self) -> MultiAddress<::AccountId, ()> { + MultiAddress::Id(>::account_id(self)) + } + + fn sign(&self, signer_payload: &[u8]) -> MultiSignature { + self.account_key().sign(signer_payload).into() + } } pub type OpaSubstrateClient = SubstrateClient, OpaTransaction>; diff --git a/crates/protocol-substrate-opa/src/loader.rs b/crates/protocol-substrate-opa/src/loader.rs index 700fce516..731c124ab 100644 --- a/crates/protocol-substrate-opa/src/loader.rs +++ b/crates/protocol-substrate-opa/src/loader.rs @@ -1,126 +1,126 @@ use common::opa::{ - codec::PolicyV1, - std::{PolicyLoader, PolicyLoaderError}, - OpaSettings, + codec::PolicyV1, + std::{PolicyLoader, PolicyLoaderError}, + OpaSettings, }; use opa::bundle::Bundle; use protocol_substrate::{SubstrateClient, SubxtClientError}; use subxt::{ - ext::{scale_value::Composite, sp_core::blake2_128}, - PolkadotConfig, + ext::{scale_value::Composite, sp_core::blake2_128}, + PolkadotConfig, }; use tracing::{debug, error, info, instrument, warn}; use crate::{transaction::OpaTransaction, OpaEventCodec, OpaSubstrateClient}; pub struct SubstratePolicyLoader { - settings: OpaSettings, - policy: Option>, - client: OpaSubstrateClient, - addr_string: String, + settings: OpaSettings, + policy: Option>, + client: OpaSubstrateClient, + addr_string: String, } impl SubstratePolicyLoader { - pub fn new( - settings: OpaSettings, - client: &SubstrateClient, OpaTransaction>, - ) -> Self { - Self { - addr_string: settings.policy_address.to_string(), - settings, - policy: None, - client: client.clone(), - } - } - - #[instrument(level = "debug", skip(self), fields( + pub fn new( + settings: OpaSettings, + client: &SubstrateClient, OpaTransaction>, + ) -> Self { + Self { + addr_string: settings.policy_address.to_string(), + settings, + policy: None, + client: client.clone(), + } + } + + #[instrument(level = "debug", skip(self), fields( policy_address = % self.settings.policy_address, entrypoint = % self.settings.entrypoint ))] - async fn load_bundle_from_chain(&mut self) -> Result, SubxtClientError> { - if let Some(policy) = self.policy.as_ref() { - return Ok(policy.clone()); - } - let load_policy_from = self.settings.policy_address; - debug!(policy_address=?load_policy_from, "Loading policy from address"); - let load_policy_from = subxt::ext::scale_value::serde::to_value(load_policy_from)?; - loop { - tracing::debug!(target: "protocol_substrate_opa::loader", "Loading policy from storage."); - let call = subxt::dynamic::runtime_api_call( - "Opa", - "get_policy", - Composite::unnamed(vec![load_policy_from.clone()]), - ); - - let policy: PolicyV1 = self - .client - .client - .runtime_api() - .at_latest() - .await? - .call(call) - .await - .map_err(SubxtClientError::from) - .and_then(|r| r.as_type::().map_err(SubxtClientError::from))?; - - if let Some(policy) = Some(policy) { - return Ok(policy.into_vec()); - } else { - warn!("Policy not found, retrying in 2 seconds"); - tokio::time::sleep(std::time::Duration::from_secs(2)).await; - continue; - } - } - } + async fn load_bundle_from_chain(&mut self) -> Result, SubxtClientError> { + if let Some(policy) = self.policy.as_ref() { + return Ok(policy.clone()); + } + let load_policy_from = self.settings.policy_address; + debug!(policy_address=?load_policy_from, "Loading policy from address"); + let load_policy_from = subxt::ext::scale_value::serde::to_value(load_policy_from)?; + loop { + tracing::debug!(target: "protocol_substrate_opa::loader", "Loading policy from storage."); + let call = subxt::dynamic::runtime_api_call( + "Opa", + "get_policy", + Composite::unnamed(vec![load_policy_from.clone()]), + ); + + let policy: PolicyV1 = self + .client + .client + .runtime_api() + .at_latest() + .await? + .call(call) + .await + .map_err(SubxtClientError::from) + .and_then(|r| r.as_type::().map_err(SubxtClientError::from))?; + + if let Some(policy) = Some(policy) { + return Ok(policy.into_vec()); + } else { + warn!("Policy not found, retrying in 2 seconds"); + tokio::time::sleep(std::time::Duration::from_secs(2)).await; + continue; + } + } + } } #[async_trait::async_trait] impl PolicyLoader for SubstratePolicyLoader { - fn set_address(&mut self, _address: &str) { - unimplemented!() - } - - fn set_rule_name(&mut self, _name: &str) { - unimplemented!() - } - - fn set_entrypoint(&mut self, _entrypoint: &str) { - unimplemented!() - } - - fn get_address(&self) -> &str { - &self.addr_string - } - - fn get_rule_name(&self) -> &str { - &self.settings.policy_name - } - - fn get_entrypoint(&self) -> &str { - &self.settings.entrypoint - } - - fn get_policy(&self) -> &[u8] { - self.policy.as_ref().unwrap() - } - - async fn load_policy(&mut self) -> Result<(), PolicyLoaderError> { - let bundle = self - .load_bundle_from_chain() - .await - .map_err(|e| PolicyLoaderError::Substrate(e.into()))?; - info!(fetched_policy_bytes=?bundle.len(), "Fetched policy"); - if bundle.is_empty() { - error!("Policy not found: {}", self.get_rule_name()); - return Err(PolicyLoaderError::MissingPolicy(self.get_rule_name().to_string())); - } - self.load_policy_from_bundle(&Bundle::from_bytes(&*bundle)?) - } - - fn load_policy_from_bytes(&mut self, policy: &[u8]) { - self.policy = Some(policy.to_vec()) - } - - fn hash(&self) -> String { - hex::encode(blake2_128(self.policy.as_ref().unwrap())) - } + fn set_address(&mut self, _address: &str) { + unimplemented!() + } + + fn set_rule_name(&mut self, _name: &str) { + unimplemented!() + } + + fn set_entrypoint(&mut self, _entrypoint: &str) { + unimplemented!() + } + + fn get_address(&self) -> &str { + &self.addr_string + } + + fn get_rule_name(&self) -> &str { + &self.settings.policy_name + } + + fn get_entrypoint(&self) -> &str { + &self.settings.entrypoint + } + + fn get_policy(&self) -> &[u8] { + self.policy.as_ref().unwrap() + } + + async fn load_policy(&mut self) -> Result<(), PolicyLoaderError> { + let bundle = self + .load_bundle_from_chain() + .await + .map_err(|e| PolicyLoaderError::Substrate(e.into()))?; + info!(fetched_policy_bytes=?bundle.len(), "Fetched policy"); + if bundle.is_empty() { + error!("Policy not found: {}", self.get_rule_name()); + return Err(PolicyLoaderError::MissingPolicy(self.get_rule_name().to_string())); + } + self.load_policy_from_bundle(&Bundle::from_bytes(&*bundle)?) + } + + fn load_policy_from_bytes(&mut self, policy: &[u8]) { + self.policy = Some(policy.to_vec()) + } + + fn hash(&self) -> String { + hex::encode(blake2_128(self.policy.as_ref().unwrap())) + } } diff --git a/crates/protocol-substrate-opa/src/submission_builder.rs b/crates/protocol-substrate-opa/src/submission_builder.rs index 4f20dfb0b..f7926a0ce 100644 --- a/crates/protocol-substrate-opa/src/submission_builder.rs +++ b/crates/protocol-substrate-opa/src/submission_builder.rs @@ -1,197 +1,197 @@ use core::panic; use std::{ - cell::RefCell, - sync::{Arc, Mutex}, + cell::RefCell, + sync::{Arc, Mutex}, }; use chronicle_signing::{ - ChronicleSigning, OpaKnownKeyNamesSigner, SecretError, WithSecret, OPA_NAMESPACE, + ChronicleSigning, OpaKnownKeyNamesSigner, SecretError, WithSecret, OPA_NAMESPACE, }; use common::{ - k256::{ - ecdsa::{Signature, SigningKey, VerifyingKey}, - pkcs8::{EncodePublicKey, LineEnding}, - schnorr::signature::Signer, - PublicKey, - }, - opa::{ - codec::{NewPublicKeyV1, SignedOperationPayloadV1}, - BootstrapRoot, NewPublicKey, OpaSubmission, Operation, Payload, Policy, RegisterKey, - RotateKey, SetPolicy, SignedOperation, SignedOperationPayload, - }, + k256::{ + ecdsa::{Signature, SigningKey, VerifyingKey}, + pkcs8::{EncodePublicKey, LineEnding}, + schnorr::signature::Signer, + PublicKey, + }, + opa::{ + codec::{NewPublicKeyV1, SignedOperationPayloadV1}, + BootstrapRoot, NewPublicKey, OpaSubmission, Operation, Payload, Policy, RegisterKey, + RotateKey, SetPolicy, SignedOperation, SignedOperationPayload, + }, }; use subxt::ext::codec::Encode; use uuid::Uuid; fn bootstrap_root(public_key: VerifyingKey) -> BootstrapRoot { - let public_key: PublicKey = public_key.into(); - BootstrapRoot { public_key: public_key.to_public_key_pem(LineEnding::CRLF).unwrap().into() } + let public_key: PublicKey = public_key.into(); + BootstrapRoot { public_key: public_key.to_public_key_pem(LineEnding::CRLF).unwrap().into() } } fn register_key( - id: impl AsRef, - public_key: &VerifyingKey, - overwrite_existing: bool, + id: impl AsRef, + public_key: &VerifyingKey, + overwrite_existing: bool, ) -> RegisterKey { - let public_key: PublicKey = public_key.into(); - RegisterKey { - id: id.as_ref().to_string(), - public_key: public_key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), - overwrite_existing, - } + let public_key: PublicKey = public_key.into(); + RegisterKey { + id: id.as_ref().to_string(), + public_key: public_key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), + overwrite_existing, + } } fn rotate_key(id: impl AsRef, old_key: &SigningKey, new_key: &SigningKey) -> RotateKey { - let new_verifying_public_key: PublicKey = new_key.verifying_key().into(); - let new_key_message = NewPublicKey { - id: id.as_ref().to_string(), - public_key: new_verifying_public_key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), - }; - - let new_key_bytes = NewPublicKeyV1::from(new_key_message.clone()).encode(); - - let old_signature: Signature = old_key.sign(&new_key_bytes); - let old_verifying_key = old_key.verifying_key(); - let old_verifying_public_key: PublicKey = old_verifying_key.into(); - - let new_signature: Signature = new_key.sign(&new_key_bytes); - - RotateKey { - payload: new_key_message, - previous_signature: old_signature.to_vec(), - previous_signing_key: old_verifying_public_key - .to_public_key_pem(LineEnding::CRLF) - .unwrap() - .into(), - new_signature: new_signature.to_vec(), - new_signing_key: new_verifying_public_key - .to_public_key_pem(LineEnding::CRLF) - .unwrap() - .into(), - } + let new_verifying_public_key: PublicKey = new_key.verifying_key().into(); + let new_key_message = NewPublicKey { + id: id.as_ref().to_string(), + public_key: new_verifying_public_key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), + }; + + let new_key_bytes = NewPublicKeyV1::from(new_key_message.clone()).encode(); + + let old_signature: Signature = old_key.sign(&new_key_bytes); + let old_verifying_key = old_key.verifying_key(); + let old_verifying_public_key: PublicKey = old_verifying_key.into(); + + let new_signature: Signature = new_key.sign(&new_key_bytes); + + RotateKey { + payload: new_key_message, + previous_signature: old_signature.to_vec(), + previous_signing_key: old_verifying_public_key + .to_public_key_pem(LineEnding::CRLF) + .unwrap() + .into(), + new_signature: new_signature.to_vec(), + new_signing_key: new_verifying_public_key + .to_public_key_pem(LineEnding::CRLF) + .unwrap() + .into(), + } } fn set_policy(id: impl AsRef, policy: Vec) -> SetPolicy { - SetPolicy { id: id.as_ref().to_owned(), policy: Policy::new(policy) } + SetPolicy { id: id.as_ref().to_owned(), policy: Policy::new(policy) } } enum BuildingMessage { - BootstrapRoot(BootstrapRoot), - RegisterKey(SignedOperation), - RotateKey(SignedOperation), - SetPolicy(SignedOperation), + BootstrapRoot(BootstrapRoot), + RegisterKey(SignedOperation), + RotateKey(SignedOperation), + SetPolicy(SignedOperation), } pub struct SubmissionBuilder { - message: Option, + message: Option, } impl SubmissionBuilder { - pub fn bootstrap_root(public_key: VerifyingKey) -> Self { - Self { message: Some(BuildingMessage::BootstrapRoot(bootstrap_root(public_key))) } - } - - pub async fn register_key( - id: impl AsRef, - new_key: &str, - signer: &ChronicleSigning, - overwrite_existing: bool, - ) -> Result { - let operation = SignedOperationPayload { - operation: Operation::RegisterKey(register_key( - id, - &signer.verifying_key(OPA_NAMESPACE, new_key).await?, - overwrite_existing, - )), - }; - - let signature = signer - .opa_sign(&SignedOperationPayloadV1::from(operation.clone()).encode()) - .await?; - let key: PublicKey = signer.opa_verifying().await?.into(); - let signed_operation = SignedOperation { - payload: operation, - signature: signature.to_vec(), - verifying_key: key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), - }; - Ok(Self { message: Some(BuildingMessage::RegisterKey(signed_operation)) }) - } - - pub async fn rotate_key( - id: &str, - signer: &ChronicleSigning, - old_key: &str, - new_key: &str, - ) -> Result { - let extract_key: Arc>>> = - Arc::new(Mutex::new(None.into())); - - signer - .with_signing_key(OPA_NAMESPACE, old_key, |old_key| { - extract_key.lock().unwrap().replace(Some(old_key.clone())); - }) - .await?; - - let old_key = extract_key.lock().unwrap().borrow().clone().unwrap(); - - signer - .with_signing_key(OPA_NAMESPACE, new_key, |new_key| { - extract_key.lock().unwrap().replace(Some(new_key.clone())); - }) - .await?; - - let new_key = extract_key.lock().unwrap().borrow().clone().unwrap(); - - let operation = SignedOperationPayload { - operation: Operation::RotateKey(rotate_key(id, &old_key, &new_key)), - }; - - let signature = signer - .opa_sign(&SignedOperationPayloadV1::from(operation.clone()).encode()) - .await?; - let key: PublicKey = signer.opa_verifying().await?.into(); - - let signed_operation = SignedOperation { - payload: operation, - signature, - verifying_key: key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), - }; - Ok(Self { message: Some(BuildingMessage::RotateKey(signed_operation)) }) - } - - pub async fn set_policy( - id: &str, - policy: Vec, - signer: &ChronicleSigning, - ) -> Result { - let operation = - SignedOperationPayload { operation: Operation::SetPolicy(set_policy(id, policy)) }; - let signature = signer - .opa_sign(&SignedOperationPayloadV1::from(operation.clone()).encode()) - .await?; - let key: PublicKey = signer.opa_verifying().await?.into(); - - let signed_operation = SignedOperation { - payload: operation, - signature, - verifying_key: key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), - }; - - Ok(Self { message: Some(BuildingMessage::SetPolicy(signed_operation)) }) - } - - pub fn build(self, span_id: u64, correlation_id: Uuid) -> OpaSubmission { - OpaSubmission { - span_id, - correlation_id: correlation_id.into_bytes(), - version: "1.0".to_string(), - payload: match self.message { - Some(BuildingMessage::BootstrapRoot(message)) => Payload::BootstrapRoot(message), - Some(BuildingMessage::RotateKey(message)) => Payload::SignedOperation(message), - Some(BuildingMessage::SetPolicy(message)) => Payload::SignedOperation(message), - Some(BuildingMessage::RegisterKey(message)) => Payload::SignedOperation(message), - None => panic!("No message to build"), - }, - } - } + pub fn bootstrap_root(public_key: VerifyingKey) -> Self { + Self { message: Some(BuildingMessage::BootstrapRoot(bootstrap_root(public_key))) } + } + + pub async fn register_key( + id: impl AsRef, + new_key: &str, + signer: &ChronicleSigning, + overwrite_existing: bool, + ) -> Result { + let operation = SignedOperationPayload { + operation: Operation::RegisterKey(register_key( + id, + &signer.verifying_key(OPA_NAMESPACE, new_key).await?, + overwrite_existing, + )), + }; + + let signature = signer + .opa_sign(&SignedOperationPayloadV1::from(operation.clone()).encode()) + .await?; + let key: PublicKey = signer.opa_verifying().await?.into(); + let signed_operation = SignedOperation { + payload: operation, + signature: signature.to_vec(), + verifying_key: key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), + }; + Ok(Self { message: Some(BuildingMessage::RegisterKey(signed_operation)) }) + } + + pub async fn rotate_key( + id: &str, + signer: &ChronicleSigning, + old_key: &str, + new_key: &str, + ) -> Result { + let extract_key: Arc>>> = + Arc::new(Mutex::new(None.into())); + + signer + .with_signing_key(OPA_NAMESPACE, old_key, |old_key| { + extract_key.lock().unwrap().replace(Some(old_key.clone())); + }) + .await?; + + let old_key = extract_key.lock().unwrap().borrow().clone().unwrap(); + + signer + .with_signing_key(OPA_NAMESPACE, new_key, |new_key| { + extract_key.lock().unwrap().replace(Some(new_key.clone())); + }) + .await?; + + let new_key = extract_key.lock().unwrap().borrow().clone().unwrap(); + + let operation = SignedOperationPayload { + operation: Operation::RotateKey(rotate_key(id, &old_key, &new_key)), + }; + + let signature = signer + .opa_sign(&SignedOperationPayloadV1::from(operation.clone()).encode()) + .await?; + let key: PublicKey = signer.opa_verifying().await?.into(); + + let signed_operation = SignedOperation { + payload: operation, + signature, + verifying_key: key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), + }; + Ok(Self { message: Some(BuildingMessage::RotateKey(signed_operation)) }) + } + + pub async fn set_policy( + id: &str, + policy: Vec, + signer: &ChronicleSigning, + ) -> Result { + let operation = + SignedOperationPayload { operation: Operation::SetPolicy(set_policy(id, policy)) }; + let signature = signer + .opa_sign(&SignedOperationPayloadV1::from(operation.clone()).encode()) + .await?; + let key: PublicKey = signer.opa_verifying().await?.into(); + + let signed_operation = SignedOperation { + payload: operation, + signature, + verifying_key: key.to_public_key_pem(LineEnding::CRLF).unwrap().into(), + }; + + Ok(Self { message: Some(BuildingMessage::SetPolicy(signed_operation)) }) + } + + pub fn build(self, span_id: u64, correlation_id: Uuid) -> OpaSubmission { + OpaSubmission { + span_id, + correlation_id: correlation_id.into_bytes(), + version: "1.0".to_string(), + payload: match self.message { + Some(BuildingMessage::BootstrapRoot(message)) => Payload::BootstrapRoot(message), + Some(BuildingMessage::RotateKey(message)) => Payload::SignedOperation(message), + Some(BuildingMessage::SetPolicy(message)) => Payload::SignedOperation(message), + Some(BuildingMessage::RegisterKey(message)) => Payload::SignedOperation(message), + None => panic!("No message to build"), + }, + } + } } diff --git a/crates/protocol-substrate-opa/src/transaction.rs b/crates/protocol-substrate-opa/src/transaction.rs index 01d8c9f64..f32887a01 100644 --- a/crates/protocol-substrate-opa/src/transaction.rs +++ b/crates/protocol-substrate-opa/src/transaction.rs @@ -1,5 +1,5 @@ use chronicle_signing::{ - ChronicleSigning, OwnedSecret, SecretError, BATCHER_NAMESPACE, BATCHER_PK, + ChronicleSigning, OwnedSecret, SecretError, BATCHER_NAMESPACE, BATCHER_PK, }; use common::opa::{codec::OpaSubmissionV1, OpaSubmission}; use protocol_abstract::LedgerTransaction; @@ -9,150 +9,150 @@ use thiserror::Error; #[derive(Debug, Error)] pub enum TransactionError { - #[error("Secret error: {0}")] - SecretError( - #[from] - #[source] - SecretError, - ), - #[error("Secret string error: {0}")] - SecretStringError( - #[from] - #[source] - SecretStringError, - ), + #[error("Secret error: {0}")] + SecretError( + #[from] + #[source] + SecretError, + ), + #[error("Secret string error: {0}")] + SecretStringError( + #[from] + #[source] + SecretStringError, + ), } #[derive(Clone)] // Note, the subxt client requires synchronous, infallible access to the signing keypair, so we // extract it on construction pub enum OpaTransaction { - BootstrapRoot(OpaSubmission, ChronicleSigning, subxt::ext::sp_core::ecdsa::Pair), - RotateRoot(OpaSubmission, ChronicleSigning, subxt::ext::sp_core::ecdsa::Pair), - RegisterKey(OpaSubmission, ChronicleSigning, String, bool, subxt::ext::sp_core::ecdsa::Pair), - RotateKey(OpaSubmission, ChronicleSigning, String, subxt::ext::sp_core::ecdsa::Pair), - SetPolicy(OpaSubmission, ChronicleSigning, String, subxt::ext::sp_core::ecdsa::Pair), + BootstrapRoot(OpaSubmission, ChronicleSigning, subxt::ext::sp_core::ecdsa::Pair), + RotateRoot(OpaSubmission, ChronicleSigning, subxt::ext::sp_core::ecdsa::Pair), + RegisterKey(OpaSubmission, ChronicleSigning, String, bool, subxt::ext::sp_core::ecdsa::Pair), + RotateKey(OpaSubmission, ChronicleSigning, String, subxt::ext::sp_core::ecdsa::Pair), + SetPolicy(OpaSubmission, ChronicleSigning, String, subxt::ext::sp_core::ecdsa::Pair), } impl OpaTransaction { - pub async fn bootstrap_root( - opa_submission: OpaSubmission, - signer: &ChronicleSigning, - ) -> Result { - Ok(Self::BootstrapRoot( - opa_submission, - signer.to_owned(), - subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( - &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), - )?, - )) - } + pub async fn bootstrap_root( + opa_submission: OpaSubmission, + signer: &ChronicleSigning, + ) -> Result { + Ok(Self::BootstrapRoot( + opa_submission, + signer.to_owned(), + subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( + &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), + )?, + )) + } - pub async fn rotate_root( - opa_submission: OpaSubmission, - signer: &ChronicleSigning, - ) -> Result { - Ok(Self::RotateRoot( - opa_submission, - signer.to_owned(), - subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( - &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), - )?, - )) - } + pub async fn rotate_root( + opa_submission: OpaSubmission, + signer: &ChronicleSigning, + ) -> Result { + Ok(Self::RotateRoot( + opa_submission, + signer.to_owned(), + subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( + &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), + )?, + )) + } - pub async fn register_key( - name: impl AsRef, - opa_submission: OpaSubmission, - signer: &ChronicleSigning, - overwrite_existing: bool, - ) -> Result { - Ok(Self::RegisterKey( - opa_submission, - signer.to_owned(), - name.as_ref().to_owned(), - overwrite_existing, - subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( - &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), - )?, - )) - } + pub async fn register_key( + name: impl AsRef, + opa_submission: OpaSubmission, + signer: &ChronicleSigning, + overwrite_existing: bool, + ) -> Result { + Ok(Self::RegisterKey( + opa_submission, + signer.to_owned(), + name.as_ref().to_owned(), + overwrite_existing, + subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( + &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), + )?, + )) + } - pub async fn rotate_key( - name: impl AsRef, - opa_submission: OpaSubmission, - signer: &ChronicleSigning, - ) -> Result { - Ok(Self::RegisterKey( - opa_submission, - signer.to_owned(), - name.as_ref().to_owned(), - false, - subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( - &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), - )?, - )) - } + pub async fn rotate_key( + name: impl AsRef, + opa_submission: OpaSubmission, + signer: &ChronicleSigning, + ) -> Result { + Ok(Self::RegisterKey( + opa_submission, + signer.to_owned(), + name.as_ref().to_owned(), + false, + subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( + &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), + )?, + )) + } - pub async fn set_policy( - name: impl AsRef, - opa_submission: OpaSubmission, - signer: &ChronicleSigning, - ) -> Result { - Ok(Self::SetPolicy( - opa_submission, - signer.to_owned(), - name.as_ref().to_owned(), - subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( - &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), - )?, - )) - } + pub async fn set_policy( + name: impl AsRef, + opa_submission: OpaSubmission, + signer: &ChronicleSigning, + ) -> Result { + Ok(Self::SetPolicy( + opa_submission, + signer.to_owned(), + name.as_ref().to_owned(), + subxt::ext::sp_core::ecdsa::Pair::from_seed_slice( + &signer.copy_signing_key(BATCHER_NAMESPACE, BATCHER_PK).await?.to_bytes(), + )?, + )) + } - pub fn account_key(&self) -> &subxt::ext::sp_core::ecdsa::Pair { - match self { - OpaTransaction::BootstrapRoot(_, _, k) => k, - OpaTransaction::RotateRoot(_, _, k) => k, - OpaTransaction::RegisterKey(_, _, _, _, k) => k, - OpaTransaction::RotateKey(_, _, _, k) => k, - OpaTransaction::SetPolicy(_, _, _, k) => k, - } - } + pub fn account_key(&self) -> &subxt::ext::sp_core::ecdsa::Pair { + match self { + OpaTransaction::BootstrapRoot(_, _, k) => k, + OpaTransaction::RotateRoot(_, _, k) => k, + OpaTransaction::RegisterKey(_, _, _, _, k) => k, + OpaTransaction::RotateKey(_, _, _, k) => k, + OpaTransaction::SetPolicy(_, _, _, k) => k, + } + } - pub fn submission(&self) -> &OpaSubmission { - match self { - OpaTransaction::BootstrapRoot(o, _, _) => o, - OpaTransaction::RotateRoot(o, _, _) => o, - OpaTransaction::RegisterKey(o, _, _, _, _) => o, - OpaTransaction::RotateKey(o, _, _, _) => o, - OpaTransaction::SetPolicy(o, _, _, _) => o, - } - } + pub fn submission(&self) -> &OpaSubmission { + match self { + OpaTransaction::BootstrapRoot(o, _, _) => o, + OpaTransaction::RotateRoot(o, _, _) => o, + OpaTransaction::RegisterKey(o, _, _, _, _) => o, + OpaTransaction::RotateKey(o, _, _, _) => o, + OpaTransaction::SetPolicy(o, _, _, _) => o, + } + } } #[async_trait::async_trait] impl LedgerTransaction for OpaTransaction { - type Error = SecretError; - type Payload = OpaSubmissionV1; + type Error = SecretError; + type Payload = OpaSubmissionV1; - async fn as_payload(&self) -> Result { - Ok(match self.clone() { - OpaTransaction::BootstrapRoot(o, _, _) => o, - OpaTransaction::RotateRoot(o, _, _) => o, - OpaTransaction::RegisterKey(o, _, _, _, _) => o, - OpaTransaction::RotateKey(o, _, _, _) => o, - OpaTransaction::SetPolicy(o, _, _, _) => o, - } - .into()) - } + async fn as_payload(&self) -> Result { + Ok(match self.clone() { + OpaTransaction::BootstrapRoot(o, _, _) => o, + OpaTransaction::RotateRoot(o, _, _) => o, + OpaTransaction::RegisterKey(o, _, _, _, _) => o, + OpaTransaction::RotateKey(o, _, _, _) => o, + OpaTransaction::SetPolicy(o, _, _, _) => o, + } + .into()) + } - fn correlation_id(&self) -> [u8; 16] { - match self { - OpaTransaction::BootstrapRoot(o, _, _) => o.correlation_id, - OpaTransaction::RotateRoot(o, _, _) => o.correlation_id, - OpaTransaction::RegisterKey(o, _, _, _, _) => o.correlation_id, - OpaTransaction::RotateKey(o, _, _, _) => o.correlation_id, - OpaTransaction::SetPolicy(o, _, _, _) => o.correlation_id, - } - } + fn correlation_id(&self) -> [u8; 16] { + match self { + OpaTransaction::BootstrapRoot(o, _, _) => o.correlation_id, + OpaTransaction::RotateRoot(o, _, _) => o.correlation_id, + OpaTransaction::RegisterKey(o, _, _, _, _) => o.correlation_id, + OpaTransaction::RotateKey(o, _, _, _) => o.correlation_id, + OpaTransaction::SetPolicy(o, _, _, _) => o.correlation_id, + } + } } diff --git a/crates/protocol-substrate/src/subxt_client.rs b/crates/protocol-substrate/src/subxt_client.rs index 38cca4d10..5d8b85fad 100644 --- a/crates/protocol-substrate/src/subxt_client.rs +++ b/crates/protocol-substrate/src/subxt_client.rs @@ -2,601 +2,601 @@ use std::{convert::Infallible, marker::PhantomData, net::SocketAddr, time::Durat use derivative::Derivative; use futures::{ - stream::{self, BoxStream}, - FutureExt, StreamExt, TryFutureExt, TryStreamExt, + stream::{self, BoxStream}, + FutureExt, StreamExt, TryFutureExt, TryStreamExt, }; use pallet_chronicle::ChronicleTransactionId; use subxt::{ - backend::BackendExt, - config::ExtrinsicParams, - error::MetadataError, - ext::{ - codec::{Decode, Encode}, - sp_core::{twox_128, H256}, - }, - metadata::{ - types::{PalletMetadata, StorageEntryMetadata, StorageEntryType}, - DecodeWithMetadata, EncodeWithMetadata, - }, - storage::{DynamicAddress, StorageAddress}, - tx::{Payload, SubmittableExtrinsic}, - utils::{AccountId32, MultiAddress, MultiSignature}, - Metadata, OnlineClient, + backend::BackendExt, + config::ExtrinsicParams, + error::MetadataError, + ext::{ + codec::{Decode, Encode}, + sp_core::{twox_128, H256}, + }, + metadata::{ + types::{PalletMetadata, StorageEntryMetadata, StorageEntryType}, + DecodeWithMetadata, EncodeWithMetadata, + }, + storage::{DynamicAddress, StorageAddress}, + tx::{Payload, SubmittableExtrinsic}, + utils::{AccountId32, MultiAddress, MultiSignature}, + Metadata, OnlineClient, }; pub use subxt::Config; use protocol_abstract::{ - BlockId, FromBlock, LedgerEvent, LedgerEventCodec, LedgerEventContext, LedgerReader, - LedgerTransaction, LedgerWriter, Position, RetryLedger, WriteConsistency, + BlockId, FromBlock, LedgerEvent, LedgerEventCodec, LedgerEventContext, LedgerReader, + LedgerTransaction, LedgerWriter, Position, RetryLedger, WriteConsistency, }; #[derive(Derivative)] #[derivative(Clone(bound = ""))] pub struct SubstrateClient { - pub client: OnlineClient, - _p: PhantomData<(EC, T)>, + pub client: OnlineClient, + _p: PhantomData<(EC, T)>, } type ExtrinsicResult = -Result<(SubmittableExtrinsic>, [u8; 16]), subxt::Error>; + Result<(SubmittableExtrinsic>, [u8; 16]), subxt::Error>; impl SubstrateClient - where - C: subxt::Config< - Hash=subxt::utils::H256, - Address=MultiAddress, - Signature=MultiSignature, - >, - >::OtherParams: Default, - T: LedgerTransaction + Send + Sync, - ::Payload: subxt::ext::scale_encode::EncodeAsFields, - EC: LedgerEventCodec + Send + Sync, +where + C: subxt::Config< + Hash = subxt::utils::H256, + Address = MultiAddress, + Signature = MultiSignature, + >, + >::OtherParams: Default, + T: LedgerTransaction + Send + Sync, + ::Payload: subxt::ext::scale_encode::EncodeAsFields, + EC: LedgerEventCodec + Send + Sync, { - pub async fn connect(url: impl AsRef) -> Result { - Ok(Self { client: OnlineClient::from_insecure_url(url).await?, _p: Default::default() }) - } - - pub async fn connect_socket_addr(socket: SocketAddr) -> Result { - tracing::info!("Connecting to Substrate client via SocketAddr: {:?}", socket); - let client_result = OnlineClient::from_insecure_url(socket.to_string()).await; - match client_result { - Ok(client) => { - tracing::info!("Successfully connected to Substrate client."); - Ok(Self { client, _p: Default::default() }) - } - Err(e) => { - tracing::error!("Failed to connect to Substrate client: {:?}", e); - Err(SubxtClientError::from(e)) - } - } - } - - pub fn retry(&self, duration: Duration) -> RetryLedger - where - Self: LedgerReader + Sized, - { - tracing::debug!(target: "substrate_client", "Creating a retryable ledger reader."); - RetryLedger::new(self.clone(), duration) - } - - // TODO: bring the pallet / call name in from trait - - #[tracing::instrument(level = "trace", skip(self, signer, correlation_id, operations), fields( + pub async fn connect(url: impl AsRef) -> Result { + Ok(Self { client: OnlineClient::from_insecure_url(url).await?, _p: Default::default() }) + } + + pub async fn connect_socket_addr(socket: SocketAddr) -> Result { + tracing::info!("Connecting to Substrate client via SocketAddr: {:?}", socket); + let client_result = OnlineClient::from_insecure_url(socket.to_string()).await; + match client_result { + Ok(client) => { + tracing::info!("Successfully connected to Substrate client."); + Ok(Self { client, _p: Default::default() }) + }, + Err(e) => { + tracing::error!("Failed to connect to Substrate client: {:?}", e); + Err(SubxtClientError::from(e)) + }, + } + } + + pub fn retry(&self, duration: Duration) -> RetryLedger + where + Self: LedgerReader + Sized, + { + tracing::debug!(target: "substrate_client", "Creating a retryable ledger reader."); + RetryLedger::new(self.clone(), duration) + } + + // TODO: bring the pallet / call name in from trait + + #[tracing::instrument(level = "trace", skip(self, signer, correlation_id, operations), fields( correlation_id = % hex::encode(correlation_id), ret ))] - pub async fn create_extrinsic + Send>( - &self, - signer: &S, - correlation_id: [u8; 16], - operations: &T, - ) -> ExtrinsicResult { - let payload = Payload::new("Chronicle", "apply", operations.as_payload().await.unwrap()); - - self.client - .tx() - .create_signed(&payload, signer, Default::default()) - .await - .map(|extrinsic| (extrinsic, correlation_id)) - } - - pub async fn send_extrinsic( - &self, - consistency: WriteConsistency, - extrinsic: (SubmittableExtrinsic>, [u8; 16]), - ) -> Result { - extrinsic - .0 - .submit_and_watch() - .and_then(|progress| match consistency { - WriteConsistency::Weak => futures::future::ok(()).boxed(), - WriteConsistency::Strong => progress - .wait_for_finalized_success() - .and_then(|_| futures::future::ok(())) - .boxed(), - }) - .await - .map(|_| extrinsic.1.into()) - .map_err(|e| (e, ChronicleTransactionId::from(extrinsic.1))) - } + pub async fn create_extrinsic + Send>( + &self, + signer: &S, + correlation_id: [u8; 16], + operations: &T, + ) -> ExtrinsicResult { + let payload = Payload::new("Chronicle", "apply", operations.as_payload().await.unwrap()); + + self.client + .tx() + .create_signed(&payload, signer, Default::default()) + .await + .map(|extrinsic| (extrinsic, correlation_id)) + } + + pub async fn send_extrinsic( + &self, + consistency: WriteConsistency, + extrinsic: (SubmittableExtrinsic>, [u8; 16]), + ) -> Result { + extrinsic + .0 + .submit_and_watch() + .and_then(|progress| match consistency { + WriteConsistency::Weak => futures::future::ok(()).boxed(), + WriteConsistency::Strong => progress + .wait_for_finalized_success() + .and_then(|_| futures::future::ok(())) + .boxed(), + }) + .await + .map(|_| extrinsic.1.into()) + .map_err(|e| (e, ChronicleTransactionId::from(extrinsic.1))) + } } #[derive(Debug, thiserror::Error)] pub enum SubxtClientError { - #[error("Subxt error: {0}")] - SubxtError( - #[from] - #[source] - subxt::Error, - ), - - #[error("Invalid block")] - InvalidBlock, - - #[error("Codec: {0}")] - Codec( - #[from] - #[source] - subxt::ext::codec::Error, - ), - - #[error("Decode: {0}")] - Decode( - #[from] - #[source] - subxt::error::DecodeError, - ), - - #[error("Serde: {0}")] - Serde( - #[from] - #[source] - subxt::ext::scale_value::serde::SerializerError, - ), + #[error("Subxt error: {0}")] + SubxtError( + #[from] + #[source] + subxt::Error, + ), + + #[error("Invalid block")] + InvalidBlock, + + #[error("Codec: {0}")] + Codec( + #[from] + #[source] + subxt::ext::codec::Error, + ), + + #[error("Decode: {0}")] + Decode( + #[from] + #[source] + subxt::error::DecodeError, + ), + + #[error("Serde: {0}")] + Serde( + #[from] + #[source] + subxt::ext::scale_value::serde::SerializerError, + ), } impl From for SubxtClientError { - fn from(_value: Infallible) -> Self { - unreachable!() - } + fn from(_value: Infallible) -> Self { + unreachable!() + } } impl SubstrateClient - where - C: subxt::Config, - H: subxt::config::Header + Send + Sync + Decode + Encode, - EC: LedgerEventCodec> - + Send - + Sync, - T: LedgerTransaction + Send + Sync, +where + C: subxt::Config, + H: subxt::config::Header + Send + Sync + Decode + Encode, + EC: LedgerEventCodec> + + Send + + Sync, + T: LedgerTransaction + Send + Sync, { - // Return child blocks of from_block, limiting to num_blocks if not none - async fn block_hashes_from( - &self, - from_block: C::Hash, - num_blocks: Option, - ) -> Result>, SubxtClientError> { - // Get the block at hash - let block = self.client.blocks().at(from_block).await?; - - let from_block_num = block.number(); - - let hashes = stream::unfold( - (self.client.clone(), from_block_num), - move |(client, block_num)| async move { - if let Some(num_blocks) = num_blocks { - if num_blocks == block_num { - return None; - } - } - - let block_hash: Result = client - .backend() - .call_decoding( - "chain_getBlockHash", - Some(&vec![block_num].encode()), - subxt::utils::H256::zero(), - ) - .await - .map_err(SubxtClientError::from); - - Some((block_hash, (client, block_num + 1))) - }, - ); - - Ok(Box::pin(hashes)) - } - - // Return events from `number_of_blocks` blocks from the client, starting at `from_block` - async fn events_for_block( - &self, - from_block: C::Hash, - ) -> Result::Sink>>, SubxtClientError> { - let header = self.client.backend().block_header(from_block).await?; - let block_num = match header { - Some(header) => Ok(header.number()), - None => { - tracing::error!("Block header is None"); - Err(SubxtClientError::InvalidBlock) - } - }?; - - let events_for_block = match self.client.events().at(from_block).await { - Ok(events) => Ok(events), - Err(e) => { - tracing::error!("Failed to get events for block: {}", e); - Err(SubxtClientError::InvalidBlock) - } - }?; - - let events_for_block = - stream::unfold(events_for_block.iter(), |mut events_for_block| async move { - match events_for_block.next() { - Some(Ok(event)) => match EC::maybe_deserialize(event).await { - Ok(Some(event)) => Some((event, events_for_block)), - _ => None, - }, - Some(Err(e)) => { - tracing::error!("Cannot fetch event {}", e); - None - } - _ => None, - } - }); - - let event_stream = events_for_block.map(move |(event, span)| { - let correlation_id = event.correlation_id(); - ( - event, - ChronicleTransactionId::from(correlation_id), - BlockId::Block(from_block), - Position::from(block_num), - span, - ) - }); - - Ok(event_stream.boxed()) - } - - async fn stream_finalized_events( - &self, - ) -> Result::Sink>>, SubxtClientError> { - let blocks = self.client.blocks().subscribe_finalized().await?; - - let parsed_events = blocks - .map_err(SubxtClientError::from) - .and_then(|block| async move { - let block_num = block.number(); - let block_hash = block.hash(); - - let events = block.events().await.map_err(SubxtClientError::from); - - match events { - Err(e) => Err(e), - Ok(events) => { - let events = events - .iter() - .filter_map(|event| { - event - .map_err(SubxtClientError::from) - .and_then(|event| { - futures::executor::block_on(EC::maybe_deserialize(event)) - }) - .transpose() - .map(|event| { - event.map(|(event, span)| { - let correlation_id = event.correlation_id(); - ( - event, - ChronicleTransactionId::from(correlation_id), - BlockId::Block(block_hash), - Position::from(block_num), - span, - ) - }) - }) - }) - .collect::>(); - Ok(stream::iter(events)) - } - } - }) - .boxed(); - - //Unfold and terminate stream on error - let flattened_stream = stream::unfold(parsed_events, |mut parsed_events| async move { - match parsed_events.next().await { - Some(Ok(events)) => Some((events, parsed_events)), - Some(Err(e)) => { - tracing::error!("Subscription error {}", e); - None - } - _ => None, - } - }) - .flatten() - .boxed(); - - // Terminate on parse error in flattened stream, - let flattened_stream = - stream::unfold(flattened_stream, |mut flattened_stream| async move { - match flattened_stream.next().await { - Some(Err(e)) => { - tracing::error!("Event parse error {}", e); - None - } - Some(Ok(event)) => Some((event, flattened_stream)), - None => None, - } - }) - .boxed(); - - Ok(flattened_stream) - } - - async fn historical_events( - &self, - from_block: C::Hash, - num_blocks: Option, - ) -> Result::Sink>>, SubxtClientError> { - let from_block_clone = self; - let block_hashes = from_block_clone.block_hashes_from(from_block, num_blocks).await?; - - let events = stream::unfold( - (block_hashes, self), - move |(mut block_hashes, self_clone)| async move { - let next_block_hash = block_hashes.next().await; - match next_block_hash { - Some(Ok(block_hash)) => { - let events = self_clone.events_for_block(block_hash).await; - match events { - Ok(events) => Some((events, (block_hashes, self_clone))), - Err(e) => { - tracing::error!("Subscription error {}", e); - None - } - } - } - Some(Err(e)) => { - tracing::error!("Subscription error {}", e); - None - } - _ => None, - } - }, - ) - .flatten() - .boxed(); - - Ok(events) - } + // Return child blocks of from_block, limiting to num_blocks if not none + async fn block_hashes_from( + &self, + from_block: C::Hash, + num_blocks: Option, + ) -> Result>, SubxtClientError> { + // Get the block at hash + let block = self.client.blocks().at(from_block).await?; + + let from_block_num = block.number(); + + let hashes = stream::unfold( + (self.client.clone(), from_block_num), + move |(client, block_num)| async move { + if let Some(num_blocks) = num_blocks { + if num_blocks == block_num { + return None; + } + } + + let block_hash: Result = client + .backend() + .call_decoding( + "chain_getBlockHash", + Some(&vec![block_num].encode()), + subxt::utils::H256::zero(), + ) + .await + .map_err(SubxtClientError::from); + + Some((block_hash, (client, block_num + 1))) + }, + ); + + Ok(Box::pin(hashes)) + } + + // Return events from `number_of_blocks` blocks from the client, starting at `from_block` + async fn events_for_block( + &self, + from_block: C::Hash, + ) -> Result::Sink>>, SubxtClientError> { + let header = self.client.backend().block_header(from_block).await?; + let block_num = match header { + Some(header) => Ok(header.number()), + None => { + tracing::error!("Block header is None"); + Err(SubxtClientError::InvalidBlock) + }, + }?; + + let events_for_block = match self.client.events().at(from_block).await { + Ok(events) => Ok(events), + Err(e) => { + tracing::error!("Failed to get events for block: {}", e); + Err(SubxtClientError::InvalidBlock) + }, + }?; + + let events_for_block = + stream::unfold(events_for_block.iter(), |mut events_for_block| async move { + match events_for_block.next() { + Some(Ok(event)) => match EC::maybe_deserialize(event).await { + Ok(Some(event)) => Some((event, events_for_block)), + _ => None, + }, + Some(Err(e)) => { + tracing::error!("Cannot fetch event {}", e); + None + }, + _ => None, + } + }); + + let event_stream = events_for_block.map(move |(event, span)| { + let correlation_id = event.correlation_id(); + ( + event, + ChronicleTransactionId::from(correlation_id), + BlockId::Block(from_block), + Position::from(block_num), + span, + ) + }); + + Ok(event_stream.boxed()) + } + + async fn stream_finalized_events( + &self, + ) -> Result::Sink>>, SubxtClientError> { + let blocks = self.client.blocks().subscribe_finalized().await?; + + let parsed_events = blocks + .map_err(SubxtClientError::from) + .and_then(|block| async move { + let block_num = block.number(); + let block_hash = block.hash(); + + let events = block.events().await.map_err(SubxtClientError::from); + + match events { + Err(e) => Err(e), + Ok(events) => { + let events = events + .iter() + .filter_map(|event| { + event + .map_err(SubxtClientError::from) + .and_then(|event| { + futures::executor::block_on(EC::maybe_deserialize(event)) + }) + .transpose() + .map(|event| { + event.map(|(event, span)| { + let correlation_id = event.correlation_id(); + ( + event, + ChronicleTransactionId::from(correlation_id), + BlockId::Block(block_hash), + Position::from(block_num), + span, + ) + }) + }) + }) + .collect::>(); + Ok(stream::iter(events)) + }, + } + }) + .boxed(); + + //Unfold and terminate stream on error + let flattened_stream = stream::unfold(parsed_events, |mut parsed_events| async move { + match parsed_events.next().await { + Some(Ok(events)) => Some((events, parsed_events)), + Some(Err(e)) => { + tracing::error!("Subscription error {}", e); + None + }, + _ => None, + } + }) + .flatten() + .boxed(); + + // Terminate on parse error in flattened stream, + let flattened_stream = + stream::unfold(flattened_stream, |mut flattened_stream| async move { + match flattened_stream.next().await { + Some(Err(e)) => { + tracing::error!("Event parse error {}", e); + None + }, + Some(Ok(event)) => Some((event, flattened_stream)), + None => None, + } + }) + .boxed(); + + Ok(flattened_stream) + } + + async fn historical_events( + &self, + from_block: C::Hash, + num_blocks: Option, + ) -> Result::Sink>>, SubxtClientError> { + let from_block_clone = self; + let block_hashes = from_block_clone.block_hashes_from(from_block, num_blocks).await?; + + let events = stream::unfold( + (block_hashes, self), + move |(mut block_hashes, self_clone)| async move { + let next_block_hash = block_hashes.next().await; + match next_block_hash { + Some(Ok(block_hash)) => { + let events = self_clone.events_for_block(block_hash).await; + match events { + Ok(events) => Some((events, (block_hashes, self_clone))), + Err(e) => { + tracing::error!("Subscription error {}", e); + None + }, + } + }, + Some(Err(e)) => { + tracing::error!("Subscription error {}", e); + None + }, + _ => None, + } + }, + ) + .flatten() + .boxed(); + + Ok(events) + } } #[async_trait::async_trait] impl LedgerWriter for SubstrateClient - where - C: subxt::Config< - Address=MultiAddress, - AccountId=AccountId32, - Hash=H256, - Signature=MultiSignature, - >, - >::OtherParams: Default + Send, - E: LedgerEventCodec> + Send + Sync, - T: LedgerTransaction + Send + Sync + subxt::tx::Signer, - ::Payload: subxt::ext::scale_encode::EncodeAsFields, +where + C: subxt::Config< + Address = MultiAddress, + AccountId = AccountId32, + Hash = H256, + Signature = MultiSignature, + >, + >::OtherParams: Default + Send, + E: LedgerEventCodec> + Send + Sync, + T: LedgerTransaction + Send + Sync + subxt::tx::Signer, + ::Payload: subxt::ext::scale_encode::EncodeAsFields, { - type Error = SubxtClientError; - type Submittable = (SubmittableExtrinsic>, [u8; 16]); - type Transaction = T; - - async fn pre_submit( - &self, - tx: Self::Transaction, - ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error> { - let correlation_id = tx.correlation_id(); - let (ext, id) = self.create_extrinsic(&tx, correlation_id, &tx).await?; - - Ok(((ext, id), id.into())) - } - - async fn do_submit( - &self, - consistency: WriteConsistency, - submittable: Self::Submittable, - ) -> Result { - tracing::info!( + type Error = SubxtClientError; + type Submittable = (SubmittableExtrinsic>, [u8; 16]); + type Transaction = T; + + async fn pre_submit( + &self, + tx: Self::Transaction, + ) -> Result<(Self::Submittable, ChronicleTransactionId), Self::Error> { + let correlation_id = tx.correlation_id(); + let (ext, id) = self.create_extrinsic(&tx, correlation_id, &tx).await?; + + Ok(((ext, id), id.into())) + } + + async fn do_submit( + &self, + consistency: WriteConsistency, + submittable: Self::Submittable, + ) -> Result { + tracing::info!( target: "substrate_client", correlation_id = ?submittable.1, "Submitting extrinsic with correlation ID." ); - self.send_extrinsic(consistency, submittable) - .await - .map_err(|(e, id)| (e.into(), id)) - } + self.send_extrinsic(consistency, submittable) + .await + .map_err(|(e, id)| (e.into(), id)) + } } #[async_trait::async_trait] pub trait SubstrateStateReader { - type Error: std::error::Error; - /// Get the state entry at `address` - async fn get_state_entry( - &self, - pallet_name: &str, - entry_name: &str, - address: K, - ) -> Result, Self::Error>; + type Error: std::error::Error; + /// Get the state entry at `address` + async fn get_state_entry( + &self, + pallet_name: &str, + entry_name: &str, + address: K, + ) -> Result, Self::Error>; } pub(crate) fn validate_storage_address( - address: &Address, - pallet: PalletMetadata<'_>, + address: &Address, + pallet: PalletMetadata<'_>, ) -> Result<(), subxt::Error> { - if let Some(hash) = address.validation_hash() { - validate_storage(pallet, address.entry_name(), hash)?; - } - Ok(()) + if let Some(hash) = address.validation_hash() { + validate_storage(pallet, address.entry_name(), hash)?; + } + Ok(()) } /// Return details about the given storage entry. fn lookup_entry_details<'a>( - pallet_name: &str, - entry_name: &str, - metadata: &'a Metadata, + pallet_name: &str, + entry_name: &str, + metadata: &'a Metadata, ) -> Result<(PalletMetadata<'a>, &'a StorageEntryMetadata), subxt::Error> { - let pallet_metadata = metadata.pallet_by_name_err(pallet_name)?; - let storage_metadata = pallet_metadata - .storage() - .ok_or_else(|| MetadataError::StorageNotFoundInPallet(pallet_name.to_owned()))?; - let storage_entry = storage_metadata - .entry_by_name(entry_name) - .ok_or_else(|| MetadataError::StorageEntryNotFound(entry_name.to_owned()))?; - Ok((pallet_metadata, storage_entry)) + let pallet_metadata = metadata.pallet_by_name_err(pallet_name)?; + let storage_metadata = pallet_metadata + .storage() + .ok_or_else(|| MetadataError::StorageNotFoundInPallet(pallet_name.to_owned()))?; + let storage_entry = storage_metadata + .entry_by_name(entry_name) + .ok_or_else(|| MetadataError::StorageEntryNotFound(entry_name.to_owned()))?; + Ok((pallet_metadata, storage_entry)) } /// Validate a storage entry against the metadata. fn validate_storage( - pallet: PalletMetadata<'_>, - storage_name: &str, - hash: [u8; 32], + pallet: PalletMetadata<'_>, + storage_name: &str, + hash: [u8; 32], ) -> Result<(), subxt::Error> { - let Some(expected_hash) = pallet.storage_hash(storage_name) else { - return Err(MetadataError::IncompatibleCodegen.into()); - }; - if expected_hash != hash { - return Err(MetadataError::IncompatibleCodegen.into()); - } - Ok(()) + let Some(expected_hash) = pallet.storage_hash(storage_name) else { + return Err(MetadataError::IncompatibleCodegen.into()); + }; + if expected_hash != hash { + return Err(MetadataError::IncompatibleCodegen.into()); + } + Ok(()) } /// Fetch the return type out of a [`StorageEntryType`]. fn return_type_from_storage_entry_type(entry: &StorageEntryType) -> u32 { - match entry { - StorageEntryType::Plain(ty) => *ty, - StorageEntryType::Map { value_ty, .. } => *value_ty, - } + match entry { + StorageEntryType::Plain(ty) => *ty, + StorageEntryType::Map { value_ty, .. } => *value_ty, + } } /// Given some bytes, a pallet and storage name, decode the response. fn decode_storage_with_metadata( - bytes: &mut &[u8], - metadata: &Metadata, - storage_metadata: &StorageEntryMetadata, + bytes: &mut &[u8], + metadata: &Metadata, + storage_metadata: &StorageEntryMetadata, ) -> Result { - let ty = storage_metadata.entry_type(); - let return_ty = return_type_from_storage_entry_type(ty); - let val = T::decode_with_metadata(bytes, return_ty, metadata)?; - Ok(val) + let ty = storage_metadata.entry_type(); + let return_ty = return_type_from_storage_entry_type(ty); + let val = T::decode_with_metadata(bytes, return_ty, metadata)?; + Ok(val) } pub(crate) fn write_storage_address_root_bytes( - addr: &Address, - out: &mut Vec, + addr: &Address, + out: &mut Vec, ) { - out.extend(twox_128(addr.pallet_name().as_bytes())); - out.extend(twox_128(addr.entry_name().as_bytes())); + out.extend(twox_128(addr.pallet_name().as_bytes())); + out.extend(twox_128(addr.entry_name().as_bytes())); } pub(crate) fn storage_address_bytes( - addr: &Address, - metadata: &Metadata, + addr: &Address, + metadata: &Metadata, ) -> Result, subxt::Error> { - let mut bytes = Vec::new(); - write_storage_address_root_bytes(addr, &mut bytes); - addr.append_entry_bytes(metadata, &mut bytes)?; - Ok(bytes) + let mut bytes = Vec::new(); + write_storage_address_root_bytes(addr, &mut bytes); + addr.append_entry_bytes(metadata, &mut bytes)?; + Ok(bytes) } #[async_trait::async_trait] impl SubstrateStateReader for SubstrateClient - where - C: subxt::Config, - EC: LedgerEventCodec + Send + Sync, - T: protocol_abstract::LedgerTransaction + Send + Sync, +where + C: subxt::Config, + EC: LedgerEventCodec + Send + Sync, + T: protocol_abstract::LedgerTransaction + Send + Sync, { - type Error = SubxtClientError; - - async fn get_state_entry( - &self, - pallet_name: &str, - entry_name: &str, - address: K, - ) -> Result, Self::Error> { - let metadata = self.client.metadata(); - let (pallet, entry) = lookup_entry_details(pallet_name, entry_name, &metadata)?; - - let address = DynamicAddress::new(pallet_name, entry_name, vec![address]); - - // Metadata validation checks whether the static address given - // is likely to actually correspond to a real storage entry or not. - // if not, it means static codegen doesn't line up with runtime - // metadata. - validate_storage_address(&address, pallet)?; - - // Look up the return type ID to enable DecodeWithMetadata: - let lookup_bytes = storage_address_bytes(&address, &metadata)?; - if let Some(data) = self.client.storage().at_latest().await?.fetch_raw(lookup_bytes).await? - { - let val = decode_storage_with_metadata::(&mut &*data, &metadata, entry)?; - Ok(Some(val)) - } else { - Ok(None) - } - } + type Error = SubxtClientError; + + async fn get_state_entry( + &self, + pallet_name: &str, + entry_name: &str, + address: K, + ) -> Result, Self::Error> { + let metadata = self.client.metadata(); + let (pallet, entry) = lookup_entry_details(pallet_name, entry_name, &metadata)?; + + let address = DynamicAddress::new(pallet_name, entry_name, vec![address]); + + // Metadata validation checks whether the static address given + // is likely to actually correspond to a real storage entry or not. + // if not, it means static codegen doesn't line up with runtime + // metadata. + validate_storage_address(&address, pallet)?; + + // Look up the return type ID to enable DecodeWithMetadata: + let lookup_bytes = storage_address_bytes(&address, &metadata)?; + if let Some(data) = self.client.storage().at_latest().await?.fetch_raw(lookup_bytes).await? + { + let val = decode_storage_with_metadata::(&mut &*data, &metadata, entry)?; + Ok(Some(val)) + } else { + Ok(None) + } + } } #[async_trait::async_trait] impl LedgerReader for SubstrateClient - where - C: subxt::Config, - H: subxt::config::Header + Decode + Encode + Send + Sync, - EC: LedgerEventCodec> - + Send - + Sync, - T: LedgerTransaction + Send + Sync, +where + C: subxt::Config, + H: subxt::config::Header + Decode + Encode + Send + Sync, + EC: LedgerEventCodec> + + Send + + Sync, + T: LedgerTransaction + Send + Sync, { - type Error = SubxtClientError; - type Event = ::Sink; - type EventCodec = EC; - - // Get the block height of the ledger, and the id of the highest block - async fn block_height(&self) -> Result<(Position, BlockId), Self::Error> { - let block = self.client.blocks().at_latest().await?; - - Ok((Position::from(block.number()), BlockId::from(block.hash()))) - } - - /// Subscribe to state updates from this ledger, starting at `offset`, and - /// ending the stream after `number_of_blocks` blocks have been processed. - async fn state_updates( - &self, - // The block to start from - from_block: FromBlock, - // The number of blocks to process before ending the stream - number_of_blocks: Option, - ) -> Result>, Self::Error> { - // If fromblock is not head, then load in historical blocks and yield up to number_of_blocks - // events - let historical = match from_block { - FromBlock::Head => stream::empty().boxed(), - FromBlock::First => self - .historical_events(self.client.backend().genesis_hash().await?, number_of_blocks) - .await? - .boxed(), - FromBlock::BlockId(BlockId::Block(hash)) => - self.historical_events(hash, number_of_blocks).await?.boxed(), - FromBlock::BlockId(BlockId::Unknown) => self - .historical_events(self.client.backend().genesis_hash().await?, number_of_blocks) - .await? - .boxed(), - }; - - let all = historical.chain(self.stream_finalized_events().await?); - - //TODO: only take number_of_blocks worth of events before closing the stream - - Ok(all.boxed()) - } + type Error = SubxtClientError; + type Event = ::Sink; + type EventCodec = EC; + + // Get the block height of the ledger, and the id of the highest block + async fn block_height(&self) -> Result<(Position, BlockId), Self::Error> { + let block = self.client.blocks().at_latest().await?; + + Ok((Position::from(block.number()), BlockId::from(block.hash()))) + } + + /// Subscribe to state updates from this ledger, starting at `offset`, and + /// ending the stream after `number_of_blocks` blocks have been processed. + async fn state_updates( + &self, + // The block to start from + from_block: FromBlock, + // The number of blocks to process before ending the stream + number_of_blocks: Option, + ) -> Result>, Self::Error> { + // If fromblock is not head, then load in historical blocks and yield up to number_of_blocks + // events + let historical = match from_block { + FromBlock::Head => stream::empty().boxed(), + FromBlock::First => self + .historical_events(self.client.backend().genesis_hash().await?, number_of_blocks) + .await? + .boxed(), + FromBlock::BlockId(BlockId::Block(hash)) => + self.historical_events(hash, number_of_blocks).await?.boxed(), + FromBlock::BlockId(BlockId::Unknown) => self + .historical_events(self.client.backend().genesis_hash().await?, number_of_blocks) + .await? + .boxed(), + }; + + let all = historical.chain(self.stream_finalized_events().await?); + + //TODO: only take number_of_blocks worth of events before closing the stream + + Ok(all.boxed()) + } } diff --git a/crates/runtime-api-chronicle/Cargo.toml b/crates/runtime-api-chronicle/Cargo.toml deleted file mode 100644 index a49e61757..000000000 --- a/crates/runtime-api-chronicle/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "runtime-api-chronicle" -version = "1.0.0" -edition = "2021" - -[package.metadata.docs.rs] -targets = ["x86_64-unknown-linux-gnu"] - -[dependencies] - -sp-api = { git = 'https://github.com/paritytech/polkadot-sdk.git', tag = 'polkadot-v1.9.0', default-features = false } -sp-core = { git = 'https://github.com/paritytech/polkadot-sdk.git', tag = 'polkadot-v1.9.0', default-features = false } - -#Local dependencies -common = { path = "../common", default-features = false, features = ["parity-encoding"] } - -[features] -default = ["std"] -std = [ - "sp-api/std", - "common/std" -] diff --git a/crates/runtime-api-chronicle/src/lib.rs b/crates/runtime-api-chronicle/src/lib.rs deleted file mode 100644 index 90c6d1874..000000000 --- a/crates/runtime-api-chronicle/src/lib.rs +++ /dev/null @@ -1,15 +0,0 @@ -#![cfg_attr(not(feature = "std"), no_std)] - -pub type Hash = sp_core::H256; - -pub mod chronicle_core { - pub use common::*; -} - -// Here we declare the runtime API. It is implemented it the `impl` block in -// runtime file (the `runtime/src/lib.rs`) -sp_api::decl_runtime_apis! { - pub trait ChronicleApi { - fn placeholder() -> u32; - } -} diff --git a/node/node-chronicle/src/benchmarking.rs b/node/node-chronicle/src/benchmarking.rs index 1288331a6..6e06a558f 100644 --- a/node/node-chronicle/src/benchmarking.rs +++ b/node/node-chronicle/src/benchmarking.rs @@ -6,9 +6,9 @@ use crate::service::FullClient; use pallet_chronicle::chronicle_core::OperationSubmission; use runtime::{AccountId, SystemCall}; +use runtime_chronicle::{self as runtime, pallet_opa::operations::ChronicleOperation, RuntimeCall}; use sc_cli::Result; use sc_client_api::BlockBackend; -use runtime_chronicle::{self as runtime, pallet_opa::operations::ChronicleOperation, RuntimeCall}; use sp_core::{Encode, Pair}; use sp_inherents::{InherentData, InherentDataProvider}; use sp_keyring::Sr25519Keyring; @@ -83,11 +83,13 @@ impl frame_benchmarking_cli::ExtrinsicBuilder for OperationSubmissionBuilder { let extrinsic: OpaqueExtrinsic = create_benchmark_extrinsic( self.client.as_ref(), acc, - RuntimeCall::Chronicle(pallet_chronicle::Call::apply { operations: OperationSubmission { - correlation_id: [0; 16], - items: vec![self.value.clone()].into(), - identity: common::identity::SignedIdentity::new_no_identity().into(), - }}), + RuntimeCall::Chronicle(pallet_chronicle::Call::apply { + operations: OperationSubmission { + correlation_id: [0; 16], + items: vec![self.value.clone()].into(), + identity: common::identity::SignedIdentity::new_no_identity().into(), + }, + }), nonce, ) .into(); diff --git a/node/node-chronicle/src/chain_spec.rs b/node/node-chronicle/src/chain_spec.rs index d0f7a212b..6413f08b6 100644 --- a/node/node-chronicle/src/chain_spec.rs +++ b/node/node-chronicle/src/chain_spec.rs @@ -1,17 +1,22 @@ use std::path::Path; +use pallet_im_online::sr25519::AuthorityId as ImOnlineId; use runtime_chronicle::{ - opaque::SessionKeys, pallet_chronicle, AccountId, AuraConfig, GrandpaConfig, ImOnlineConfig, Runtime, RuntimeGenesisConfig, SessionConfig, Signature, SudoConfig, SystemConfig, ValidatorSetConfig, WASM_BINARY + opaque::SessionKeys, pallet_chronicle, AccountId, AuraConfig, GrandpaConfig, ImOnlineConfig, + Runtime, RuntimeGenesisConfig, SessionConfig, Signature, SudoConfig, SystemConfig, + ValidatorSetConfig, WASM_BINARY, }; use sc_keystore::LocalKeystore; use sc_service::ChainType; use sc_telemetry::{log, serde_json}; +use serde_json::to_value; use sp_consensus_aura::sr25519::AuthorityId as AuraId; use sp_consensus_grandpa::AuthorityId as GrandpaId; use sp_core::{sr25519, Pair, Public}; -use sp_runtime::{traits::{IdentifyAccount, Verify}, KeyTypeId}; -use pallet_im_online::sr25519::AuthorityId as ImOnlineId; -use serde_json::to_value; +use sp_runtime::{ + traits::{IdentifyAccount, Verify}, + KeyTypeId, +}; // The URL for the telemetry server. // const STAGING_TELEMETRY_URL: &str = "wss://telemetry.polkadot.io/submit/"; @@ -38,40 +43,47 @@ where /// Generate an Aura authority key. pub fn authority_keys_from_seed(s: &str) -> (AccountId, AuraId, GrandpaId, ImOnlineId) { - (get_account_id_from_seed::(s), get_from_seed::(s), get_from_seed::(s), get_from_seed::(s)) + ( + get_account_id_from_seed::(s), + get_from_seed::(s), + get_from_seed::(s), + get_from_seed::(s), + ) } use sp_keystore::Keystore; -pub fn authority_keys_from_keystore(p: &std::path::Path) -> (AccountId, AuraId, GrandpaId, ImOnlineId) { - let keystore = LocalKeystore::open(p, None).expect("Local keystore should open"); - - let sudo_key = Keystore::sr25519_public_keys(&keystore, KeyTypeId(*b"acco")) - .into_iter() - .next() - .expect("Account key should be present in keystore"); - - let aura_key = Keystore::sr25519_public_keys(&keystore, KeyTypeId(*b"aura")) - .into_iter() - .next() - .expect("Aura key should be present in keystore"); - - let grandpa_key = Keystore::ed25519_public_keys(&keystore, KeyTypeId(*b"gran")) - .into_iter() - .next() - .expect("Grandpa key should be present in keystore"); - - let im_online_key = Keystore::sr25519_public_keys(&keystore, KeyTypeId(*b"onli")) - .into_iter() - .next() - .expect("ImOnline key should be present in keystore"); - - ( - AccountPublic::from(sudo_key).into_account(), - aura_key.into(), - grandpa_key.into(), - im_online_key.into(), - ) +pub fn authority_keys_from_keystore( + p: &std::path::Path, +) -> (AccountId, AuraId, GrandpaId, ImOnlineId) { + let keystore = LocalKeystore::open(p, None).expect("Local keystore should open"); + + let sudo_key = Keystore::sr25519_public_keys(&keystore, KeyTypeId(*b"acco")) + .into_iter() + .next() + .expect("Account key should be present in keystore"); + + let aura_key = Keystore::sr25519_public_keys(&keystore, KeyTypeId(*b"aura")) + .into_iter() + .next() + .expect("Aura key should be present in keystore"); + + let grandpa_key = Keystore::ed25519_public_keys(&keystore, KeyTypeId(*b"gran")) + .into_iter() + .next() + .expect("Grandpa key should be present in keystore"); + + let im_online_key = Keystore::sr25519_public_keys(&keystore, KeyTypeId(*b"onli")) + .into_iter() + .next() + .expect("ImOnline key should be present in keystore"); + + ( + AccountPublic::from(sudo_key).into_account(), + aura_key.into(), + grandpa_key.into(), + im_online_key.into(), + ) } pub fn development_config() -> Result { @@ -87,7 +99,8 @@ pub fn development_config() -> Result { vec![authority_keys_from_seed("Alice")], get_account_id_from_seed::("Alice"), true, - )).expect("Genesis config should be serializable") + )) + .expect("Genesis config should be serializable"), ) .with_protocol_id("chronicle") .build()) @@ -97,10 +110,7 @@ pub fn local_testnet_config() -> Result { let wasm_binary = WASM_BINARY.ok_or_else(|| "Development wasm not available".to_string())?; log::info!("testnet configuration"); - Ok(ChainSpec::builder( - wasm_binary, - None - ) + Ok(ChainSpec::builder(wasm_binary, None) .with_name("Local Testnet") .with_id("local_testnet") .with_chain_type(ChainType::Local) @@ -109,7 +119,8 @@ pub fn local_testnet_config() -> Result { vec![authority_keys_from_seed("Alice")], get_account_id_from_seed::("Alice"), true, - )).expect("Genesis config should be serializable") + )) + .expect("Genesis config should be serializable"), ) .with_protocol_id("chronicle") .build()) @@ -126,9 +137,7 @@ fn genesis( _enable_println: bool, ) -> RuntimeGenesisConfig { RuntimeGenesisConfig { - system: SystemConfig { - ..Default::default() - }, + system: SystemConfig { ..Default::default() }, sudo: SudoConfig { // Assign network admin rights. key: Some(root_key), @@ -138,36 +147,27 @@ fn genesis( initial_validators: initial_authorities.iter().map(|x| x.0.clone()).collect::>(), }, session: SessionConfig { - keys: initial_authorities.iter().map(|x| { - ( - x.0.clone(), - x.0.clone(), - session_keys(x.1.clone(), x.2.clone(), x.3.clone()) - ) - }).collect::>(), - }, - aura: AuraConfig { - authorities: vec![], - }, - grandpa: GrandpaConfig { - ..Default::default() + keys: initial_authorities + .iter() + .map(|x| { + (x.0.clone(), x.0.clone(), session_keys(x.1.clone(), x.2.clone(), x.3.clone())) + }) + .collect::>(), }, + aura: AuraConfig { authorities: vec![] }, + grandpa: GrandpaConfig { ..Default::default() }, im_online: ImOnlineConfig { keys: vec![] }, } } - - pub fn chronicle_config() -> Result { let wasm_binary = WASM_BINARY.ok_or_else(|| "Development wasm not available".to_string())?; - let (root_key, aura_key, grandpa_key, im_online_key) = authority_keys_from_keystore(Path::new("/keystore/")); + let (root_key, aura_key, grandpa_key, im_online_key) = + authority_keys_from_keystore(Path::new("/keystore/")); log::info!("Private network configuration"); - Ok(ChainSpec::builder( - wasm_binary, - None - ) + Ok(ChainSpec::builder(wasm_binary, None) .with_name("Chronicle") .with_id("chronicle") .with_chain_type(ChainType::Live) @@ -176,9 +176,9 @@ pub fn chronicle_config() -> Result { vec![(root_key.clone(), aura_key, grandpa_key, im_online_key)], root_key, true, - )).expect("Genesis config should be serializable") + )) + .expect("Genesis config should be serializable"), ) .with_protocol_id("chronicle") .build()) } - diff --git a/node/node-chronicle/src/command.rs b/node/node-chronicle/src/command.rs index e0381642f..436c6634f 100644 --- a/node/node-chronicle/src/command.rs +++ b/node/node-chronicle/src/command.rs @@ -1,5 +1,5 @@ use crate::{ - benchmarking::{inherent_benchmark_data, RemarkBuilder, OperationSubmissionBuilder}, + benchmarking::{inherent_benchmark_data, OperationSubmissionBuilder, RemarkBuilder}, chain_spec, cli::{Cli, Subcommand}, service, @@ -155,9 +155,8 @@ pub fn run() -> sc_cli::Result<()> { BenchmarkCmd::Extrinsic(cmd) => { let PartialComponents { client, .. } = service::new_partial(&config)?; // Register the *Remark* and *TKA* builders. - let ext_factory = ExtrinsicFactory(vec![ - Box::new(RemarkBuilder::new(client.clone())), - ]); + let ext_factory = + ExtrinsicFactory(vec![Box::new(RemarkBuilder::new(client.clone()))]); cmd.run(client, inherent_benchmark_data()?, Vec::new(), &ext_factory) }, diff --git a/node/node-chronicle/src/rpc.rs b/node/node-chronicle/src/rpc.rs index 2c9b1b01c..7273ecd82 100644 --- a/node/node-chronicle/src/rpc.rs +++ b/node/node-chronicle/src/rpc.rs @@ -45,6 +45,5 @@ where module.merge(System::new(client.clone(), pool, deny_unsafe).into_rpc())?; - Ok(module) } diff --git a/node/runtime-chronicle/Cargo.toml b/node/runtime-chronicle/Cargo.toml index 42e2b3f8c..d0943bf85 100644 --- a/node/runtime-chronicle/Cargo.toml +++ b/node/runtime-chronicle/Cargo.toml @@ -58,7 +58,6 @@ frame-system-benchmarking = { git = 'https://github.com/paritytech/polkadot-sdk. # RPC related frame-system-rpc-runtime-api = { git = 'https://github.com/paritytech/polkadot-sdk.git', tag = 'polkadot-v1.9.0', default-features = false } -runtime-api-chronicle = { default-features = false, path = "../../crates/runtime-api-chronicle" } # Local Pallets diff --git a/node/runtime-chronicle/src/lib.rs b/node/runtime-chronicle/src/lib.rs index 6da9f7d26..c9a9e8bb9 100644 --- a/node/runtime-chronicle/src/lib.rs +++ b/node/runtime-chronicle/src/lib.rs @@ -9,13 +9,16 @@ include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); use frame_support::pallet_prelude::TransactionPriority; use frame_system::EnsureRoot; use pallet_grandpa::AuthorityId as GrandpaId; +use pallet_im_online::sr25519::AuthorityId as ImOnlineId; use sp_api::impl_runtime_apis; use sp_consensus_aura::sr25519::AuthorityId as AuraId; -use pallet_im_online::sr25519::AuthorityId as ImOnlineId; use sp_core::{crypto::KeyTypeId, OpaqueMetadata}; use sp_runtime::{ create_runtime_str, generic, impl_opaque_keys, - traits::{AccountIdLookup, BlakeTwo256, Block as BlockT, IdentifyAccount, NumberFor, OpaqueKeys, Verify}, + traits::{ + AccountIdLookup, BlakeTwo256, Block as BlockT, IdentifyAccount, NumberFor, OpaqueKeys, + Verify, + }, transaction_validity::{TransactionSource, TransactionValidity}, ApplyExtrinsicResult, MultiSignature, }; @@ -25,7 +28,6 @@ use sp_version::NativeVersion; use sp_version::RuntimeVersion; pub mod no_nonce_fees; - // A few exports that help ease life for downstream crates. pub use frame_support::{ construct_runtime, parameter_types, @@ -204,11 +206,11 @@ impl frame_system::Config for Runtime { type MaxConsumers = frame_support::traits::ConstU32<16>; type RuntimeTask = RuntimeTask; - type SingleBlockMigrations = (); - type MultiBlockMigrator = (); - type PreInherents = (); - type PostInherents = (); - type PostTransactions = (); + type SingleBlockMigrations = (); + type MultiBlockMigrator = (); + type PreInherents = (); + type PostInherents = (); + type PostTransactions = (); } parameter_types! { @@ -284,7 +286,6 @@ impl frame_system::offchain::SigningTypes for Runtime { type Signature = Signature; } - impl frame_system::offchain::SendTransactionTypes> for Runtime { type OverarchingCall = RuntimeCall; type Extrinsic = UncheckedExtrinsic; @@ -379,12 +380,6 @@ mod benches { } impl_runtime_apis! { - impl runtime_api_chronicle::ChronicleApi for Runtime { - // Purely to keep runtime api dependencies in place - fn placeholder() -> u32 { - 0 - } - } impl sp_api::Core for Runtime { fn version() -> RuntimeVersion { @@ -397,20 +392,20 @@ impl_runtime_apis! { fn initialize_block(header: &::Header) -> sp_runtime::ExtrinsicInclusionMode { - Executive::initialize_block(header); - sp_runtime::ExtrinsicInclusionMode::AllExtrinsics - } + Executive::initialize_block(header); + sp_runtime::ExtrinsicInclusionMode::AllExtrinsics + } } impl sp_genesis_builder::GenesisBuilder for Runtime { - fn create_default_config() -> Vec { - frame_support::genesis_builder_helper::create_default_config::() - } - - fn build_config(config: Vec) -> sp_genesis_builder::Result { - frame_support::genesis_builder_helper::build_config::(config) - } - } + fn create_default_config() -> Vec { + frame_support::genesis_builder_helper::create_default_config::() + } + + fn build_config(config: Vec) -> sp_genesis_builder::Result { + frame_support::genesis_builder_helper::build_config::(config) + } + } impl sp_api::Metadata for Runtime { fn metadata() -> OpaqueMetadata {