From afd749a9f57211882eca3891c9cbb9ab4ab84271 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 19:50:06 -0600
Subject: [PATCH 01/87] feat: initial commit removing cljs support and
converting over to deps
---
.gitignore | 1 +
CHANGELOG.md | 10 +-
CONTRIBUTING.md | 4 +-
CONTRIBUTORS.md | 5 +-
Makefile | 45 +
NOTICE | 2 +-
README.md | 2 +-
build/.gitkeep | 0
build/META-INF/MANIFEST.MF | 4 +
.../maven/k13labs/clara-rules/pom.properties | 6 +
build/clara/rules.clj | 257 ++
.../clara/rules/accumulators.clj | 17 +-
build/clara/rules/compiler.clj | 2039 ++++++++++++++++
build/clara/rules/dsl.clj | 315 +++
build/clara/rules/durability.clj | 699 ++++++
build/clara/rules/durability/fressian.clj | 642 +++++
.../clara/rules/engine.clj | 244 +-
build/clara/rules/java.clj | 50 +
.../clara/rules/listener.clj | 4 +-
build/clara/rules/memory.clj | 909 +++++++
build/clara/rules/platform.clj | 93 +
.../clara/rules/schema.clj | 13 +-
build/clara/rules/test_rules_data.clj | 37 +
.../clara/rules/testfacts.clj | 1 -
build/clara/rules/update_cache/cancelling.clj | 146 ++
.../clara/rules/update_cache/core.clj | 0
.../clara/tools/fact_graph.clj | 12 +-
.../clara/tools/inspect.clj | 227 +-
.../clara/tools/internal/inspect.clj | 2 +-
.../clara/tools/loop_detector.clj | 23 +-
build/clara/tools/testing_utils.clj | 204 ++
.../clara/tools/tracing.clj | 5 +-
.../clj-kondo.exports/clara/rules/config.edn | 7 +
.../clara/rules/hooks/clara_rules.clj_kondo | 444 ++++
deps.edn | 59 +
dev/user.clj | 1 +
project.clj | 14 +-
resources/.gitkeep | 0
resources/public/index.html | 15 -
src/main/clojure/clara/macros.clj | 278 ---
src/main/clojure/clara/rules.clj | 257 ++
src/main/clojure/clara/rules.cljc | 431 ----
src/main/clojure/clara/rules/accumulators.clj | 212 ++
src/main/clojure/clara/rules/compiler.clj | 580 ++---
src/main/clojure/clara/rules/dsl.clj | 160 +-
src/main/clojure/clara/rules/durability.clj | 78 +-
.../clara/rules/durability/fressian.clj | 202 +-
src/main/clojure/clara/rules/engine.clj | 2119 +++++++++++++++++
src/main/clojure/clara/rules/java.clj | 10 +-
src/main/clojure/clara/rules/listener.clj | 176 ++
src/main/clojure/clara/rules/memory.clj | 909 +++++++
src/main/clojure/clara/rules/memory.cljc | 1068 ---------
src/main/clojure/clara/rules/platform.clj | 93 +
src/main/clojure/clara/rules/platform.cljc | 115 -
src/main/clojure/clara/rules/schema.clj | 200 ++
.../clojure/clara/rules/test_rules_data.clj | 11 +-
src/main/clojure/clara/rules/testfacts.clj | 24 +
.../clara/rules/update_cache/cancelling.clj | 1 -
.../clojure/clara/rules/update_cache/core.clj | 32 +
src/main/clojure/clara/tools/fact_graph.clj | 96 +
src/main/clojure/clara/tools/inspect.clj | 354 +++
.../clojure/clara/tools/internal/inspect.clj | 80 +
.../clojure/clara/tools/loop_detector.clj | 102 +
.../clojure/clara/tools/testing_utils.clj | 204 ++
.../clojure/clara/tools/testing_utils.cljc | 229 --
src/main/clojure/clara/tools/tracing.clj | 175 ++
.../clojure/clara/generative/generators.clj | 12 +-
.../clojure/clara/generative/test_accum.clj | 12 +-
.../clara/generative/test_generators.clj | 2 +-
src/test/clojure/clara/other_ruleset.clj | 2 +-
.../clara/performance/test_compilation.clj | 76 +-
.../performance/test_rule_execution.clj} | 11 +-
src/test/clojure/clara/rule_defs.clj | 37 +
src/test/clojure/clara/sample_ruleset.clj | 1 -
.../clara/test_accumulation.clj} | 651 +++--
.../clara/test_accumulators.clj} | 91 +-
.../clara/test_bindings.clj} | 76 +-
.../clara/test_clear_ns_productions.clj | 69 +
.../clara/test_common.clj} | 39 +-
.../clara/test_dsl.clj} | 314 ++-
src/test/clojure/clara/test_durability.clj | 25 +-
.../clara/test_exists.clj} | 66 +-
src/test/clojure/clara/test_fressian.clj | 6 +-
.../clojure/clara/test_infinite_loops.clj | 6 +-
src/test/clojure/clara/test_java.clj | 14 +-
.../clara/test_memory.clj} | 70 +-
.../clara/test_negation.clj} | 98 +-
.../clara/test_node_sharing.clj} | 72 +-
.../clara/test_performance_optimizations.clj | 45 +
.../clara/test_queries.clj} | 42 +-
src/test/clojure/clara/test_rhs_retract.clj | 44 +
src/test/clojure/clara/test_rules.clj | 317 ++-
.../clara/test_rules_require.clj} | 8 +-
src/test/clojure/clara/test_setup.clj | 18 +
.../clara/test_simple_rules.clj} | 75 +-
.../clara/test_testing_utils.clj} | 31 +-
.../clara/test_truth_maintenance.clj} | 108 +-
.../clara/tools/test_fact_graph.clj} | 9 +-
.../clara/tools/test_inspect.clj} | 86 +-
.../clara/tools/test_tracing.clj} | 81 +-
src/test/clojurescript/clara/test.cljs | 66 -
.../clara/test_complex_negation.cljs | 80 -
src/test/clojurescript/clara/test_rules.cljs | 240 --
.../clojurescript/clara/test_salience.cljs | 115 -
src/test/common/clara/rule_defs.cljc | 39 -
.../clara/test_clear_ns_productions.cljc | 84 -
.../clara/test_performance_optimizations.cljc | 62 -
src/test/common/clara/test_rhs_retract.cljc | 62 -
src/test/html/advanced.html | 5 -
src/test/html/simple.html | 5 -
src/test/js/runner.js | 37 -
tests.edn | 19 +
tool/build.clj | 19 +
113 files changed, 12993 insertions(+), 5208 deletions(-)
create mode 100644 Makefile
create mode 100644 build/.gitkeep
create mode 100644 build/META-INF/MANIFEST.MF
create mode 100644 build/META-INF/maven/k13labs/clara-rules/pom.properties
create mode 100644 build/clara/rules.clj
rename src/main/clojure/clara/rules/accumulators.cljc => build/clara/rules/accumulators.clj (95%)
create mode 100644 build/clara/rules/compiler.clj
create mode 100644 build/clara/rules/dsl.clj
create mode 100644 build/clara/rules/durability.clj
create mode 100644 build/clara/rules/durability/fressian.clj
rename src/main/clojure/clara/rules/engine.cljc => build/clara/rules/engine.clj (93%)
create mode 100644 build/clara/rules/java.clj
rename src/main/clojure/clara/rules/listener.cljc => build/clara/rules/listener.clj (99%)
create mode 100644 build/clara/rules/memory.clj
create mode 100644 build/clara/rules/platform.clj
rename src/main/clojure/clara/rules/schema.cljc => build/clara/rules/schema.clj (97%)
create mode 100644 build/clara/rules/test_rules_data.clj
rename src/main/clojure/clara/rules/testfacts.cljc => build/clara/rules/testfacts.clj (99%)
create mode 100644 build/clara/rules/update_cache/cancelling.clj
rename src/main/clojure/clara/rules/update_cache/core.cljc => build/clara/rules/update_cache/core.clj (100%)
rename src/main/clojure/clara/tools/fact_graph.cljc => build/clara/tools/fact_graph.clj (96%)
rename src/main/clojure/clara/tools/inspect.cljc => build/clara/tools/inspect.clj (58%)
rename src/main/clojure/clara/tools/internal/inspect.cljc => build/clara/tools/internal/inspect.clj (99%)
rename src/main/clojure/clara/tools/loop_detector.cljc => build/clara/tools/loop_detector.clj (81%)
create mode 100644 build/clara/tools/testing_utils.clj
rename src/main/clojure/clara/tools/tracing.cljc => build/clara/tools/tracing.clj (99%)
create mode 100644 build/clj-kondo.exports/clara/rules/config.edn
create mode 100644 build/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
create mode 100644 deps.edn
create mode 100644 dev/user.clj
create mode 100644 resources/.gitkeep
delete mode 100644 resources/public/index.html
delete mode 100644 src/main/clojure/clara/macros.clj
create mode 100644 src/main/clojure/clara/rules.clj
delete mode 100644 src/main/clojure/clara/rules.cljc
create mode 100644 src/main/clojure/clara/rules/accumulators.clj
create mode 100644 src/main/clojure/clara/rules/engine.clj
create mode 100644 src/main/clojure/clara/rules/listener.clj
create mode 100644 src/main/clojure/clara/rules/memory.clj
delete mode 100644 src/main/clojure/clara/rules/memory.cljc
create mode 100644 src/main/clojure/clara/rules/platform.clj
delete mode 100644 src/main/clojure/clara/rules/platform.cljc
create mode 100644 src/main/clojure/clara/rules/schema.clj
create mode 100644 src/main/clojure/clara/rules/testfacts.clj
create mode 100644 src/main/clojure/clara/rules/update_cache/core.clj
create mode 100644 src/main/clojure/clara/tools/fact_graph.clj
create mode 100644 src/main/clojure/clara/tools/inspect.clj
create mode 100644 src/main/clojure/clara/tools/internal/inspect.clj
create mode 100644 src/main/clojure/clara/tools/loop_detector.clj
create mode 100644 src/main/clojure/clara/tools/testing_utils.clj
delete mode 100644 src/main/clojure/clara/tools/testing_utils.cljc
create mode 100644 src/main/clojure/clara/tools/tracing.clj
rename src/test/{common/clara/performance/test_rule_execution.cljc => clojure/clara/performance/test_rule_execution.clj} (82%)
create mode 100644 src/test/clojure/clara/rule_defs.clj
rename src/test/{common/clara/test_accumulation.cljc => clojure/clara/test_accumulation.clj} (78%)
rename src/test/{common/clara/test_accumulators.cljc => clojure/clara/test_accumulators.clj} (89%)
rename src/test/{common/clara/test_bindings.cljc => clojure/clara/test_bindings.clj} (86%)
create mode 100644 src/test/clojure/clara/test_clear_ns_productions.clj
rename src/test/{common/clara/test_common.cljc => clojure/clara/test_common.clj} (81%)
rename src/test/{common/clara/test_dsl.cljc => clojure/clara/test_dsl.clj} (52%)
rename src/test/{common/clara/test_exists.cljc => clojure/clara/test_exists.clj} (73%)
rename src/test/{common/clara/test_memory.cljc => clojure/clara/test_memory.clj} (76%)
rename src/test/{common/clara/test_negation.cljc => clojure/clara/test_negation.clj} (83%)
rename src/test/{common/clara/test_node_sharing.cljc => clojure/clara/test_node_sharing.clj} (81%)
create mode 100644 src/test/clojure/clara/test_performance_optimizations.clj
rename src/test/{common/clara/test_queries.cljc => clojure/clara/test_queries.clj} (58%)
create mode 100644 src/test/clojure/clara/test_rhs_retract.clj
rename src/test/{common/clara/test_rules_require.cljc => clojure/clara/test_rules_require.clj} (82%)
create mode 100644 src/test/clojure/clara/test_setup.clj
rename src/test/{common/clara/test_simple_rules.cljc => clojure/clara/test_simple_rules.clj} (70%)
rename src/test/{common/clara/test_testing_utils.cljc => clojure/clara/test_testing_utils.clj} (68%)
rename src/test/{common/clara/test_truth_maintenance.cljc => clojure/clara/test_truth_maintenance.clj} (83%)
rename src/test/{common/clara/tools/test_fact_graph.cljc => clojure/clara/tools/test_fact_graph.clj} (95%)
rename src/test/{common/clara/tools/test_inspect.cljc => clojure/clara/tools/test_inspect.clj} (91%)
rename src/test/{common/clara/tools/test_tracing.cljc => clojure/clara/tools/test_tracing.clj} (70%)
delete mode 100644 src/test/clojurescript/clara/test.cljs
delete mode 100644 src/test/clojurescript/clara/test_complex_negation.cljs
delete mode 100644 src/test/clojurescript/clara/test_rules.cljs
delete mode 100644 src/test/clojurescript/clara/test_salience.cljs
delete mode 100644 src/test/common/clara/rule_defs.cljc
delete mode 100644 src/test/common/clara/test_clear_ns_productions.cljc
delete mode 100644 src/test/common/clara/test_performance_optimizations.cljc
delete mode 100644 src/test/common/clara/test_rhs_retract.cljc
delete mode 100644 src/test/html/advanced.html
delete mode 100644 src/test/html/simple.html
delete mode 100644 src/test/js/runner.js
create mode 100644 tests.edn
create mode 100644 tool/build.clj
diff --git a/.gitignore b/.gitignore
index da5eecd7..35299ec9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -17,3 +17,4 @@ figwheel_server.log
*.iml
.clj-kondo
.lsp
+.cpcache
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9282b04a..971fcc87 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,12 @@
-This is a history of changes to clara-rules.
+This is a history of changes to k13labs/clara-rules.
-# 0.23.0-SNAPSHOT
+# 0.9.0
+* strip cljs support, general cleanup.
+* integrate with collections from ham-fisted for performance gains.
+
+This is a history of changes to clara-rules prior to forking to k13labs/clara-rules.
+
+# 0.23.0
* extract clara.rules.compiler/compile-test-handler from clara.rules.compiler/compile-test
* add support for `env` inside of test expressions
* use `.clj_kondo` extension for clj-kondo hook code for better tool compatibility (clj-kondo support now requires clj-kondo 2022.04.25 or higher)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 0cc8cc59..c7404588 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -30,10 +30,10 @@ Thanks.
* Open a [pull request][7].
* The pull request will be reviewed by the community and merged by the project committers.
-[1]: https://github.com/cerner/clara-rules/issues
+[1]: https://github.com/k13labs/clara-rules/issues
[2]: http://gun.io/blog/how-to-github-fork-branch-and-pull-request
[3]: http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html
[4]: ./CHANGELOG.md
[5]: ./LICENSE
[6]: http://gitready.com/advanced/2009/02/10/squashing-commits-with-rebase.html
-[7]: https://help.github.com/articles/using-pull-requests
\ No newline at end of file
+[7]: https://help.github.com/articles/using-pull-requests
diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md
index 42c8a21c..4f53bff0 100644
--- a/CONTRIBUTORS.md
+++ b/CONTRIBUTORS.md
@@ -1,13 +1,10 @@
-Cerner Corporation
+Clara Rules Contributors
- Ryan Brush [@rbrush]
- Mike Rodriguez [@mrrodriguez]
- William Parker [@WilliamParker]
- Ethan Christian [@EthanEChristian]
- Pushkar Kulkarni [@kulkarnipushkar]
-
-Community
-
- David Goeke [@dgoeke]
- Dave Dixon [@sparkofreason]
- Baptiste Fontaine [@bfontaine]
diff --git a/Makefile b/Makefile
new file mode 100644
index 00000000..d92ea926
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,45 @@
+.PHONY: repl test clean compile-main-java compile-test-java deploy install format-check format-fix
+
+SHELL := /bin/bash
+VERSION := 0.9.0-SNAPSHOT
+
+compile-main-java:
+ clj -T:build compile-main-java
+
+compile-test-java: compile-main-java
+ clj -T:build compile-test-java
+
+repl: compile-test-java
+ clj -M:dev:test:repl
+
+test: compile-test-java
+ clj -M:dev:test:runner --focus :unit --reporter kaocha.report/tap
+
+test-generative: compile-test-java
+ clj -M:dev:test:runner --focus :generative --reporter kaocha.report/tap
+
+test-config:
+ clj -M:dev:test:runner --print-config
+
+clean:
+ rm -rf pom.xml target build
+
+build: compile-main-java
+ clj -Spom
+ clj -X:jar \
+ :sync-pom true \
+ :group-id "k13labs" \
+ :artifact-id "clara-rules" \
+ :version '"$(VERSION)"'
+
+deploy:
+ clj -X:deploy-maven
+
+install:
+ clj -X:install-maven
+
+format-check:
+ clj -M:format-check
+
+format-fix:
+ clj -M:format-fix
diff --git a/NOTICE b/NOTICE
index cd7fd35c..a83dae24 100644
--- a/NOTICE
+++ b/NOTICE
@@ -1,4 +1,4 @@
-Copyright 2016 Cerner Innovation, Inc.
+Copyright 2023 Jose Gomez
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
diff --git a/README.md b/README.md
index 7d101067..61ab8563 100644
--- a/README.md
+++ b/README.md
@@ -61,7 +61,7 @@ See [CONTRIBUTING.md](CONTRIBUTING.md)
# LICENSE
-Copyright 2018 Cerner Innovation, Inc.
+Copyright 2023 Jose Gomez
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
diff --git a/build/.gitkeep b/build/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/build/META-INF/MANIFEST.MF b/build/META-INF/MANIFEST.MF
new file mode 100644
index 00000000..d49f8288
--- /dev/null
+++ b/build/META-INF/MANIFEST.MF
@@ -0,0 +1,4 @@
+Manifest-Version: 1.0
+Created-By: depstar
+Built-By: jose.gomez
+Build-Jdk: 11.0.20
diff --git a/build/META-INF/maven/k13labs/clara-rules/pom.properties b/build/META-INF/maven/k13labs/clara-rules/pom.properties
new file mode 100644
index 00000000..7d25afdb
--- /dev/null
+++ b/build/META-INF/maven/k13labs/clara-rules/pom.properties
@@ -0,0 +1,6 @@
+#Generated by depstar
+#Tue Dec 26 19:45:19 CST 2023
+revision=3d3c776dd19f1fc80d50ae13c9d1fc51acc40da0
+version=0.9.0-SNAPSHOT
+groupId=k13labs
+artifactId=clara-rules
diff --git a/build/clara/rules.clj b/build/clara/rules.clj
new file mode 100644
index 00000000..c26815b1
--- /dev/null
+++ b/build/clara/rules.clj
@@ -0,0 +1,257 @@
+(ns clara.rules
+ "Forward-chaining rules for Clojure. The primary API is in this namespace."
+ (:require [clara.rules.engine :as eng]
+ [clara.rules.platform :as platform]
+ [clara.rules.compiler :as com]
+ [clara.rules.dsl :as dsl]))
+
+(defn insert
+ "Inserts one or more facts into a working session. It does not modify the given
+ session, but returns a new session with the facts added."
+ [session & facts]
+ (eng/insert session facts))
+
+(defn insert-all
+ "Inserts a sequence of facts into a working session. It does not modify the given
+ session, but returns a new session with the facts added."
+ [session fact-seq]
+ (eng/insert session fact-seq))
+
+(defn retract
+ "Retracts a fact from a working session. It does not modify the given session,
+ but returns a new session with the facts retracted."
+ [session & facts]
+ (eng/retract session facts))
+
+(defn fire-rules
+ "Fires are rules in the given session. Once a rule is fired, it is labeled in a fired
+ state and will not be re-fired unless facts affecting the rule are added or retracted.
+
+ This function does not modify the given session to mark rules as fired. Instead, it returns
+ a new session in which the rules are marked as fired.
+
+ This take an additional map of options as a second argument. Current options:
+
+ :cancelling true (EXPERIMENTAL, subject to change/removal. Not supported in ClojureScript.):
+ Simultaneously propagate insertions and retractions through the rules network, at every step using the insertion and retractions of equals facts to cancel each
+ other out and avoid operations deeper in the rules network. The behavior of unconditional insertions and RHS (right-hand side) retractions
+ is undefined when this option is enabled and this option should not be used when calling fire-rules can result in these operations.
+ Note that this is purely a performance optimization and no guarantees are made at this time on whether a given rule's RHS will be called.
+ When this option is used rule RHS code that is executed shouldn't do anything that impacts state other than perform logical insertions."
+ ([session] (eng/fire-rules session {}))
+ ([session opts] (eng/fire-rules session opts)))
+
+(defn query
+ "Runs the given query with the optional given parameters against the session.
+ The optional parameters should be in map form. For example, a query call might be:
+
+ (query session get-by-last-name :?last-name \"Jones\")
+
+ The query itself may be either the var created by a defquery statement,
+ or the actual name of the query.
+ "
+ [session query & params]
+ (let [params-map (->> (for [[param value] (apply hash-map params)]
+ [(platform/query-param param) value])
+ (into {}))]
+ (eng/query session query params-map)))
+
+(defn insert!
+ "To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
+
+ Inserted facts are logical, in that if the support for the insertion is removed, the fact
+ will automatically be retracted. For instance, if there is a rule that inserts a \"Cold\" fact
+ if a \"Temperature\" fact is below a threshold, and the \"Temperature\" fact that triggered
+ the rule is retracted, the \"Cold\" fact the rule inserted is also retracted. This is the underlying
+ truth maintenance facillity.
+
+ This truth maintenance is also transitive: if a rule depends on some criteria to fire, and a
+ criterion becomes invalid, it may retract facts that invalidate other rules, which in turn
+ retract their conclusions. This way we can ensure that information inferred by rules is always
+ in a consistent state."
+ [& facts]
+ (eng/insert-facts! facts false))
+
+(defn insert-all!
+ "Behaves the same as insert!, but accepts a sequence of facts to be inserted. This can be simpler and more efficient for
+ rules needing to insert multiple facts.
+
+ See the doc in insert! for details on insert behavior.."
+ [facts]
+ (eng/insert-facts! facts false))
+
+(defn insert-unconditional!
+ "To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
+
+ This differs from insert! in that it is unconditional. The facts inserted will not be retracted
+ even if the rule activation doing the insert becomes false. Most users should prefer the simple insert!
+ function as described above, but this function is available for use cases that don't wish to use
+ Clara's truth maintenance."
+ [& facts]
+ (eng/insert-facts! facts true))
+
+(defn insert-all-unconditional!
+ "Behaves the same as insert-unconditional!, but accepts a sequence of facts to be inserted rather than individual facts.
+
+ See the doc in insert-unconditional! for details on uncondotional insert behavior."
+ [facts]
+ (eng/insert-facts! facts true))
+
+(defn retract!
+ "To be executed within a rule's right-hand side, this retracts a fact or facts from the working memory.
+
+ Retracting facts from the right-hand side has slightly different semantics than insertion. As described
+ in the insert! documentation, inserts are logical and will automatically be retracted if the rule
+ that inserted them becomes false. This retract! function does not follow the inverse; retracted items
+ are simply removed, and not re-added if the rule that retracted them becomes false.
+
+ The reason for this is that retractions remove information from the knowledge base, and doing truth
+ maintenance over retractions would require holding onto all retracted items, which would be an issue
+ in some use cases. This retract! method is included to help with certain use cases, but unless you
+ have a specific need, it is better to simply do inserts on the rule's right-hand side, and let
+ Clara's underlying truth maintenance retract inserted items if their support becomes false."
+ [& facts]
+ (eng/rhs-retract-facts! facts))
+
+(extend-type clojure.lang.Symbol
+ com/IRuleSource
+ (load-rules [sym]
+ ;; Find the rules and queries in the namespace, shred them,
+ ;; and compile them into a rule base.
+ (if (namespace sym)
+ ;; The symbol is qualified, so load rules in the qualified symbol.
+ (let [resolved (resolve sym)]
+ (when (nil? resolved)
+ (throw (ex-info (str "Unable to resolve rule source: " sym) {:sym sym})))
+
+ (cond
+ ;; The symbol references a rule or query, so just return it
+ (or (:query (meta resolved))
+ (:rule (meta resolved))) [@resolved]
+
+ ;; The symbol refernces a sequence, so return it.
+ (sequential? @resolved) @resolved
+
+ :else
+ (throw (ex-info (str "The source referenced by " sym " is not valid.") {:sym sym}))))
+
+ ;; The symbol is not qualified, so treat it as a namespace.
+ (->> (ns-interns sym)
+ (vals) ; Get the references in the namespace.
+ (filter var?)
+ (filter (comp (some-fn :rule :query :production-seq) meta)) ; Filter down to rules, queries, and seqs of both.
+ ;; If definitions are created dynamically (i.e. are not reflected in an actual code file)
+ ;; it is possible that they won't have :line metadata, so we have a default of 0.
+ (sort (fn [v1 v2]
+ (compare (or (:line (meta v1)) 0)
+ (or (:line (meta v2)) 0))))
+ (mapcat #(if (:production-seq (meta %))
+ (deref %)
+ [(deref %)]))))))
+
+(defmacro mk-session
+ "Creates a new session using the given rule sources. The resulting session
+ is immutable, and can be used with insert, retract, fire-rules, and query functions.
+
+ If no sources are provided, it will attempt to load rules from the caller's namespace,
+ which is determined by reading Clojure's *ns* var.
+
+ This will use rules defined with defrule, queries defined with defquery, and sequences
+ of rule and/or query structures in vars that are annotated with the metadata ^:production-seq.
+
+ The caller may also specify keyword-style options at the end of the parameters. Currently five
+ options are supported, although most users will either not need these or just the first two:
+
+ * :fact-type-fn, which must have a value of a function used to determine the logical type of a given
+ fact. Defaults to Clojure's type function.
+ * :cache, indicating whether the session creation can be cached, effectively memoizing mk-session.
+ Defaults to true. Callers may wish to set this to false when needing to dynamically reload rules.
+ * :ancestors-fn, which returns a collection of ancestors for a given type. Defaults to Clojure's ancestors function. A
+ fact of a given type will match any rule that uses one of that type's ancestors. Note that if the collection is ordered
+ this ordering will be maintained by Clara; ordering the ancestors consistently will increase the consistency of overall performance.
+ * :activation-group-fn, a function applied to production structures and returns the group they should be activated with.
+ It defaults to checking the :salience property, or 0 if none exists.
+ * :activation-group-sort-fn, a comparator function used to sort the values returned by the above :activation-group-fn.
+ Defaults to >, so rules with a higher salience are executed first.
+ * :forms-per-eval - The maximum number of expressions that will be evaluated per call to eval.
+ Larger batch sizes should see better performance compared to smaller batch sizes. (Only applicable to Clojure)
+ Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
+ * :omit-compile-ctx - When false Clara, in Clojure, retains additional information to improve error messages during
+ session deserialization at the cost of additional memory use.
+ By default this information is retained until the session is initially compiled and then will be discarded. This
+ information might prove useful for debugging compilation errors within the rulebase, eg. rulebase serialization
+ (ie. via Clara's durability support).
+ Defaults to true, see clara.rules.compiler/omit-compile-ctx-default for more information.
+
+ This is not supported in ClojureScript, since it requires eval to dynamically build a session. ClojureScript
+ users must use pre-defined rule sessions using defsession."
+ [& args]
+ (if (and (seq args) (not (keyword? (first args))))
+ `(com/mk-session ~(vec args)) ; At least one namespace given, so use it.
+ `(com/mk-session (concat [(ns-name *ns*)] ~(vec args))))) ; No namespace given, so use the current one.
+
+(defmacro defsession
+ "Creates a sesson given a list of sources and keyword-style options, which are typically Clojure namespaces.
+
+ Typical usage would be like this, with a session defined as a var:
+
+ (defsession my-session 'example.namespace)
+
+ That var contains an immutable session that then can be used as a starting point to create sessions with
+ caller-provided data. Since the session itself is immutable, it can be safely used from multiple threads
+ and will not be modified by callers. So a user might grab it, insert facts, and otherwise
+ use it as follows:
+
+ (-> my-session
+ (insert (->Temperature 23))
+ (fire-rules))"
+ [name & sources-and-options]
+ `(def ~name (com/mk-session ~(vec sources-and-options))))
+
+(defmacro defrule
+ "Defines a rule and stores it in the given var. For instance, a simple rule would look like this:
+
+ (defrule hvac-approval
+ \"HVAC repairs need the appropriate paperwork, so insert
+ a validation error if approval is not present.\"
+ [WorkOrder (= type :hvac)]
+ [:not [ApprovalForm (= formname \"27B-6\")]]
+ =>
+ (insert! (->ValidationError
+ :approval
+ \"HVAC repairs must include a 27B-6 form.\")))
+
+ See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
+ [name & body]
+ (let [doc (if (string? (first body)) (first body) nil)]
+ `(def ~(vary-meta name assoc :rule true :doc doc)
+ ~(dsl/build-rule name body (meta &form)))))
+
+(defmacro defquery
+ "Defines a query and stored it in the given var. For instance, a simple query that accepts no
+ parameters would look like this:
+
+ (defquery check-job
+ \"Checks the job for validation errors.\"
+ []
+ [?issue <- ValidationError])
+
+ See the [query authoring documentation](http://www.clara-rules.org/docs/queries/) for details."
+ [name & body]
+ (let [doc (if (string? (first body)) (first body) nil)]
+ `(def ~(vary-meta name assoc :query true :doc doc)
+ ~(dsl/build-query name body (meta &form)))))
+
+(defmacro clear-ns-productions!
+ "Ensures that any rule/query definitions which have been cached will be cleared from the associated namespace.
+ Rule and query definitions can be cached such that if their definitions are not explicitly overwritten with the same
+ name (i.e. deleted or renamed), the stale definitions can be loaded into a session using that namespace on
+ reload via the REPL or mechanism such as figwheel. Place (clear-ns-productions!) at the top of any namespace
+ defining rules/queries to ensure the cache is cleared properly."
+ []
+ (let [production-syms (->> (ns-interns *ns*)
+ (filter (comp var? second))
+ (filter (comp (some-fn :rule :query :production-seq) meta second)) ; Filter down to rules, queries, and seqs of both.
+ (map first))] ; Take the symbols for the rule/query vars
+ (doseq [psym production-syms]
+ (ns-unmap *ns* psym))))
diff --git a/src/main/clojure/clara/rules/accumulators.cljc b/build/clara/rules/accumulators.clj
similarity index 95%
rename from src/main/clojure/clara/rules/accumulators.cljc
rename to build/clara/rules/accumulators.clj
index ad6210c0..37da6748 100644
--- a/src/main/clojure/clara/rules/accumulators.cljc
+++ b/build/clara/rules/accumulators.clj
@@ -1,7 +1,6 @@
(ns clara.rules.accumulators
"A set of common accumulators usable in Clara rules."
(:require [clara.rules.engine :as eng]
- [clojure.set :as set]
[schema.core :as s])
(:refer-clojure :exclude [min max distinct count]))
@@ -202,12 +201,12 @@
"Returns an accumulator that preserves all accumulated items.
If given a field, returns all values in that field."
([]
- (accum
- {:initial-value []
- :reduce-fn (fn [items value] (conj items value))
- :retract-fn (fn [items retracted] (drop-one-of items retracted))}))
+ (accum
+ {:initial-value []
+ :reduce-fn (fn [items value] (conj items value))
+ :retract-fn (fn [items retracted] (drop-one-of items retracted))}))
([field]
- (accum
- {:initial-value []
- :reduce-fn (fn [items value] (conj items (field value)))
- :retract-fn (fn [items retracted] (drop-one-of items (field retracted)))})))
+ (accum
+ {:initial-value []
+ :reduce-fn (fn [items value] (conj items (field value)))
+ :retract-fn (fn [items retracted] (drop-one-of items (field retracted)))})))
diff --git a/build/clara/rules/compiler.clj b/build/clara/rules/compiler.clj
new file mode 100644
index 00000000..9de679db
--- /dev/null
+++ b/build/clara/rules/compiler.clj
@@ -0,0 +1,2039 @@
+(ns clara.rules.compiler
+ "This namespace is for internal use and may move in the future.
+ This is the Clara rules compiler, translating raw data structures into compiled versions and functions.
+ Most users should use only the clara.rules namespace."
+ (:require [clara.rules.engine :as eng]
+ [clara.rules.schema :as schema]
+ [clojure.set :as set]
+ [clojure.string :as string]
+ [clojure.walk :as walk]
+ [schema.core :as sc])
+ (:import [clara.rules.engine
+ ProductionNode
+ QueryNode
+ AlphaNode
+ RootJoinNode
+ HashJoinNode
+ ExpressionJoinNode
+ NegationNode
+ NegationWithJoinFilterNode
+ TestNode
+ AccumulateNode
+ AccumulateWithJoinFilterNode
+ LocalTransport
+ Accumulator
+ NegationResult
+ ISystemFact]
+ [java.beans
+ PropertyDescriptor]
+ [clojure.lang
+ IFn]))
+
+;; Protocol for loading rules from some arbitrary source.
+(defprotocol IRuleSource
+ (load-rules [source]))
+
+(sc/defschema BetaNode
+ "These nodes exist in the beta network."
+ (sc/pred (comp #{ProductionNode
+ QueryNode
+ RootJoinNode
+ HashJoinNode
+ ExpressionJoinNode
+ NegationNode
+ NegationWithJoinFilterNode
+ TestNode
+ AccumulateNode
+ AccumulateWithJoinFilterNode}
+ class)
+ "Some beta node type"))
+
+;; A rulebase -- essentially an immutable Rete network with a collection of
+;; alpha and beta nodes and supporting structure.
+(sc/defrecord Rulebase [;; Map of matched type to the alpha nodes that handle them.
+ alpha-roots :- {sc/Any [AlphaNode]}
+ ;; Root beta nodes (join, accumulate, etc.).
+ beta-roots :- [BetaNode]
+ ;; Productions in the rulebase.
+ productions :- #{schema/Production}
+ ;; Production nodes.
+ production-nodes :- [ProductionNode]
+ ;; Map of queries to the nodes hosting them.
+ query-nodes :- {sc/Any QueryNode}
+ ;; Map of id to one of the alpha or beta nodes (join, accumulate, etc).
+ id-to-node :- {sc/Num (sc/conditional
+ :activation AlphaNode
+ :else BetaNode)}
+ ;; Function for sorting activation groups of rules for firing.
+ activation-group-sort-fn
+ ;; Function that takes a rule and returns its activation group.
+ activation-group-fn
+ ;; Function that takes facts and determines what alpha nodes they match.
+ get-alphas-fn
+ ;; A map of [node-id field-name] to function.
+ node-expr-fn-lookup :- schema/NodeFnLookup])
+
+(defn- is-variable?
+ "Returns true if the given expression is a variable (a symbol prefixed by ?)"
+ [expr]
+ (and (symbol? expr)
+ (.startsWith (name expr) "?")))
+
+(def ^:private reflector
+ "For some reason (bug?) the default reflector doesn't use the
+ Clojure dynamic class loader, which prevents reflecting on
+ `defrecords`. Work around by supplying our own which does."
+ (clojure.reflect.JavaReflector. (clojure.lang.RT/makeClassLoader)))
+
+(defn- get-field-accessors
+ "Given a clojure.lang.IRecord subclass, returns a map of field name to a
+ symbol representing the function used to access it."
+ [cls]
+ (into {}
+ (for [field-name (clojure.lang.Reflector/invokeStaticMethod ^Class cls
+ "getBasis"
+ ^"[Ljava.lang.Object;" (make-array Object 0))]
+ ;; Do not preserve the metadata on the field names returned from
+ ;; IRecord.getBasis() since it may not be safe to eval this metadata
+ ;; in other contexts. This mostly applies to :tag metadata that may
+ ;; be unqualified class names symbols at this point.
+ [(with-meta field-name {}) (symbol (str ".-" field-name))])))
+
+(defn- get-bean-accessors
+ "Returns a map of bean property name to a symbol representing the function used to access it."
+ [cls]
+ (into {}
+ ;; Iterate through the bean properties, returning tuples and the corresponding methods.
+ (for [^PropertyDescriptor property (seq (.. java.beans.Introspector
+ (getBeanInfo cls)
+ (getPropertyDescriptors)))
+ :let [read-method (.getReadMethod property)]
+ ;; In the event that there the class has an indexed property without a basic accessor we will simply skip
+ ;; the accessor as we will not know how to retrieve the value. see https://github.com/cerner/clara-rules/issues/446
+ :when read-method]
+ [(symbol (string/replace (.getName property) #"_" "-")) ; Replace underscore with idiomatic dash.
+ (symbol (str "." (.getName read-method)))])))
+
+(defn effective-type [type]
+ (if (symbol? type)
+ (.loadClass (clojure.lang.RT/makeClassLoader) (name type))
+ type))
+
+(defn get-fields
+ "Returns a map of field name to a symbol representing the function used to access it."
+ [type]
+ (let [type (effective-type type)]
+ (cond
+ (isa? type clojure.lang.IRecord) (get-field-accessors type)
+ (class? type) (get-bean-accessors type) ; Treat unrecognized classes as beans.
+ :else [])))
+
+(defn- equality-expression? [expression]
+ (let [qualify-when-sym #(when-let [resolved (and (symbol? %)
+ (resolve %))]
+ (and (var? resolved)
+ (symbol (-> resolved meta :ns ns-name name)
+ (-> resolved meta :name name))))
+ op (first expression)]
+ ;; Check for unqualified = or == to support original Clara unification
+ ;; syntax where clojure.core/== was supposed to be excluded explicitly.
+ (boolean (or (#{'= '== 'clojure.core/= 'clojure.core/==} op)
+ (#{'clojure.core/= 'clojure.core/==} (qualify-when-sym op))))))
+
+(def ^:dynamic *compile-ctx* nil)
+
+(defn try-eval
+ "Evals the given `expr`. If an exception is thrown, it is caught and an
+ ex-info exception is thrown with more details added. Uses *compile-ctx*
+ for additional contextual info to add to the exception details."
+ [expr]
+ (try
+ (eval expr)
+ (catch Exception e
+ (let [edata (merge {:expr expr}
+ (dissoc *compile-ctx* :msg))
+ msg (:msg *compile-ctx*)]
+ (throw (ex-info (str (if msg (str "Failed " msg) "Failed compiling.") \newline
+ ;; Put ex-data specifically in the string since
+ ;; often only ExceptionInfo.toString() will be
+ ;; called, which doesn't show this data.
+ edata \newline)
+ edata
+ e))))))
+
+(defn- compile-constraints
+ "Compiles a sequence of constraints into a structure that can be evaluated.
+
+ Callers may also pass a collection of equality-only-variables, which instructs
+ this function to only do an equality check on them rather than create a unification binding."
+ ([exp-seq]
+ (compile-constraints exp-seq #{}))
+ ([exp-seq equality-only-variables]
+
+ (if (empty? exp-seq)
+ `(deref ~'?__bindings__)
+ (let [[exp & rest-exp] exp-seq
+ variables (into #{}
+ (filter (fn [item]
+ (and (symbol? item)
+ (= \? (first (name item)))
+ (not (equality-only-variables item))))
+ exp))
+ expression-values (remove variables (rest exp))
+ binds-variables? (and (equality-expression? exp)
+ (seq variables))
+
+ ;; if we intend on binding any variables at this level of the
+ ;; expression then future layers should not be able to rebind them.
+ ;; see https://github.com/cerner/clara-rules/issues/417 for more info
+ equality-only-variables (if binds-variables?
+ (into equality-only-variables
+ variables)
+ equality-only-variables)
+
+ compiled-rest (compile-constraints rest-exp equality-only-variables)]
+
+ (when (and binds-variables?
+ (empty? expression-values))
+ (throw (ex-info (str "Malformed variable binding for " variables ". No associated value.")
+ {:variables (map keyword variables)})))
+
+ (cond
+ binds-variables?
+ ;; Bind each variable with the first value we encounter.
+ ;; The additional equality checks are handled below so which value
+ ;; we bind to is not important. So an expression like (= ?x value-1 value-2) will
+ ;; bind ?x to value-1, and then ensure value-1 and value-2 are equal below.
+
+ ;; First assign each value in a let, so it is visible to subsequent expressions.
+ `(let [~@(for [variable variables
+ let-expression [variable (first expression-values)]]
+ let-expression)]
+
+ ;; Update the bindings produced by this expression.
+ ~@(for [variable variables]
+ `(swap! ~'?__bindings__ assoc ~(keyword variable) ~variable))
+
+ ;; If there is more than one expression value, we need to ensure they are
+ ;; equal as well as doing the bind. This ensures that value-1 and value-2 are
+ ;; equal.
+ ~(if (> (count expression-values) 1)
+
+ `(if ~(cons '= expression-values) ~compiled-rest nil)
+ ;; No additional values to check, so move on to the rest of
+ ;; the expression
+ compiled-rest))
+
+;; A contraint that is empty doesn't need to be added as a check,
+ ;; simply move on to the rest
+ (empty? exp)
+ compiled-rest
+
+ ;; No variables to unify, so simply check the expression and
+ ;; move on to the rest.
+ :else
+ `(if ~exp ~compiled-rest nil))))))
+
+(defn flatten-expression
+ "Flattens expression as clojure.core/flatten does, except will flatten
+ anything that is a collection rather than specifically sequential."
+ [expression]
+ (filter (complement coll?)
+ (tree-seq coll? seq expression)))
+
+(defn variables-as-keywords
+ "Returns a set of the symbols in the given s-expression that start with '?' as keywords"
+ [expression]
+ (into #{} (for [item (flatten-expression expression)
+ :when (is-variable? item)]
+ (keyword item))))
+
+(defn field-name->accessors-used
+ "Returns a map of field name to accessors for any field names of type used
+ in the constraints."
+ [type constraints]
+ (let [field-name->accessor (get-fields type)
+ all-fields (set (keys field-name->accessor))
+ fields-used (into #{}
+ (filter all-fields)
+ (flatten-expression constraints))]
+ (into {}
+ (filter (comp fields-used key))
+ field-name->accessor)))
+
+(defn- add-meta
+ "Helper function to add metadata."
+ [fact-symbol fact-type]
+ (let [fact-type (if (symbol? fact-type)
+ (try
+ (resolve fact-type)
+ (catch Exception e
+ ;; We shouldn't have to worry about exceptions being thrown here according
+ ;; to `resolve`s docs.
+ ;; However, due to http://dev.clojure.org/jira/browse/CLJ-1403 being open
+ ;; still, it is safer to catch any exceptions thrown.
+ fact-type))
+ fact-type)]
+ (if (class? fact-type)
+ (vary-meta fact-symbol assoc :tag (symbol (.getName ^Class fact-type)))
+ fact-symbol)))
+
+(defn- mk-node-fn-name
+ "A simple helper function to maintain a consistent pattern for naming anonymous functions in the rulebase.
+
+ node-type - expected to align with one of the types of nodes defined in clara.rules.engine, and node-type->abbreviated-type.
+ node-id - expected to be an integer
+ fn-type - an identifier for what the function means to the node
+
+ fn-type is required as some nodes might have multiple functions associated to them, ex. Accumulator nodes containing
+ filter functions."
+ [node-type node-id fn-type]
+ (if-let [abbreviated-node-type (get eng/node-type->abbreviated-type node-type)]
+ (symbol (str abbreviated-node-type "-" node-id "-" fn-type))
+ (throw (ex-info "Unrecognized node type"
+ {:node-type node-type
+ :node-id node-id
+ :fn-type fn-type}))))
+
+(defn compile-condition
+ "Returns a function definition that can be used in alpha nodes to test the condition."
+ [type node-id destructured-fact constraints result-binding env]
+ (let [;; Get a map of fieldnames to access function symbols.
+ accessors (field-name->accessors-used type constraints)
+ ;; The assignments should use the argument destructuring if provided, or default to accessors otherwise.
+ assignments (if destructured-fact
+ ;; Simply destructure the fact if arguments are provided.
+ [destructured-fact '?__fact__]
+ ;; No argument provided, so use our default destructuring logic.
+ (concat '(this ?__fact__)
+ (mapcat (fn [[name accessor]]
+ [name (list accessor '?__fact__)])
+ accessors)))
+
+ ;; The destructured environment, if any
+ destructured-env (if (> (count env) 0)
+ {:keys (mapv #(symbol (name %)) (keys env))}
+ '?__env__)
+
+ ;; Initial bindings used in the return of the compiled condition expresion.
+ initial-bindings (if result-binding {result-binding '?__fact__} {})
+
+ ;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-condition' to be used for this scenario
+ fn-name (mk-node-fn-name "AlphaNode" node-id "AE")]
+
+ `(fn ~fn-name [~(add-meta '?__fact__ type)
+ ~destructured-env]
+ (let [~@assignments
+ ~'?__bindings__ (atom ~initial-bindings)]
+ ~(compile-constraints constraints)))))
+
+(defn build-token-assignment
+ "A helper function to build variable assignment forms for tokens."
+ [binding-key]
+ (list (symbol (name binding-key))
+ (list `-> '?__token__ :bindings binding-key)))
+
+(defn compile-test-handler [node-id constraints env]
+ (let [binding-keys (variables-as-keywords constraints)
+ assignments (mapcat build-token-assignment binding-keys)
+
+ ;; The destructured environment, if any
+ destructured-env (if (> (count env) 0)
+ {:keys (mapv #(symbol (name %)) (keys env))}
+ '?__env__)
+
+ ;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-test' to be used for this scenario
+ fn-name (mk-node-fn-name "TestNode" node-id "TE")]
+ `(fn ~fn-name [~'?__token__ ~destructured-env]
+ (let [~@assignments]
+ (and ~@constraints)))))
+
+(defn compile-test [node-id constraints env]
+ (let [test-handler (compile-test-handler node-id constraints env)]
+ `(array-map :handler ~test-handler
+ :constraints '~constraints)))
+
+(defn compile-action
+ "Compile the right-hand-side action of a rule, returning a function to execute it."
+ [node-id binding-keys rhs env]
+ (let [;; Avoid creating let bindings in the compile code that aren't actually used in the body.
+ ;; The bindings only exist in the scope of the RHS body, not in any code called by it,
+ ;; so this scanning strategy will detect all possible uses of binding variables in the RHS.
+ ;; Note that some strategies with macros could introduce bindings, but these aren't something
+ ;; we're trying to support. If necessary a user could macroexpand their RHS code manually before
+ ;; providing it to Clara.
+ rhs-bindings-used (variables-as-keywords rhs)
+
+ assignments (sequence
+ (comp
+ (filter rhs-bindings-used)
+ (mapcat build-token-assignment))
+ binding-keys)
+
+ ;; The destructured environment, if any.
+ destructured-env (if (> (count env) 0)
+ {:keys (mapv #(symbol (name %)) (keys env))}
+ '?__env__)
+
+ ;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-action' to be used for this scenario
+ fn-name (mk-node-fn-name "ProductionNode" node-id "AE")]
+ `(fn ~fn-name [~'?__token__ ~destructured-env]
+ (let [~@assignments]
+ ~rhs))))
+
+(defn compile-accum
+ "Used to create accumulators that take the environment into account."
+ [node-id node-type accum env]
+ (let [destructured-env
+ (if (> (count env) 0)
+ {:keys (mapv #(symbol (name %)) (keys env))}
+ '?__env__)
+
+ ;; AccE will stand for AccumExpr
+ fn-name (mk-node-fn-name node-type node-id "AccE")]
+ `(fn ~fn-name [~destructured-env]
+ ~accum)))
+
+(defn compile-join-filter
+ "Compiles to a predicate function that ensures the given items can be unified. Returns a ready-to-eval
+ function that accepts the following:
+
+ * a token from the parent node
+ * the fact
+ * a map of bindings from the fact, which was typically computed on the alpha side
+ * an environment
+
+ The function created here returns truthy if the given fact satisfies the criteria."
+ [node-id node-type {:keys [type constraints args] :as unification-condition} ancestor-bindings element-bindings env]
+ (let [accessors (field-name->accessors-used type constraints)
+
+ destructured-env (if (> (count env) 0)
+ {:keys (mapv #(symbol (name %)) (keys env))}
+ '?__env__)
+
+ destructured-fact (first args)
+
+ fact-assignments (if destructured-fact
+ ;; Simply destructure the fact if arguments are provided.
+ [destructured-fact '?__fact__]
+ ;; No argument provided, so use our default destructuring logic.
+ (concat '(this ?__fact__)
+ (mapcat (fn [[name accessor]]
+ [name (list accessor '?__fact__)])
+ accessors)))
+
+ ;; Get the bindings used in the join filter expression that are pulled from
+ ;; the token. This is simply the bindings in the constraints with the newly
+ ;; created element bindings for this condition removed.
+ token-binding-keys (remove element-bindings (variables-as-keywords constraints))
+
+ token-assignments (mapcat build-token-assignment token-binding-keys)
+
+ new-binding-assignments (mapcat #(list (symbol (name %))
+ (list 'get '?__element-bindings__ %))
+ element-bindings)
+
+ assignments (concat
+ fact-assignments
+ token-assignments
+ new-binding-assignments)
+
+ equality-only-variables (into #{} (for [binding ancestor-bindings]
+ (symbol (name (keyword binding)))))
+
+ ;; JFE will stand for JoinFilterExpr
+ fn-name (mk-node-fn-name node-type node-id "JFE")]
+
+ `(fn ~fn-name
+ [~'?__token__
+ ~(add-meta '?__fact__ type)
+ ~'?__element-bindings__
+ ~destructured-env]
+ (let [~@assignments
+ ~'?__bindings__ (atom {})]
+ ~(compile-constraints constraints equality-only-variables)))))
+
+(defn- expr-type [expression]
+ (if (map? expression)
+ :condition
+ (first expression)))
+
+(defn- cartesian-join
+ "Performs a cartesian join to distribute disjunctions for disjunctive normal form.,
+ This distributing each disjunction across every other disjunction and also across each
+ given conjunction. Returns a sequence where each element contains a sequence
+ of conjunctions that can be used in rules."
+ [disjunctions-to-distribute conjunctions]
+
+ ;; For every disjuction, do a cartesian join to distribute it
+ ;; across every other disjuction. We also must distributed it across
+ ;; each conjunction
+ (reduce
+ (fn [distributed-disjunctions disjunction-to-distribute]
+
+ (for [expression disjunction-to-distribute
+ distributed-disjunction distributed-disjunctions]
+ (conj distributed-disjunction expression)))
+
+ ;; Start with our conjunctions to join to, since we must distribute
+ ;; all disjunctions across these as well.
+ [conjunctions]
+ disjunctions-to-distribute))
+
+(defn to-dnf
+ "Convert a lhs expression to disjunctive normal form."
+ [expression]
+
+ ;; Always validate the expression schema, as this is only done at compile time.
+ (sc/validate schema/Condition expression)
+ (condp = (expr-type expression)
+ ;; Individual conditions can return unchanged.
+ :condition
+ expression
+
+ :test
+ expression
+
+ :exists
+ expression
+
+ ;; Apply de Morgan's law to push negation nodes to the leaves.
+ :not
+ (let [children (rest expression)
+ child (first children)]
+
+ (when (not= 1 (count children))
+ (throw (ex-info "Negation must have only one child." {:illegal-negation expression})))
+
+ (condp = (expr-type child)
+
+ ;; If the child is a single condition, simply return the ast.
+ :condition expression
+
+ :test expression
+
+ ;; Note that :exists does not support further nested boolean conditions.
+ ;; It is just syntax sugar over an accumulator.
+ :exists expression
+
+ ;; Double negation, so just return the expression under the second negation.
+ :not
+ (to-dnf (second child))
+
+ ;; DeMorgan's law converting conjunction to negated disjuctions.
+ :and (to-dnf (cons :or (for [grandchild (rest child)] [:not grandchild])))
+
+ ;; DeMorgan's law converting disjuction to negated conjuctions.
+ :or (to-dnf (cons :and (for [grandchild (rest child)] [:not grandchild])))))
+
+ ;; For all others, recursively process the children.
+ (let [children (map to-dnf (rest expression))
+ ;; Get all conjunctions, which will not conain any disjunctions since they were processed above.
+ conjunctions (filter #(#{:and :condition :not :exists} (expr-type %)) children)]
+
+ ;; If there is only one child, the and or or operator can simply be eliminated.
+ (if (= 1 (count children))
+ (first children)
+
+ (condp = (expr-type expression)
+
+ :and
+ (let [disjunctions (map rest (filter #(= :or (expr-type %)) children))
+ ;; Merge all child conjunctions into a single conjunction.
+ combine-conjunctions (fn [children]
+ (cons :and
+ (for [child children
+ nested-child (if (= :and (expr-type child))
+ (rest child)
+ [child])]
+ nested-child)))]
+ (if (empty? disjunctions)
+ (combine-conjunctions children)
+ (cons :or
+ (for [c (cartesian-join disjunctions conjunctions)]
+ (combine-conjunctions c)))))
+ :or
+ ;; Merge all child disjunctions into a single disjunction.
+ (let [disjunctions (mapcat rest (filter #(#{:or} (expr-type %)) children))]
+ (cons :or (concat disjunctions conjunctions))))))))
+
+(defn- non-equality-unification? [expression previously-bound]
+ "Returns true if the given expression does a non-equality unification against a variable that
+ is not in the previously-bound set, indicating it can't be solved by simple unification."
+ (let [found-complex (atom false)
+ process-form (fn [form]
+ (when (and (seq? form)
+ (not (equality-expression? form))
+ (some (fn [sym] (and (symbol? sym)
+ (.startsWith (name sym) "?")
+ (not (previously-bound sym))))
+ (flatten-expression form)))
+
+ (reset! found-complex true))
+
+ form)]
+
+ ;; Walk the expression to find use of a symbol that can't be solved by equality-based unificaiton.
+ (doall (walk/postwalk process-form expression))
+
+ @found-complex))
+
+(defn condition-type
+ "Returns the type of a single condition that has been transformed
+ to disjunctive normal form. The types are: :negation, :accumulator, :test, :exists, and :join"
+ [condition]
+ (let [is-negation (= :not (first condition))
+ is-exists (= :exists (first condition))
+ accumulator (:accumulator condition)
+ result-binding (:result-binding condition) ; Get the optional result binding used by accumulators.
+ condition (cond
+ is-negation (second condition)
+ accumulator (:from condition)
+ :else condition)
+ node-type (cond
+ is-negation :negation
+ is-exists :exists
+ accumulator :accumulator
+ (:type condition) :join
+ :else :test)]
+
+ node-type))
+
+(defn- extract-exists
+ "Converts :exists operations into an accumulator to detect
+ the presence of a fact and a test to check that count is
+ greater than zero.
+
+ It may be possible to replace this conversion with a specialized
+ ExtractNode in the future, but this transformation is simple
+ and meets the functional needs."
+ [conditions]
+ (for [condition conditions
+ expanded (if (= :exists (condition-type condition))
+ ;; This is an :exists condition, so expand it
+ ;; into an accumulator and a test.
+ (let [exists-count (gensym "?__gen__")]
+ [{:accumulator '(clara.rules.accumulators/exists)
+ :from (second condition)
+ :result-binding (keyword exists-count)}])
+
+ ;; This is not an :exists condition, so do not change it.
+ [condition])]
+
+ expanded))
+
+(defn- classify-variables
+ "Classifies the variables found in the given contraints into 'bound' vs 'free'
+ variables. Bound variables are those that are found in a valid
+ equality-based, top-level binding form. All other variables encountered are
+ considered free. Returns a tuple of the form
+ [bound-variables free-variables]
+ where bound-variables and free-variables are the sets of bound and free
+ variables found in the constraints respectively."
+ [constraints]
+ (reduce (fn [[bound-variables free-variables] constraint]
+ ;; Only top-level constraint forms can introduce new variable bindings.
+ ;; If the top-level constraint is an equality expression, add the
+ ;; bound variables to the set of bound variables.
+ (if (and (seq? constraint) (equality-expression? constraint))
+ [(->> (rest constraint)
+ (filterv is-variable?)
+ ;; A variable that was marked unbound in a previous expression should
+ ;; not be considered bound.
+ (remove free-variables)
+ (into bound-variables))
+ ;; Any other variables in a nested form are now considered "free".
+ (->> (rest constraint)
+ ;; We already have checked this level symbols for bound variables.
+ (remove symbol?)
+ flatten-expression
+ (filter is-variable?)
+ ;; Variables previously bound in an expression are not free.
+ (remove bound-variables)
+ (into free-variables))]
+
+ ;; Binding forms are not supported nested within other forms, so
+ ;; any variables that occur now are considered "free" variables.
+ [bound-variables
+ (->> (flatten-expression constraint)
+ (filterv is-variable?)
+ ;; Variables previously bound in an expression are not free.
+ (remove bound-variables)
+ (into free-variables))]))
+ [#{} #{}]
+ constraints))
+
+(sc/defn analyze-condition :- {;; Variables used in the condition that are bound
+ :bound #{sc/Symbol}
+
+ ;; Variables used in the condition that are unbound.
+ :unbound #{sc/Symbol}
+
+ ;; The condition that was analyzed
+ :condition schema/Condition
+
+ ;; Flag indicating it is an accumulator.
+ :is-accumulator sc/Bool}
+ [condition :- schema/Condition]
+
+ (let [dnf-condition (to-dnf condition)
+
+ ;; Break the condition up into leafs
+ leaf-conditions (case (first dnf-condition)
+
+ ;; A top level disjunction, so get all child conjunctions and
+ ;; flatten them.
+ :or
+ (for [nested-condition (rest dnf-condition)
+ leaf-condition (if (= :and (first nested-condition))
+ (rest nested-condition)
+ [nested-condition])]
+ leaf-condition)
+
+;; A top level and of nested conditions, so just use them
+ :and
+ (rest dnf-condition)
+
+ ;; The condition itself is a leaf, so keep it.
+ [dnf-condition])]
+
+ (reduce
+ (fn [{:keys [bound unbound condition is-accumulator]} leaf-condition]
+
+;; The effective leaf for variable purposes may be contained in a negation or
+ ;; an accumulator, so extract it.
+ (let [effective-leaf (condp = (condition-type leaf-condition)
+ :accumulator (:from leaf-condition)
+ :negation (second leaf-condition)
+ leaf-condition)
+
+ constraints (:constraints effective-leaf)
+
+ [bound-variables unbound-variables] (if (#{:negation :test} (condition-type leaf-condition))
+ ;; Variables used in a negation should be considered
+ ;; unbound since they aren't usable in another condition,
+ ;; so label all variables as unbound. Similarly, :test
+ ;; conditions can't bind new variables since they don't
+ ;; have any new facts as input. See:
+ ;; https://github.com/cerner/clara-rules/issues/357
+ [#{}
+ (apply set/union (classify-variables constraints))]
+
+ ;; It is not a negation, so simply classify variables.
+ (classify-variables constraints))
+
+ bound-with-result-bindings (cond-> bound-variables
+ (:fact-binding effective-leaf) (conj (symbol (name (:fact-binding effective-leaf))))
+ (:result-binding leaf-condition) (conj (symbol (name (:result-binding leaf-condition)))))
+
+ ;; All variables bound in this condition.
+ all-bound (set/union bound bound-with-result-bindings)
+
+ ;; Unbound variables, minus those that have been bound elsewhere in this condition.
+ all-unbound (set/difference (set/union unbound-variables unbound) all-bound)]
+
+ {:bound all-bound
+ :unbound all-unbound
+ :condition condition
+ :is-accumulator (or is-accumulator
+ (= :accumulator
+ (condition-type leaf-condition)))}))
+
+ {:bound #{}
+ :unbound #{}
+ :condition condition
+ :is-accumulator false}
+ leaf-conditions)))
+
+(sc/defn sort-conditions :- [schema/Condition]
+ "Performs a topologic sort of conditions to ensure variables needed by
+ child conditions are bound."
+ [conditions :- [schema/Condition]]
+
+ ;; Get the bound and unbound variables for all conditions and sort them.
+ (let [classified-conditions (map analyze-condition conditions)]
+
+ (loop [sorted-conditions []
+ bound-variables #{}
+ remaining-conditions classified-conditions]
+
+ (if (empty? remaining-conditions)
+ ;; No more conditions to sort, so return the raw conditions
+ ;; in sorted order.
+ (map :condition sorted-conditions)
+
+ ;; Unsatisfied conditions remain, so find ones we can satisfy.
+ (let [satisfied? (fn [classified-condition]
+ (set/subset? (:unbound classified-condition)
+ bound-variables))
+
+ ;; Find non-accumulator conditions that are satisfied. We defer
+ ;; accumulators until later in the rete network because they
+ ;; may fire a default value if all needed bindings earlier
+ ;; in the network are satisfied.
+ satisfied-non-accum? (fn [classified-condition]
+ (and (not (:is-accumulator classified-condition))
+ (set/subset? (:unbound classified-condition)
+ bound-variables)))
+
+ has-satisfied-non-accum (some satisfied-non-accum? remaining-conditions)
+
+ newly-satisfied (if has-satisfied-non-accum
+ (filter satisfied-non-accum? remaining-conditions)
+ (filter satisfied? remaining-conditions))
+
+ still-unsatisfied (if has-satisfied-non-accum
+ (remove satisfied-non-accum? remaining-conditions)
+ (remove satisfied? remaining-conditions))
+
+ updated-bindings (apply set/union bound-variables
+ (map :bound newly-satisfied))]
+
+ ;; If no existing variables can be satisfied then the production is invalid.
+ (when (empty? newly-satisfied)
+
+ ;; Get the subset of variables that cannot be satisfied.
+ (let [unsatisfiable (set/difference
+ (apply set/union (map :unbound still-unsatisfied))
+ bound-variables)]
+ (throw (ex-info (str "Using variable that is not previously bound. This can happen "
+ "when an expression uses a previously unbound variable, "
+ "or if a variable is referenced in a nested part of a parent "
+ "expression, such as (or (= ?my-expression my-field) ...). " \newline
+ "Note that variables used in negations are not bound for subsequent
+ rules since the negation can never match." \newline
+ "Production: " \newline
+ (:production *compile-ctx*) \newline
+ "Unbound variables: "
+ unsatisfiable)
+ {:production (:production *compile-ctx*)
+ :variables unsatisfiable}))))
+
+ (recur (into sorted-conditions newly-satisfied)
+ updated-bindings
+ still-unsatisfied))))))
+
+(defn- non-equality-unifications
+ "Returns a set of unifications that do not use equality-based checks."
+ [constraints]
+ (let [[bound-variables unbound-variables] (classify-variables constraints)]
+ (into #{}
+ (for [constraint constraints
+ :when (non-equality-unification? constraint bound-variables)]
+ constraint))))
+
+(sc/defn condition-to-node :- schema/ConditionNode
+ "Converts a condition to a node structure."
+ [condition :- schema/Condition
+ env :- (sc/maybe {sc/Keyword sc/Any})
+ parent-bindings :- #{sc/Keyword}]
+ (let [node-type (condition-type condition)
+ accumulator (:accumulator condition)
+ result-binding (:result-binding condition) ; Get the optional result binding used by accumulators.
+ condition (cond
+ (= :negation node-type) (second condition)
+ accumulator (:from condition)
+ :else condition)
+
+ ;; Convert a test within a negation to a negation of the test. This is necessary
+ ;; because negation nodes expect an additional condition to match against.
+ [node-type condition] (if (and (= node-type :negation)
+ (= :test (condition-type condition)))
+
+ ;; Create a negated version of our test condition.
+ [:test {:constraints [(list 'not (cons 'and (:constraints condition)))]}]
+
+ ;; This was not a test within a negation, so keep the previous values.
+ [node-type condition])
+
+ ;; Get the set of non-equality unifications that cannot be resolved locally to the rule.
+ non-equality-unifications (if (or (= :accumulator node-type)
+ (= :negation node-type)
+ (= :join node-type))
+ (non-equality-unifications (:constraints condition))
+ #{})
+
+ ;; If there are any non-equality unitifications, create a join with filter expression to handle them.
+ join-filter-expressions (if (seq non-equality-unifications)
+
+ (assoc condition :constraints (filterv non-equality-unifications (:constraints condition)))
+
+ nil)
+
+ ;; Remove instances of non-equality constraints from accumulator
+ ;; and negation nodes, since those are handled with specialized node implementations.
+ condition (if (seq non-equality-unifications)
+ (assoc condition
+ :constraints (into [] (remove non-equality-unifications (:constraints condition)))
+ :original-constraints (:constraints condition))
+
+ condition)
+
+ ;; Variables used in the constraints
+ constraint-bindings (variables-as-keywords (:constraints condition))
+
+ ;; Variables used in the condition.
+ cond-bindings (if (:fact-binding condition)
+ (conj constraint-bindings (:fact-binding condition))
+ constraint-bindings)
+
+ new-bindings (set/difference (variables-as-keywords (:constraints condition))
+ parent-bindings)
+
+ join-filter-bindings (if join-filter-expressions
+ (variables-as-keywords join-filter-expressions)
+ nil)]
+
+ (cond->
+ {:node-type node-type
+ :condition condition
+ :new-bindings new-bindings
+ :used-bindings (set/union cond-bindings join-filter-bindings)}
+
+ (seq env) (assoc :env env)
+
+ ;; Add the join bindings to join, accumulator or negation nodes.
+ (#{:join :negation :accumulator} node-type) (assoc :join-bindings (set/intersection cond-bindings parent-bindings))
+
+ accumulator (assoc :accumulator accumulator)
+
+ result-binding (assoc :result-binding result-binding)
+
+ join-filter-expressions (assoc :join-filter-expressions join-filter-expressions)
+
+ join-filter-bindings (assoc :join-filter-join-bindings (set/intersection join-filter-bindings parent-bindings)))))
+
+(sc/defn ^:private add-node :- schema/BetaGraph
+ "Adds a node to the beta graph."
+ [beta-graph :- schema/BetaGraph
+ source-ids :- [sc/Int]
+ target-id :- sc/Int
+ target-node :- (sc/either schema/ConditionNode schema/ProductionNode)]
+
+ ;; Add the production or condition to the network.
+ (let [beta-graph (assoc-in beta-graph [:id-to-new-bindings target-id]
+ ;; A ProductionNode will not have the set of new bindings previously created,
+ ;; so we assign them an empty set here. A ConditionNode will always have a set of new bindings,
+ ;; even if it is an empty one; this is enforced by the schema. Having an empty set of new bindings
+ ;; is a valid and meaningful state that just means that all join bindings in that node come from right-activations
+ ;; earlier in the network.
+ (or (:new-bindings target-node)
+ #{}))
+
+ beta-graph (if (#{:production :query} (:node-type target-node))
+ (assoc-in beta-graph [:id-to-production-node target-id] target-node)
+ (assoc-in beta-graph [:id-to-condition-node target-id] target-node))]
+
+;; Associate the forward and backward edges.
+ (reduce (fn [beta-graph source-id]
+ (let [forward-path [:forward-edges source-id]
+ forward-previous (get-in beta-graph forward-path)
+ backward-path [:backward-edges target-id]
+ backward-previous (get-in beta-graph backward-path)]
+ (-> beta-graph
+ (assoc-in
+ forward-path
+ (if forward-previous
+ (conj forward-previous target-id)
+ (sorted-set target-id)))
+ (assoc-in
+ backward-path
+ (if backward-previous
+ (conj backward-previous source-id)
+ (sorted-set source-id))))))
+
+ beta-graph
+ source-ids)))
+
+(declare add-production)
+
+(sc/defn ^:private extract-negation :- {:new-expression schema/Condition
+ :beta-with-negations schema/BetaGraph}
+
+ "Extracts complex, nested negations and adds a rule to the beta graph to trigger the returned
+ negation expression."
+ [previous-expressions :- [schema/Condition]
+ expression :- schema/Condition
+ parent-ids :- [sc/Int]
+ ancestor-bindings :- #{sc/Keyword}
+ beta-graph :- schema/BetaGraph
+ production :- schema/Production
+ create-id-fn]
+
+ (if (and (= :not (first expression))
+ (sequential? (second expression))
+ (#{:and :or :not} (first (second expression))))
+
+ ;; Dealing with a compound negation, so extract it out.
+ (let [negation-expr (second expression)
+ gen-rule-name (str (or (:name production)
+ (gensym "gen-rule"))
+ "__"
+ (gensym))
+
+ ;; Insert the bindings from ancestors that are used in the negation
+ ;; in the NegationResult fact so that the [:not [NegationResult...]]
+ ;; condition can assert that the facts matching the negation
+ ;; have the necessary bindings.
+ ;; See https://github.com/cerner/clara-rules/issues/304 for more details
+ ;; and a case that behaves incorrectly without this check.
+ ancestor-bindings-in-negation-expr (set/intersection
+ (variables-as-keywords negation-expr)
+ ancestor-bindings)
+
+ ancestor-bindings-insertion-form (into {}
+ (map (fn [binding]
+ [binding (-> binding
+ name
+ symbol)]))
+ ancestor-bindings-in-negation-expr)
+
+ ancestor-binding->restriction-form (fn [b]
+ (list '= (-> b name symbol)
+ (list b 'ancestor-bindings)))
+
+ modified-expression `[:not {:type clara.rules.engine.NegationResult
+ :constraints [(~'= ~gen-rule-name ~'gen-rule-name)
+ ~@(map ancestor-binding->restriction-form
+ ancestor-bindings-in-negation-expr)]}]
+
+ generated-rule (cond-> {:name gen-rule-name
+ :lhs (concat previous-expressions [negation-expr])
+ :rhs `(clara.rules/insert! (eng/->NegationResult ~gen-rule-name
+ ~ancestor-bindings-insertion-form))}
+
+ ;; Propagate properties like salience to the generated production.
+ (:props production) (assoc :props (:props production))
+
+ true (assoc-in [:props :clara-rules/internal-salience] :extracted-negation)
+
+ ;; Propagate the the environment (such as local bindings) if applicable.
+ (:env production) (assoc :env (:env production)))
+
+ ;; Add the generated rule to the beta network.
+
+ beta-with-negations (add-production generated-rule beta-graph create-id-fn)]
+
+ {:new-expression modified-expression
+ :beta-with-negations beta-with-negations})
+
+ ;; The expression wasn't a negation, so return the previous content.
+ {:new-expression expression
+ :beta-with-negations beta-graph}))
+
+;; A beta graph with no nodes.
+(def ^:private empty-beta-graph {:forward-edges (sorted-map)
+ :backward-edges (sorted-map)
+ :id-to-condition-node (sorted-map 0 ::root-condition)
+ :id-to-production-node (sorted-map)
+ :id-to-new-bindings (sorted-map)})
+
+(sc/defn ^:private add-conjunctions :- {:beta-graph schema/BetaGraph
+ :new-ids [sc/Int]
+ :bindings #{sc/Keyword}}
+
+ "Adds a sequence of conjunctions to the graph in a parent-child relationship."
+
+ [conjunctions :- [schema/Condition]
+ parent-ids :- [sc/Int]
+ env :- (sc/maybe {sc/Keyword sc/Any})
+ ancestor-bindings :- #{sc/Keyword}
+ beta-graph :- schema/BetaGraph
+ create-id-fn]
+
+ (loop [beta-graph beta-graph
+ parent-ids parent-ids
+ bindings ancestor-bindings
+ [expression & remaining-expressions] conjunctions]
+
+ (if expression
+
+ (let [node (condition-to-node expression env bindings)
+
+ {:keys [result-binding fact-binding]} expression
+
+ all-bindings (cond-> (set/union bindings (:used-bindings node))
+ result-binding (conj result-binding)
+ fact-binding (conj fact-binding))
+
+ ;; Find children that all parent nodes have.
+ forward-edges (if (= 1 (count parent-ids))
+ ;; If there is only one parent then there is no need to reconstruct
+ ;; the set of forward edges. This is an intentional performance optimization for large
+ ;; beta graphs that have many nodes branching from a common node, typically the root node.
+ (-> (:forward-edges beta-graph)
+ (get (first parent-ids)))
+ (->> (select-keys parent-ids (:forward-edges beta-graph))
+ vals
+ ;; Order doesn't matter here as we will effectively sort it using update-node->id later,
+ ;; thus adding determinism.
+ (into #{} cat)))
+
+ id-to-condition-nodes (:id-to-condition-node beta-graph)
+
+ ;; Since we require that id-to-condition-nodes have an equal value to "node" under the ID
+ ;; for this to be used. In any possible edge cases where there are equal nodes under different IDs,
+ ;; maintaining the lowest node id will add determinism.
+ ;; Having different nodes under the same ID would be a bug,
+ ;; but having an equivalent node under multiple IDs wouldn't necessarily be one.
+ ;;
+ ;; Using maps(nodes) as keys acts as a performance optimization here, we are relying on the fact that maps cache
+ ;; their hash codes. This saves us time in the worst case scenarios when there are large amounts of forward edges
+ ;; that will be compared repeatedly. For example, when adding new children to the root node we must first compare
+ ;; all of the old children for a prior match, for each additional child added we incur the assoc but the hash code
+ ;; has already been cached by prior iterations of add-conjunctions. In these cases we have seen that the hashing
+ ;; will out perform equivalence checks.
+ update-node->ids (fn [m id]
+ (let [node (get id-to-condition-nodes id)
+ prev (get m node)]
+ (if prev
+ ;; There might be nodes with multiple representation under the same parent, this would
+ ;; likely be due to nodes that have the same condition but do not share all the same
+ ;; parents, see Issue 433 for more information
+ (assoc! m node (conj prev id))
+ (assoc! m node [id]))))
+
+ node->ids (-> (reduce update-node->ids
+ (transient {})
+ forward-edges)
+ persistent!)
+
+ backward-edges (:backward-edges beta-graph)
+
+ parent-ids-set (set parent-ids)
+
+ ;; Use the existing id or create a new one.
+ node-id (or (when-let [common-nodes (get node->ids node)]
+ ;; We need to validate that the node we intend on sharing shares the same parents as the
+ ;; current node we are creating. See Issue 433 for more information
+ (some #(when (= (get backward-edges %)
+ parent-ids-set)
+ %)
+ common-nodes))
+ (create-id-fn))
+
+ graph-with-node (add-node beta-graph parent-ids node-id node)]
+
+ (recur graph-with-node
+ [node-id]
+ all-bindings
+ remaining-expressions))
+
+ ;; No expressions remaining, so return the structure.
+ {:beta-graph beta-graph
+ :new-ids parent-ids
+ :bindings bindings})))
+
+(sc/defn ^:private add-production :- schema/BetaGraph
+ "Adds a production to the graph of beta nodes."
+ [production :- schema/Production
+ beta-graph :- schema/BetaGraph
+ create-id-fn]
+
+ ;; Flatten conditions, removing an extraneous ands so we can process them independently.
+ (let [flattened-conditions (for [condition (:lhs production)
+ child-condition (if (#{'and :and} (first condition))
+ (rest condition)
+ [condition])]
+
+ child-condition)
+
+ sorted-conditions (sort-conditions flattened-conditions)]
+
+ (loop [previous-conditions []
+ [current-condition & remaining-conditions] sorted-conditions
+ parent-ids [0]
+ ancestor-bindings #{}
+ beta-graph beta-graph]
+
+ (if current-condition
+
+ (let [{:keys [new-expression beta-with-negations]}
+ (extract-negation previous-conditions
+ current-condition
+ parent-ids
+ ancestor-bindings
+ beta-graph
+ production
+ create-id-fn)
+
+ condition (or new-expression
+ current-condition)
+
+ ;; Extract disjunctions from the condition.
+ dnf-expression (to-dnf condition)
+
+ ;; Get a sequence of disjunctions.
+ disjunctions (for [expression
+ (if (= :or (first dnf-expression)) ; Ignore top-level or in DNF.
+ (rest dnf-expression)
+ [dnf-expression])]
+
+ (if (= :and (first expression)) ; Ignore nested ands in DNF.
+ (rest expression)
+ [expression]))
+
+ {beta-with-nodes :beta-graph new-ids :new-ids all-bindings :bindings}
+ (reduce (fn [previous-result conjunctions]
+
+ ;; Get the beta graph, new identifiers, and complete bindings
+ (let [;; Convert exists operations to accumulator and test nodes.
+ exists-extracted (extract-exists conjunctions)
+
+ ;; Compute the new beta graph, ids, and bindings with the expressions.
+ new-result (add-conjunctions exists-extracted
+ parent-ids
+ (:env production)
+ ancestor-bindings
+ (:beta-graph previous-result)
+ create-id-fn)]
+
+ ;; Combine the newly created beta graph, node ids, and bindings
+ ;; for use in descendent nodes.
+ {:beta-graph (:beta-graph new-result)
+ :new-ids (into (:new-ids previous-result) (:new-ids new-result))
+ :bindings (set/union (:bindings previous-result)
+ (:bindings new-result))}))
+
+ ;; Initial reduce value, combining previous graph, parent ids, and ancestor variable bindings.
+ {:beta-graph beta-with-negations
+ :new-ids []
+ :bindings ancestor-bindings}
+
+ ;; Each disjunction contains a sequence of conjunctions.
+ disjunctions)]
+
+ (recur (conj previous-conditions current-condition)
+ remaining-conditions
+ new-ids
+ all-bindings
+ beta-with-nodes))
+
+ ;; No more conditions to add, so connect the production.
+ (if (:rhs production)
+ ;; if its a production node simply add it
+ (add-node beta-graph
+ parent-ids
+ (create-id-fn)
+ {:node-type :production
+ :production production
+ :bindings ancestor-bindings})
+ ;; else its a query node and we need to validate that the query has at least the bindings
+ ;; specified in the parameters
+ (if (every? ancestor-bindings (:params production))
+ (add-node beta-graph
+ parent-ids
+ (create-id-fn)
+ {:node-type :query
+ :query production})
+ (throw (ex-info "Query does not contain bindings specified in parameters."
+ {:expected-bindings (:params production)
+ :available-bindings ancestor-bindings
+ :query (:name production)}))))))))
+
+(sc/defn to-beta-graph :- schema/BetaGraph
+ "Produces a description of the beta network."
+ [productions :- #{schema/Production}
+ create-id-fn :- IFn]
+ (reduce (fn [beta-graph production]
+ (binding [*compile-ctx* {:production production}]
+ (add-production production beta-graph create-id-fn)))
+
+ empty-beta-graph
+ productions))
+
+(sc/defn ^:private root-node? :- sc/Bool
+ "A helper function to determine if the node-id provided is a root node. A given node would be considered a root-node,
+ if its only backward edge was that of the artificial root node, node 0."
+ [backward-edges :- {sc/Int #{sc/Int}}
+ node-id :- sc/Int]
+ (= #{0} (get backward-edges node-id)))
+
+(sc/defn extract-exprs :- schema/NodeExprLookup
+ "Walks the Alpha and Beta graphs and extracts the expressions that will be used in the construction of the final network.
+ The extracted expressions are stored by their key, [ ], this allows for the function to be retrieved
+ after it has been compiled.
+
+ Note: The keys of the map returned carry the metadata that can be used during evaluation. This metadata will contain,
+ if available, the compile context, file and ns. This metadata is not stored on the expression itself because it will
+ contain forms that will fail to eval."
+ [beta-graph :- schema/BetaGraph
+ alpha-graph :- [schema/AlphaNode]]
+ (let [backward-edges (:backward-edges beta-graph)
+ handle-expr (fn [id->expr s-expr id expr-key compilation-ctx]
+ (assoc id->expr
+ [id expr-key]
+ [s-expr (assoc compilation-ctx expr-key s-expr)]))
+
+ ;; If extract-exprs ever became a hot spot, this could be changed out to use more java interop.
+ id->expr (reduce (fn [prev alpha-node]
+ (let [{:keys [id condition env]} alpha-node
+ {:keys [type constraints fact-binding args]} condition
+ cmeta (meta condition)]
+ (handle-expr prev
+ (with-meta (compile-condition
+ type id (first args) constraints
+ fact-binding env)
+ ;; Remove all metadata but file and line number
+ ;; to protect from evaluating unsafe metadata
+ ;; See PR 243 for more detailed discussion
+ (select-keys cmeta [:line :file]))
+ id
+ :alpha-expr
+ {:file (or (:file cmeta) *file*)
+ :compile-ctx {:condition condition
+ :env env
+ :msg "compiling alpha node"}})))
+ {}
+ alpha-graph)
+ id->expr (reduce-kv (fn [prev id production-node]
+ (let [production (-> production-node :production)]
+ (handle-expr prev
+ (with-meta (compile-action id
+ (:bindings production-node)
+ (:rhs production)
+ (:env production))
+ (meta (:rhs production)))
+ id
+ :action-expr
+ ;; ProductionNode expressions can be arbitrary code, therefore we need the
+ ;; ns where the production was define so that we can compile the expression
+ ;; later.
+ {:ns (:ns-name production)
+ :file (-> production :rhs meta :file)
+ :compile-ctx {:production production
+ :msg "compiling production node"}})))
+ id->expr
+ (:id-to-production-node beta-graph))]
+ (reduce-kv
+ (fn [prev id beta-node]
+ (let [condition (:condition beta-node)
+ condition (if (symbol? condition)
+ (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
+ condition)]
+ (case (or (:node-type beta-node)
+ ;; If there is no :node-type then the node is the ::root-condition
+ ;; however, creating a case for nil could potentially cause weird effects if something about the
+ ;; compilation of the beta graph changes. Therefore making an explicit case for ::root-condition
+ ;; and if there was anything that wasn't ::root-condition this case statement will fail rather than
+ ;; failing somewhere else.
+ beta-node)
+ ::root-condition prev
+
+ :join (if (or (root-node? backward-edges id)
+ (not (:join-filter-expressions beta-node)))
+ ;; This is either a RootJoin or HashJoin node, in either case they do not have an expression
+ ;; to capture.
+ prev
+ (handle-expr prev
+ (compile-join-filter id
+ "ExpressionJoinNode"
+ (:join-filter-expressions beta-node)
+ (:join-filter-join-bindings beta-node)
+ (:new-bindings beta-node)
+ (:env beta-node))
+ id
+ :join-filter-expr
+ {:compile-ctx {:condition condition
+ :join-filter-expressions (:join-filter-expressions beta-node)
+ :env (:env beta-node)
+ :msg "compiling expression join node"}}))
+ :negation (if (:join-filter-expressions beta-node)
+ (handle-expr prev
+ (compile-join-filter id
+ "NegationWithJoinFilterNode"
+ (:join-filter-expressions beta-node)
+ (:join-filter-join-bindings beta-node)
+ (:new-bindings beta-node)
+ (:env beta-node))
+ id
+ :join-filter-expr
+ {:compile-ctx {:condition condition
+ :join-filter-expressions (:join-filter-expressions beta-node)
+ :env (:env beta-node)
+ :msg "compiling negation with join filter node"}})
+ prev)
+ :test (handle-expr prev
+ (compile-test id (:constraints condition) (:env beta-node))
+ id
+ :test-expr
+ {:compile-ctx {:condition condition
+ :env (:env beta-node)
+ :msg "compiling test node"}})
+ :accumulator (cond-> (handle-expr prev
+ (compile-accum id
+ (if (:join-filter-expressions beta-node)
+ "AccumulateWithJoinFilterNode"
+ "AccumulateNode")
+ (:accumulator beta-node)
+ (:env beta-node))
+ id
+ :accum-expr
+ {:compile-ctx {:condition condition
+ :accumulator (:accumulator beta-node)
+ :env (:env beta-node)
+ :msg "compiling accumulator"}})
+
+ (:join-filter-expressions beta-node)
+ (handle-expr (compile-join-filter id
+ "AccumulateWithJoinFilterNode"
+ (:join-filter-expressions beta-node)
+ (:join-filter-join-bindings beta-node)
+ (:new-bindings beta-node)
+ (:env beta-node))
+ id
+ :join-filter-expr
+ {:compile-ctx {:condition condition
+ :join-filter-expressions (:join-filter-expressions beta-node)
+ :env (:env beta-node)
+ :msg "compiling accumulate with join filter node"}}))
+ :query prev
+
+ ;; This error should only be thrown if there are changes to the compilation of the beta-graph
+ ;; such as an addition of a node type.
+ (throw (ex-info "Invalid node type encountered while compiling rulebase."
+ {:node beta-node})))))
+ id->expr
+ (:id-to-condition-node beta-graph))))
+
+(sc/defn compile-exprs :- schema/NodeFnLookup
+ "Takes a map in the form produced by extract-exprs and evaluates the values(expressions) of the map in a batched manner.
+ This allows the eval calls to be more effecient, rather than evaluating each expression on its own.
+ See #381 for more details."
+ [key->expr :- schema/NodeExprLookup
+ partition-size :- sc/Int]
+ (let [batching-try-eval (fn [compilation-ctxs exprs]
+ ;; Try to evaluate all of the expressions as a batch. If they fail the batch eval then we
+ ;; try them one by one with their compilation context, this is slow but we were going to fail
+ ;; anyway.
+ (try
+ (mapv vector (eval exprs) compilation-ctxs)
+ (catch Exception e
+ ;; Using mapv here rather than map to avoid laziness, otherwise compilation failure might
+ ;; fall into the throw below for the wrong reason.
+ (mapv (fn [expr compilation-ctx]
+ (with-bindings
+ {#'*compile-ctx* (:compile-ctx compilation-ctx)
+ #'*file* (:file compilation-ctx *file*)}
+ (try-eval expr)))
+ exprs
+ compilation-ctxs)
+ ;; If none of the rules are the issue, it is likely that the
+ ;; size of the code trying to be evaluated has exceeded the limit
+ ;; set by java.
+ (throw (ex-info (str "There was a failure while batch evaling the node expressions, " \newline
+ "but wasn't present when evaling them individually. This likely indicates " \newline
+ "that the method size exceeded the maximum set by the jvm, see the cause for the actual error.")
+ {:compilation-ctxs compilation-ctxs}
+ e)))))]
+ (into {}
+ cat
+ ;; Grouping by ns, most expressions will not have a defined ns, only expressions from production nodes.
+ ;; These expressions must be evaluated in the ns where they were defined, as they will likely contain code
+ ;; that is namespace dependent.
+ (for [[nspace group] (sort-by key (group-by (comp :ns second val) key->expr))
+ ;; Partitioning the number of forms to be evaluated, Java has a limit to the size of methods if we were
+ ;; evaluate all expressions at once it would likely exceed this limit and throw an exception.
+ expr-batch (partition-all partition-size group)
+ :let [node-expr-keys (mapv first expr-batch)
+ compilation-ctxs (mapv (comp second val) expr-batch)
+ exprs (mapv (comp first val) expr-batch)]]
+ (mapv vector
+ node-expr-keys
+ (with-bindings (if nspace
+ {#'*ns* (the-ns nspace)}
+ {})
+ (batching-try-eval compilation-ctxs exprs)))))))
+
+(defn safe-get
+ "A helper function for retrieving a given key from the provided map. If the key doesn't exist within the map this
+ function will throw an exception."
+ [m k]
+ (let [not-found ::not-found
+ v (get m k not-found)]
+ (if (identical? v not-found)
+ (throw (ex-info "Key not found with safe-get" {:map m :key k}))
+ v)))
+
+(sc/defn ^:private compile-node
+ "Compiles a given node description into a node usable in the network with the
+ given children."
+ [beta-node :- (sc/either schema/ConditionNode schema/ProductionNode)
+ id :- sc/Int
+ is-root :- sc/Bool
+ children :- [sc/Any]
+ expr-fn-lookup :- schema/NodeFnLookup
+ new-bindings :- #{sc/Keyword}]
+
+ (let [{:keys [condition production query join-bindings env]} beta-node
+
+ condition (if (symbol? condition)
+ (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
+ condition)
+
+ compiled-expr-fn (fn [id field] (first (safe-get expr-fn-lookup [id field])))]
+
+ (case (:node-type beta-node)
+
+ :join
+ ;; Use an specialized root node for efficiency in this case.
+ (if is-root
+ (eng/->RootJoinNode
+ id
+ condition
+ children
+ join-bindings)
+
+ ;; If the join operation includes arbitrary expressions
+ ;; that can't expressed as a hash join, we must use the expressions
+ (if (:join-filter-expressions beta-node)
+ (eng/->ExpressionJoinNode
+ id
+ condition
+ (compiled-expr-fn id :join-filter-expr)
+ children
+ join-bindings)
+ (eng/->HashJoinNode
+ id
+ condition
+ children
+ join-bindings)))
+
+ :negation
+ ;; Check to see if the negation includes an
+ ;; expression that must be joined to the incoming token
+ ;; and use the appropriate node type.
+ (if (:join-filter-expressions beta-node)
+ (eng/->NegationWithJoinFilterNode
+ id
+ condition
+ (compiled-expr-fn id :join-filter-expr)
+ children
+ join-bindings)
+ (eng/->NegationNode
+ id
+ condition
+ children
+ join-bindings))
+
+ :test
+ (eng/->TestNode
+ id
+ env
+ (compiled-expr-fn id :test-expr)
+ children)
+
+ :accumulator
+ ;; We create an accumulator that accepts the environment for the beta node
+ ;; into its context, hence the function with the given environment.
+ (let [compiled-node (compiled-expr-fn id :accum-expr)
+ compiled-accum (compiled-node (:env beta-node))]
+
+ ;; Ensure the compiled accumulator has the expected structure
+ (when (not (instance? Accumulator compiled-accum))
+ (throw (IllegalArgumentException. (str (:accumulator beta-node) " is not a valid accumulator."))))
+
+ ;; If a non-equality unification is in place, compile the predicate and use
+ ;; the specialized accumulate node.
+
+ (if (:join-filter-expressions beta-node)
+ (eng/->AccumulateWithJoinFilterNode
+ id
+ ;; Create an accumulator structure for use when examining the node or the tokens
+ ;; it produces.
+ {:accumulator (:accumulator beta-node)
+ ;; Include the original filter expressions in the constraints for inspection tooling.
+ :from (update-in condition [:constraints]
+ into (-> beta-node :join-filter-expressions :constraints))}
+ compiled-accum
+ (compiled-expr-fn id :join-filter-expr)
+ (:result-binding beta-node)
+ children
+ join-bindings
+ (:new-bindings beta-node))
+
+ ;; All unification is based on equality, so just use the simple accumulate node.
+ (eng/->AccumulateNode
+ id
+ ;; Create an accumulator structure for use when examining the node or the tokens
+ ;; it produces.
+ {:accumulator (:accumulator beta-node)
+ :from condition}
+ compiled-accum
+ (:result-binding beta-node)
+ children
+ join-bindings
+ (:new-bindings beta-node))))
+
+ :production
+ (eng/->ProductionNode
+ id
+ production
+ (compiled-expr-fn id :action-expr))
+
+ :query
+ (eng/->QueryNode
+ id
+ query
+ (:params query)))))
+
+(sc/defn ^:private compile-beta-graph :- {sc/Int sc/Any}
+ "Compile the beta description to the nodes used at runtime."
+ [{:keys [id-to-production-node id-to-condition-node id-to-new-bindings forward-edges backward-edges]} :- schema/BetaGraph
+ expr-fn-lookup :- schema/NodeFnLookup]
+ (let [;; Sort the ids to compile based on dependencies.
+ ids-to-compile (loop [pending-ids (into #{} (concat (keys id-to-production-node) (keys id-to-condition-node)))
+ node-deps forward-edges
+ sorted-nodes []]
+
+ (if (empty? pending-ids)
+ sorted-nodes
+
+ (let [newly-satisfied-ids (into #{}
+ (for [pending-id pending-ids
+ :when (empty? (get node-deps pending-id))]
+ pending-id))
+
+ updated-edges (into {} (for [[dependent-id dependencies] node-deps]
+ [dependent-id (set/difference dependencies newly-satisfied-ids)]))]
+
+ (recur (set/difference pending-ids newly-satisfied-ids)
+ updated-edges
+ (concat sorted-nodes newly-satisfied-ids)))))]
+
+ (reduce (fn [id-to-compiled-nodes id-to-compile]
+
+ ;; Get the condition or production node to compile
+ (let [node-to-compile (get id-to-condition-node
+ id-to-compile
+ (get id-to-production-node id-to-compile))
+
+ ;; Get the children. The children should be sorted because the
+ ;; id-to-compiled-nodes map is sorted.
+ children (->> (get forward-edges id-to-compile)
+ (select-keys id-to-compiled-nodes)
+ (vals))]
+
+ ;; Sanity check for our logic...
+ (assert (= (count children)
+ (count (get forward-edges id-to-compile)))
+ "Each child should be compiled.")
+
+ (if (= ::root-condition node-to-compile)
+ id-to-compiled-nodes
+ (assoc id-to-compiled-nodes
+ id-to-compile
+ (compile-node node-to-compile
+ id-to-compile
+ (root-node? backward-edges id-to-compile)
+ children
+ expr-fn-lookup
+ (get id-to-new-bindings id-to-compile))))))
+ ;; The node IDs have been determined before now, so we just need to sort the map returned.
+ ;; This matters because the engine will left-activate the beta roots with the empty token
+ ;; in the order that this map is seq'ed over.
+ (sorted-map)
+ ids-to-compile)))
+
+(sc/defn to-alpha-graph :- [schema/AlphaNode]
+ "Returns a sequence of [condition-fn, [node-ids]] tuples to represent the alpha side of the network."
+ [beta-graph :- schema/BetaGraph
+ create-id-fn :- IFn]
+
+ ;; Create a sequence of tuples of conditions + env to beta node ids.
+ (let [condition-to-node-ids (for [[id node] (:id-to-condition-node beta-graph)
+ :when (:condition node)]
+ [[(:condition node) (:env node)] id])
+
+ ;; Merge common conditions together.
+ condition-to-node-map (reduce
+ (fn [node-map [[condition env] node-id]]
+
+ ;; Can't use simple update-in because we need to ensure
+ ;; the value is a vector, not a list.
+ (if (get node-map [condition env])
+ (update-in node-map [[condition env]] conj node-id)
+ (assoc node-map [condition env] [node-id])))
+ {}
+ condition-to-node-ids)
+
+ ;; We sort the alpha nodes by the ordered sequence of the node ids they correspond to
+ ;; in order to make the order of alpha nodes for any given type consistent. Note that we
+ ;; coerce to a vector because we need a type that is comparable.
+ condition-to-node-entries (sort-by (fn [[k v]] (-> v sort vec))
+ condition-to-node-map)]
+
+ ;; Compile conditions into functions.
+ (vec
+ (for [[[condition env] node-ids] condition-to-node-entries
+ :when (:type condition)] ; Exclude test conditions.
+
+
+ (cond-> {:id (create-id-fn)
+ :condition condition
+ :beta-children (distinct node-ids)}
+ (seq env) (assoc :env env))))))
+
+(sc/defn compile-alpha-nodes :- [{:id sc/Int
+ :type sc/Any
+ :alpha-fn sc/Any ;; TODO: is a function...
+ (sc/optional-key :env) {sc/Keyword sc/Any}
+ :children [sc/Num]}]
+ [alpha-nodes :- [schema/AlphaNode]
+ expr-fn-lookup :- schema/NodeFnLookup]
+ (for [{:keys [id condition beta-children env] :as node} alpha-nodes]
+ (cond-> {:id id
+ :type (effective-type (:type condition))
+ :alpha-fn (first (safe-get expr-fn-lookup [id :alpha-expr]))
+ :children beta-children}
+ env (assoc :env env))))
+
+;; Wrap the fact-type so that Clojure equality and hashcode semantics are used
+;; even though this is placed in a Java map.
+(deftype AlphaRootsWrapper [fact-type ^int fact-type-hash roots]
+ Object
+ (equals [this other]
+ (let [other ^AlphaRootsWrapper other]
+ (cond
+
+ (identical? fact-type (.fact-type other))
+ true
+
+ (not (== fact-type-hash (.fact-type-hash other)))
+ false
+
+ :else
+ (= fact-type (.fact-type other)))))
+
+ ;; Since know we will need to find the hashcode of this object in all cases just eagerly calculate it upfront
+ ;; and avoid extra calls to hash later.
+ (hashCode [this] fact-type-hash))
+
+(defn- create-get-alphas-fn
+ "Returns a function that given a sequence of facts,
+ returns a map associating alpha nodes with the facts they accept."
+ [fact-type-fn ancestors-fn alpha-roots]
+
+ (let [;; If a customized fact-type-fn is provided,
+ ;; we must use a specialized grouping function
+ ;; that handles internal control types that may not
+ ;; follow the provided type function.
+ wrapped-fact-type-fn (if (= fact-type-fn type)
+ type
+ (fn [fact]
+ (if (instance? ISystemFact fact)
+ ;; Internal system types always use Clojure's type mechanism.
+ (type fact)
+ ;; All other types defer to the provided function.
+ (fact-type-fn fact))))
+
+ ;; Wrap the ancestors-fn so that we don't send internal facts such as NegationResult
+ ;; to user-provided productions. Note that this work is memoized inside fact-type->roots.
+ wrapped-ancestors-fn (fn [fact-type]
+ (if (isa? fact-type ISystemFact)
+ ;; Exclude system types from having ancestors for now
+ ;; since none of our use-cases require them. If this changes
+ ;; we may need to define a custom hierarchy for them.
+ #{}
+ (ancestors-fn fact-type)))
+
+ fact-type->roots (memoize
+ (fn [fact-type]
+ ;; There is no inherent ordering here but we put the AlphaRootsWrapper instances
+ ;; in a vector rather than a set to avoid nondeterministic ordering (and thus nondeterministic
+ ;; performance).
+ (into []
+ ;; If a given type in the ancestors has no matching alpha roots,
+ ;; don't return it as an ancestor. Fact-type->roots is memoized on the fact type,
+ ;; but work is performed per group returned on each call the to get-alphas-fn. Therefore
+ ;; removing groups with no alpha nodes here will improve performance on subsequent calls
+ ;; to the get-alphas-fn with the same fact type.
+ (keep #(when-let [roots (not-empty (get alpha-roots %))]
+ (AlphaRootsWrapper. % (hash %) roots)))
+ ;; If a user-provided ancestors-fn returns a sorted collection, for example for
+ ;; ensuring determinism, we respect that ordering here by conj'ing on to the existing
+ ;; collection.
+ (conj (wrapped-ancestors-fn fact-type) fact-type))))
+
+ update-roots->facts! (fn [^java.util.Map roots->facts roots-group fact]
+ (if-let [v (.get roots->facts roots-group)]
+ (.add ^java.util.List v fact)
+ (.put roots->facts roots-group (doto (java.util.LinkedList.)
+ (.add fact)))))]
+
+ (fn [facts]
+ (let [roots->facts (java.util.LinkedHashMap.)]
+
+ (doseq [fact facts
+ roots-group (fact-type->roots (wrapped-fact-type-fn fact))]
+ (update-roots->facts! roots->facts roots-group fact))
+
+ (let [return-list (java.util.LinkedList.)
+ entries (.entrySet roots->facts)
+ entries-it (.iterator entries)]
+ ;; We iterate over the LinkedHashMap manually to avoid potential issues described at http://dev.clojure.org/jira/browse/CLJ-1738
+ ;; where a Java iterator can return the same entry object repeatedly and mutate it after each next() call. We use mutable lists
+ ;; for performance but wrap them in unmodifiableList to make it clear that the caller is not expected to mutate these lists.
+ ;; Since after this function returns the only reference to the fact lists will be through the unmodifiedList we can depend elsewhere
+ ;; on these lists not changing. Since the only expected workflow with these lists is to loop through them, not add or remove elements,
+ ;; we don't gain much from using a transient (which can be efficiently converted to a persistent data structure) rather than a mutable type.
+ (loop []
+ (when (.hasNext entries-it)
+ (let [^java.util.Map$Entry e (.next entries-it)]
+ (.add return-list [(-> e ^AlphaRootsWrapper (.getKey) .roots)
+ (java.util.Collections/unmodifiableList (.getValue e))])
+ (recur))))
+
+ (java.util.Collections/unmodifiableList return-list))))))
+
+(sc/defn build-network
+ "Constructs the network from compiled beta tree and condition functions."
+ [id-to-node :- {sc/Int sc/Any}
+ beta-roots
+ alpha-fns
+ productions
+ fact-type-fn
+ ancestors-fn
+ activation-group-sort-fn
+ activation-group-fn
+ expr-fn-lookup]
+
+ (let [beta-nodes (vals id-to-node)
+
+ production-nodes (for [node beta-nodes
+ :when (= ProductionNode (type node))]
+ node)
+
+ query-nodes (for [node beta-nodes
+ :when (= QueryNode (type node))]
+ node)
+
+ query-map (into {} (for [query-node query-nodes
+
+ ;; Queries can be looked up by reference or by name;
+ entry [[(:query query-node) query-node]
+ [(:name (:query query-node)) query-node]]]
+ entry))
+
+ ;; type, alpha node tuples.
+ alpha-nodes (for [{:keys [id type alpha-fn children env] :as alpha-map} alpha-fns
+ :let [beta-children (map id-to-node children)]]
+ [type (eng/->AlphaNode id env beta-children alpha-fn type)])
+
+ ;; Merge the alpha nodes into a multi-map
+ alpha-map (reduce
+ (fn [alpha-map [type alpha-node]]
+ (update-in alpha-map [type] conj alpha-node))
+ {}
+ alpha-nodes)
+
+ ;; Merge the alpha nodes into the id-to-node map
+ id-to-node (into id-to-node
+ (map (juxt :id identity))
+ (mapv second alpha-nodes))
+
+ get-alphas-fn (create-get-alphas-fn fact-type-fn ancestors-fn alpha-map)]
+
+ #_{:clj-kondo/ignore [:unresolved-symbol]}
+ (strict-map->Rulebase
+ {:alpha-roots alpha-map
+ :beta-roots beta-roots
+ :productions productions
+ :production-nodes production-nodes
+ :query-nodes query-map
+ :id-to-node id-to-node
+ :activation-group-sort-fn activation-group-sort-fn
+ :activation-group-fn activation-group-fn
+ :get-alphas-fn get-alphas-fn
+ :node-expr-fn-lookup expr-fn-lookup})))
+
+;; Cache of sessions for fast reloading.
+(def ^:private session-cache (atom {}))
+
+(defn clear-session-cache!
+ "Clears the cache of reusable Clara sessions, so any subsequent sessions
+ will be re-compiled from the rule definitions. This is intended for use
+ by tooling or specialized needs; most users can simply specify the :cache false
+ option when creating sessions."
+ []
+ (reset! session-cache {}))
+
+(defn production-load-order-comp [a b]
+ (< (-> a meta ::rule-load-order)
+ (-> b meta ::rule-load-order)))
+
+(defn validate-names-unique
+ "Checks that all productions included in the session have unique names,
+ throwing an exception if duplicates are found."
+ [productions]
+ (let [non-unique (->> productions
+ (group-by :name)
+ (filter (fn [[k v]] (and (some? k) (not= 1 (count v)))))
+ (map key)
+ set)]
+ (if (empty? non-unique)
+ productions
+ (throw (ex-info (str "Non-unique production names: " non-unique) {:names non-unique})))))
+
+(def forms-per-eval-default
+ "The default max number of forms that will be evaluated together as a single batch.
+ 5000 is chosen here due to the way that clojure will evaluate the vector of forms extracted from the nodes.
+ The limiting factor here is the max java method size (64KiB), clojure will compile each form in the vector down into
+ its own class file and generate another class file that will reference each of the other functions and wrap them in
+ a vector inside a static method. For example,
+
+ (eval [(fn one [_] ...) (fn two [_] ...)])
+ would generate 3 classes.
+
+ some_namespace$eval1234
+ some_namespace$eval1234$one_1234
+ some_namespace$eval1234$two_1235
+
+ some_namespace$eval1234$one_1234 and some_namespace$eval1234$two_1235 contian the implementation of the functions,
+ where some_namespace$eval1234 will contain two methods, invoke and invokeStatic.
+ The invokeStatic method breaks down into something similar to a single create array call followed by 2 array set calls
+ with new invocations on the 2 classes the method then returns a new vector created from the array.
+
+ 5000 is lower than the absolute max to allow for modifications to how clojure compiles without needing to modify this.
+ The current limit should be 5471, this is derived from the following opcode investigation:
+
+ Array creation: 5B
+ Creating and populating the first 6 elements of the array: 60B
+ Creating and populating the next 122 elements of the array: 1,342B
+ Creating and populating the next 5343 elements of the array: 64,116B
+ Creating the vector and the return statement: 4B
+
+ This sums to 65,527B just shy of the 65,536B method size limit."
+ 5000)
+
+(def omit-compile-ctx-default
+ "During construction of the Session there is data maintained such that if the underlying expressions fail to compile
+ then this data can be used to explain the failure and the constraints of the rule who's expression is being evaluated.
+ The default behavior will be to discard this data, as there will be no use unless the session will be serialized and
+ deserialized into a dissimilar environment, ie function or symbols might be unresolvable. In those sorts of scenarios
+ it would be possible to construct the original Session with the `omit-compile-ctx` flag set to false, then the compile
+ context should aid in debugging the compilation failure on deserialization."
+ true)
+
+(sc/defn mk-session*
+ "Compile the rules into a rete network and return the given session."
+ [productions :- #{schema/Production}
+ options :- {sc/Keyword sc/Any}]
+ (let [;; We need to put the productions in a sorted set here, instead of in mk-session, since we don't want
+ ;; a sorted set in the session-cache. If we did only rule order would be used to compare set equality
+ ;; for finding a cache hit, and we want to use the entire production, which a non-sorted set does.
+ ;; When using a sorted set, for constant options,
+ ;; any session creation with n productions would be a cache hit if any previous session had n productions.
+ ;; Furthermore, we can avoid the work of sorting the set until we know that there was a cache miss.
+ ;;
+ ;; Note that this ordering is not for correctness; we are just trying to increase consistency of rulebase compilation,
+ ;; and hopefully thereby execution times, from run to run.
+ _ (validate-names-unique productions)
+ productions (with-meta (into (sorted-set-by production-load-order-comp)
+ productions)
+ ;; Store the name of the custom comparator for durability.
+ {:clara.rules.durability/comparator-name `production-load-order-comp})
+
+ ;; A stateful counter used for unique ids of the nodes of the graph.
+ id-counter (atom 0)
+ create-id-fn (fn [] (swap! id-counter inc))
+
+ forms-per-eval (:forms-per-eval options forms-per-eval-default)
+
+ beta-graph (to-beta-graph productions create-id-fn)
+ alpha-graph (to-alpha-graph beta-graph create-id-fn)
+
+ ;; Extract the expressions from the graphs and evaluate them in a batch manner.
+ ;; This is a performance optimization, see Issue 381 for more information.
+ exprs (compile-exprs (extract-exprs beta-graph alpha-graph) forms-per-eval)
+
+ ;; If we have made it to this point, it means that we have succeeded in compiling all expressions
+ ;; thus we can free the :compile-ctx used for troubleshooting compilation failures.
+ ;; The reason that this flag exists is in the event that this session will be serialized with an
+ ;; uncertain deserialization environment and this sort of troubleshooting information would be useful
+ ;; in diagnosing compilation errors in specific rules.
+ omit-compile-ctx (:omit-compile-ctx options omit-compile-ctx-default)
+ exprs (if omit-compile-ctx
+ (into {}
+ (map
+ (fn [[k [expr ctx]]]
+ [k [expr (dissoc ctx :compile-ctx)]]))
+ exprs)
+ exprs)
+
+ beta-tree (compile-beta-graph beta-graph exprs)
+ beta-root-ids (-> beta-graph :forward-edges (get 0)) ; 0 is the id of the virtual root node.
+ beta-roots (vals (select-keys beta-tree beta-root-ids))
+ alpha-nodes (compile-alpha-nodes alpha-graph exprs)
+
+ ;; The fact-type uses Clojure's type function unless overridden.
+ fact-type-fn (or (get options :fact-type-fn)
+ type)
+
+ ;; The ancestors for a logical type uses Clojure's ancestors function unless overridden.
+ ancestors-fn (or (get options :ancestors-fn)
+ ancestors)
+
+ ;; The default is to sort activations in descending order by their salience.
+ activation-group-sort-fn (eng/options->activation-group-sort-fn options)
+
+ ;; The returned salience will be a tuple of the form [rule-salience internal-salience],
+ ;; where internal-salience is considered after the rule-salience and is assigned automatically by the compiler.
+ activation-group-fn (eng/options->activation-group-fn options)
+
+ rulebase (build-network beta-tree beta-roots alpha-nodes productions
+ fact-type-fn ancestors-fn activation-group-sort-fn activation-group-fn
+ exprs)
+
+ get-alphas-fn (:get-alphas-fn rulebase)
+
+ transport (LocalTransport.)]
+
+ (eng/assemble {:rulebase rulebase
+ :memory (eng/local-memory rulebase transport activation-group-sort-fn activation-group-fn get-alphas-fn)
+ :transport transport
+ :listeners (get options :listeners [])
+ :get-alphas-fn get-alphas-fn})))
+
+(defn add-production-load-order
+ "Adds ::rule-load-order to metadata of productions. Custom DSL's may need to use this if
+ creating a session in Clojure without calling mk-session below."
+ [productions]
+ (map (fn [n production]
+ (vary-meta production assoc ::rule-load-order (or n 0)))
+ (range) productions))
+
+(defn mk-session
+ "Creates a new session using the given rule source. The resulting session
+ is immutable, and can be used with insert, retract, fire-rules, and query functions."
+ ([sources-and-options]
+ (let [sources (take-while (complement keyword?) sources-and-options)
+ options (apply hash-map (drop-while (complement keyword?) sources-and-options))
+ productions (->> sources
+ ;; Load rules from the source, or just use the input as a seq.
+ (mapcat #(if (satisfies? IRuleSource %)
+ (load-rules %)
+ %))
+ add-production-load-order
+ ;; Ensure that we choose the earliest occurrence of a rule for the purpose of rule order.
+ ;; There are Clojure core functions for distinctness, of course, but none of them seem to guarantee
+ ;; which instance will be chosen in case of duplicates.
+ (reduce (fn [previous new-production]
+ ;; Contains? is broken for transient sets; see http://dev.clojure.org/jira/browse/CLJ-700
+ ;; Since all values in this set should be truthy, we can just use the set as a function instead.
+ (if (previous new-production)
+ previous
+ (conj! previous new-production)))
+ (transient #{}))
+ persistent!)]
+
+ (if-let [session (get @session-cache [productions options])]
+ session
+ (let [session (mk-session* productions options)]
+
+ ;; Cache the session unless instructed not to.
+ (when (get options :cache true)
+ (swap! session-cache assoc [productions options] session))
+
+ ;; Return the session.
+ session)))))
diff --git a/build/clara/rules/dsl.clj b/build/clara/rules/dsl.clj
new file mode 100644
index 00000000..aa98fc40
--- /dev/null
+++ b/build/clara/rules/dsl.clj
@@ -0,0 +1,315 @@
+(ns clara.rules.dsl
+ "Implementation of the defrule-style DSL for Clara. Most users should simply use the clara.rules namespace."
+ (:require [clojure.walk :as walk]
+ [clara.rules.compiler :as com]
+ [clara.rules.platform :as platform])
+ (:refer-clojure :exclude [qualified-keyword?]))
+
+;; Let operators be symbols or keywords.
+(def ops #{'and 'or 'not 'exists :and :or :not :exists})
+
+(defn- separator?
+ "True iff `x` is a rule body separator symbol."
+ [x] (and (symbol? x) (= "=>" (name x))))
+
+(defn split-lhs-rhs
+ "Given a rule with the =>, splits the left- and right-hand sides."
+ [rule-body]
+ (let [[lhs [sep & rhs]] (split-with #(not (separator? %)) rule-body)]
+
+ {:lhs lhs
+ :rhs (when-not (empty? rhs)
+ (conj rhs 'do))}))
+
+(defn- throw-dsl-ex
+ "Throws an exception indicating a failure parsing a form."
+ [message info expr-meta]
+ (if expr-meta
+ (let [{:keys [line column file]} expr-meta]
+ (throw (ex-info
+ (str message
+ (when line
+ (str " line: " line))
+ (when column
+ (str " column: " column))
+ (when file
+ (str " file: " file)))
+
+ (into info expr-meta))))
+ (throw (ex-info message info))))
+
+(defn- construct-condition
+ "Creates a condition with the given optional result binding when parsing a rule."
+ [condition result-binding expr-meta]
+ (let [type (if (symbol? (first condition))
+ (if-let [resolved (resolve (first condition))]
+
+ ;; If the type resolves to a var, grab its contents for the match.
+ (if (var? resolved)
+ (deref resolved)
+ resolved)
+
+ (first condition)) ; For ClojureScript compatibility, we keep the symbol if we can't resolve it.
+ (first condition))
+ ;; Args is an optional vector of arguments following the type.
+ args (if (vector? (second condition)) (second condition) nil)
+ constraints (vec (if args (drop 2 condition) (rest condition)))]
+
+ (when (and (vector? type)
+ (some seq? type))
+
+ (throw-dsl-ex (str "Type " type " is a vector and appears to contain expressions. "
+ "Is there an extraneous set of brackets in the condition?")
+ {}
+ expr-meta))
+
+ (when (> (count args) 1)
+ (throw-dsl-ex "Only one argument can be passed to a condition."
+ {}
+ expr-meta))
+
+ ;; Check if a malformed rule has a nested operator where we expect a type.
+ (when (and (sequential? type)
+ (seq type)
+ (ops (first type)))
+ (throw-dsl-ex (str "Attempting to bind into " result-binding
+ " nested expression: " (pr-str condition)
+ " Nested expressions cannot be bound into higher-level results")
+ {}
+ expr-meta))
+
+ ;; Include the original metadata in the returned condition so line numbers
+ ;; can be preserved when we compile it.
+ (with-meta
+ (cond-> {:type type
+ :constraints constraints}
+ args (assoc :args args)
+ result-binding (assoc :fact-binding result-binding))
+
+ (if (seq constraints)
+ (assoc (meta (first constraints))
+ :file *file*)))))
+
+(defn- parse-condition-or-accum
+ "Parse an expression that could be a condition or an accumulator."
+ [condition expr-meta]
+ ;; Grab the binding of the operation result, if present.
+ (let [result-binding (if (= '<- (second condition)) (keyword (first condition)) nil)
+ condition (if result-binding (drop 2 condition) condition)]
+
+ (when (and (not= nil result-binding)
+ (not= \? (first (name result-binding))))
+ (throw-dsl-ex (str "Invalid binding for condition: " result-binding)
+ {}
+ expr-meta))
+
+ ;; If it's an s-expression, simply let it expand itself, and assoc the binding with the result.
+ (if (#{'from :from} (second condition)) ; If this is an accumulator....
+ (let [parsed-accum {:accumulator (first condition)
+ :from (construct-condition (nth condition 2) nil expr-meta)}]
+ ;; A result binding is optional for an accumulator.
+ (if result-binding
+ (assoc parsed-accum :result-binding result-binding)
+ parsed-accum))
+ ;; Not an accumulator, so simply create the condition.
+ (construct-condition condition result-binding expr-meta))))
+
+(defn- parse-expression
+ "Convert each expression into a condition structure."
+ [expression expr-meta]
+ (cond
+
+ (contains? ops (first expression))
+ (into [(keyword (name (first expression)))] ; Ensure expression operator is a keyword.
+ (for [nested-expr (rest expression)]
+ (parse-expression nested-expr expr-meta)))
+
+ (contains? #{'test :test} (first expression))
+ (if (= 1 (count expression))
+ (throw-dsl-ex (str "Empty :test conditions are not allowed.")
+ {}
+ expr-meta)
+ {:constraints (vec (rest expression))})
+
+ :else
+ (parse-condition-or-accum expression expr-meta)))
+
+(defn- maybe-qualify
+ "Attempt to qualify the given symbol, returning the symbol itself
+ if we can't qualify it for any reason."
+ [env sym]
+ (let [env (set env)]
+ (if (and (symbol? sym)
+ (not= '.. sym) ; Skip the special .. host interop macro.
+ (.endsWith (name sym) "."))
+
+ ;; The . suffix indicates a type constructor, so we qualify the type instead, then
+ ;; re-add the . to the resolved type.
+ (-> (subs (name sym)
+ 0
+ (dec (count (name sym)))) ; Get the name without the trailing dot.
+ (symbol) ; Convert it back to a symbol.
+ ^Class (resolve) ; Resolve into the type class.
+ (.getName) ; Get the qualified name.
+ (str ".") ; Re-add the dot for the constructor, which is now qualified
+ (symbol)) ; Convert back into a symbol used at compile time.
+
+ ;; Qualify a normal clojure symbol.
+ (if (and (symbol? sym) ; Only qualify symbols...
+ (not (env sym))) ; not in env (env contains locals).) ; that we can resolve
+
+ (cond
+ ;; If it's a Java class, simply qualify it.
+ (instance? Class (resolve sym))
+ (symbol (.getName ^Class (resolve sym)))
+
+ ;; Don't qualify clojure.core for portability, since CLJS uses a different namespace.
+ (and (resolve sym)
+ (not (= "clojure.core"
+ (str (ns-name (:ns (meta (resolve sym)))))))
+ (name sym))
+ (symbol (str (ns-name (:ns (meta (resolve sym))))) (name sym))
+
+ ;; See if it's a static method call and qualify it.
+ (and (namespace sym)
+ (not (resolve sym))
+ (instance? Class (resolve (symbol (namespace sym))))) ; The namespace portion is the class name we try to resolve.
+ (symbol (.getName ^Class (resolve (symbol (namespace sym))))
+ (name sym))
+
+ ;; Finally, just return the symbol unchanged if it doesn't match anything above,
+ ;; assuming it's a local parameter or variable.n
+ :else
+ sym)
+ sym))))
+
+(defn- qualify-meta
+ "Qualify metadata associated with the symbol."
+ [env sym]
+ (if (:tag (meta sym))
+ (vary-meta sym update-in [:tag] (fn [tag] (-> ^Class (resolve tag)
+ (.getName)
+ (symbol))))
+ sym))
+
+(defn- resolve-vars
+ "Resolve vars used in expression. TODO: this should be narrowed to resolve only
+ those that aren't in the environment, condition, or right-hand side."
+ [form env]
+ (walk/postwalk
+ (fn [sym]
+ (->> sym
+ (maybe-qualify env)
+ (qualify-meta env)))
+ form))
+
+(defmacro local-syms []
+ (mapv #(list 'quote %) (keys &env)))
+
+(defn destructuring-sym? [sym]
+ (or (re-matches #"vec__\d+" (name sym))
+ (re-matches #"map__\d+" (name sym))))
+
+(defn- destructure-syms
+ [{:keys [args] :as condition}]
+ (if args
+ (remove destructuring-sym? (eval `(let [~args nil] (local-syms))))))
+
+(defn parse-rule*
+ "Creates a rule from the DSL syntax using the given environment map. *ns*
+ should be bound to the namespace the rule is meant to be defined in."
+ ([lhs rhs properties env]
+ (parse-rule* lhs rhs properties env {}))
+ ([lhs rhs properties env rule-meta]
+ (let [conditions (into [] (for [expr lhs]
+ (parse-expression expr rule-meta)))
+
+ rule {:ns-name (list 'quote (ns-name *ns*))
+ :lhs (list 'quote
+ (mapv #(resolve-vars % (destructure-syms %))
+ conditions))
+ :rhs (list 'quote
+ (vary-meta rhs
+ assoc :file *file*))}
+
+ symbols (set (filter symbol? (com/flatten-expression (concat lhs rhs))))
+ matching-env (into {} (for [sym (keys env)
+ :when (symbols sym)]
+ [(keyword (name sym)) sym]))]
+
+ (cond-> rule
+
+ ;; Add properties, if given.
+ (not (empty? properties)) (assoc :props properties)
+
+ ;; Add the environment, if given.
+ (not (empty? env)) (assoc :env matching-env)))))
+
+(defn parse-query*
+ "Creates a query from the DSL syntax using the given environment map."
+ ([params lhs env]
+ (parse-query* params lhs env {}))
+ ([params lhs env query-meta]
+ (let [conditions (into [] (for [expr lhs]
+ (parse-expression expr query-meta)))
+
+ query {:lhs (list 'quote (mapv #(resolve-vars % (destructure-syms %))
+ conditions))
+ :params (set (map platform/query-param params))}
+
+ symbols (set (filter symbol? (com/flatten-expression lhs)))
+ matching-env (into {}
+ (for [sym (keys env)
+ :when (symbols sym)]
+ [(keyword (name sym)) sym]))]
+
+ (cond-> query
+ (not (empty? env)) (assoc :env matching-env)))))
+
+(defmacro parse-rule
+ "Macro used to dynamically create a new rule using the DSL syntax."
+ ([lhs rhs]
+ (parse-rule* lhs rhs nil &env))
+ ([lhs rhs properties]
+ (parse-rule* lhs rhs properties &env)))
+
+;;; added to clojure.core in 1.9
+(defn- qualified-keyword?
+ "Return true if x is a keyword with a namespace"
+ [x] (and (keyword? x) (namespace x) true))
+
+(defn- production-name
+ [prod-name]
+ (cond
+ (qualified-keyword? prod-name) prod-name
+ :else (str (name (ns-name *ns*)) "/" (name prod-name))))
+
+(defn build-rule
+ "Function used to parse and build a rule using the DSL syntax."
+ ([name body] (build-rule name body {}))
+ ([name body form-meta]
+ (let [doc (if (string? (first body)) (first body) nil)
+ body (if doc (rest body) body)
+ properties (if (map? (first body)) (first body) nil)
+ definition (if properties (rest body) body)
+ {:keys [lhs rhs]} (split-lhs-rhs definition)]
+ (cond-> (parse-rule* lhs rhs properties {} form-meta)
+
+ name (assoc :name (production-name name))
+ doc (assoc :doc doc)))))
+
+(defmacro parse-query
+ "Macro used to dynamically create a new rule using the DSL syntax."
+ [params lhs]
+ (parse-query* params lhs &env))
+
+(defn build-query
+ "Function used to parse and build a query using the DSL syntax."
+ ([name body] (build-query name body {}))
+ ([name body form-meta]
+ (let [doc (if (string? (first body)) (first body) nil)
+ binding (if doc (second body) (first body))
+ definition (if doc (drop 2 body) (rest body))]
+ (cond-> (parse-query* binding definition {} form-meta)
+ name (assoc :name (production-name name))
+ doc (assoc :doc doc)))))
diff --git a/build/clara/rules/durability.clj b/build/clara/rules/durability.clj
new file mode 100644
index 00000000..41f1149a
--- /dev/null
+++ b/build/clara/rules/durability.clj
@@ -0,0 +1,699 @@
+(ns clara.rules.durability
+ "Support for persisting Clara sessions to an external store.
+ Provides the ability to store and restore an entire session working memory state. The restored
+ session is able to have additional insert, retract, query, and fire rule calls performed
+ immediately after.
+
+ See https://github.com/cerner/clara-rules/issues/198 for more discussion on this.
+
+ Note! This is still an EXPERIMENTAL namespace. This may change non-passively without warning.
+ Any session or rulebase serialized in one version of Clara is not guaranteed to deserialize
+ successfully against another version of Clara."
+ (:require [clara.rules.engine :as eng]
+ [clara.rules.compiler :as com]
+ [clara.rules.memory :as mem]
+ [schema.core :as s])
+ (:import [clara.rules.compiler Rulebase]
+ [clara.rules.memory RuleOrderedActivation]
+ [java.util List Map]))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;;; Rulebase serialization helpers.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(def ^:internal ^ThreadLocal node-id->node-cache
+ "Useful for caching rulebase network nodes by id during serialization and deserialization to
+ avoid creating multiple object instances for the same node."
+ (ThreadLocal.))
+
+(def ^:internal ^ThreadLocal node-fn-cache
+ "A cache for holding the fns used to reconstruct the nodes. Only applicable during read time, specifically
+ this will be bound to a Map of [ ] to IFn before the rulebase is deserialized. While the
+ rulebase is deserialized the nodes will reference this cache to repopulate their fns."
+ (ThreadLocal.))
+
+(defn- add-node-fn [node fn-key expr-key]
+ (assoc node
+ fn-key
+ (first (get (.get node-fn-cache) [(:id node) expr-key]))))
+
+(defn add-rhs-fn [node]
+ (add-node-fn node :rhs :action-expr))
+
+(defn add-alpha-fn [node]
+ (add-node-fn node :activation :alpha-expr))
+
+(defn add-join-filter-fn [node]
+ (add-node-fn node :join-filter-fn :join-filter-expr))
+
+(defn add-test-fn [node]
+ (add-node-fn node :test :test-expr))
+
+(defn add-accumulator [node]
+ (assoc node
+ :accumulator ((first (get (.get node-fn-cache) [(:id node) :accum-expr]))
+ (:env node))))
+
+(defn node-id->node
+ "Lookup the node for the given node-id in the node-id->node-cache cache."
+ [node-id]
+ (@(.get node-id->node-cache) node-id))
+
+(defn cache-node
+ "Cache the node in the node-id->node-cache. Returns the node."
+ [node]
+ (when-let [node-id (:id node)]
+ (vswap! (.get node-id->node-cache) assoc node-id node))
+ node)
+
+(def ^:internal ^ThreadLocal clj-struct-holder
+ "A cache for writing and reading Clojure records. At write time, an IdentityHashMap can be
+ used to keep track of repeated references to the same object instance occurring in
+ the serialization stream. At read time, a plain ArrayList (mutable and indexed for speed)
+ can be used to add records to when they are first seen, then look up repeated occurrences
+ of references to the same record instance later."
+ (ThreadLocal.))
+
+(defn clj-struct->idx
+ "Gets the numeric index for the given struct from the clj-struct-holder."
+ [fact]
+ (-> clj-struct-holder
+ ^Map (.get)
+ (.get fact)))
+
+(defn clj-struct-holder-add-fact-idx!
+ "Adds the fact to the clj-struct-holder with a new index. This can later be retrieved
+ with clj-struct->idx."
+ [fact]
+ ;; Note the values will be int type here. This shouldn't be a problem since they
+ ;; will be read later as longs and both will be compatible with the index lookup
+ ;; at read-time. This could have a cast to long here, but it would waste time
+ ;; unnecessarily.
+ (-> clj-struct-holder
+ ^Map (.get)
+ (.put fact (-> clj-struct-holder
+ ^Map (.get)
+ (.size)))))
+
+(defn clj-struct-idx->obj
+ "The reverse of clj-struct->idx. Returns an object for the given index found
+ in clj-struct-holder."
+ [id]
+ (-> clj-struct-holder
+ ^List (.get)
+ (.get id)))
+
+(defn clj-struct-holder-add-obj!
+ "The reverse of clj-struct-holder-add-fact-idx!. Adds the object to the clj-struct-holder
+ at the next available index."
+ [fact]
+ (-> clj-struct-holder
+ ^List (.get)
+ (.add fact))
+ fact)
+
+(defn create-map-entry
+ "Helper to create map entries. This can be useful for serialization implementations
+ on clojure.lang.MapEntry types.
+ Using the ctor instead of clojure.lang.MapEntry/create since this method
+ doesn't exist prior to clj 1.8.0"
+ [k v]
+ (clojure.lang.MapEntry. k v))
+
+;;;; To deal with http://dev.clojure.org/jira/browse/CLJ-1733 we need to impl a way to serialize
+;;;; sorted sets and maps. However, this is not sufficient for arbitrary comparators. If
+;;;; arbitrary comparators are used for the sorted coll, the comparator has to be restored
+;;;; explicitly since arbitrary functions are not serializable in any stable way right now.
+
+(defn sorted-comparator-name
+ "Sorted collections are not easily serializable since they have an opaque function object instance
+ associated with them. To deal with that, the sorted collection can provide a ::comparator-name
+ in the metadata that indicates a symbolic name for the function used as the comparator. With this
+ name the function can be looked up and associated to the sorted collection again during
+ deserialization time.
+ * If the sorted collection has metadata ::comparator-name, then the value should be a name
+ symbol and is returned.
+ * If the sorted collection has the clojure.lang.RT/DEFAULT_COMPARATOR, returns nil.
+ * If neither of the above are true, an exception is thrown indicating that there is no way to provide
+ a useful name for this sorted collection, so it won't be able to be serialized."
+ [^clojure.lang.Sorted s]
+ (let [cname (-> s meta ::comparator-name)]
+
+ ;; Fail if reliable serialization of this sorted coll isn't possible.
+ (when (and (not cname)
+ (not= (.comparator s) clojure.lang.RT/DEFAULT_COMPARATOR))
+ (throw (ex-info (str "Cannot serialize sorted collection with non-default"
+ " comparator because no :clara.rules.durability/comparator-name provided in metadata.")
+ {:sorted-coll s
+ :comparator (.comparator s)})))
+
+ cname))
+
+(defn seq->sorted-set
+ "Helper to create a sorted set from a seq given an optional comparator."
+ [s ^java.util.Comparator c]
+ (if c
+ (clojure.lang.PersistentTreeSet/create c (seq s))
+ (clojure.lang.PersistentTreeSet/create (seq s))))
+
+(defn seq->sorted-map
+ "Helper to create a sorted map from a seq given an optional comparator."
+ [s ^java.util.Comparator c]
+ (if c
+ (clojure.lang.PersistentTreeMap/create c ^clojure.lang.ISeq (sequence cat s))
+ (clojure.lang.PersistentTreeMap/create ^clojure.lang.ISeq (sequence cat s))))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;;; Memory serialization via "indexing" working memory facts.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;;; A placeholder to put in working memory locations where consumer-defined, domain-specific facts
+;;; were stored at. This placeholder is used to track the position of a fact so that the fact can
+;;; be serialized externally by IWorkingMemorySerializer and later the deserialized fact can be put
+;;; back in place of this placeholder.
+;;; See the ISessionSerializer and IWorkingMemorySerializer protocols and
+;;; indexed-session-memory-state for more details.
+(defrecord MemIdx [idx])
+
+;;; Same as MemIdx but specific to internal objects, such as Token or Element.
+(defrecord InternalMemIdx [idx])
+
+(defn find-index
+ "Finds the fact in the fact->idx-map. The fact is assumed to be a key. Returns the value for
+ that key, which should just be a numeric index used to track where facts are stubbed out with
+ MemIdx's in working memory so that they can be 'put back' later."
+ [^Map fact->idx-map fact]
+ (.get fact->idx-map fact))
+
+(defn- find-index-or-add!
+ "The same as find-index, but if the fact is not found, it is added to the map (destructively)
+ and the index it was mapped to is returned.
+ This implies that the map must support the mutable map interface, namely java.util.Map.put()."
+ [^Map fact->index-map fact]
+ (or (.get fact->index-map fact)
+ (let [n (.size fact->index-map)
+ idx (->MemIdx n)]
+ (.put fact->index-map fact idx)
+ idx)))
+
+(defn- add-mem-internal-idx!
+ "Adds an element to fact->idx-map. The fact is assumed to be a key. The value is a tuple containing
+ both the InternalMemIdx and the 'indexed' form of the element. The indexed form is the element that
+ has had all of its internal facts stubbed with MemIdxs. The actual element is used as the key because
+ the act of stubbing the internal fields of the element changes the identity of the element thus making
+ every indexed-element unique. The indexed-element is stored so that it can be serialized rather than
+ the element itself. This function simply adds a new key, unlike find-index-or-add!, as such the caller
+ should first check that the key is not already present before calling this method.
+ Returns the stub used to represent an internal fact, so that it can be 'put back' later."
+ [^Map fact->idx-map
+ element
+ indexed-element]
+ (let [n (.size fact->idx-map)
+ idx (->InternalMemIdx n)]
+ (.put fact->idx-map element [idx indexed-element])
+ idx))
+
+(defn- find-mem-internal-idx
+ "Returns the InternalMemIdx for the given element."
+ [^Map fact->idx-map
+ element]
+ (nth (.get fact->idx-map element) 0))
+
+;;; Similar what is in clara.rules.memory currently, but just copied for now to avoid dependency issues.
+(defn- update-vals [m update-fn]
+ (->> m
+ (reduce-kv (fn [m k v]
+ (assoc! m k (update-fn v)))
+ (transient {}))
+ persistent!))
+
+(defn- index-bindings
+ [seen bindings]
+ (update-vals bindings
+ #(find-index-or-add! seen %)))
+
+(defn- index-update-bindings-keys [index-update-bindings-fn
+ bindings-map]
+ (persistent!
+ (reduce-kv (fn [m k v]
+ (assoc! m
+ (index-update-bindings-fn k)
+ v))
+ (transient {})
+ bindings-map)))
+
+(defn- index-token [internal-seen seen token]
+ (if-let [idx (find-mem-internal-idx internal-seen token)]
+ idx
+ (let [indexed (-> token
+ (update :matches
+ #(mapv (fn [[fact node-id]]
+ [(find-index-or-add! seen fact)
+ node-id])
+ %))
+ (update :bindings
+ #(index-bindings seen %)))]
+ (add-mem-internal-idx! internal-seen token indexed))))
+
+(defn index-alpha-memory [internal-seen seen amem]
+ (let [index-update-bindings-fn #(index-bindings seen %)
+ index-update-fact-fn #(find-index-or-add! seen %)
+ index-update-elements (fn [elements]
+ (mapv #(if-let [idx (find-mem-internal-idx internal-seen %)]
+ idx
+ (let [indexed (-> %
+ (update :fact
+ index-update-fact-fn)
+ (update :bindings
+ index-update-bindings-fn))]
+ (add-mem-internal-idx! internal-seen % indexed)))
+ elements))]
+ (update-vals amem
+ #(-> (index-update-bindings-keys index-update-bindings-fn %)
+ (update-vals index-update-elements)))))
+
+(defn index-accum-memory [seen accum-mem]
+ (let [index-update-bindings-fn #(index-bindings seen %)
+ index-facts (fn [facts]
+ (mapv #(find-index-or-add! seen %) facts))
+ index-update-accum-reduced (fn [node-id accum-reduced]
+ (let [m (meta accum-reduced)]
+ (if (::eng/accum-node m)
+ ;; AccumulateNode
+ (let [[facts res] accum-reduced
+ facts (index-facts facts)]
+ (with-meta
+ [facts
+ (if (= ::eng/not-reduced res)
+ res
+ (find-index-or-add! seen res))]
+ m))
+
+ ;; AccumulateWithJoinFilterNode
+ (with-meta (index-facts accum-reduced)
+ m))))
+ index-update-bindings-map (fn [node-id bindings-map]
+ (-> (index-update-bindings-keys index-update-bindings-fn bindings-map)
+ (update-vals #(index-update-accum-reduced node-id %))))]
+
+ (->> accum-mem
+ (reduce-kv (fn [m node-id bindings-map]
+ (assoc! m node-id (-> (index-update-bindings-keys index-update-bindings-fn bindings-map)
+ (update-vals #(index-update-bindings-map node-id %)))))
+ (transient {}))
+ persistent!)))
+
+(defn index-beta-memory [internal-seen seen bmem]
+ (let [index-update-tokens (fn [tokens]
+ (mapv #(index-token internal-seen seen %)
+ tokens))]
+ (update-vals bmem
+ (fn [v]
+ (-> (index-update-bindings-keys #(index-bindings seen %) v)
+ (update-vals index-update-tokens))))))
+
+(defn index-production-memory [internal-seen seen pmem]
+ (let [index-update-facts (fn [facts]
+ (mapv #(or (find-index seen %)
+ (find-index-or-add! seen %))
+ facts))]
+ (update-vals pmem
+ (fn [token-map]
+ (->> token-map
+ (reduce-kv (fn [m k v]
+ (assoc! m
+ (index-token internal-seen seen k)
+ (mapv index-update-facts v)))
+ (transient {}))
+ persistent!)))))
+
+(defn index-activation-map [internal-seen seen actmap]
+ (update-vals actmap
+ #(mapv (fn [^RuleOrderedActivation act]
+ (mem/->RuleOrderedActivation (.-node-id act)
+ (index-token internal-seen seen (.-token act))
+ (.-activation act)
+ (.-rule-load-order act)
+ false))
+ %)))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;;; Commonly useful session serialization helpers.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(def ^:internal ^ThreadLocal ^List mem-facts
+ "Useful for ISessionSerializer implementors to have a reference to the facts deserialized via
+ IWorkingMemorySerializer that are needed to restore working memory whose locations were stubbed
+ with a MemIdx during serialization."
+ (ThreadLocal.))
+
+(def ^:internal ^ThreadLocal ^List mem-internal
+ "Useful for ISessionSerializer implementors to have a reference to the facts deserialized via
+ IWorkingMemorySerializer that are needed to restore working memory whose locations were stubbed
+ with a InternalMemIdx during serialization. These objects are specific to the Clare engine,
+ and as such will be serialized and deserialized along with the memory."
+ (ThreadLocal.))
+
+(defn find-mem-idx
+ "Finds the fact from mem-facts at the given index. See docs on mem-facts for more."
+ [idx]
+ (-> mem-facts
+ (.get)
+ (get idx)))
+
+(defn find-internal-idx
+ "Finds the fact from mem-internal at the given index. See docs on mem-internal for more."
+ [idx]
+ (-> mem-internal
+ (.get)
+ (get idx)))
+
+(defn indexed-session-memory-state
+ "Takes the working memory from a session and strips it down to only the memory needed for
+ serialization. Along with this, replaces all working memory facts with MemIdx place holders.
+ The terminology being used here is to call this step 'indexing' the memory.
+
+ A map is returned with two keys:
+ * :memory - The working memory representation that is the same as the given memory's :memory,
+ however, all facts in the memory are replaced with MemIdx placeholders.
+ * :indexed-facts - the facts replaced with MemIdx placeholders. The facts are returned in a
+ sequential collection. Each fact is the n'th item of the collection if the MemIdx for that
+ fact has :idx = n. No facts returned should be identical? (i.e. multiple references to the
+ same object instance). However, it is possible for some facts returned to be aggregations
+ containing other facts that do appear elsewhere in the fact sequence. It is up to the
+ implementation of the IWorkingMemorySerializer to deal with these possible, identical? object
+ references correctly. This is generally true for most serialization mechanisms.
+
+ Note! This function should not typically be used. It is left public to assist in ISessionSerializer
+ durability implementations. Use clara.rules/mk-session typically to make rule sessions.
+
+ Note! Currently this only supports the clara.rules.memory.PersistentLocalMemory implementation
+ of memory."
+ [memory]
+ (let [idx-fact-arr-pair-fn (fn [^java.util.Map$Entry e]
+ [(:idx (.getValue e)) (.getKey e)])
+
+ internal-fact-arr-pair-fn (fn [^java.util.Map$Entry e]
+ ;; Intenal facts are stored with a key
+ ;; of the original object to a tuple of
+ ;; the InternalMemIdx and the indexed object.
+ ;; When serializing these facts, to avoid serializing
+ ;; consumer facts contained within internal facts,
+ ;; the indexed object is serialized instead of
+ ;; the original object itself. See add-mem-internal-idx!
+ ;; for more details
+ (let [v (.getValue e)]
+ [(:idx (nth v 0)) (nth v 1)]))
+
+ vec-indexed-facts (fn [^Map fact->index-map
+ map-entry->arr-idx-pair]
+ ;; It is not generally safe to reduce or seq over a mutable Java Map.
+ ;; One example is IdentityHashMap. The iterator of the IdentityHashMap
+ ;; mutates the map entry values in place and it is never safe to call a
+ ;; Iterator.hasNext() when not finished working with the previous value
+ ;; returned from Iterator.next(). This is subtle and is actually only a
+ ;; problem in JDK6 for IdentityHashMap. JDK7+ appear to have discontinued
+ ;; this mutable map entry. However, this is not something to rely on and
+ ;; JDK6 support is still expected to work for Clara. The only trasducer
+ ;; in Clojure that can currently, safely consume the JDK6-style
+ ;; IdentityHashMap via its entry set iterator is Eduction. This doesn't
+ ;; appear to be due to explicit semantics though, but rather an
+ ;; implementation detail.
+ ;; For further context, this situation is related, but not exactly the
+ ;; same as http://dev.clojure.org/jira/browse/CLJ-1738.
+ (let [;; The use of a primitive array here isn't strictly necessary. However,
+ ;; it doesn't add much in terms of complexity and is faster than the
+ ;; alternatives.
+ ^"[Ljava.lang.Object;" arr (make-array Object (.size fact->index-map))
+ es (.entrySet fact->index-map)
+ it (.iterator es)]
+ (when (.hasNext it)
+ (loop [^java.util.Map$Entry e (.next it)]
+ (let [pair (map-entry->arr-idx-pair e)]
+ (aset arr (nth pair 0) (nth pair 1)))
+ (when (.hasNext it)
+ (recur (.next it)))))
+ (into [] arr)))
+
+ index-memory (fn [memory]
+ (let [internal-seen (java.util.IdentityHashMap.)
+ seen (java.util.IdentityHashMap.)
+
+ indexed (-> memory
+ (update :accum-memory #(index-accum-memory seen %))
+ (update :alpha-memory #(index-alpha-memory internal-seen seen %))
+ (update :beta-memory #(index-beta-memory internal-seen seen %))
+ (update :production-memory #(index-production-memory internal-seen seen %))
+ (update :activation-map #(index-activation-map internal-seen seen %)))]
+
+ {:memory indexed
+ :indexed-facts (vec-indexed-facts seen idx-fact-arr-pair-fn)
+ :internal-indexed-facts (vec-indexed-facts internal-seen internal-fact-arr-pair-fn)}))]
+ (-> memory
+ index-memory
+ (update :memory
+ ;; Assoc nil values rather than using dissoc in order to preserve the type of the memory.
+ assoc
+ ;; The rulebase does need to be stored per memory. It will be restored during deserialization.
+ :rulebase nil
+ ;; Currently these do not support serialization and must be provided during deserialization via a
+ ;; base-session or they default to the standard defaults.
+ :activation-group-sort-fn nil
+ :activation-group-fn nil
+ :alphas-fn nil))))
+
+(def ^:private create-get-alphas-fn @#'com/create-get-alphas-fn)
+
+(defn opts->get-alphas-fn [rulebase opts]
+ (let [fact-type-fn (:fact-type-fn opts type)
+ ancestors-fn (:ancestors-fn opts ancestors)]
+ (create-get-alphas-fn fact-type-fn
+ ancestors-fn
+ (:alpha-roots rulebase))))
+
+(defn assemble-restored-session
+ "Builds a Clara session from the given rulebase and memory components. When no memory is given a new
+ one is created with all of the defaults of eng/local-memory.
+ Note! This function should not typically be used. It is left public to assist in ISessionSerializer
+ durability implementations. Use clara.rules/mk-session typically to make rule sessions.
+
+ If the options are not provided, they will default to the Clara session defaults. The available options
+ on the session (as opposed to the rulebase) are the transport and listeners.
+
+ Note! Currently this only supports the clara.rules.memory.PersistentLocalMemory implementation
+ of memory."
+ ([rulebase opts]
+ (let [{:keys [listeners transport]} opts]
+
+ (eng/assemble {:rulebase rulebase
+ :memory (eng/local-memory rulebase
+ (clara.rules.engine.LocalTransport.)
+ (:activation-group-sort-fn rulebase)
+ (:activation-group-fn rulebase)
+ ;; TODO: Memory doesn't seem to ever need this or use
+ ;; it. Can we just remove it from memory?
+ (:get-alphas-fn rulebase))
+ :transport (or transport (clara.rules.engine.LocalTransport.))
+ :listeners (or listeners [])
+ :get-alphas-fn (:get-alphas-fn rulebase)})))
+
+ ([rulebase memory opts]
+ (let [{:keys [listeners transport]} opts]
+
+ (eng/assemble {:rulebase rulebase
+ :memory (assoc memory
+ :rulebase rulebase
+ :activation-group-sort-fn (:activation-group-sort-fn rulebase)
+ :activation-group-fn (:activation-group-fn rulebase)
+ :alphas-fn (:get-alphas-fn rulebase))
+ :transport (or transport (clara.rules.engine.LocalTransport.))
+ :listeners (or listeners [])
+ :get-alphas-fn (:get-alphas-fn rulebase)}))))
+
+(defn rulebase->rulebase-with-opts
+ "Intended for use in rulebase deserialization implementations where these functions were stripped
+ off the rulebase implementation; this function takes these options and wraps them in the same manner
+ as clara.rules/mk-session. This function should typically only be used when implementing ISessionSerializer."
+ [without-opts-rulebase opts]
+ (assoc without-opts-rulebase
+ :activation-group-sort-fn (eng/options->activation-group-sort-fn opts)
+ :activation-group-fn (eng/options->activation-group-fn opts)
+ :get-alphas-fn (opts->get-alphas-fn without-opts-rulebase opts)))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;;; Serialization protocols.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(defprotocol ISessionSerializer
+ "Provides the ability to serialize and deserialize a session. Options can be given and supported
+ via the opts argument to both serialize and deserialize. Certain options are expected and
+ required to be supported by any implementation of ISessionSerializer. These are referred to as
+ the 'standard' options.
+
+ These include:
+
+ * :rulebase-only? - When true indicates the rulebase is the only part of the session to serializer.
+ The *default* is false for the serialize-session-state function. It is defaulted to true for the
+ serialize-rulebase convenience function. This is useful for when many sessions are to be
+ serialized, but all having a common rulebase. Storing the rulebase only, will likely save both
+ space and time in these scenarios.
+
+ * :with-rulebase? - When true the rulebase is included in the serialized state of the session.
+ The *default* behavior is false when serializing a session via the serialize-session-state function.
+
+ * :base-rulebase - A rulebase to attach to the session being deserialized. The assumption here is that
+ the session was serialized without the rulebase, i.e. :with-rulebase? = false, so it needs a rulebase
+ to be 'attached' back onto it to be usable.
+
+ * :forms-per-eval - The maximum number of expressions that will be evaluated per call to eval.
+ Larger batch sizes should see better performance compared to smaller batch sizes.
+ Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
+
+ Options for the rulebase semantics that are documented at clara.rules/mk-session include:
+
+ * :fact-type-fn
+ * :ancestors-fn
+ * :activation-group-sort-fn
+ * :activation-group-fn
+
+
+ Other options can be supported by specific implementors of ISessionSerializer."
+
+ (serialize [this session opts]
+ "Serialize the given session with the given options. Where the session state is stored is dependent
+ on the implementation of this instance e.g. it may store it in a known reference to an IO stream.")
+
+ (deserialize [this mem-facts opts]
+ "Deserialize the session state associated to this instance e.g. it may be coming from a known reference
+ to an IO stream. mem-facts is a sequential collection of the working memory facts that were
+ serialized and deserialized by an implementation of IWorkingMemorySerializer."))
+
+(defprotocol IWorkingMemorySerializer
+ "Provides the ability to serialize and deserialize the facts stored in the working memory of a session.
+ Facts can be serialized in whatever way makes sense for a given domain. The domain of facts can vary
+ greatly from one use-case of the rules engine to the next. So the mechanism of serializing the facts
+ in memory can vary greatly as a result of this. Clara does not yet provide any default implementations
+ for this, but may in the future. However, many of the handlers defined in clara.rules.durability.fressian
+ can be reused if the consumer wishes to serialize via Fressian. See more on this in
+ the clara.rules.durability.fressian namespace docs.
+
+ The important part of this serialization protocol is that the facts returned from deserialize-facts are in
+ the *same order* as how they were given to serialize-facts."
+
+ (serialize-facts [this fact-seq]
+ "Serialize the given fact-seq, which is an order sequence of facts from working memory of a session.
+ Note, as mentioned in the protocol docs, the *order* these are given is *important* and should be preserved
+ when they are returned via deserialize-facts.")
+
+ (deserialize-facts [this]
+ "Returns the facts associated to this instance deserialized in the same order that they were given
+ to serialize-facts."))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;;; Durability API.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(s/defn serialize-rulebase
+ "Serialize *only* the rulebase portion of the given session. The serialization is done by the
+ given session-serializer implementor of ISessionSerializer.
+
+ Options can be given as an optional argument. These are passed through to the session-serializer
+ implementation. See the description of standard options an ISessionSerializer should provide in
+ the ISessionSerializer docs. Also, see the specific ISessionSerializer implementation docs for
+ any non-standard options supported/not supported.
+ See ISessionSerializer docs for more on that.
+
+ The rulebase is the stateless structure that controls the flow of productions, i.e. the 'rete'
+ rule network. The ability to serialize only the rulebase is supported so that the rulebase can
+ be stored and retrieved a single time for potentially many sessions containing different working
+ memory data, for the same rules. This function is only a convenience for passing the
+ :rulebase-only? true flag to the serialize-session-state function.
+ See serialize-session-state for more."
+ ([session :- (s/protocol eng/ISession)
+ session-serializer :- (s/protocol ISessionSerializer)]
+ (serialize-rulebase session
+ session-serializer
+ {}))
+
+ ([session :- (s/protocol eng/ISession)
+ session-serializer :- (s/protocol ISessionSerializer)
+ opts :- {s/Any s/Any}]
+ (serialize session-serializer
+ session
+ (assoc opts :rulebase-only? true))))
+
+(s/defn deserialize-rulebase :- Rulebase
+ "Deserializes the rulebase stored via the serialize-rulebase function. This is done via the given
+ session-serializer implementor of ISessionSerializer.
+
+ Options can be given as an optional argument. These are passed through to the session-serializer
+ implementation. See the description of standard options an ISessionSerializer should provide in
+ the ISessionSerializer docs. Also, see the specific ISessionSerializer implementation docs for
+ any non-standard options supported/not supported.
+ See ISessionSerializer docs for more on that."
+ ([session-serializer :- (s/protocol ISessionSerializer)]
+ (deserialize-rulebase session-serializer
+ {}))
+
+ ([session-serializer :- (s/protocol ISessionSerializer)
+ opts :- {s/Any s/Any}]
+ (deserialize session-serializer
+ nil
+ (assoc opts :rulebase-only? true))))
+
+(s/defn serialize-session-state
+ "Serializes the state of the given session. By default, this *excludes* the rulebase from being
+ serialized alongside the working memory state of the session. The rulebase, if specified, and
+ the working memory of the session are serialized by the session-serializer implementor of
+ ISessionSerializer. The memory-serializer implementor of IWorkingMemorySerializer is used to
+ serialize the actual facts stored within working memory.
+
+ Typically, the caller can use a pre-defined default session-serializer, such as
+ clara.rules.durability.fressian/create-session-serializer.
+ See clara.rules.durability.fressian for more specific details regarding this, including the extra
+ required dependency on Fressian notes found there.
+ The memory-facts-serializer is often a custom provided implemenation since the facts stored in
+ working memory are domain specific to the consumers' usage of the rules.
+ See the IWorkingMemorySerializer docs for more.
+
+ Options can be given as an optional argument. These are passed through to the session-serializer
+ implementation. See the description of standard options an ISessionSerializer should provide in
+ the ISessionSerializer docs. Also, see the specific ISessionSerializer implementation docs for
+ any non-standard options supported/not supported."
+ ([session :- (s/protocol eng/ISession)
+ session-serializer :- (s/protocol ISessionSerializer)
+ memory-facts-serializer :- (s/protocol IWorkingMemorySerializer)]
+ (serialize-session-state session
+ session-serializer
+ memory-facts-serializer
+ {:with-rulebase? false}))
+
+ ([session :- (s/protocol eng/ISession)
+ session-serializer :- (s/protocol ISessionSerializer)
+ memory-facts-serializer :- (s/protocol IWorkingMemorySerializer)
+ opts :- {s/Any s/Any}]
+ (serialize-facts memory-facts-serializer
+ (serialize session-serializer session opts))))
+
+(s/defn deserialize-session-state :- (s/protocol eng/ISession)
+ "Deserializes the session that was stored via the serialize-session-state function. Similar to
+ what is described there, this uses the session-serializer implementor of ISessionSerializer to
+ deserialize the session and working memory state. The memory-facts-serializer implementor of
+ IWorkingMemorySerializer is used to deserialize the actual facts stored in working memory.
+
+ Options can be given as an optional argument. These are passed through to the session-serializer
+ implementation. See the description of standard options an ISessionSerializer should provide in
+ the ISessionSerializer docs. Also, see the specific ISessionSerializer implementation docs for
+ any non-standard options supported/not supported."
+ ([session-serializer :- (s/protocol ISessionSerializer)
+ memory-facts-serializer :- (s/protocol IWorkingMemorySerializer)]
+ (deserialize-session-state session-serializer
+ memory-facts-serializer
+ {}))
+
+ ([session-serializer :- (s/protocol ISessionSerializer)
+ memory-facts-serializer :- (s/protocol IWorkingMemorySerializer)
+ opts :- {s/Any s/Any}]
+ (deserialize session-serializer
+ (deserialize-facts memory-facts-serializer)
+ opts)))
diff --git a/build/clara/rules/durability/fressian.clj b/build/clara/rules/durability/fressian.clj
new file mode 100644
index 00000000..c60c9b00
--- /dev/null
+++ b/build/clara/rules/durability/fressian.clj
@@ -0,0 +1,642 @@
+(ns clara.rules.durability.fressian
+ "A default Fressian-based implementation of d/ISessionSerializer.
+
+ Note! Currently this only supports the clara.rules.memory.PersistentLocalMemory implementation
+ of memory."
+ (:require [clara.rules.durability :as d]
+ [clara.rules.memory :as mem]
+ [clara.rules.engine :as eng]
+ [clara.rules.compiler :as com]
+ [clara.rules.platform :as pform]
+ [schema.core :as s]
+ [clojure.data.fressian :as fres]
+ [clojure.java.io :as jio]
+ [clojure.main :as cm])
+ (:import [clara.rules.durability
+ MemIdx
+ InternalMemIdx]
+ [clara.rules.memory
+ RuleOrderedActivation]
+ [clara.rules.engine
+ Token
+ Element
+ ProductionNode
+ QueryNode
+ AlphaNode
+ RootJoinNode
+ HashJoinNode
+ ExpressionJoinNode
+ NegationNode
+ NegationWithJoinFilterNode
+ TestNode
+ AccumulateNode
+ AccumulateWithJoinFilterNode]
+ [org.fressian
+ StreamingWriter
+ Writer
+ Reader
+ FressianWriter
+ FressianReader]
+ [org.fressian.handlers
+ WriteHandler
+ ReadHandler]
+ [java.util
+ ArrayList
+ IdentityHashMap
+ Map
+ WeakHashMap]
+ [java.io
+ InputStream
+ OutputStream]))
+
+;; Use this map to cache the symbol for the map->RecordNameHere
+;; factory function created for every Clojure record to improve
+;; serialization performance.
+;; See https://github.com/cerner/clara-rules/issues/245 for more extensive discussion.
+(def ^:private ^Map class->factory-fn-sym (java.util.Collections/synchronizedMap
+ (WeakHashMap.)))
+
+(defn record-map-constructor-name
+ "Return the 'map->' prefix, factory constructor function for a Clojure record."
+ [rec]
+ (let [klass (class rec)]
+ (if-let [cached-sym (.get class->factory-fn-sym klass)]
+ cached-sym
+ (let [class-name (.getName ^Class klass)
+ idx (.lastIndexOf class-name (int \.))
+ ns-nom (.substring class-name 0 idx)
+ nom (.substring class-name (inc idx))
+ factory-fn-sym (symbol (str (cm/demunge ns-nom)
+ "/map->"
+ (cm/demunge nom)))]
+ (.put class->factory-fn-sym klass factory-fn-sym)
+ factory-fn-sym))))
+
+(defn write-map
+ "Writes a map as Fressian with the tag 'map' and all keys cached."
+ [^Writer w m]
+ (.writeTag w "map" 1)
+ (.beginClosedList ^StreamingWriter w)
+ (reduce-kv
+ (fn [^Writer w k v]
+ (.writeObject w k true)
+ (.writeObject w v))
+ w
+ m)
+ (.endList ^StreamingWriter w))
+
+(defn write-with-meta
+ "Writes the object to the writer under the given tag. If the record has metadata, the metadata
+ will also be written. read-with-meta will associated this metadata back with the object
+ when reading."
+ ([w tag o]
+ (write-with-meta w tag o (fn [^Writer w o] (.writeList w o))))
+ ([^Writer w tag o write-fn]
+ (let [m (meta o)]
+ (do
+ (.writeTag w tag 2)
+ (write-fn w o)
+ (if m
+ (.writeObject w m)
+ (.writeNull w))))))
+
+(defn- read-meta [^Reader rdr]
+ (some->> rdr
+ .readObject
+ (into {})))
+
+(defn read-with-meta
+ "Reads an object from the reader that was written via write-with-meta. If the object was written
+ with metadata the metadata will be associated on the object returned."
+ [^Reader rdr build-fn]
+ (let [o (build-fn (.readObject rdr))
+ m (read-meta rdr)]
+ (cond-> o
+ m (with-meta m))))
+
+(defn write-record
+ "Same as write-with-meta, but with Clojure record support. The type of the record will
+ be preserved."
+ [^Writer w tag rec]
+ (let [m (meta rec)]
+ (.writeTag w tag 3)
+ (.writeObject w (record-map-constructor-name rec) true)
+ (write-map w rec)
+ (if m
+ (.writeObject w m)
+ (.writeNull w))))
+
+(defn read-record
+ "Same as read-with-meta, but with Clojure record support. The type of the record will
+ be preserved."
+ ([^Reader rdr]
+ (read-record rdr nil))
+ ([^Reader rdr add-fn]
+ (let [builder (-> (.readObject rdr) resolve deref)
+ build-map (.readObject rdr)
+ m (read-meta rdr)]
+ (cond-> (builder build-map)
+ m (with-meta m)
+ add-fn add-fn))))
+
+(defn- create-cached-node-handler
+ ([clazz
+ tag
+ tag-for-cached]
+ {:class clazz
+ :writer (reify WriteHandler
+ (write [_ w o]
+ (let [node-id (:id o)]
+ (if (@(.get d/node-id->node-cache) node-id)
+ (do
+ (.writeTag w tag-for-cached 1)
+ (.writeInt w node-id))
+ (do
+ (d/cache-node o)
+ (write-record w tag o))))))
+ :readers {tag-for-cached
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (d/node-id->node (.readObject rdr))))
+ tag
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (-> rdr
+ read-record
+ d/cache-node)))}})
+ ([clazz
+ tag
+ tag-for-cached
+ remove-node-expr-fn
+ add-node-expr-fn]
+ {:class clazz
+ :writer (reify WriteHandler
+ (write [_ w o]
+ (let [node-id (:id o)]
+ (if (@(.get d/node-id->node-cache) node-id)
+ (do
+ (.writeTag w tag-for-cached 1)
+ (.writeInt w node-id))
+ (do
+ (d/cache-node o)
+ (write-record w tag (remove-node-expr-fn o)))))))
+ :readers {tag-for-cached
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (d/node-id->node (.readObject rdr))))
+ tag
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (-> rdr
+ (read-record add-node-expr-fn)
+ d/cache-node)))}}))
+
+(defn- create-identity-based-handler
+ [clazz
+ tag
+ write-fn
+ read-fn]
+ (let [indexed-tag (str tag "-idx")]
+ ;; Write an object a single time per object reference to that object. The object is then "cached"
+ ;; with the IdentityHashMap `d/clj-struct-holder`. If another reference to this object instance
+ ;; is encountered later, only the "index" of the object in the map will be written.
+ {:class clazz
+ :writer (reify WriteHandler
+ (write [_ w o]
+ (if-let [idx (d/clj-struct->idx o)]
+ (do
+ (.writeTag w indexed-tag 1)
+ (.writeInt w idx))
+ (do
+ ;; We are writing all nested objects prior to adding the original object to the cache here as
+ ;; this will be the order that will occur on read, ie, the reader will have traverse to the bottom
+ ;; of the struct before rebuilding the object.
+ (write-fn w tag o)
+ (d/clj-struct-holder-add-fact-idx! o)))))
+ ;; When reading the first time a reference to an object instance is found, the entire object will
+ ;; need to be constructed. It is then put into indexed cache. If more references to this object
+ ;; instance are encountered later, they will be in the form of a numeric index into this cache.
+ ;; This is guaranteed by the semantics of the corresponding WriteHandler.
+ :readers {indexed-tag
+ (reify ReadHandler
+ (read [_ rdr _ _]
+ (d/clj-struct-idx->obj (.readInt rdr))))
+ tag
+ (reify ReadHandler
+ (read [_ rdr _ _]
+ (-> rdr
+ read-fn
+ d/clj-struct-holder-add-obj!)))}}))
+
+(def handlers
+ "A structure tying together the custom Fressian write and read handlers used
+ by FressianSessionSerializer's."
+ {"java/class"
+ {:class Class
+ :writer (reify WriteHandler
+ (write [_ w c]
+ (.writeTag w "java/class" 1)
+ (.writeObject w (symbol (.getName ^Class c)) true)))
+ :readers {"java/class"
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (resolve (.readObject rdr))))}}
+
+ "clj/set"
+ (create-identity-based-handler
+ clojure.lang.APersistentSet
+ "clj/set"
+ write-with-meta
+ (fn clj-set-reader [rdr] (read-with-meta rdr set)))
+
+ "clj/vector"
+ (create-identity-based-handler
+ clojure.lang.APersistentVector
+ "clj/vector"
+ write-with-meta
+ (fn clj-vec-reader [rdr] (read-with-meta rdr vec)))
+
+ "clj/list"
+ (create-identity-based-handler
+ clojure.lang.PersistentList
+ "clj/list"
+ write-with-meta
+ (fn clj-list-reader [rdr] (read-with-meta rdr #(apply list %))))
+
+ "clj/emptylist"
+ ;; Not using the identity based handler as this will always be identical anyway
+ ;; then meta data will be added in the reader
+ {:class clojure.lang.PersistentList$EmptyList
+ :writer (reify WriteHandler
+ (write [_ w o]
+ (let [m (meta o)]
+ (do
+ (.writeTag w "clj/emptylist" 1)
+ (if m
+ (.writeObject w m)
+ (.writeNull w))))))
+ :readers {"clj/emptylist"
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (let [m (read-meta rdr)]
+ (cond-> '()
+ m (with-meta m)))))}}
+
+ "clj/aseq"
+ (create-identity-based-handler
+ clojure.lang.ASeq
+ "clj/aseq"
+ write-with-meta
+ (fn clj-seq-reader [rdr] (read-with-meta rdr sequence)))
+
+ "clj/lazyseq"
+ (create-identity-based-handler
+ clojure.lang.LazySeq
+ "clj/lazyseq"
+ write-with-meta
+ (fn clj-lazy-seq-reader [rdr] (read-with-meta rdr sequence)))
+
+ "clj/map"
+ (create-identity-based-handler
+ clojure.lang.APersistentMap
+ "clj/map"
+ (fn clj-map-writer [wtr tag m] (write-with-meta wtr tag m write-map))
+ (fn clj-map-reader [rdr] (read-with-meta rdr #(into {} %))))
+
+ "clj/treeset"
+ (create-identity-based-handler
+ clojure.lang.PersistentTreeSet
+ "clj/treeset"
+ (fn clj-treeset-writer [^Writer wtr tag s]
+ (let [cname (d/sorted-comparator-name s)]
+ (.writeTag wtr tag 3)
+ (if cname
+ (.writeObject wtr cname true)
+ (.writeNull wtr))
+ ;; Preserve metadata.
+ (if-let [m (meta s)]
+ (.writeObject wtr m)
+ (.writeNull wtr))
+ (.writeList wtr s)))
+ (fn clj-treeset-reader [^Reader rdr]
+ (let [c (some-> rdr .readObject resolve deref)
+ m (.readObject rdr)
+ s (-> (.readObject rdr)
+ (d/seq->sorted-set c))]
+ (if m
+ (with-meta s m)
+ s))))
+
+ "clj/treemap"
+ (create-identity-based-handler
+ clojure.lang.PersistentTreeMap
+ "clj/treemap"
+ (fn clj-treemap-writer [^Writer wtr tag o]
+ (let [cname (d/sorted-comparator-name o)]
+ (.writeTag wtr tag 3)
+ (if cname
+ (.writeObject wtr cname true)
+ (.writeNull wtr))
+ ;; Preserve metadata.
+ (if-let [m (meta o)]
+ (.writeObject wtr m)
+ (.writeNull wtr))
+ (write-map wtr o)))
+ (fn clj-treemap-reader [^Reader rdr]
+ (let [c (some-> rdr .readObject resolve deref)
+ m (.readObject rdr)
+ s (d/seq->sorted-map (.readObject rdr) c)]
+ (if m
+ (with-meta s m)
+ s))))
+
+ "clj/mapentry"
+ (create-identity-based-handler
+ clojure.lang.MapEntry
+ "clj/mapentry"
+ (fn clj-mapentry-writer [^Writer wtr tag o]
+ (.writeTag wtr tag 2)
+ (.writeObject wtr (key o) true)
+ (.writeObject wtr (val o)))
+ (fn clj-mapentry-reader [^Reader rdr]
+ (d/create-map-entry (.readObject rdr)
+ (.readObject rdr))))
+
+ ;; Have to redefine both Symbol and IRecord to support metadata as well
+ ;; as identity-based caching for the IRecord case.
+
+ "clj/sym"
+ (create-identity-based-handler
+ clojure.lang.Symbol
+ "clj/sym"
+ (fn clj-sym-writer [^Writer wtr tag o]
+ ;; Mostly copied from private fres/write-named, except the metadata part.
+ (.writeTag wtr tag 3)
+ (.writeObject wtr (namespace o) true)
+ (.writeObject wtr (name o) true)
+ (if-let [m (meta o)]
+ (.writeObject wtr m)
+ (.writeNull wtr)))
+ (fn clj-sym-reader [^Reader rdr]
+ (let [s (symbol (.readObject rdr) (.readObject rdr))
+ m (read-meta rdr)]
+ (cond-> s
+ m (with-meta m)))))
+
+ "clj/record"
+ (create-identity-based-handler
+ clojure.lang.IRecord
+ "clj/record"
+ write-record
+ read-record)
+
+ "clara/productionnode"
+ (create-cached-node-handler ProductionNode
+ "clara/productionnode"
+ "clara/productionnodeid"
+ #(assoc % :rhs nil)
+ d/add-rhs-fn)
+
+ "clara/querynode"
+ (create-cached-node-handler QueryNode
+ "clara/querynode"
+ "clara/querynodeid")
+
+ "clara/alphanode"
+ (create-cached-node-handler AlphaNode
+ "clara/alphanodeid"
+ "clara/alphanode"
+ #(assoc % :activation nil)
+ d/add-alpha-fn)
+
+ "clara/rootjoinnode"
+ (create-cached-node-handler RootJoinNode
+ "clara/rootjoinnode"
+ "clara/rootjoinnodeid")
+
+ "clara/hashjoinnode"
+ (create-cached-node-handler HashJoinNode
+ "clara/hashjoinnode"
+ "clara/hashjoinnodeid")
+
+ "clara/exprjoinnode"
+ (create-cached-node-handler ExpressionJoinNode
+ "clara/exprjoinnode"
+ "clara/exprjoinnodeid"
+ #(assoc % :join-filter-fn nil)
+ d/add-join-filter-fn)
+
+ "clara/negationnode"
+ (create-cached-node-handler NegationNode
+ "clara/negationnode"
+ "clara/negationnodeid")
+
+ "clara/negationwjoinnode"
+ (create-cached-node-handler NegationWithJoinFilterNode
+ "clara/negationwjoinnode"
+ "clara/negationwjoinnodeid"
+ #(assoc % :join-filter-fn nil)
+ d/add-join-filter-fn)
+
+ "clara/testnode"
+ (create-cached-node-handler TestNode
+ "clara/testnode"
+ "clara/testnodeid"
+ #(assoc % :test nil)
+ d/add-test-fn)
+
+ "clara/accumnode"
+ (create-cached-node-handler AccumulateNode
+ "clara/accumnode"
+ "clara/accumnodeid"
+ #(assoc % :accumulator nil)
+ d/add-accumulator)
+
+ "clara/accumwjoinnode"
+ (create-cached-node-handler AccumulateWithJoinFilterNode
+ "clara/accumwjoinnode"
+ "clara/accumwjoinnodeid"
+ #(assoc % :accumulator nil :join-filter-fn nil)
+ (comp d/add-accumulator d/add-join-filter-fn))
+
+ "clara/ruleorderactivation"
+ {:class RuleOrderedActivation
+ :writer (reify WriteHandler
+ (write [_ w c]
+ (.writeTag w "clara/ruleorderactivation" 4)
+ (.writeObject w (.-node-id ^RuleOrderedActivation c) true)
+ (.writeObject w (.-token ^RuleOrderedActivation c))
+ (.writeObject w (.-activation ^RuleOrderedActivation c))
+ (.writeInt w (.-rule-load-order ^RuleOrderedActivation c))))
+ :readers {"clara/ruleorderactivation"
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (mem/->RuleOrderedActivation (.readObject rdr)
+ (.readObject rdr)
+ (.readObject rdr)
+ (.readObject rdr)
+ false)))}}
+
+ "clara/memidx"
+ {:class MemIdx
+ :writer (reify WriteHandler
+ (write [_ w c]
+ (.writeTag w "clara/memidx" 1)
+ (.writeInt w (:idx c))))
+ :readers {"clara/memidx"
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (d/find-mem-idx (.readObject rdr))))}}
+
+ "clara/internalmemidx"
+ {:class InternalMemIdx
+ :writer (reify WriteHandler
+ (write [_ w c]
+ (.writeTag w "clara/internalmemidx" 1)
+ (.writeInt w (:idx c))))
+ :readers {"clara/internalmemidx"
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (d/find-internal-idx (.readObject rdr))))}}})
+
+(def write-handlers
+ "All Fressian write handlers used by FressianSessionSerializer's."
+ (into fres/clojure-write-handlers
+ (map (fn [[tag {clazz :class wtr :writer}]]
+ [clazz {tag wtr}]))
+ handlers))
+
+(def read-handlers
+ "All Fressian read handlers used by FressianSessionSerializer's."
+ (->> handlers
+ vals
+ (into fres/clojure-read-handlers
+ (mapcat :readers))))
+
+(def write-handler-lookup
+ (-> write-handlers
+ fres/associative-lookup
+ fres/inheritance-lookup))
+
+(def read-handler-lookup
+ (fres/associative-lookup read-handlers))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;;;; Session serializer.
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+(defrecord FressianSessionSerializer [in-stream out-stream]
+ d/ISessionSerializer
+ (serialize [_ session opts]
+ (let [{:keys [rulebase memory]} (eng/components session)
+ node-expr-fn-lookup (:node-expr-fn-lookup rulebase)
+ remove-node-fns (fn [expr-lookup]
+ (zipmap (keys expr-lookup)
+ (mapv second (vals expr-lookup))))
+ rulebase (assoc rulebase
+ :activation-group-sort-fn nil
+ :activation-group-fn nil
+ :get-alphas-fn nil
+ :node-expr-fn-lookup nil)
+ record-holder (IdentityHashMap.)
+ do-serialize
+ (fn [sources]
+ (with-open [^FressianWriter wtr
+ (fres/create-writer out-stream :handlers write-handler-lookup)]
+ (pform/thread-local-binding [d/node-id->node-cache (volatile! {})
+ d/clj-struct-holder record-holder]
+ (doseq [s sources] (fres/write-object wtr s)))))]
+
+ ;; In this case there is nothing to do with memory, so just serialize immediately.
+ (if (:rulebase-only? opts)
+ ;; node-expr-fn-lookup is a map with a structure of:
+ ;; {[Int Keyword] [IFn {Keyword Any}]}
+ ;; as fns are not serializable, we must remove them and alter the structure of the map to be
+ ;; {[Int Keyword] {Keyword Any}}
+ ;; during deserialization the compilation-context({Keyword Any}), which contains the unevaluated form,
+ ;; can be used to reconstruct the original map.
+ (do-serialize [(remove-node-fns node-expr-fn-lookup) rulebase])
+
+ ;; Otherwise memory needs to have facts extracted to return.
+ (let [{:keys [memory indexed-facts internal-indexed-facts]} (d/indexed-session-memory-state memory)
+ sources (if (:with-rulebase? opts)
+ [(remove-node-fns node-expr-fn-lookup) rulebase internal-indexed-facts memory]
+ [internal-indexed-facts memory])]
+
+ (do-serialize sources)
+
+ ;; Return the facts needing to be serialized still.
+ indexed-facts))))
+
+ (deserialize [_ mem-facts opts]
+
+ (with-open [^FressianReader rdr (fres/create-reader in-stream :handlers read-handler-lookup)]
+ (let [{:keys [rulebase-only? base-rulebase forms-per-eval]} opts
+
+ record-holder (ArrayList.)
+ ;; The rulebase should either be given from the base-session or found in
+ ;; the restored session-state.
+ maybe-base-rulebase (when (and (not rulebase-only?) base-rulebase)
+ base-rulebase)
+
+ forms-per-eval (or forms-per-eval com/forms-per-eval-default)
+
+ reconstruct-expressions (fn [expr-lookup]
+ ;; Rebuilding the expr-lookup map from the serialized map:
+ ;; {[Int Keyword] {Keyword Any}} -> {[Int Keyword] [SExpr {Keyword Any}]}
+ (into {}
+ (for [[node-key compilation-ctx] expr-lookup]
+ [node-key [(-> compilation-ctx (get (nth node-key 1)))
+ compilation-ctx]])))
+
+ rulebase (if maybe-base-rulebase
+ maybe-base-rulebase
+ (let [without-opts-rulebase
+ (pform/thread-local-binding [d/node-id->node-cache (volatile! {})
+ d/clj-struct-holder record-holder]
+ (pform/thread-local-binding [d/node-fn-cache (-> (fres/read-object rdr)
+ reconstruct-expressions
+ (com/compile-exprs forms-per-eval))]
+ (assoc (fres/read-object rdr)
+ :node-expr-fn-lookup
+ (.get d/node-fn-cache))))]
+ (d/rulebase->rulebase-with-opts without-opts-rulebase opts)))]
+
+ (if rulebase-only?
+ rulebase
+ (d/assemble-restored-session rulebase
+ (pform/thread-local-binding [d/clj-struct-holder record-holder
+ d/mem-facts mem-facts]
+ ;; internal memory contains facts provided by mem-facts
+ ;; thus mem-facts must be bound before the call to read
+ ;; the internal memory
+ (pform/thread-local-binding [d/mem-internal (fres/read-object rdr)]
+ (fres/read-object rdr)))
+ opts))))))
+
+(s/defn create-session-serializer
+ "Creates an instance of FressianSessionSerializer which implements d/ISessionSerializer by using
+ Fressian serialization for the session structures.
+
+ In the one arity case, takes either an input stream or an output stream. This arity is intended for
+ creating a Fressian serializer instance that will only be used for serialization or deserialization,
+ but not both. e.g. This is often convenient if serialization and deserialization are not done from
+ the same process. If it is to be used for serialization, then the stream given should be an output
+ stream. If it is to be used for deserialization, then the stream to be given should be an
+ input stream.
+
+ In the two arity case, takes an input stream and an output stream. These will be used for
+ deserialization and serialization within the created Fressian serializer instance, respectively.
+
+ Note! Currently this only supports the clara.rules.memory.PersistentLocalMemory implementation
+ of memory."
+ ([in-or-out-stream :- (s/pred (some-fn #(instance? InputStream %)
+ #(instance? OutputStream %))
+ "java.io.InputStream or java.io.OutputStream")]
+ (if (instance? InputStream in-or-out-stream)
+ (create-session-serializer in-or-out-stream nil)
+ (create-session-serializer nil in-or-out-stream)))
+
+ ([in-stream :- (s/maybe InputStream)
+ out-stream :- (s/maybe OutputStream)]
+ (->FressianSessionSerializer in-stream out-stream)))
diff --git a/src/main/clojure/clara/rules/engine.cljc b/build/clara/rules/engine.clj
similarity index 93%
rename from src/main/clojure/clara/rules/engine.cljc
rename to build/clara/rules/engine.clj
index 2468407b..4bbfa1b1 100644
--- a/src/main/clojure/clara/rules/engine.cljc
+++ b/build/clara/rules/engine.clj
@@ -1,15 +1,12 @@
(ns clara.rules.engine
"This namespace is for internal use and may move in the future. Most users should use only the clara.rules namespace."
- (:require [clojure.reflect :as reflect]
- [clojure.core.reducers :as r]
- [schema.core :as s]
+ (:require [clojure.core.reducers :as r]
[clojure.string :as string]
[clara.rules.memory :as mem]
[clara.rules.listener :as l]
- #?(:clj [clara.rules.platform :as platform]
- :cljs [clara.rules.platform :as platform :include-macros true])
+ [clara.rules.platform :as platform]
[clara.rules.update-cache.core :as uc]
- #?(:clj [clara.rules.update-cache.cancelling :as ca])))
+ [clara.rules.update-cache.cancelling :as ca]))
;; The accumulator is a Rete extension to run an accumulation (such as sum, average, or similar operation)
;; over a collection of values passing through the Rete network. This object defines the behavior
@@ -39,24 +36,12 @@
;; if that object descends from a particular object through
;; Clojure's hierarchy as determined by the isa? function.
;; See Issue 239 for more details.
-#?(:clj
- (do
- ;; A marker interface to identify internal facts.
- (definterface ISystemFact)
- (defrecord NegationResult [gen-rule-name ancestor-bindings]
- ISystemFact))
-
- :cljs
- (do
- (defrecord NegationResult [gen-rule-name ancestor-bindings])
- ;; Make NegationResult a "system type" so that NegationResult
- ;; facts are special-cased when matching productions. This serves
- ;; the same purpose as implementing the ISystemFact Java interface
- ;; on the Clojure version of NegationResult.
- ;; ClojureScript does not have definterface; if we experience performance
- ;; problems in ClojureScript similar to those on the JVM that are
- ;; described in issue 239 we can investigate a similar strategy in JavaScript.
- (derive NegationResult ::system-type)))
+;; A marker interface to identify internal facts.
+(definterface ISystemFact)
+
+(defrecord NegationResult
+ [gen-rule-name ancestor-bindings]
+ ISystemFact)
;; Schema for the structure returned by the components
;; function on the session protocol.
@@ -64,7 +49,6 @@
;; for now since it's unused for validation and created
;; undesired warnings as described at https://groups.google.com/forum/#!topic/prismatic-plumbing/o65PfJ4CUkI
(comment
-
(def session-components-schema
{:rulebase s/Any
:memory s/Any
@@ -130,10 +114,10 @@
accumulators only propagate bindings created and the result binding
downstream rather than all facts that were accumulated over, but there
are use-cases in session inspection where we want to retrieve the individual facts.
-
+
Example: [?min-temp <- (acc/min :temperature) :from [Temperature (= temperature ?loc)]]
[?windspeed <- [WindSpeed (= location ?loc)]]
-
+
Given a token propagated from the node for the WindSpeed condition
we could retrieve the Temperature facts from the matching location."))
@@ -210,12 +194,12 @@
"Returns a map from conditions to sets of rules."
([node]
(if-let [condition (when (satisfies? IConditionNode node)
- (get-condition-description node))]
+ (get-condition-description node))]
{condition (get-terminal-node-types node)}
(->> node
- :children
- (map get-conditions-and-rule-names)
- (reduce (partial merge-with into) {})))))
+ :children
+ (map get-conditions-and-rule-names)
+ (reduce (partial merge-with into) {})))))
;; Active session during rule execution.
(def ^:dynamic *current-session* nil)
@@ -446,7 +430,6 @@
ITerminalNode
(terminal-node-type [this] [:query (:name query)]))
-
(defn node-rule-names
[child-type node]
(->> node
@@ -470,7 +453,6 @@
(if (pos? (count names))
(str prefix plural ":\n" names-string "\n"))))
-
(defn- single-condition-message
[condition-number [condition-definition terminals]]
(let [productions (->> terminals
@@ -514,12 +496,13 @@
(defn- alpha-node-matches
[facts env activation node]
(platform/eager-for [fact facts
- :let [bindings (try (activation fact env)
- (catch #?(:clj Exception :cljs :default) e
- (throw-condition-exception {:cause e
- :node node
- :fact fact
- :env env})))]
+ :let [bindings (try
+ (activation fact env)
+ (catch Exception e
+ (throw-condition-exception {:cause e
+ :node node
+ :fact fact
+ :env env})))]
:when bindings] ; FIXME: add env.
[fact bindings]))
@@ -544,24 +527,22 @@
(let [fact-binding-pairs (alpha-node-matches facts env activation node)]
(l/alpha-retract! listener node (map first fact-binding-pairs))
(retract-elements
- transport
- memory
- listener
- children
- (platform/eager-for [[fact bindings] fact-binding-pairs]
- (->Element fact bindings))))))
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [[fact bindings] fact-binding-pairs]
+ (->Element fact bindings))))))
(defrecord RootJoinNode [id condition children binding-keys]
ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
+ (left-activate [node join-bindings tokens memory transport listener])
;; This specialized root node doesn't need to deal with the
;; empty token, so do nothing.
- )
- (left-retract [node join-bindings tokens memory transport listener]
+ (left-retract [node join-bindings tokens memory transport listener])
;; The empty token can't be retracted from the root node,
;; so do nothing.
- )
(get-join-keys [node] binding-keys)
@@ -572,8 +553,7 @@
(l/right-activate! listener node elements)
-
- ;; Add elements to the working memory to support analysis tools.
+;; Add elements to the working memory to support analysis tools.
(mem/add-elements! memory node join-bindings elements)
;; Simply create tokens and send it downstream.
(send-tokens
@@ -671,13 +651,13 @@
(defn- join-node-matches
[node join-filter-fn token fact fact-bindings env]
(let [beta-bindings (try (join-filter-fn token fact fact-bindings {})
- (catch #?(:clj Exception :cljs :default) e
- (throw-condition-exception {:cause e
- :node node
- :fact fact
- :env env
- :bindings (merge (:bindings token)
- fact-bindings)})))]
+ (catch Exception e
+ (throw-condition-exception {:cause e
+ :node node
+ :fact fact
+ :env env
+ :bindings (merge (:bindings token)
+ fact-bindings)})))]
beta-bindings))
(defrecord ExpressionJoinNode [id condition join-filter-fn children binding-keys]
@@ -847,7 +827,7 @@
;; Retract only if it previously had no matches in the negation node,
;; and therefore had an activation.
(let [elements (mem/get-elements memory node join-bindings)]
- (platform/eager-for [token tokens
+ (platform/eager-for [token tokens
:when (not (matches-some-facts? node
token
elements
@@ -930,7 +910,7 @@
[node test-handler env token]
(let [test-result (try
(test-handler token env)
- (catch #?(:clj Exception :cljs :default) e
+ (catch Exception e
(throw-condition-exception {:cause e
:node node
:env env
@@ -980,8 +960,8 @@
new-bindings (merge (:bindings token)
fact-bindings
(when result-binding
- { result-binding
- converted-result}))]
+ {result-binding
+ converted-result}))]
(retract-tokens transport memory listener (:children node)
[(->Token new-facts new-bindings)])))
@@ -992,7 +972,7 @@
(let [new-bindings (merge (:bindings token)
fact-bindings
(when result-binding
- { result-binding
+ {result-binding
converted-result}))
;; This is to check that the produced accumulator result is
@@ -1211,8 +1191,7 @@
(send-accumulated node accum-condition accumulator result-binding token converted bindings
transport memory listener))
-
- ;; If there are previous results, then propagate downstream if the new result differs from
+;; If there are previous results, then propagate downstream if the new result differs from
;; the previous result. If the new result is equal to the previous result don't do
;; anything. Note that the memory has already been updated with the new combined value,
;; which may be needed if elements in memory changes later.
@@ -1819,31 +1798,29 @@
(flush-insertions! batched false))
(when-let [batched (seq retrieved-rhs-retractions)]
(flush-rhs-retractions! batched)))
- (catch #?(:clj Exception :cljs :default) e
-
+ (catch Exception e
;; If the rule fired an exception, help debugging by attaching
;; details about the rule itself, cached insertions, and any listeners
;; while propagating the cause.
- (let [production (:production node)
- rule-name (:name production)
- rhs (:rhs production)]
- (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
- " with bindings " (pr-str (:bindings token)))
- {:bindings (:bindings token)
- :name rule-name
- :rhs rhs
- :batched-logical-insertions @batched-logical-insertions
- :batched-unconditional-insertions @batched-unconditional-insertions
- :batched-rhs-retractions @batched-rhs-retractions
- :listeners (try
- (let [p-listener (l/to-persistent! listener)]
- (if (l/null-listener? p-listener)
- []
- (l/get-children p-listener)))
- (catch #?(:clj Exception :cljs :default)
- listener-exception
- listener-exception))}
- e)))))
+ (let [production (:production node)
+ rule-name (:name production)
+ rhs (:rhs production)]
+ (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
+ " with bindings " (pr-str (:bindings token)))
+ {:bindings (:bindings token)
+ :name rule-name
+ :rhs rhs
+ :batched-logical-insertions @batched-logical-insertions
+ :batched-unconditional-insertions @batched-unconditional-insertions
+ :batched-rhs-retractions @batched-rhs-retractions
+ :listeners (try
+ (let [p-listener (l/to-persistent! listener)]
+ (if (l/null-listener? p-listener)
+ []
+ (l/get-children p-listener)))
+ (catch Exception listener-exception
+ listener-exception))}
+ e)))))
;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
;; will be in context for child rules.
@@ -1949,54 +1926,51 @@
get-alphas-fn
(uc/get-ordered-update-cache)))
- #?(:cljs (throw (ex-info "The :cancelling option is not supported in ClojureScript"
- {:session session :opts opts}))
-
- :clj (let [insertions (sequence
- (comp (filter (fn [pending-op]
- (= (:type pending-op)
- :insertion)))
- (mapcat :facts))
- pending-operations)
-
- retractions (sequence
- (comp (filter (fn [pending-op]
- (= (:type pending-op)
- :retraction)))
- (mapcat :facts))
- pending-operations)
-
- update-cache (ca/get-cancelling-update-cache)]
-
- (binding [*current-session* {:rulebase rulebase
- :transient-memory transient-memory
- :transport transport
- :insertions (atom 0)
- :get-alphas-fn get-alphas-fn
- :pending-updates update-cache
- :listener transient-listener}]
-
- ;; Insertions should come before retractions so that if we insert and then retract the same
- ;; fact that is not already in the session the end result will be that the session won't have that fact.
- ;; If retractions came first then we'd first retract a fact that isn't in the session, which doesn't do anything,
- ;; and then later we would insert the fact.
- (doseq [[alpha-roots fact-group] (get-alphas-fn insertions)
- root alpha-roots]
- (alpha-activate root fact-group transient-memory transport transient-listener))
-
- (doseq [[alpha-roots fact-group] (get-alphas-fn retractions)
- root alpha-roots]
- (alpha-retract root fact-group transient-memory transport transient-listener))
-
- (fire-rules* rulebase
- (:production-nodes rulebase)
- transient-memory
- transport
- transient-listener
- get-alphas-fn
- ;; This continues to use the cancelling cache after the first batch of insertions and retractions.
- ;; If this is suboptimal for some workflows we can revisit this.
- update-cache)))))
+ (let [insertions (sequence
+ (comp (filter (fn [pending-op]
+ (= (:type pending-op)
+ :insertion)))
+ (mapcat :facts))
+ pending-operations)
+
+ retractions (sequence
+ (comp (filter (fn [pending-op]
+ (= (:type pending-op)
+ :retraction)))
+ (mapcat :facts))
+ pending-operations)
+
+ update-cache (ca/get-cancelling-update-cache)]
+
+ (binding [*current-session* {:rulebase rulebase
+ :transient-memory transient-memory
+ :transport transport
+ :insertions (atom 0)
+ :get-alphas-fn get-alphas-fn
+ :pending-updates update-cache
+ :listener transient-listener}]
+
+ ;; Insertions should come before retractions so that if we insert and then retract the same
+ ;; fact that is not already in the session the end result will be that the session won't have that fact.
+ ;; If retractions came first then we'd first retract a fact that isn't in the session, which doesn't do anything,
+ ;; and then later we would insert the fact.
+ (doseq [[alpha-roots fact-group] (get-alphas-fn insertions)
+ root alpha-roots]
+ (alpha-activate root fact-group transient-memory transport transient-listener))
+
+ (doseq [[alpha-roots fact-group] (get-alphas-fn retractions)
+ root alpha-roots]
+ (alpha-retract root fact-group transient-memory transport transient-listener))
+
+ (fire-rules* rulebase
+ (:production-nodes rulebase)
+ transient-memory
+ transport
+ transient-listener
+ get-alphas-fn
+ ;; This continues to use the cancelling cache after the first batch of insertions and retractions.
+ ;; If this is suboptimal for some workflows we can revisit this.
+ update-cache))))
(LocalSession. rulebase
(mem/to-persistent! transient-memory)
diff --git a/build/clara/rules/java.clj b/build/clara/rules/java.clj
new file mode 100644
index 00000000..533409cd
--- /dev/null
+++ b/build/clara/rules/java.clj
@@ -0,0 +1,50 @@
+(ns clara.rules.java
+ "This namespace is for internal use and may move in the future.
+ Java support. Users should use the Java API, or the clara.rules namespace from Clojure."
+ (:require [clara.rules :as clara]
+ [clara.rules.engine :as eng]
+ [clara.rules.compiler :as com])
+ (:refer-clojure :exclude [==])
+ (:import [clara.rules WorkingMemory QueryResult]))
+
+(deftype JavaQueryResult [result]
+ QueryResult
+ (getResult [_ fieldName]
+ (get result (keyword fieldName)))
+ Object
+ (toString [_]
+ (.toString result)))
+
+(defn- run-query [session name args]
+ (let [query-var (or (resolve (symbol name))
+ (throw (IllegalArgumentException.
+ (str "Unable to resolve symbol to query: " name))))
+
+ ;; Keywordize string keys from Java.
+ keyword-args (into {}
+ (for [[k v] args]
+ [(keyword k) v]))
+ results (eng/query session (deref query-var) keyword-args)]
+ (map #(JavaQueryResult. %) results)))
+
+(deftype JavaWorkingMemory [session]
+ WorkingMemory
+
+ (insert [this facts]
+ (JavaWorkingMemory. (apply clara/insert session facts)))
+
+ (retract [this facts]
+ (JavaWorkingMemory. (apply clara/retract session facts)))
+
+ (fireRules [this]
+ (JavaWorkingMemory. (clara/fire-rules session)))
+
+ (query [this name args]
+ (run-query session name args))
+
+ (query [this name]
+ (run-query session name {})))
+
+(defn mk-java-session [rulesets]
+ (JavaWorkingMemory.
+ (com/mk-session (map symbol rulesets))))
diff --git a/src/main/clojure/clara/rules/listener.cljc b/build/clara/rules/listener.clj
similarity index 99%
rename from src/main/clojure/clara/rules/listener.cljc
rename to build/clara/rules/listener.clj
index 0161e58a..a5b1368d 100644
--- a/src/main/clojure/clara/rules/listener.cljc
+++ b/build/clara/rules/listener.clj
@@ -94,7 +94,7 @@
(insert-facts! [listener node token facts]
(doseq [child children]
(insert-facts! child node token facts)))
-
+
(alpha-activate! [listener node facts]
(doseq [child children]
(alpha-activate! child node facts)))
@@ -106,7 +106,7 @@
(retract-facts! [listener node token facts]
(doseq [child children]
(retract-facts! child node token facts)))
-
+
(alpha-retract! [listener node facts]
(doseq [child children]
(alpha-retract! child node facts)))
diff --git a/build/clara/rules/memory.clj b/build/clara/rules/memory.clj
new file mode 100644
index 00000000..958234ba
--- /dev/null
+++ b/build/clara/rules/memory.clj
@@ -0,0 +1,909 @@
+(ns clara.rules.memory
+ "This namespace is for internal use and may move in the future.
+ Specification and default implementation of working memory"
+ (:import [java.util
+ Collections
+ LinkedList
+ NavigableMap
+ PriorityQueue
+ TreeMap]))
+
+(defprotocol IPersistentMemory
+ (to-transient [memory]))
+
+(defprotocol IMemoryReader
+ ;; Returns the rulebase associated with the given memory.
+ (get-rulebase [memory])
+
+ ;; Returns a function that produces a map of alpha nodes to
+ ;; facts that match the type of the node
+ (get-alphas-fn [memory])
+
+ ;; Returns the elements assoicated with the given node.
+ (get-elements [memory node bindings])
+
+ ;; Returns all elements associated with the given node, regardless of bindings.
+ (get-elements-all [memory node])
+
+ ;; Returns the tokens associated with the given node.
+ (get-tokens [memory node bindings])
+
+ ;; Returns all tokens associated with the given node, regardless of bindings
+ (get-tokens-all [memory node])
+
+ ;; Returns the reduced form of objects processed by an accumulator node
+ ;; for facts that match the given bindings.
+ (get-accum-reduced [memory node join-bindings fact-bindings])
+
+ ;; Returns all reduced results for the given node that matches
+ ;; the given join bindings, independent of the individual fact-bindings
+ ;; created by the accumulator's condition.
+ (get-accum-reduced-all [memory node join-bindings])
+
+ ;; Returns a tuple of [join-bindings fact-bindings result] for all
+ ;; accumulated items on this node.
+ (get-accum-reduced-complete [memory node])
+
+ ;; Returns insertions that occurred at the given node for the given token.
+ ;; Returns a sequence of the form
+ ;; [facts-inserted-for-one-rule-activation facts-inserted-for-another-rule-activation]
+ (get-insertions [memory node token])
+
+ ;; Returns all insertions that occurred in the given node's RHS; this takes the form
+ ;; {token [facts-inserted-for-one-rule-activation facts-inserted-for-another-rule-activation]}
+ (get-insertions-all [memory node])
+
+ ;; Returns a map of nodes with pending activations to the activations themselves.
+ (get-activations [memory]))
+
+(defprotocol ITransientMemory
+
+ ;; Adds working memory elements to the given working memory at the given node.
+ (add-elements! [memory node join-bindings elements])
+
+ ;; Remove working memory elements from the given working memory at the given node.
+ (remove-elements! [memory node elements join-bindings])
+
+ ;; Add tokens to the given working memory at the given node.
+ (add-tokens! [memory node join-bindings tokens])
+
+ ;; Removes tokens from the given working memory at the given node.
+ (remove-tokens! [memory node join-bindings tokens])
+
+ ;; Adds the result of a reduced accumulator execution to the given memory and node.
+ (add-accum-reduced! [memory node join-bindings accum-result fact-bindings])
+
+ ;; Removes the result of a reduced accumulator execution to the given memory and node.
+ (remove-accum-reduced! [memory node join-bindings fact-bindings])
+
+ ;; Add a record that a given fact twas inserted at a given node with
+ ;; the given support. Used for truth maintenance.
+ ;; This should be called at most once per rule activation.
+ (add-insertions! [memory node token facts])
+
+ ;; Removes all records of facts that were inserted at the given node
+ ;; due to the given token. Used for truth maintenance.
+ ;; This function returns a map of each token to the associated facts
+ ;; it removed.
+ (remove-insertions! [memory node token])
+
+ ;; Add a sequence of activations.
+ (add-activations! [memory production activations])
+
+ ;; Pop an activation from the working memory. Returns nil if no
+ ;; activations are pending.
+ (pop-activation! [memory])
+
+ ;; Returns the group of the next activation, or nil if none are pending.
+ (next-activation-group [memory])
+
+ ;; Remove the given activations from the working memory. This is expected
+ ;; to return a tuple of the form [removed-activations unremoved-activations],
+ ;; where unremoved-activations is comprised of activations passed to the memory
+ ;; for removal but which were not removed because they were not present in the memory's
+ ;; store of pending activations.
+ (remove-activations! [memory production activations])
+
+ ;; Clear all activations from the working memory
+ (clear-activations! [memory])
+
+ ;; Converts the transient memory to persistent form.
+ (to-persistent! [memory]))
+
+(defn- coll-empty?
+ "Returns true if the collection is empty. Does not call seq due to avoid
+ overhead that may cause for non-persistent collection types, e.g.
+ java.util.LinkedList, etc."
+ [^java.util.Collection coll]
+ (or (nil? coll) (.isEmpty coll)))
+
+(defn- list-remove!
+ "Removes the item, to-remove, from the given list, lst. If it is found and removed,
+ returns true. Otherwise returns false. Only removes the first element in the list
+ that is equal to to-remove. Equality is done based on the given eq-pred function.
+ If it isn't given, the default is = . If others are equal, they will not be removed.
+ This is similar to java.util.List.remove(Object). lst is updated in place for performance.
+ This implies that the list must support the mutable list interface, namely via the
+ java.util.List.listIterator()."
+ ([^java.util.List lst to-remove]
+ (list-remove! lst to-remove =))
+ ([^java.util.List lst to-remove eq-pred]
+ (if-not (coll-empty? lst)
+ (let [li (.listIterator lst)]
+ (loop [x (.next li)]
+ (cond
+ (eq-pred to-remove x)
+ (do
+ (.remove li)
+ true)
+
+ (.hasNext li)
+ (recur (.next li))
+
+ :else
+ false)))
+ false)))
+
+(defn- add-all!
+ "Adds all items from source to the destination dest collection
+ destructively. Avoids using Collection.addAll() due to unnecessary
+ performance overhead of calling Collection.toArray() on the
+ incoming source. Returns dest."
+ [^java.util.Collection dest source]
+ (doseq [x source]
+ (.add dest x))
+ dest)
+
+(defn- ->linked-list
+ "Creates a new java.util.LinkedList from the coll, but avoids using
+ Collection.addAll(Collection) since there is unnecessary overhead
+ in this of calling Collection.toArray() on coll."
+ ^java.util.List [coll]
+ (if (instance? LinkedList coll)
+ coll
+ (add-all! (LinkedList.) coll)))
+
+(defn- remove-first-of-each!
+ "Remove the first instance of each item in the given remove-seq that
+ appears in the collection coll. coll is updated in place for
+ performance. This implies that the coll must support the mutable
+ collection interface method Collection.remove(Object). Returns a tuple
+ of the form [remove-seq-items-removed remove-seq-items-not-removed].
+ An optional compare-fn can be given to specify how to compare remove-seq
+ items against items in coll. The default compare-fn is = .
+ For immutable collection removal, use the non-destructive remove-first-of-each
+ defined below."
+ ([remove-seq ^java.util.List coll]
+ (remove-first-of-each! remove-seq coll =))
+
+ ([remove-seq ^java.util.List coll compare-fn]
+ ;; Optimization for special case of one item to remove,
+ ;; which occurs frequently.
+ (if (= 1 (count remove-seq))
+ (let [to-remove (first remove-seq)]
+ (if (list-remove! coll to-remove compare-fn)
+ [remove-seq []]
+ [[] remove-seq]))
+
+ ;; Otherwise, perform a linear search for items to remove.
+ (loop [to-remove (first remove-seq)
+ remove-seq (next remove-seq)
+ removed (transient [])
+ not-removed (transient [])]
+ (if to-remove
+ (let [found? (list-remove! coll to-remove compare-fn)
+ removed (if found?
+ (conj! removed to-remove)
+ removed)
+ not-removed (if found?
+ not-removed
+ (conj! not-removed to-remove))]
+ (recur (first remove-seq)
+ (next remove-seq)
+ removed
+ not-removed))
+ ;; If this is expensive, using a mutable collection maybe good to
+ ;; consider here in a future optimization.
+ [(persistent! removed) (persistent! not-removed)])))))
+
+(defn remove-first-of-each
+ "Remove the first instance of each item in the given remove-seq that
+ appears in the collection. This also tracks which items were found
+ and removed. Returns a tuple of the form:
+ [items-removed coll-with-items-removed items-not-removed]
+ This function does so eagerly since
+ the working memories with large numbers of insertions and retractions
+ can cause lazy sequences to become deeply nested."
+ [remove-seq coll]
+ (cond
+
+ ;; There is nothing to remove.
+ (empty? remove-seq) [[] coll]
+
+ ;; Otherwise, perform a linear search for items to remove.
+ :else (loop [f (first coll)
+ r (rest coll)
+ [remove-seq items-removed result] [remove-seq (transient []) (transient [])]]
+
+ (if f
+ (recur (first r)
+ (rest r)
+
+ ;; Determine if f matches any of the items to remove.
+ (loop [to-remove (first remove-seq)
+ remove-seq (rest remove-seq)
+ ;; Remember what is left to remove for later.
+ left-to-remove (transient [])]
+
+ ;; Try to find if f matches anything to-remove.
+ (if to-remove
+ (if (= to-remove f)
+
+ ;; Found a match, so the search is done.
+ [(persistent! (reduce conj! left-to-remove remove-seq))
+ (conj! items-removed to-remove)
+ result]
+
+ ;; Keep searching for a match.
+ (recur (first remove-seq)
+ (rest remove-seq)
+ (conj! left-to-remove to-remove)))
+
+ ;; No matches found.
+ [(persistent! left-to-remove)
+ items-removed
+ (conj! result f)])))
+
+ [(persistent! items-removed) (persistent! result) remove-seq]))))
+
+(defn fast-token-compare [compare-fn token other-token]
+ ;; Fastest path is if the two tokens are truly identical.
+ (or (identical? token other-token)
+ ;; Assumption is that both arguments given are tokens already.
+ (and (let [bindings (:bindings token)
+ other-bindings (:bindings other-token)]
+ ;; Calling `count` on these Clojure maps shows up as a bottleneck
+ ;; even with clojure.lang.IPersistentMap being clojure.lang.Counted unfortunately.
+ (and (= (.size ^java.util.Map bindings)
+ (.size ^java.util.Map other-bindings))
+ ;; `every?` is too slow for a performance critical place like this. It
+ ;; calls `seq` too many times on the underlying maps. Instead `seq` one
+ ;; time and keep using that same seq.
+ ;; Also avoiding Clojure destructuring since even that is not as fast
+ ;; pre-1.9.0.
+ (if-let [^clojure.lang.ISeq entries (.seq ^clojure.lang.Seqable bindings)]
+ ;; Type hint to Indexed vs MapEntry just because MapEntry seems to be a
+ ;; less stable impl detail to rely on.
+ (loop [^clojure.lang.Indexed entry (.first entries)
+ entries (.next entries)]
+ (let [k (some-> entry (.nth 0))
+ v (some-> entry (.nth 1))]
+ (if (and k
+ ;; other-bindings will always be persistent map so invoke
+ ;; it directly. It is faster than `get`.
+ (compare-fn v (other-bindings k)))
+ (recur (some-> entries .first)
+ (some-> entries .next))
+ ;; If there is no k left, then every entry matched. If there is a k,
+ ;; that means the comparison failed, so the maps aren't equal.
+ (not k))))
+ ;; Empty bindings on both sides.
+ true)))
+
+ ;; Check the :matches on each token. :matches need to be in the same order on both
+ ;; tokens to be considered the same.
+ (let [^clojure.lang.Indexed matches (:matches token)
+ ^clojure.lang.Indexed other-matches (:matches other-token)
+ count-matches (.size ^java.util.List matches)]
+ (and (= count-matches
+ (.size ^java.util.List other-matches))
+ (loop [i 0]
+ (cond
+ (= i count-matches)
+ true
+
+ ;; Compare node-id's first. Fallback to comparing facts. This will
+ ;; most very likely be the most expensive part to execute.
+ (let [^clojure.lang.Indexed m (.nth matches i)
+ ^clojure.lang.Indexed om (.nth other-matches i)]
+ ;; A token :matches tuple is of the form [fact node-id].
+ (and (= (.nth m 1) (.nth om 1))
+ (compare-fn (.nth m 0) (.nth om 0))))
+ (recur (inc i))
+
+ :else
+ false)))))))
+
+(defprotocol IdentityComparable
+ (using-token-identity! [this bool]))
+
+(deftype RuleOrderedActivation [node-id
+ token
+ activation
+ rule-load-order
+ ^:unsynchronized-mutable use-token-identity?]
+ IdentityComparable
+ ;; NOTE! This should never be called on a RuleOrderedActivation instance that has been stored
+ ;; somewhere in local memory because it could cause interference across threads that have
+ ;; multiple versions of local memories that are sharing some of their state. This is only intended
+ ;; to be called by ephemeral, only-local references to RuleOrderedActivation instances used to
+ ;; search for activations to remove from memory when performing `remove-activations!` operations.
+ ;; The reason this mutable state exists at all is to "flip" a single instance of a RuleOrderedActivation
+ ;; from identity to value equality based comparable when doing the "two-pass" removal search operation
+ ;; of `remove-activations!`. This avoids having to create different instances for each pass.
+ (using-token-identity! [this bool]
+ (set! use-token-identity? bool)
+ this)
+ Object
+ ;; Two RuleOrderedActivation instances should be equal if and only if their
+ ;; activation is equal. Note that if the node of two activations is the same,
+ ;; their rule-load-order must be the same. Using a deftype wrapper allows us to
+ ;; use Clojure equality to determine this while placing the wrapper in a Java data
+ ;; structure that uses Java equality; the Java equality check will simply end up calling
+ ;; Clojure equality checks.
+ (equals [this other]
+ ;; Note that the .equals method is only called by PriorityQueue.remove, and the object provided
+ ;; to the .remove method will never be identical to any object in the queue. A short-circuiting
+ ;; check for reference equality would therefore be pointless here because it would never be true.
+ (boolean
+ (when (instance? RuleOrderedActivation other)
+ (let [^RuleOrderedActivation other other]
+ (and
+ ;; If the node-id of two nodes is equal then we can assume that the nodes are equal.
+ (= node-id
+ (.node-id other))
+
+ ;; We check with identity based semantics on the other when the use-token-identity? field
+ ;; indicates to do so.
+ (if (or use-token-identity? (.-use-token-identity? other))
+ (fast-token-compare identical? token (.-token other))
+ (fast-token-compare = token (.-token other)))))))))
+
+(defn- ->rule-ordered-activation
+ "Take an activation from the engine and wrap it in a map that includes information
+ on the rule load order. In Clojure, as opposed to ClojureScript, each activation should
+ be wrapped in this way exactly once (that is, the value of the :activation key should
+ be an activation from the engine.)"
+ ([activation]
+ (->rule-ordered-activation activation false))
+ ([activation use-token-identity?]
+ (let [node (:node activation)]
+ (RuleOrderedActivation. (:id node)
+ (:token activation)
+ activation
+ (or (-> node
+ :production
+ meta
+ :clara.rules.compiler/rule-load-order)
+ 0)
+ use-token-identity?))))
+
+(defn- queue-activations!
+ "Add activations to a queue. The wrap-rule-order? option should be true
+ unless the activations in question have previously been wrapped."
+ ([^java.util.Queue pq activations]
+ (queue-activations! pq activations true))
+ ([^java.util.Queue pq activations wrap-rule-order?]
+ (if wrap-rule-order?
+ (doseq [act activations]
+ (.add pq (->rule-ordered-activation act)))
+ (doseq [act activations]
+ (.add pq act)))
+ pq))
+
+(defn- ->activation-priority-queue
+ "Given activations, create a priority queue based on rule ordering.
+ The activations should be wrapped by using the wrap-rule-order? option
+ if they have not been wrapped already."
+ ([activations]
+ (->activation-priority-queue activations true))
+ ([activations wrap-rule-order?]
+ (let [init-cnt (count activations)
+ ;; Note that 11 is the default initial value; there is no constructor
+ ;; for PriorityQueue that takes a custom comparator and does not require
+ ;; an initial size to be passed.
+ pq (PriorityQueue. (if (pos? init-cnt) init-cnt 11)
+ (fn [^RuleOrderedActivation x
+ ^RuleOrderedActivation y]
+ (compare (.rule-load-order x)
+ (.rule-load-order y))))]
+ (queue-activations! pq activations wrap-rule-order?))))
+
+(declare ->PersistentLocalMemory)
+
+;;; Transient local memory implementation. Typically only persistent memory will be visible externally.
+
+(deftype TransientLocalMemory [rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ ^:unsynchronized-mutable alpha-memory
+ ^:unsynchronized-mutable beta-memory
+ ^:unsynchronized-mutable accum-memory
+ ^:unsynchronized-mutable production-memory
+ ^:unsynchronized-mutable ^NavigableMap activation-map]
+
+ IMemoryReader
+ (get-rulebase [memory] rulebase)
+
+ (get-alphas-fn [memory] alphas-fn)
+
+ (get-elements [memory node bindings]
+ (get (get alpha-memory (:id node) {})
+ bindings
+ []))
+
+ (get-elements-all [memory node]
+ (sequence
+ cat
+ (vals
+ (get alpha-memory (:id node) {}))))
+
+ (get-tokens [memory node bindings]
+ (get (get beta-memory (:id node) {})
+ bindings
+ []))
+
+ (get-tokens-all [memory node]
+ (sequence
+ cat
+ (vals (get beta-memory (:id node) {}))))
+
+ (get-accum-reduced [memory node join-bindings fact-bindings]
+ (get-in accum-memory [(:id node) join-bindings fact-bindings] ::no-accum-reduced))
+
+ (get-accum-reduced-all [memory node join-bindings]
+ (get
+ (get accum-memory (:id node) {})
+ join-bindings))
+
+ ;; TODO: rename existing get-accum-reduced-all and use something better here.
+ (get-accum-reduced-complete [memory node]
+ (for [[join-binding joins] (get accum-memory (:id node) {})
+ [fact-binding reduced] joins]
+ {:join-bindings join-binding
+ :fact-bindings fact-binding
+ :result reduced}))
+
+ (get-insertions [memory node token]
+ (get
+ (get production-memory (:id node) {})
+ token
+ []))
+
+ (get-insertions-all [memory node]
+ (get production-memory (:id node) {}))
+
+ (get-activations [memory]
+ (into []
+ (comp cat
+ (map (fn [^RuleOrderedActivation a]
+ (.activation a))))
+ (vals activation-map)))
+
+ ITransientMemory
+ (add-elements! [memory node join-bindings elements]
+ (let [binding-element-map (get alpha-memory (:id node) {})
+ previous-elements (get binding-element-map join-bindings)]
+
+ (cond
+ ;; When changing existing persistent collections, just add on
+ ;; the new elements.
+ (coll? previous-elements)
+ (set! alpha-memory
+ (assoc! alpha-memory
+ (:id node)
+ (assoc binding-element-map
+ join-bindings
+ (into previous-elements elements))))
+
+ ;; Already mutable, so update-in-place.
+ previous-elements
+ (add-all! previous-elements elements)
+
+ ;; No previous. We can just leave it persistent if it is
+ ;; until we actually need to modify anything. This avoids
+ ;; unnecessary copying.
+ elements
+ (set! alpha-memory
+ (assoc! alpha-memory
+ (:id node)
+ (assoc binding-element-map
+ join-bindings
+ elements))))))
+
+ (remove-elements! [memory node join-bindings elements]
+ ;; Do nothing when no elements to remove.
+ (when-not (coll-empty? elements)
+ (let [binding-element-map (get alpha-memory (:id node) {})
+ previous-elements (get binding-element-map join-bindings)]
+ (cond
+ ;; Do nothing when no previous elements to remove from.
+ (coll-empty? previous-elements)
+ []
+
+ ;; Convert persistent collection to a mutable one prior to calling remove-first-of-each!
+ ;; alpha-memory needs to be updated this time since there is now going to be a mutable
+ ;; collection associated in this memory location instead.
+ (coll? previous-elements)
+ (let [remaining-elements (->linked-list previous-elements)
+ removed-elements (first (remove-first-of-each! elements remaining-elements))]
+ ;; If there are no remaining elements under a binding group for the node remove the binding group.
+ ;; This allows these binding values to be garbage collected.
+ (let [new-bindings-map (if (.isEmpty ^java.util.List remaining-elements)
+ (dissoc binding-element-map join-bindings)
+ (assoc binding-element-map
+ join-bindings
+ remaining-elements))]
+ (set! alpha-memory
+ (assoc! alpha-memory
+ (:id node)
+ new-bindings-map))
+ removed-elements))
+
+ ;; Already mutable, so we do not need to re-associate to alpha-memory.
+ previous-elements
+ (let [removed-elements (first (remove-first-of-each! elements previous-elements))]
+ (when (.isEmpty ^java.util.List previous-elements)
+ (set! alpha-memory
+ (assoc! alpha-memory
+ (:id node)
+ (dissoc binding-element-map join-bindings))))
+ removed-elements)))))
+
+ (add-tokens! [memory node join-bindings tokens]
+ (let [binding-token-map (get beta-memory (:id node) {})
+ previous-tokens (get binding-token-map join-bindings)]
+ ;; The reasoning here is the same as in add-elements! impl above.
+ (cond
+ (coll? previous-tokens)
+ (set! beta-memory
+ (assoc! beta-memory
+ (:id node)
+ (assoc binding-token-map
+ join-bindings
+ (into previous-tokens tokens))))
+
+ previous-tokens
+ (add-all! previous-tokens tokens)
+
+ tokens
+ (set! beta-memory
+ (assoc! beta-memory
+ (:id node)
+ (assoc binding-token-map
+ join-bindings
+ tokens))))))
+
+ (remove-tokens! [memory node join-bindings tokens]
+ ;; The reasoning here is the same as remove-elements!
+ (when-not (coll-empty? tokens)
+ (let [binding-token-map (get beta-memory (:id node) {})
+ previous-tokens (get binding-token-map join-bindings)]
+ (if
+ (coll-empty? previous-tokens)
+ []
+
+ (let [;; Attempt to remove tokens using the faster indentity-based equality first since
+ ;; most of the time this is all we need and it can be much faster. Any token that
+ ;; wasn't removed via identity, has to be "retried" with normal value-based
+ ;; equality though since those semantics are supported within the engine. This
+ ;; slower path should be rare for any heavy retraction flows - such as those that come
+ ;; via truth maintenance.
+ two-pass-remove! (fn [remaining-tokens tokens]
+ (let [[removed-tokens not-removed-tokens]
+ (remove-first-of-each! tokens
+ remaining-tokens
+ (fn [t1 t2]
+ (fast-token-compare identical? t1 t2)))]
+
+ (if-let [other-removed (and (seq not-removed-tokens)
+ (-> not-removed-tokens
+ (remove-first-of-each! remaining-tokens
+ (fn [t1 t2]
+ (fast-token-compare = t1 t2)))
+ first
+ seq))]
+ (into removed-tokens other-removed)
+ removed-tokens)))]
+ (cond
+ (coll? previous-tokens)
+ (let [remaining-tokens (->linked-list previous-tokens)
+ removed-tokens (two-pass-remove! remaining-tokens tokens)
+ new-tokens-map (if (.isEmpty ^java.util.List remaining-tokens)
+ (dissoc binding-token-map join-bindings)
+ (assoc binding-token-map join-bindings remaining-tokens))]
+ (set! beta-memory
+ (assoc! beta-memory
+ (:id node)
+ new-tokens-map))
+ removed-tokens)
+
+ previous-tokens
+ (let [removed-tokens (two-pass-remove! previous-tokens tokens)]
+ (when (.isEmpty ^java.util.List previous-tokens)
+ (set! beta-memory
+ (assoc! beta-memory
+ (:id node)
+ (dissoc binding-token-map join-bindings))))
+
+ removed-tokens)))))))
+
+ (add-accum-reduced! [memory node join-bindings accum-result fact-bindings]
+ (set! accum-memory
+ (assoc! accum-memory
+ (:id node)
+ (assoc-in (get accum-memory (:id node) {})
+ [join-bindings fact-bindings]
+ accum-result))))
+
+ (remove-accum-reduced! [memory node join-bindings fact-bindings]
+ (let [node-id (:id node)
+ node-id-mem (get accum-memory node-id {})
+ join-mem (dissoc (get node-id-mem join-bindings) fact-bindings)
+ node-id-mem (if (empty? join-mem)
+ (dissoc node-id-mem join-bindings)
+ (assoc node-id-mem join-bindings join-mem))]
+ (set! accum-memory
+ (if (empty? node-id-mem)
+ (dissoc! accum-memory
+ node-id)
+ (assoc! accum-memory
+ node-id
+ node-id-mem)))))
+
+ ;; The value under each token in the map should be a sequence
+ ;; of sequences of facts, with each inner sequence coming from a single
+ ;; rule activation.
+ (add-insertions! [memory node token facts]
+ (let [token-facts-map (get production-memory (:id node) {})]
+ (set! production-memory
+ (assoc! production-memory
+ (:id node)
+ (update token-facts-map token conj facts)))))
+
+ (remove-insertions! [memory node tokens]
+
+ ;; Remove the facts inserted from the given token.
+ (let [token-facts-map (get production-memory (:id node) {})
+ ;; Get removed tokens for the caller.
+ [results
+ new-token-facts-map]
+
+ (loop [results (transient {})
+ token-map (transient token-facts-map)
+ to-remove tokens]
+ (if-let [head-token (first to-remove)]
+ ;; Don't use contains? due to http://dev.clojure.org/jira/browse/CLJ-700
+ (if-let [token-insertions (get token-map head-token)]
+ (let [;; There is no particular significance in removing the
+ ;; first group; we just need to remove exactly one.
+ [removed-facts & remaining-facts] token-insertions
+ removed-insertion-map (if (not-empty remaining-facts)
+ (assoc! token-map head-token remaining-facts)
+ (dissoc! token-map head-token))
+ prev-token-result (get results head-token [])]
+ (recur (assoc! results head-token (into prev-token-result removed-facts))
+ removed-insertion-map
+ (rest to-remove)))
+ ;; If the token isn't present in the insertions just try the next one.
+ (recur results token-map (rest to-remove)))
+ [(persistent! results)
+ (persistent! token-map)]))]
+
+ ;; Clear the tokens and update the memory.
+ (set! production-memory
+ (if (not-empty new-token-facts-map)
+ (assoc! production-memory
+ (:id node)
+ new-token-facts-map)
+ (dissoc! production-memory (:id node))))
+ results))
+
+ (add-activations!
+ [memory production new-activations]
+ (let [activation-group (activation-group-fn production)
+ previous (.get activation-map activation-group)]
+ ;; The reasoning here is the same as in add-elements! impl above.
+ (cond
+ previous
+ (queue-activations! previous new-activations)
+
+ (not (coll-empty? new-activations))
+ (.put activation-map
+ activation-group
+ (->activation-priority-queue new-activations)))))
+
+ (pop-activation!
+ [memory]
+ (when (not (.isEmpty activation-map))
+ (let [entry (.firstEntry activation-map)
+ key (.getKey entry)
+ ^java.util.Queue value (.getValue entry)
+
+ ;; An empty value is illegal and should be removed by an action
+ ;; that creates one (e.g. a remove-activations! call).
+ ^RuleOrderedActivation activation (.remove value)]
+
+ ;; This activation group is empty now, so remove it from
+ ;; the map entirely.
+ (when (.isEmpty value)
+ (.remove activation-map key))
+
+ ;; Return the selected activation.
+ (.activation activation))))
+
+ (next-activation-group
+ [memory]
+ (when (not (.isEmpty activation-map))
+ (let [entry (.firstEntry activation-map)]
+ (.getKey entry))))
+
+ (remove-activations! [memory production to-remove]
+ (when-not (coll-empty? to-remove)
+ (let [activation-group (activation-group-fn production)
+ ^java.util.Queue activations (.get activation-map activation-group)
+ removed-activations (LinkedList.)
+ unremoved-activations (LinkedList.)]
+
+ (if (coll-empty? activations)
+ ;; If there are no activations present under the group
+ ;; then we can't remove any.
+ [[] to-remove]
+ ;; Remove as many activations by identity as possible first.
+ (let [not-removed (loop [to-remove-item (first to-remove)
+ to-remove (next to-remove)
+ not-removed (transient [])]
+ (if to-remove-item
+ (let [^RuleOrderedActivation act (->rule-ordered-activation to-remove-item true)]
+ (if (.remove activations act)
+ (do
+ (.add removed-activations (.activation act))
+ ;; The activations that are not removed in the identity checking sweep
+ ;; are a superset of the activations that are removed after the equality
+ ;; sweep finishes so we can just create the list of unremoved activations
+ ;; during that sweep.
+ (recur (first to-remove) (next to-remove) not-removed))
+ (recur (first to-remove) (next to-remove) (conj! not-removed act))))
+ (persistent! not-removed)))]
+
+ ;; There may still be activations not removed since the removal may be based on value-based
+ ;; equality semantics. Retractions in the engine do not require that identical object references
+ ;; are given to remove object values that are equal.
+ (doseq [^RuleOrderedActivation act not-removed]
+ (if (.remove activations (using-token-identity! act false))
+ (.add removed-activations (.activation act))
+ (.add unremoved-activations (.activation act))))
+
+ (when (coll-empty? activations)
+ (.remove activation-map activation-group))
+
+ [(Collections/unmodifiableList removed-activations)
+ (Collections/unmodifiableList unremoved-activations)])))))
+
+ (clear-activations!
+ [memory]
+ (.clear activation-map))
+
+ (to-persistent! [memory]
+ (let [->persistent-coll #(if (coll? %)
+ %
+ (seq %))
+ update-vals (fn [m update-fn]
+ (->> m
+ (reduce-kv (fn [m k v]
+ (assoc! m k (update-fn v)))
+ (transient m))
+ persistent!))
+ persistent-vals #(update-vals % ->persistent-coll)]
+ (->PersistentLocalMemory rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ (update-vals (persistent! alpha-memory) persistent-vals)
+ (update-vals (persistent! beta-memory) persistent-vals)
+ (persistent! accum-memory)
+ (persistent! production-memory)
+ (into {}
+ (map (juxt key (comp ->persistent-coll val)))
+ activation-map)))))
+
+(defrecord PersistentLocalMemory [rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ alpha-memory
+ beta-memory
+ accum-memory
+ production-memory
+ activation-map]
+ IMemoryReader
+ (get-rulebase [memory] rulebase)
+
+ (get-alphas-fn [memory] alphas-fn)
+
+ (get-elements [memory node bindings]
+ (get (get alpha-memory (:id node) {})
+ bindings
+ []))
+
+ (get-elements-all [memory node]
+ (sequence
+ cat
+ (vals
+ (get alpha-memory (:id node) {}))))
+
+ (get-tokens [memory node bindings]
+ (get (get beta-memory (:id node) {})
+ bindings
+ []))
+
+ (get-tokens-all [memory node]
+ (sequence
+ cat
+ (vals (get beta-memory (:id node) {}))))
+
+ (get-accum-reduced [memory node join-bindings fact-bindings]
+ ;; nil is a valid previously reduced value that can be found in the map.
+ ;; Return ::no-accum-reduced instead of nil when there is no previously
+ ;; reduced value in memory.
+ (get-in accum-memory [(:id node) join-bindings fact-bindings] ::no-accum-reduced))
+
+ (get-accum-reduced-all [memory node join-bindings]
+ (get
+ (get accum-memory (:id node) {})
+ join-bindings))
+
+ (get-accum-reduced-complete [memory node]
+ (for [[join-binding joins] (get accum-memory (:id node) {})
+ [fact-binding reduced] joins]
+ {:join-bindings join-binding
+ :fact-bindings fact-binding
+ :result reduced}))
+
+ (get-insertions [memory node token]
+ (get
+ (get production-memory (:id node) {})
+ token
+ []))
+
+ (get-insertions-all [memory node]
+ (get production-memory (:id node) {}))
+
+ (get-activations [memory]
+ (into []
+ (comp cat
+ (map (fn [^RuleOrderedActivation a]
+ (.activation a))))
+ (vals activation-map)))
+
+ IPersistentMemory
+ (to-transient [memory]
+ (TransientLocalMemory. rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ (transient alpha-memory)
+ (transient beta-memory)
+ (transient accum-memory)
+ (transient production-memory)
+ (let [treemap (TreeMap. ^java.util.Comparator activation-group-sort-fn)]
+ (doseq [[activation-group activations] activation-map]
+ (.put treemap
+ activation-group
+ (->activation-priority-queue activations false)))
+ treemap))))
+
+(defn local-memory
+ "Creates an persistent local memory for the given rule base."
+ [rulebase activation-group-sort-fn activation-group-fn alphas-fn]
+
+ (->PersistentLocalMemory rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ {}
+ {}
+ {}
+ {}
+ {}))
diff --git a/build/clara/rules/platform.clj b/build/clara/rules/platform.clj
new file mode 100644
index 00000000..36a88b55
--- /dev/null
+++ b/build/clara/rules/platform.clj
@@ -0,0 +1,93 @@
+(ns clara.rules.platform
+ "This namespace is for internal use and may move in the future.
+ Platform unified code Clojure/ClojureScript."
+ (:import [java.lang IllegalArgumentException]
+ [java.util LinkedHashMap]))
+
+(defn throw-error
+ "Throw an error with the given description string."
+ [^String description]
+ (throw (IllegalArgumentException. description)))
+
+(defn query-param
+ "Coerces a query param to a parameter keyword such as :?param, if an unsupported type is
+ supplied then an exception will be thrown"
+ [p]
+ (cond
+ (keyword? p) p
+ (symbol? p) (keyword p)
+ :else
+ (throw-error (str "Query bindings must be specified as a keyword or symbol: " p))))
+
+;; This class wraps Clojure objects to ensure Clojure's equality and hash
+;; semantics are visible to Java code. This allows these Clojure objects
+;; to be safely used in things like Java Sets or Maps.
+;; This class also accepts and stores the hash code, since it almost always
+;; will be used once and generally more than once.
+(deftype JavaEqualityWrapper [wrapped ^int hash-code]
+
+ Object
+ (equals [this other]
+ (and (instance? JavaEqualityWrapper other)
+ (= wrapped (.wrapped ^JavaEqualityWrapper other))))
+
+ (hashCode [this]
+ hash-code))
+
+(defn group-by-seq
+ "Groups the items of the given coll by f to each item. Returns a seq of tuples of the form
+ [f-val xs] where xs are items from the coll and f-val is the result of applying f to any of
+ those xs. Each x in xs has the same value (f x). xs will be in the same order as they were
+ found in coll.
+ The behavior is similar to calling `(seq (group-by f coll))` However, the returned seq will
+ always have consistent ordering from process to process. The ordering is insertion order
+ as new (f x) values are found traversing the given coll collection in its seq order. The
+ returned order is made consistent to ensure that relevant places within the rules engine that
+ use this grouping logic have deterministic behavior across different processes."
+ [f coll]
+ (let [^java.util.Map m (reduce (fn [^java.util.Map m x]
+ (let [k (f x)
+ ;; Use Java's hashcode for performance reasons as
+ ;; discussed at https://github.com/cerner/clara-rules/issues/393
+ wrapper (JavaEqualityWrapper. k
+ (if (nil? k)
+ (int 0)
+ (int (.hashCode ^Object k))))
+ xs (or (.get m wrapper)
+ (transient []))]
+ (.put m wrapper (conj! xs x)))
+ m)
+ (LinkedHashMap.)
+ coll)
+ it (.iterator (.entrySet m))]
+ ;; Explicitly iterate over a Java iterator in order to avoid running into issues as
+ ;; discussed in http://dev.clojure.org/jira/browse/CLJ-1738
+ (loop [coll (transient [])]
+ (if (.hasNext it)
+ (let [^java.util.Map$Entry e (.next it)]
+ (recur (conj! coll [(.wrapped ^JavaEqualityWrapper (.getKey e)) (persistent! (.getValue e))])))
+ (persistent! coll)))))
+
+(defmacro thread-local-binding
+ "Wraps given body in a try block, where it sets each given ThreadLocal binding
+ and removes it in finally block."
+ [bindings & body]
+ (when-not (vector? bindings)
+ (throw (ex-info "Binding needs to be a vector."
+ {:bindings bindings})))
+ (when-not (even? (count bindings))
+ (throw (ex-info "Needs an even number of forms in binding vector"
+ {:bindings bindings})))
+ (let [binding-pairs (partition 2 bindings)]
+ `(try
+ ~@(for [[tl v] binding-pairs]
+ `(.set ~tl ~v))
+ ~@body
+ (finally
+ ~@(for [[tl] binding-pairs]
+ `(.remove ~tl))))))
+
+(defmacro eager-for
+ "A for wrapped with a doall to force realisation. Usage is the same as regular for."
+ [& body]
+ `(doall (for ~@body)))
diff --git a/src/main/clojure/clara/rules/schema.cljc b/build/clara/rules/schema.clj
similarity index 97%
rename from src/main/clojure/clara/rules/schema.cljc
rename to build/clara/rules/schema.clj
index c9e646ff..50669daf 100644
--- a/src/main/clojure/clara/rules/schema.cljc
+++ b/build/clara/rules/schema.clj
@@ -3,21 +3,19 @@
for the underlying Rete network itself. This can be used by tools or other libraries working with rules."
(:require [schema.core :as s]))
-
(s/defn condition-type :- (s/enum :or :not :and :exists :fact :accumulator :test)
"Returns the type of node in a LHS condition expression."
[condition]
(if (map? condition) ; Leaf nodes are maps, per the schema
(cond
- (:type condition) :fact
- (:accumulator condition) :accumulator
- :else :test)
+ (:type condition) :fact
+ (:accumulator condition) :accumulator
+ :else :test)
;; Otherwise the node must a sequential that starts with the boolean operator.
(first condition)))
-
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Rule and query structure schema.
@@ -30,8 +28,7 @@
;; Original constraints preserved for tooling in case a transformation was applied to the condition.
(s/optional-key :original-constraints) [SExpr]
(s/optional-key :fact-binding) s/Keyword
- (s/optional-key :args) s/Any
- })
+ (s/optional-key :args) s/Any})
(def AccumulatorCondition
{:accumulator s/Any
@@ -148,7 +145,7 @@
;; Map of identifier to condition nodes.
:id-to-condition-node {s/Int (s/cond-pre (s/eq :clara.rules.compiler/root-condition)
- ConditionNode)}
+ ConditionNode)}
;; Map of identifier to query or rule nodes.
:id-to-production-node {s/Int ProductionNode}
diff --git a/build/clara/rules/test_rules_data.clj b/build/clara/rules/test_rules_data.clj
new file mode 100644
index 00000000..8239fb4a
--- /dev/null
+++ b/build/clara/rules/test_rules_data.clj
@@ -0,0 +1,37 @@
+;;; This namespace exists for testing purposes only, and is temporarily plac.d under src/main/clojure/clara
+;;; due to issues with the CLJS test environment. Move te test/common/clara when this issue is resolved
+;;; and tests can be compiled and run with this file in that location.
+;;; See issue #288 for further info (https://github.com/cerner/clara-rules/issues/388).
+
+(ns clara.rules.test-rules-data
+ (:require [clara.rules]
+ [clara.rules.testfacts]))
+
+(def the-rules
+ [{:doc "Rule to determine whether it is indeed cold and windy."
+ :name "clara.rules.test-rules-data/is-cold-and-windy-data"
+ :lhs [{:type clara.rules.testfacts.Temperature
+ :constraints '[(< temperature 20)
+ (== ?t temperature)]}
+ {:type clara.rules.testfacts.WindSpeed
+ :constraints '[(> windspeed 30)
+ (== ?w windspeed)]}]
+ :rhs '(clara.rules/insert! (clara.rules.testfacts/->ColdAndWindy ?t ?w))}
+
+ {:name "clara.rules.test-rules-data/find-cold-and-windy-data"
+ :lhs [{:fact-binding :?fact
+ :type clara.rules.testfacts.ColdAndWindy
+ :constraints []}]
+ :params #{}}])
+
+(defn weather-rules
+ "Return some weather rules"
+ []
+ the-rules)
+
+(def the-rules-with-keyword-names (mapv #(update % :name keyword) the-rules))
+
+(defn weather-rules-with-keyword-names
+ "Return some weather rules using keyword names"
+ []
+ the-rules-with-keyword-names)
diff --git a/src/main/clojure/clara/rules/testfacts.cljc b/build/clara/rules/testfacts.clj
similarity index 99%
rename from src/main/clojure/clara/rules/testfacts.cljc
rename to build/clara/rules/testfacts.clj
index 41fff1d7..f11b2ac9 100644
--- a/src/main/clojure/clara/rules/testfacts.cljc
+++ b/build/clara/rules/testfacts.clj
@@ -11,7 +11,6 @@
(defrecord LousyWeather [])
(defrecord TemperatureHistory [temperatures])
-
;; Test facts for chained rules.
(defrecord First [])
(defrecord Second [])
diff --git a/build/clara/rules/update_cache/cancelling.clj b/build/clara/rules/update_cache/cancelling.clj
new file mode 100644
index 00000000..de506c3d
--- /dev/null
+++ b/build/clara/rules/update_cache/cancelling.clj
@@ -0,0 +1,146 @@
+(ns clara.rules.update-cache.cancelling
+ (:require [clara.rules.update-cache.core :as uc])
+ (:import [java.util
+ List
+ Map
+ LinkedList
+ LinkedHashMap
+ Collections]))
+
+;;; We need a wrapper to use Clojure equality semantics inside
+;;; a Java collection. Furthermore, since we know we will need to do
+;;; a hash operation for each such wrapper created anyway we can ensure
+;;; that if the hashes of two facts are not equal that the equals implementation
+;;; here will quickly return false.
+(deftype FactWrapper [fact ^int fact-hash]
+
+ Object
+ (equals [this other]
+
+ (cond
+
+ ;; There are some cases where the inserted and retracted facts could be identical, particularly
+ ;; if user code in the RHS has caches, so we go ahead and check for identity as a first-pass check,
+ ;; but there are probably aren't enough cases where the facts are identical to make doing a full sweep
+ ;; on identity first worthwhile, particularly since in practice the hash check will make the vast majority
+ ;; of .equals calls that return false quite fast.
+ (identical? fact (.fact ^FactWrapper other))
+ true
+
+ (not (== fact-hash (.fact_hash ^FactWrapper other)))
+ false
+
+ :else (= fact (.fact ^FactWrapper other))))
+
+ (hashCode [this] fact-hash))
+
+;;; These functions essentially allow us to use a Java map to create a set that stores
+;;; the frequency of its items. Note that when multiple instances of a fact are added
+;;; we keep both instances as distinct objects. We don't strictly speaking need to do this
+;;; but we expect it to perform better. The memory will retain both distinct references
+;;; and future updates are expected to be faster if these references are maintained since
+;;; memory operations look for matches on identity first in tokens before falling back to matching
+;;; on equality.
+(defn inc-fact-count! [^Map m fact]
+ (let [wrapper (FactWrapper. fact (hash fact))
+ ^List current-val (.get m wrapper)]
+ (if current-val
+ (.add current-val fact)
+ (.put m wrapper (LinkedList. [fact])))))
+
+(defn dec-fact-count! [^Map m fact]
+ (let [wrapper (FactWrapper. fact (hash fact))
+ ;; Note that we specifically use a LinkedList type hint here since we
+ ;; use methods from multiple interfaces here, namely List and Deque.
+ ^LinkedList current-val (.get m wrapper)]
+ (if current-val
+ (do
+ (if (= (.size current-val) 1)
+ (.remove m wrapper)
+ ;;; Since as noted above, the facts are equal, we don't actually care which one we remove.
+ ;;; We remove the first here to avoid any work checking equality and since this is a constant-time
+ ;;; operation on LinkedList. Since the insertions will be newly inserted facts we probably won't
+ ;;; have many identical retractions, so doing a sweep for identical facts first probably wouldn't
+ ;;; have enough hits to be worth the cost.
+ (.removeFirst current-val))
+ true)
+ false)))
+
+(defn map->vals-concated
+ [^Map m]
+ (let [val-list (java.util.LinkedList.)
+ it (.iterator (.entrySet m))]
+ (loop []
+ (when (.hasNext it)
+ (do (let [^java.util.Map$Entry e (.next it)
+ fact (.fact ^FactWrapper (.getKey e))
+ ^Iterable facts-in-val (.getValue e)
+ fact-iter (.iterator facts-in-val)]
+ (loop []
+ (when (.hasNext fact-iter)
+ (do
+ (.add val-list (.next fact-iter))
+ (recur)))))
+ (recur))))
+ ;; This list will never be exposed to the user; it is simply iterated over
+ ;; by the engine and then discarded. This being the case there is no
+ ;; need to return a persistent data structure rather than an unmodifiable one.
+ (Collections/unmodifiableList val-list)))
+
+;;; This is a pending updates cache that allows
+;; retractions and insertions of equal facts
+;;; to cancel each other out.
+;;; More formally, for i insertions and r retractions
+;;; of a fact f, it will:
+;;; - If i = r, no operations will be performed.
+;;; - If i > r, f will be returned for insertion i - r times.
+;;; - If r > i, f will be returned for retraction r - i times.
+(deftype CancellingUpdateCache [^Map ^:unsynchronized-mutable insertions
+ ^Map ^:unsynchronized-mutable retractions]
+
+ uc/UpdateCache
+
+ (add-insertions! [this facts]
+ (let [fact-iter (.iterator ^Iterable facts)]
+ (loop []
+ (when (.hasNext fact-iter)
+ (let [fact (.next fact-iter)]
+ (when-not (dec-fact-count! retractions fact)
+ (inc-fact-count! insertions fact))
+ (recur))))))
+
+ (add-retractions! [this facts]
+ (let [fact-iter (.iterator ^Iterable facts)]
+ (loop []
+ (when (.hasNext fact-iter)
+ (let [fact (.next fact-iter)]
+ (when-not (dec-fact-count! insertions fact)
+ (inc-fact-count! retractions fact))
+ (recur))))))
+
+ (get-updates-and-reset! [this]
+ (let [retractions-update (when (-> retractions .size pos?)
+ (uc/->PendingUpdate :retract (map->vals-concated retractions)))
+ insertions-update (when (-> insertions .size pos?)
+ (uc/->PendingUpdate :insert (map->vals-concated insertions)))]
+ (set! insertions (LinkedHashMap.))
+ (set! retractions (LinkedHashMap.))
+
+ (cond
+
+ (and insertions-update retractions-update)
+ ;; This could be ordered to have insertions before retractions if we ever
+ ;; found that that performs better on average. Either ordering should
+ ;; be functionally correct.
+ [[retractions-update] [insertions-update]]
+
+ insertions-update
+ [[insertions-update]]
+
+ retractions-update
+ [[retractions-update]]))))
+
+;; We use LinkedHashMap so that the ordering of the pending updates will be deterministic.
+(defn get-cancelling-update-cache
+ []
+ (CancellingUpdateCache. (LinkedHashMap.) (LinkedHashMap.)))
diff --git a/src/main/clojure/clara/rules/update_cache/core.cljc b/build/clara/rules/update_cache/core.clj
similarity index 100%
rename from src/main/clojure/clara/rules/update_cache/core.cljc
rename to build/clara/rules/update_cache/core.clj
diff --git a/src/main/clojure/clara/tools/fact_graph.cljc b/build/clara/tools/fact_graph.clj
similarity index 96%
rename from src/main/clojure/clara/tools/fact_graph.cljc
rename to build/clara/tools/fact_graph.clj
index 6fb04398..dacc7455 100644
--- a/src/main/clojure/clara/tools/fact_graph.cljc
+++ b/build/clara/tools/fact_graph.clj
@@ -33,12 +33,12 @@
(comp (remove :facts-accumulated)
(map :fact))
(:matches explanation))
-
+
activation-node (map->RuleActivationNode {:rule-name rule-name
:id (swap! id-counter inc)})
accum-matches (filter :facts-accumulated (:matches explanation))]
-
+
(as-> original-graph graph
(if (seq accum-matches)
(reduce (fn [reduce-graph accum-match]
@@ -64,8 +64,8 @@
(defn session->fact-graph
"Given a session, return a graph structure connecting all facts to the facts
- that they caused to be logically inserted. Note that such connections will not
- be made for unconditionally inserted facts."
+ that they caused to be logically inserted. Note that such connections will not
+ be made for unconditionally inserted facts."
[session]
(let [id-counter (atom 0)
;; Use a counter, whose value will be added to internal nodes, to the ensure that
@@ -78,7 +78,7 @@
;; here without the performance penalty of generating all of the inspection data for the session. Therefore, for now
;; we break the privacy of the function here. Once issue 286 is completed we should remove this private Var access.
fact->explanations (@#'i/gen-fact->explanations session)
-
+
;; Produce tuples of the form [inserted-fact {:rule rule :explanation clara.tools.inspect.Explanation}]
insertion-tuples (into []
(comp
@@ -89,7 +89,7 @@
v)))
cat)
fact->explanations)]
-
+
(reduce (fn [graph tuple]
(apply add-insertion-to-graph graph id-counter tuple))
empty-fact-graph
diff --git a/src/main/clojure/clara/tools/inspect.cljc b/build/clara/tools/inspect.clj
similarity index 58%
rename from src/main/clojure/clara/tools/inspect.cljc
rename to build/clara/tools/inspect.clj
index 94b40180..b2831863 100644
--- a/src/main/clojure/clara/tools/inspect.cljc
+++ b/build/clara/tools/inspect.clj
@@ -1,57 +1,48 @@
(ns clara.tools.inspect
"Tooling to inspect a rule session. The two major methods here are:
- * inspect, which returns a data structure describing the session that can be used by tooling.
- * explain-activations, which uses inspect and prints a human-readable description covering
- why each rule activation or query match occurred."
- (:require #?(:clj [clara.rules.engine :as eng])
- #?(:cljs [clara.rules.engine :as eng :refer [RootJoinNode
- HashJoinNode
- ExpressionJoinNode
- NegationNode
- NegationWithJoinFilterNode
- ProductionNode]])
- [clara.rules.schema :as schema]
- [clara.rules.memory :as mem]
- [clara.tools.internal.inspect :as i]
- #?(:cljs [goog.string :as gstr])
- #?(:clj
- [clojure.main :refer [demunge]])
- [schema.core :as s]
- [clojure.string :as str])
- #?(:clj
- (:import [clara.rules.engine
- ProductionNode
- RootJoinNode
- HashJoinNode
- ExpressionJoinNode
- NegationNode
- NegationWithJoinFilterNode])))
+ * inspect, which returns a data structure describing the session that can be used by tooling.
+ * explain-activations, which uses inspect and prints a human-readable description covering
+ why each rule activation or query match occurred."
+ (:require [clara.rules.engine :as eng]
+ [clara.rules.schema :as schema]
+ [clara.rules.memory :as mem]
+ [clara.tools.internal.inspect :as i]
+ [clojure.main :refer [demunge]]
+ [schema.core :as s]
+ [clojure.string :as str])
+ (:import [clara.rules.engine
+ ProductionNode
+ RootJoinNode
+ HashJoinNode
+ ExpressionJoinNode
+ NegationNode
+ NegationWithJoinFilterNode]))
(s/defschema ConditionMatch
"A structure associating a condition with the facts that matched them. The fields are:
- :fact - A fact propagated from this condition in a rule or query. For non-accumulator conditions,
- this will be the fact matched by the condition. For accumulator conditions, it will be the result
- of the accumulation. So, for example, if we have a condition like
+ :fact - A fact propagated from this condition in a rule or query. For non-accumulator conditions,
+ this will be the fact matched by the condition. For accumulator conditions, it will be the result
+ of the accumulation. So, for example, if we have a condition like
- [?cold <- Cold]
+ [?cold <- Cold]
- a ConditionMatch for this condition will have a Cold fact in its :fact field. If we have a condition like
+ a ConditionMatch for this condition will have a Cold fact in its :fact field. If we have a condition like
- [?min-cold <- (acc/min :temperature) :from [Cold]]
-
- the value of :fact will be the minimum temperature returned by the accumulator.
+ [?min-cold <- (acc/min :temperature) :from [Cold]]
- :condition - A structure representing this condition. This is the same structure used inside the structures defining
- rules and queries.
+ the value of :fact will be the minimum temperature returned by the accumulator.
- :facts-accumulated (nullable) : When the condition is an accumulator condition, this will contain the individual facts over
- which the accumulator ran. For example, in the case above with the condition
+ :condition - A structure representing this condition. This is the same structure used inside the structures defining
+ rules and queries.
- [?min-cold <- (acc/min :temperature) :from [Cold]]
-
- this will contain the individual Cold facts over which we accumulated, while the :fact field
- will contain the result of the accumulation."
+ :facts-accumulated (nullable) : When the condition is an accumulator condition, this will contain the individual facts over
+ which the accumulator ran. For example, in the case above with the condition
+
+ [?min-cold <- (acc/min :temperature) :from [Cold]]
+
+ this will contain the individual Cold facts over which we accumulated, while the :fact field
+ will contain the result of the accumulation."
{:fact s/Any
:condition schema/Condition
(s/optional-key :facts-accumulated) [s/Any]})
@@ -96,11 +87,11 @@
:join
[condition]
- ;; Negation nodes store the fact that they are a negation
- ;; in their :node-type and strip the information out of the
- ;; :condition field. We reconstruct the negation boolean condition
- ;; that is contained in rule and query data structures created by defrule
- ;; and that conforms to the Condition schema.
+ ;; Negation nodes store the fact that they are a negation
+ ;; in their :node-type and strip the information out of the
+ ;; :condition field. We reconstruct the negation boolean condition
+ ;; that is contained in rule and query data structures created by defrule
+ ;; and that conforms to the Condition schema.
:negation
[[:not condition]])
concat (map :fact (mem/get-elements-all memory {:id node-id}))))
@@ -115,7 +106,7 @@
(for [{:keys [matches bindings] :as token} tokens]
(->Explanation
- ;; Convert matches to explanation structure.
+ ;; Convert matches to explanation structure.
(for [[fact node-id] matches
:let [node (id-to-node node-id)
condition (if (:accum-condition node)
@@ -135,10 +126,9 @@
{:fact fact
:condition condition}))
- ;; Remove generated bindings from user-facing explanation.
+ ;; Remove generated bindings from user-facing explanation.
(into {} (remove (fn [[k v]]
- #?(:clj (.startsWith (name k) "?__gen__"))
- #?(:cljs (gstr/startsWith (name k) "?__gen__")))
+ (.startsWith (name k) "?__gen__"))
bindings))))))
(defn ^:private gen-all-rule-matches
@@ -150,14 +140,14 @@
[(:production k)
(to-explanations session (map #(-> % :activation :token) v))]))
grouped-info))))
-
+
(defn ^:private gen-fact->explanations
[session]
(let [{:keys [memory rulebase]} (eng/components session)
{:keys [productions production-nodes query-nodes]} rulebase
rule-to-rule-node (into {} (for [rule-node production-nodes]
- [(:production rule-node) rule-node]))]
+ [(:production rule-node) rule-node]))]
(apply merge-with into
(for [[rule rule-node] rule-to-rule-node
token (keys (mem/get-insertions-all memory rule-node))
@@ -167,54 +157,54 @@
:explanation (first (to-explanations session [token]))}]}))))
(def ^{:doc "Return a new session on which information will be gathered for optional inspection keys.
- This can significantly increase memory consumption since retracted facts
- cannot be garbage collected as normally."}
+ This can significantly increase memory consumption since retracted facts
+ cannot be garbage collected as normally."}
with-full-logging i/with-activation-listening)
(def ^{:doc "Return a new session without information gathering on this session for optional inspection keys.
- This new session will not retain references to any such information previously gathered."}
+ This new session will not retain references to any such information previously gathered."}
without-full-logging i/without-activation-listening)
-
+
(s/defn inspect
" Returns a representation of the given rule session useful to understand the
- state of the underlying rules.
+ state of the underlying rules.
+
+ The returned structure always includes the following keys:
- The returned structure always includes the following keys:
+ * :rule-matches -- a map of rule structures to their matching explanations.
+ Note that this only includes rule matches with corresponding logical
+ insertions after the rules finished firing.
+ * :query-matches -- a map of query structures to their matching explanations.
+ * :condition-matches -- a map of conditions pulled from each rule to facts they match.
+ * :insertions -- a map of rules to a sequence of {:explanation E, :fact F} records
+ to allow inspection of why a given fact was inserted.
+ * :fact->explanations -- a map of facts inserted to a sequence
+ of maps of the form {:rule rule-structure :explanation explanation},
+ where each such map justifies a single insertion of the fact.
- * :rule-matches -- a map of rule structures to their matching explanations.
- Note that this only includes rule matches with corresponding logical
- insertions after the rules finished firing.
- * :query-matches -- a map of query structures to their matching explanations.
- * :condition-matches -- a map of conditions pulled from each rule to facts they match.
- * :insertions -- a map of rules to a sequence of {:explanation E, :fact F} records
- to allow inspection of why a given fact was inserted.
- * :fact->explanations -- a map of facts inserted to a sequence
- of maps of the form {:rule rule-structure :explanation explanation},
- where each such map justifies a single insertion of the fact.
+ And additionally includes the following keys for operations
+ performed after a with-full-logging call on the session:
- And additionally includes the following keys for operations
- performed after a with-full-logging call on the session:
-
- * :unfiltered-rule-matches: A map of rule structures to their matching explanations.
- This includes all rule activations, regardless of whether they led to insertions or if
- they were ultimately retracted. This should be considered low-level information primarily
- useful for debugging purposes rather than application control logic, although legitimate use-cases
- for the latter do exist if care is taken. Patterns of insertion and retraction prior to returning to
- the caller are internal implementation details of Clara unless explicitly controlled by the user.
+ * :unfiltered-rule-matches: A map of rule structures to their matching explanations.
+ This includes all rule activations, regardless of whether they led to insertions or if
+ they were ultimately retracted. This should be considered low-level information primarily
+ useful for debugging purposes rather than application control logic, although legitimate use-cases
+ for the latter do exist if care is taken. Patterns of insertion and retraction prior to returning to
+ the caller are internal implementation details of Clara unless explicitly controlled by the user.
- Users may inspect the entire structure for troubleshooting or explore it
- for specific cases. For instance, the following code snippet could look
- at all matches for some example rule:
+ Users may inspect the entire structure for troubleshooting or explore it
+ for specific cases. For instance, the following code snippet could look
+ at all matches for some example rule:
- (defrule example-rule ... )
+ (defrule example-rule ... )
- ...
+ ...
- (get-in (inspect example-session) [:rule-matches example-rule])
+ (get-in (inspect example-session) [:rule-matches example-rule])
- ...
+ ...
- The above segment will return matches for the rule in question."
+ The above segment will return matches for the rule in question."
[session] :- InspectionSchema
(let [{:keys [memory rulebase]} (eng/components session)
{:keys [productions production-nodes query-nodes id-to-node]} rulebase
@@ -257,27 +247,27 @@
"Prints a human-readable explanation of the facts and conditions that created the Rete token."
([explanation] (explain-activation explanation ""))
([explanation prefix]
- (doseq [{:keys [fact condition]} (:matches explanation)]
- (if (:from condition)
- ;; Explain why the accumulator matched.
- (let [{:keys [accumulator from]} condition]
- (println prefix fact)
- (println prefix " accumulated with" accumulator)
- (println prefix " from" (:type from))
- (println prefix " where" (:constraints from)))
-
- ;; Explain why a condition matched.
- (let [{:keys [type constraints]} condition]
- (println prefix fact)
- (println prefix " is a" type)
- (println prefix " where" constraints))))))
+ (doseq [{:keys [fact condition]} (:matches explanation)]
+ (if (:from condition)
+ ;; Explain why the accumulator matched.
+ (let [{:keys [accumulator from]} condition]
+ (println prefix fact)
+ (println prefix " accumulated with" accumulator)
+ (println prefix " from" (:type from))
+ (println prefix " where" (:constraints from)))
+
+ ;; Explain why a condition matched.
+ (let [{:keys [type constraints]} condition]
+ (println prefix fact)
+ (println prefix " is a" type)
+ (println prefix " where" constraints))))))
(defn explain-activations
"Prints a human-friendly explanation of why rules and queries matched in the given session.
A caller my optionally pass a :rule-filter-fn, which is a predicate
(clara.tools.inspect/explain-activations session
- :rule-filter-fn (fn [rule] (re-find my-rule-regex (:name rule))))"
+ :rule-filter-fn (fn [rule] (re-find my-rule-regex (:name rule))))"
[session & {:keys [rule-filter-fn] :as options}]
(let [filter-fn (or rule-filter-fn (constantly true))]
@@ -311,24 +301,23 @@
(defn node-fn-name->production-name
"A helper function for retrieving the name or names of rules that a generated function belongs to.
- 'session' - a LocalSession from which a function was retrieved
- 'node-fn' - supports the following types:
- 1. String - expected to be in the format '/__'.
- Expected use-case for string would be in the event that a user copy pasted this function identifier
- from an external tool, ex. a jvm profiler
- 2. Symbol - expected to be in the format '/__.
- Has the same use-case as string, just adds flexibility to the type.
- 3. Function - expected to be the actual function from the Session
- This covers a use-case where the user can capture the function being used and programmatically
- trace it back to the rules being executed."
+ 'session' - a LocalSession from which a function was retrieved
+ 'node-fn' - supports the following types:
+ 1. String - expected to be in the format '/__'.
+ Expected use-case for string would be in the event that a user copy pasted this function identifier
+ from an external tool, ex. a jvm profiler
+ 2. Symbol - expected to be in the format '/__.
+ Has the same use-case as string, just adds flexibility to the type.
+ 3. Function - expected to be the actual function from the Session
+ This covers a use-case where the user can capture the function being used and programmatically
+ trace it back to the rules being executed."
[session node-fn]
(let [fn-name-str (cond
(string? node-fn)
node-fn
(fn? node-fn)
- #?(:clj (str node-fn)
- :cljs (.-name node-fn) )
+ (str node-fn)
(symbol? node-fn)
(str node-fn)
@@ -339,12 +328,9 @@
:supported-types ["string" "symbol" "fn"]})))
fn-name-str (-> fn-name-str demunge (str/split #"/") last)
- simple-fn-name #?(:clj
- (-> (or (re-find #"(.+)--\d+" fn-name-str) ;; anonymous function
- (re-find #"(.+)" fn-name-str)) ;; regular function
- last)
- :cljs
- fn-name-str)
+ simple-fn-name (-> (or (re-find #"(.+)--\d+" fn-name-str) ;; anonymous function
+ (re-find #"(.+)" fn-name-str)) ;; regular function
+ last)
[node-abr node-id _] (str/split simple-fn-name #"-")]
;; used as a sanity check that the fn provided is in the form expected, ie. --
@@ -352,8 +338,7 @@
(if-let [node (-> (eng/components session)
:rulebase
:id-to-node
- (get #?(:clj (Long/valueOf ^String node-id)
- :cljs (js/parseInt node-id))))]
+ (get (Long/valueOf ^String node-id)))]
(if (= ProductionNode (type node))
[(-> node :production :name)]
(if-let [production-names (seq (eng/node-rule-names (some-fn :production :query) node))]
diff --git a/src/main/clojure/clara/tools/internal/inspect.cljc b/build/clara/tools/internal/inspect.clj
similarity index 99%
rename from src/main/clojure/clara/tools/internal/inspect.cljc
rename to build/clara/tools/internal/inspect.clj
index 35695fd9..a6b33498 100644
--- a/src/main/clojure/clara/tools/internal/inspect.cljc
+++ b/build/clara/tools/internal/inspect.clj
@@ -77,4 +77,4 @@
(throw (ex-info "Found more than one PersistentActivationListener on session"
{:session session})))))
-
+
diff --git a/src/main/clojure/clara/tools/loop_detector.cljc b/build/clara/tools/loop_detector.clj
similarity index 81%
rename from src/main/clojure/clara/tools/loop_detector.cljc
rename to build/clara/tools/loop_detector.clj
index dfe71476..bbe14fe4 100644
--- a/src/main/clojure/clara/tools/loop_detector.cljc
+++ b/build/clara/tools/loop_detector.clj
@@ -6,7 +6,7 @@
;; Although we use a single type here note that the cycles-count and the on-limit-delay fields
;; will be nil during the persistent state of the listener.
(deftype CyclicalRuleListener [cycles-count max-cycles on-limit-fn on-limit-delay]
- l/ITransientEventListener
+ l/ITransientEventListener
(left-activate! [listener node tokens]
listener)
(left-retract! [listener node tokens]
@@ -74,22 +74,21 @@
(ifn? fn-or-keyword) fn-or-keyword
:else (throw (ex-info "The :on-error-fn must be a non-nil function value" {:clara-rules/max-cycles-exceeded-fn fn-or-keyword}))))
-
(defn with-loop-detection
- "Detect suspected infinite loops in the session.
+ "Detect suspected infinite loops in the session.
- Max-cycles is the maximum
- number of transitions permitted between different activation groups (salience levels)
- plus the number of times all rules are evaluated and their facts inserted, thus leading
- to another cycle of rules activations in the same activation group.
+ Max-cycles is the maximum
+ number of transitions permitted between different activation groups (salience levels)
+ plus the number of times all rules are evaluated and their facts inserted, thus leading
+ to another cycle of rules activations in the same activation group.
- on-limit-fn is a 0-arg function that is invoked exactly once when this limit is exceeded. It can either be
- a user-provided function or a keyword that indicates a built-in function to use. Currently supported keywords are:
+ on-limit-fn is a 0-arg function that is invoked exactly once when this limit is exceeded. It can either be
+ a user-provided function or a keyword that indicates a built-in function to use. Currently supported keywords are:
- :throw-exception - This throws an exception when the limit is reached. If tracing is enabled, the exception will include
- the trace.
+ :throw-exception - This throws an exception when the limit is reached. If tracing is enabled, the exception will include
+ the trace.
- :standard-out-warning - This prints a warning to standard out."
+ :standard-out-warning - This prints a warning to standard out."
[session max-cycles on-limit-fn]
diff --git a/build/clara/tools/testing_utils.clj b/build/clara/tools/testing_utils.clj
new file mode 100644
index 00000000..1c62dd25
--- /dev/null
+++ b/build/clara/tools/testing_utils.clj
@@ -0,0 +1,204 @@
+(ns clara.tools.testing-utils
+ "Internal utilities for testing clara-rules and derivative projects. These should be considered experimental
+ right now from the perspective of consumers of clara-rules, although it is possible that this namespace
+ will be made part of the public API once its functionality has proven robust and reliable. The focus, however,
+ is functionality needed to test the rules engine itself."
+ (:require [clara.rules.update-cache.core :as uc]
+ [clara.rules.update-cache.cancelling :as ca]
+ [clara.rules.compiler :as com]
+ [clara.rules.dsl :as dsl]
+ [clojure.test :refer [is]]))
+
+(defmacro def-rules-test
+ "This macro allows creation of rules, queries, and sessions from arbitrary combinations of rules
+ and queries in a setup map without the necessity of creating a namespace or defining a session
+ using defsession in both Clojure and ClojureScript. The first argument is the name of the test,
+ and the second argument is a map with entries :rules, :queries, and :sessions. For example usage see
+ clara.test-testing-utils. Note that sessions currently can only contain rules and queries defined
+ in the setup map; supporting other rule sources such as namespaces and defrule/defquery may be possible
+ in the future.
+
+ Namespaces consuming this macro are expected to require clara.rules and either clojure.test or cljs.test.
+ Unfortunately, at this time we can't add inline requires for these namespace with the macroexpanded code in
+ ClojureScript; see https://anmonteiro.com/2016/10/clojurescript-require-outside-ns/ for some discussion on the
+ subject. However, the test namespaces consuming this will in all likelihood have these dependencies anyway
+ so this probably isn't a significant shortcoming of this macro."
+ [name params & forms]
+ (let [sym->rule (->> params
+ :rules
+ (partition 2)
+ (into {}
+ (map (fn [[rule-name [lhs rhs props]]]
+ [rule-name (assoc (dsl/parse-rule* lhs rhs props {}) :name (str rule-name))]))))
+
+ sym->query (->> params
+ :queries
+ (partition 2)
+ (into {}
+ (map (fn [[query-name [params lhs]]]
+ [query-name (assoc (dsl/parse-query* params lhs {}) :name (str query-name))]))))
+
+ production-syms->productions (fn [p-syms]
+ (map (fn [s]
+ (or (get sym->rule s)
+ (get sym->query s)))
+ p-syms))
+
+ session-syms->session-forms (->> params
+ :sessions
+ (partition 3)
+ (into []
+ (comp (map (fn [[session-name production-syms session-opts]]
+ [session-name (production-syms->productions production-syms) session-opts]))
+ (map (fn [[session-name productions session-opts]]
+ [session-name `(com/mk-session ~(into [(vec productions)]
+ cat
+ session-opts))]))
+ cat)))
+
+ test-form `(clojure.test/deftest
+ ~name
+ (let [~@session-syms->session-forms
+ ~@(sequence cat sym->query)
+ ~@(sequence cat sym->rule)]
+ ~@forms))]
+ test-form))
+
+(defn opts-fixture
+ ;; For operations other than replace-facts uc/get-ordered-update-cache is currently
+ ;; always used. This fixture ensures that CancellingUpdateCache is tested for a wide
+ ;; variety of different cases rather than a few cases cases specific to it.
+ [f]
+ (f)
+ (with-redefs [uc/get-ordered-update-cache ca/get-cancelling-update-cache]
+ (f)))
+
+(defn join-filter-equals
+ "Intended to be a test function that is the same as equals, but is not visible to Clara as such
+ and thus forces usage of join filters instead of hash joins"
+ [& args]
+ (apply = args))
+
+(def side-effect-holder (atom nil))
+
+(defn side-effect-holder-fixture
+ "Fixture to reset the side effect holder to nil both before and after tests.
+ This should be used as a :each fixture."
+ [t]
+ (reset! side-effect-holder nil)
+ (t)
+ (reset! side-effect-holder nil))
+
+(defn time-execution
+ [func]
+ (let [start (System/currentTimeMillis)
+ _ (func)
+ stop (System/currentTimeMillis)]
+ (- stop start)))
+
+(defn execute-tests
+ [func iterations]
+ (let [execution-times (for [_ (range iterations)]
+ (time-execution func))
+ sum #(reduce + %)
+ mean (/ (sum execution-times) iterations)
+ std (->
+ (into []
+ (comp
+ (map #(- % mean))
+ (map #(Math/pow (double %) 2.0)))
+ execution-times)
+ sum
+ (/ iterations)
+ Math/sqrt)]
+ {:std (double std)
+ :mean (double mean)}))
+
+(defn run-performance-test
+ "Created as a rudimentary alternative to criterium, due to assumptions made during benchmarking. Specifically, that
+ criterium attempts to reach a steady state of compiled and loaded classes. This fundamentally doesn't work when the
+ metrics needed rely on compilation or evaluation."
+ [form]
+ (let [{:keys [description func iterations mean-assertion verbose]} form
+ {:keys [std mean]} (execute-tests func iterations)]
+ (when verbose
+ (println (str \newline "Running Performance tests for:"))
+ (println description)
+ (println "==========================================")
+ (println (str "Mean: " mean "ms"))
+ (println (str "Standard Deviation: " std "ms" \newline)))
+ (is (mean-assertion mean)
+ (str "Actual mean value: " mean))
+ {:mean mean
+ :std std}))
+
+(defn ex-data-search
+ ([^Exception e edata]
+ (ex-data-search e nil edata))
+ ([^Exception e emsg edata]
+ (loop [non-matches []
+ e e]
+ (cond
+ ;; Found match.
+ (and (= edata
+ (select-keys (ex-data e)
+ (keys edata)))
+ (or (= emsg
+ (.getMessage e))
+ (nil? emsg)))
+ :success
+
+ ;; Keep searching, record any non-matching ex-data.
+ (.getCause e)
+ (recur (if-let [ed (ex-data e)]
+ (conj non-matches {(.getMessage e) ed})
+ non-matches)
+ (.getCause e))
+
+ ;; Can't find a match.
+ :else
+ non-matches))))
+
+(defn get-all-ex-data
+ "Walk a Throwable chain and return a sequence of all data maps
+ from any ExceptionInfo instances in that chain."
+ [e]
+ (let [get-ex-chain (fn get-ex-chain [e]
+ (if-let [cause (.getCause e)]
+ (conj (get-ex-chain cause) e)
+ [e]))]
+
+ (map ex-data
+ (filter (partial instance? clojure.lang.IExceptionInfo)
+ (get-ex-chain e)))))
+
+(defmacro assert-ex-data
+ ([expected-ex-data form]
+ `(assert-ex-data nil ~expected-ex-data ~form))
+ ([expected-ex-message expected-ex-data form]
+ `(try
+ ~form
+ (is false
+ (str "Exception expected to be thrown when evaluating: " \newline
+ '~form))
+ (catch Exception e#
+ (let [res# (ex-data-search e# ~expected-ex-message ~expected-ex-data)]
+ (is (= :success res#)
+ (str "Exception msg found: " \newline
+ e# \newline
+ "Non matches found: " \newline
+ res#)))))))
+
+(defn ex-data-maps
+ "Given a throwable/exception/error `t`, return all `ex-data` maps from the stack trace cause chain in
+ the order they occur traversing the chain from this `t` through the rest of the call stack."
+ [t]
+ (let [append-self (fn append-self
+ [prior t1]
+ (if t1
+ (append-self (conj prior t1) (.getCause ^Throwable t1))
+ prior))
+ throwables (append-self [] t)]
+ (into []
+ (comp (map ex-data))
+ throwables)))
diff --git a/src/main/clojure/clara/tools/tracing.cljc b/build/clara/tools/tracing.clj
similarity index 99%
rename from src/main/clojure/clara/tools/tracing.cljc
rename to build/clara/tools/tracing.clj
index a9d5ffe7..4cc6d955 100644
--- a/src/main/clojure/clara/tools/tracing.cljc
+++ b/build/clara/tools/tracing.clj
@@ -28,7 +28,7 @@
(insert-facts! [listener node token facts]
(append-trace listener {:type :add-facts :node node :token token :facts facts}))
-
+
(alpha-activate! [listener node facts]
(append-trace listener {:type :alpha-activate :facts facts}))
@@ -37,7 +37,7 @@
(retract-facts! [listener node token facts]
(append-trace listener {:type :retract-facts :node node :token token :facts facts}))
-
+
(alpha-retract! [listener node facts]
(append-trace listener {:type :alpha-retract :facts facts}))
@@ -129,7 +129,6 @@
(when tracing-listener
(.-trace ^PersistentTracingListener tracing-listener))))
-
(defn ^:private node-id->productions
"Given a session and a node ID return a list of the rule and query names associated
with the node."
diff --git a/build/clj-kondo.exports/clara/rules/config.edn b/build/clj-kondo.exports/clara/rules/config.edn
new file mode 100644
index 00000000..6b946f6a
--- /dev/null
+++ b/build/clj-kondo.exports/clara/rules/config.edn
@@ -0,0 +1,7 @@
+{:lint-as {clara.rules/defsession clojure.core/def
+ clara.rules.platform/eager-for clojure.core/for}
+ :hooks {:analyze-call {clara.rules/defquery hooks.clara-rules/analyze-defquery-macro
+ clara.rules/defrule hooks.clara-rules/analyze-defrule-macro
+ clara.rules.dsl/parse-query hooks.clara-rules/analyze-parse-query-macro
+ clara.rules.dsl/parse-rule hooks.clara-rules/analyze-parse-rule-macro
+ clara.tools.testing-utils/def-rules-test hooks.clara-rules/analyze-def-rules-test-macro}}}
diff --git a/build/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo b/build/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
new file mode 100644
index 00000000..0d5abbcc
--- /dev/null
+++ b/build/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
@@ -0,0 +1,444 @@
+(ns hooks.clara-rules
+ (:require [clj-kondo.hooks-api :as api]
+ [clojure.string :as str]
+ [clojure.set :as set]))
+
+(defn node-value
+ [node]
+ (when node
+ (api/sexpr node)))
+
+(defn node-type
+ [node]
+ (when node
+ (api/tag node)))
+
+(defn- binding-node?
+ "determine if a symbol is a clara-rules binding symbol in the form `?`"
+ [node]
+ (let [node-name (node-value node)]
+ (and (symbol? node-name)
+ (str/starts-with? (str node-name) "?"))))
+
+(defn- fact-result-node?
+ [node]
+ (some-> node meta ::fact-result))
+
+(defn- special-binding-node?
+ "determine if a symbol is a clara-rules special binding symbol in the form `?____`"
+ [node]
+ (let [node-name (node-value node)
+ node-name-str (str node-name)]
+ (and (symbol? node-name)
+ (str/starts-with? node-name-str "?__")
+ (str/ends-with? node-name-str "__"))))
+
+(defn- extract-special-tokens
+ [node-seq]
+ (->> (reduce (fn [token-seq node]
+ (cond
+ (and (= :token (node-type node))
+ (special-binding-node? node)
+ (nil? (namespace (node-value node))))
+ (cons node token-seq)
+
+ (seq (:children node))
+ (concat token-seq (extract-special-tokens (:children node)))
+
+ :else token-seq)) [] node-seq)
+ (set)
+ (sort-by node-value)))
+
+(defn extract-arg-tokens
+ [node-seq]
+ (->> (reduce (fn [token-seq node]
+ (cond
+ (and (= :token (node-type node))
+ (symbol? (node-value node))
+ (not (binding-node? node))
+ (nil? (namespace (node-value node))))
+ (cons node token-seq)
+
+ (seq (:children node))
+ (concat token-seq (extract-arg-tokens (:children node)))
+
+ :else token-seq)) [] node-seq)
+ (set)
+ (sort-by node-value)))
+
+(defn analyze-constraints
+ "sequentially analyzes constraint expressions of clara rules and queries
+ defined via defrule or defquery by sequentially analyzing its children lhs
+ expressions and bindings."
+ [fact-node condition allow-bindings? prev-bindings input-token production-args]
+ (let [[condition-args constraint-seq]
+ (cond
+ (= :vector (node-type (first condition)))
+ [(first condition) (rest condition)]
+
+ (symbol? (node-value fact-node))
+ [(api/vector-node (vec (extract-arg-tokens condition))) condition]
+
+ :else [(api/vector-node []) condition])
+ cond-binding-set (set (map node-value (:children condition-args)))
+ ;;; if `this` bindings are not explicit, then add them anyways
+ [this-input-bindings
+ this-output-bindings] (when-not (contains? cond-binding-set 'this)
+ [[[(api/token-node 'this) input-token]]
+ [[(api/token-node '_) (api/token-node 'this)]]])
+ args-binding-set (set (map node-value (:children production-args)))
+ prev-bindings-set (->> (mapcat (comp :children first) prev-bindings)
+ (filter binding-node?)
+ (map node-value)
+ (set))
+ constraint-bindings
+ (loop [[constraint-expr & more] constraint-seq
+ bindings []
+ bindings-set (set/union prev-bindings-set args-binding-set)]
+ (if (nil? constraint-expr)
+ bindings
+ (let [constraint (:children constraint-expr)
+ binding-nodes (let [binding-tokens (seq (filter binding-node? (rest constraint)))
+ match-bindings-set (set/intersection (set (map node-value binding-tokens)) bindings-set)]
+ (when (and allow-bindings?
+ (contains? #{'= '==} (node-value (first constraint)))
+ (or (seq (filter (complement binding-node?) (rest constraint)))
+ (not-empty match-bindings-set)))
+ binding-tokens))
+ next-bindings-set (-> (set (map node-value binding-nodes))
+ (set/difference bindings-set))
+ binding-expr-nodes (seq (filter (comp next-bindings-set node-value) binding-nodes))
+ [next-bindings-set next-bindings]
+ (if binding-nodes
+ [next-bindings-set
+ (cond->> [[(api/vector-node
+ (vec binding-nodes))
+ constraint-expr]]
+ binding-expr-nodes
+ (concat [[(api/vector-node
+ (vec binding-expr-nodes))
+ input-token]]))]
+ [#{}
+ [[(api/vector-node
+ [(api/token-node '_)])
+ constraint-expr]]])]
+ (recur more
+ (concat bindings next-bindings)
+ (set/union bindings-set next-bindings-set)))))
+
+ input-bindings (when-not (empty? (node-value condition-args))
+ [[condition-args input-token]])]
+ (concat input-bindings this-input-bindings constraint-bindings this-output-bindings)))
+
+(defn analyze-conditions
+ "sequentially analyzes condition expressions of clara rules and queries
+ defined via defrule and defquery by taking into account the optional
+ result binding, optional args bindings and sequentially analyzing
+ its children constraint expressions."
+ [condition-seq allow-bindings? prev-bindings input-token production-args]
+ (loop [[condition-expr & more] condition-seq
+ bindings []]
+ (if (nil? condition-expr)
+ bindings
+ (let [condition (:children condition-expr)
+ [result-token fact-node & condition] (if (= '<- (-> condition second node-value))
+ (cons (api/vector-node
+ [(vary-meta
+ (first condition)
+ assoc ::fact-result true)]) (nnext condition))
+ (cons (api/vector-node
+ [(api/token-node '_)]) condition))
+ condition-bindings (cond
+ (nil? condition)
+ []
+
+ (contains? #{:not} (node-value fact-node))
+ (analyze-conditions condition false (concat prev-bindings bindings) input-token production-args)
+
+ (contains? #{:or :and :exists} (node-value fact-node))
+ (analyze-conditions condition allow-bindings? (concat prev-bindings bindings) input-token production-args)
+
+ (and (= :list (node-type fact-node))
+ (= :from (-> condition first node-value)))
+ (analyze-conditions (rest condition) allow-bindings? (concat prev-bindings bindings) input-token production-args)
+
+ :else
+ (analyze-constraints fact-node condition allow-bindings? (concat prev-bindings bindings) input-token production-args))
+ condition-tokens (->> (mapcat first condition-bindings)
+ (filter binding-node?))
+ result-vector (api/vector-node (vec (list* fact-node condition-tokens)))
+ result-bindings [[result-token result-vector]]
+ output-bindings (concat condition-bindings result-bindings)
+ condition-output (->> (mapcat (comp :children first) output-bindings)
+ (filter binding-node?)
+ (set)
+ (sort-by node-value))
+ output-node (api/vector-node
+ (if (empty? condition-output)
+ [(api/token-node '_)]
+ (vec condition-output)))
+ output-result-node (api/vector-node
+ (if (empty? condition-output)
+ [(api/token-node nil)]
+ (vec condition-output)))
+ next-bindings [output-node
+ (api/list-node
+ (list
+ (api/token-node 'let)
+ (api/vector-node
+ (vec (apply concat output-bindings)))
+ output-result-node))]]
+ (recur more (concat bindings [next-bindings]))))))
+
+(defn analyze-parse-query-macro
+ "analyze clara-rules parse-query macro"
+ [{:keys [:node]}]
+ (let [input-token (api/token-node (gensym 'input))
+ input-args (api/vector-node
+ [input-token])
+ [args conditions-node] (rest (:children node))
+ condition-seq (:children conditions-node)
+ special-tokens (extract-special-tokens condition-seq)
+ special-args (when (seq special-tokens)
+ (api/vector-node
+ (vec special-tokens)))
+ transformed-args (for [arg (:children args)]
+ (let [v (node-value arg)]
+ (if (keyword? v)
+ (api/token-node (symbol v))
+ arg)))
+ production-args (api/vector-node
+ (if (empty? transformed-args)
+ [(api/token-node '_)]
+ (vec transformed-args)))
+ condition-bindings (analyze-conditions condition-seq true [] input-token production-args)
+ production-bindings (apply concat
+ (when special-args
+ [special-args input-token
+ (api/token-node '_) special-args])
+ [production-args input-token]
+ condition-bindings)
+ production-output (->> (mapcat (comp :children first) condition-bindings)
+ (filter binding-node?)
+ (set)
+ (sort-by node-value))
+ production-result (api/list-node
+ (list
+ (api/token-node 'let)
+ (api/vector-node
+ (vec production-bindings))
+ (api/vector-node
+ (vec production-output))))
+ fn-node (api/list-node
+ (list
+ (api/token-node 'fn)
+ input-args
+ production-result))
+ new-node (api/map-node
+ [(api/keyword-node :production) fn-node])]
+ {:node new-node}))
+
+(defn analyze-defquery-macro
+ "analyze clara-rules defquery macro"
+ [{:keys [:node]}]
+ (let [[production-name & children] (rest (:children node))
+ production-docs (when (= :token (node-type (first children)))
+ (first children))
+ children (if production-docs (rest children) children)
+ production-opts (when (= :map (node-type (first children)))
+ (first children))
+ input-token (api/token-node (gensym 'input))
+ input-args (api/vector-node
+ [input-token])
+ [args & condition-seq] (if production-opts (rest children) children)
+ special-tokens (extract-special-tokens condition-seq)
+ special-args (when (seq special-tokens)
+ (api/vector-node
+ (vec special-tokens)))
+ transformed-args (for [arg (:children args)]
+ (let [v (node-value arg)
+ m (meta arg)]
+ (if (keyword? v)
+ (cond-> (api/token-node (symbol v))
+ (not-empty m)
+ (vary-meta merge m))
+ arg)))
+ production-args (api/vector-node
+ (if (empty? transformed-args)
+ [(api/token-node '_)]
+ (vec transformed-args)))
+ condition-bindings (analyze-conditions condition-seq true [] input-token production-args)
+ production-bindings (apply concat
+ (when special-args
+ [special-args input-token
+ (api/token-node '_) special-args])
+ [production-args input-token]
+ condition-bindings)
+ production-output (->> (mapcat (comp :children first) condition-bindings)
+ (filter binding-node?)
+ (set)
+ (sort-by node-value))
+ production-result (api/list-node
+ (list
+ (api/token-node 'let)
+ (api/vector-node
+ (vec production-bindings))
+ (api/vector-node
+ (vec production-output))))
+ fn-node (api/list-node
+ (cond-> (list (api/token-node 'fn))
+ production-docs (concat [production-docs])
+ :always (concat [input-args])
+ production-opts (concat [production-opts])
+ :always (concat [production-result])))
+ new-node (vary-meta
+ (api/list-node
+ (list
+ (api/token-node 'def) production-name
+ (api/map-node
+ [(api/keyword-node :production) fn-node])))
+ merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
+ {:node new-node}))
+
+(defn analyze-parse-rule-macro
+ "analyze clara-rules parse-rule macro"
+ [{:keys [:node]}]
+ (let [input-token (api/token-node (gensym 'input))
+ input-args (api/vector-node
+ [input-token])
+ empty-args (api/vector-node [])
+ production-seq (rest (:children node))
+ special-tokens (extract-special-tokens production-seq)
+ special-args (when (seq special-tokens)
+ (api/vector-node
+ (vec special-tokens)))
+ [conditions-node body-node] production-seq
+ condition-seq (:children conditions-node)
+ condition-bindings (analyze-conditions condition-seq true [] input-token empty-args)
+ production-bindings (apply concat
+ (when special-args
+ [special-args input-token
+ (api/token-node '_) special-args])
+ [(api/token-node '_) input-token]
+ condition-bindings)
+ production-output (->> (mapcat (comp :children first) condition-bindings)
+ (filter binding-node?)
+ (remove fact-result-node?)
+ (set)
+ (sort-by node-value))
+ production-result (api/list-node
+ (list
+ (api/token-node 'let)
+ (api/vector-node
+ (vec production-bindings))
+ (api/vector-node
+ (vec production-output))
+ body-node))
+ fn-node (api/list-node
+ (list
+ (api/token-node 'fn)
+ input-args
+ production-result))
+ new-node (api/map-node
+ [(api/keyword-node :production) fn-node])]
+ {:node new-node}))
+
+(defn analyze-defrule-macro
+ "analyze clara-rules defrule macro"
+ [{:keys [:node]}]
+ (let [[production-name & children] (rest (:children node))
+ production-docs (when (= :token (node-type (first children)))
+ (first children))
+ children (if production-docs (rest children) children)
+ production-opts (when (= :map (node-type (first children)))
+ (first children))
+ input-token (api/token-node (gensym 'input))
+ input-args (api/vector-node
+ [input-token])
+ empty-args (api/vector-node [])
+ production-seq (if production-opts (rest children) children)
+ special-tokens (extract-special-tokens production-seq)
+ special-args (when (seq special-tokens)
+ (api/vector-node
+ (vec special-tokens)))
+ [body-seq _ condition-seq] (->> (partition-by (comp #{'=>} node-value) production-seq)
+ (reverse))
+ condition-bindings (analyze-conditions condition-seq true [] input-token empty-args)
+ production-bindings (apply concat
+ (when special-args
+ [special-args input-token
+ (api/token-node '_) special-args])
+ [(api/token-node '_) input-token]
+ condition-bindings)
+ production-output (->> (mapcat (comp :children first) condition-bindings)
+ (filter binding-node?)
+ (remove fact-result-node?)
+ (set)
+ (sort-by node-value))
+ production-result (api/list-node
+ (list*
+ (api/token-node 'let)
+ (api/vector-node
+ (vec production-bindings))
+ (api/vector-node
+ (vec production-output))
+ body-seq))
+ fn-node (api/list-node
+ (cond-> (list (api/token-node 'fn))
+ production-docs (concat [production-docs])
+ :always (concat [input-args])
+ production-opts (concat [production-opts])
+ :always (concat [production-result])))
+ new-node (vary-meta
+ (api/list-node
+ (list
+ (api/token-node 'def) production-name
+ (api/map-node
+ [(api/keyword-node :production) fn-node])))
+ merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
+ {:node new-node}))
+
+
+(defn analyze-def-rules-test-macro
+ [{:keys [:node]}]
+ (let [[test-name test-params & test-body] (rest (:children node))
+ {:keys [rules
+ queries
+ sessions]} (->> (:children test-params)
+ (partition 2)
+ (map (juxt (comp api/sexpr first) last))
+ (into {}))
+ rules-seq (for [[name-node rule-node] (partition 2 (:children rules))]
+ [name-node (analyze-parse-rule-macro
+ {:node (api/list-node
+ (list*
+ (api/token-node 'clara.rules.dsl/parse-rule)
+ (:children rule-node)))})])
+ query-seq (for [[name-node query-node] (partition 2 (:children queries))]
+ [name-node (analyze-parse-query-macro
+ {:node (api/list-node
+ (list*
+ (api/token-node 'clara.rules.dsl/parse-query)
+ (:children query-node)))})])
+ session-seq (for [[name-node productions-node options-node] (partition 3 (:children sessions))
+ :let [args-seq (concat (:children productions-node)
+ (:children options-node))]]
+ [name-node (api/list-node
+ (list
+ (api/token-node 'clara.rules.compiler/mk-session)
+ (api/vector-node
+ (vec args-seq))))])
+ args-seq (->> (concat rules-seq query-seq session-seq)
+ (apply concat))
+ wrap-body (api/list-node
+ (list*
+ (api/token-node 'let)
+ (api/vector-node
+ (vec args-seq))
+ test-body))
+ new-node (api/list-node
+ (list
+ (api/token-node 'clojure.test/deftest)
+ test-name
+ wrap-body))]
+ {:node new-node}))
diff --git a/deps.edn b/deps.edn
new file mode 100644
index 00000000..cdab20a1
--- /dev/null
+++ b/deps.edn
@@ -0,0 +1,59 @@
+{:paths ["src/main/clojure" "resources" "clj-kondo" "target/main/classes"]
+ :deps/prep-lib {:alias :build
+ :fn compile-main-java
+ :ensure "target/main/classes"}
+ :deps {org.clojure/clojure {:mvn/version "1.10.3"}
+ com.cnuernber/ham-fisted {:mvn/version "2.014"}
+ prismatic/schema {:mvn/version "1.4.1"}
+ org.clojure/data.fressian {:mvn/version "1.0.0"}}
+
+ ;for more examples of aliases see: https://github.com/seancorfield/dot-clojure
+ :aliases
+ {:build {:paths ["tool"]
+ :deps {io.github.clojure/tools.build {:mvn/version "0.9.6"}}
+ :ns-default build}
+ :dev {:extra-paths ["dev"]
+ :extra-deps {reloaded.repl/reloaded.repl {:mvn/version "0.2.4"}
+ org.clojure/math.combinatorics {:mvn/version "0.1.3"}
+ clj-kondo/clj-kondo {:mvn/version "2023.04.14"}
+ criterium/criterium {:mvn/version "0.4.6"}}}
+
+ :test {:extra-paths ["src/test/clojure" "target/test/classes"]
+ :extra-deps {lambdaisland/kaocha {:mvn/version "1.80.1274"}
+ org.clojure/test.check {:mvn/version "1.1.1"}
+ pjstadig/humane-test-output {:mvn/version "0.10.0"}}}
+
+ :runner {:main-opts ["-m" "kaocha.runner"]
+ :exec-fn kaocha.runner/exec-fn}
+
+
+ :repl {:extra-deps {nrepl/nrepl {:mvn/version "1.1.0"}
+ cider/cider-nrepl {:mvn/version "0.44.0"}}
+ :main-opts ["-m" "nrepl.cmdline" "--interactive" "--middleware" "[\"cider.nrepl/cider-middleware\"]"]}
+
+ :format-fix {:extra-deps {cljfmt/cljfmt {:mvn/version "0.9.2"}}
+ :main-opts ["-m" "cljfmt.main" "fix" "src" "dev"]}
+
+ :format-check {:extra-deps {cljfmt/cljfmt {:mvn/version "0.9.2"}}
+ :main-opts ["-m" "cljfmt.main" "check" "src" "dev"]}
+
+ :outdated {:extra-deps {olical/depot {:mvn/version "2.3.0"}
+ rewrite-clj/rewrite-clj {:mvn/version "0.6.1"}}
+ :main-opts ["-m" "depot.outdated.main"]}
+
+ :jar {:replace-deps {com.github.seancorfield/depstar {:mvn/version "2.0.216"}}
+ :exec-fn hf.depstar/jar
+ :exec-args {:jar "build/clara-rules.jar"}}
+
+ :install-maven {:extra-deps {slipset/deps-deploy {:mvn/version "0.1.5"}}
+ :exec-fn deps-deploy.deps-deploy/deploy
+ :exec-args {:installer :local
+ :artifact "build/clara-rules.jar"}}
+
+ :deploy-maven {:replace-deps {slipset/deps-deploy {:mvn/version "0.1.5"}}
+ :exec-fn deps-deploy.deps-deploy/deploy
+ :exec-args {:installer :remote
+ :artifact "build/clara-rules.jar"}}}
+ :mvn/repos
+ {"central" {:url "https://repo1.maven.org/maven2/"}
+ "clojars" {:url "https://repo.clojars.org/"}}}
diff --git a/dev/user.clj b/dev/user.clj
new file mode 100644
index 00000000..7a96237a
--- /dev/null
+++ b/dev/user.clj
@@ -0,0 +1 @@
+(ns user)
diff --git a/project.clj b/project.clj
index cd6adc62..4309d8b8 100644
--- a/project.clj
+++ b/project.clj
@@ -1,6 +1,6 @@
-(defproject com.cerner/clara-rules "0.24.0-SNAPSHOT"
+(defproject k13labs/clara-rules "0.24.0-SNAPSHOT"
:description "Clara Rules Engine"
- :url "https://github.com/cerner/clara-rules"
+ :url "https://github.com/k13labs/clara-rules"
:license {:name "Apache License Version 2.0"
:url "https://www.apache.org/licenses/LICENSE-2.0"}
:dependencies [[org.clojure/clojure "1.7.0"]
@@ -86,13 +86,13 @@
(some->> x :ns ns-name str (re-matches (re-pattern (apply str patterns)))))))
:generative (fn [x] (some->> x :ns ns-name str (re-matches #"^clara\.generative.*")))
:performance (fn [x] (some->> x :ns ns-name str (re-matches #"^clara\.performance.*")))}
-
+
:scm {:name "git"
- :url "https://github.com/cerner/clara-rules"}
+ :url "https://github.com/k13labs/clara-rules"}
:pom-addition [:developers [:developer
- [:id "rbrush"]
- [:name "Ryan Brush"]
- [:url "http://www.clara-rules.org"]]]
+ [:id "k13gomez"]
+ [:name "Jose Gomez"]
+ [:url "http://www.k13labs.com/clara-rules"]]]
:deploy-repositories [["snapshots" {:url "https://oss.sonatype.org/content/repositories/snapshots/"
:creds :gpg}]
["clojars" {:url "https://repo.clojars.org"
diff --git a/resources/.gitkeep b/resources/.gitkeep
new file mode 100644
index 00000000..e69de29b
diff --git a/resources/public/index.html b/resources/public/index.html
deleted file mode 100644
index d290bc04..00000000
--- a/resources/public/index.html
+++ /dev/null
@@ -1,15 +0,0 @@
-
-
-
-
-
-
-
-
-
-
Figwheel template
-
Checkout your developer console.
-
-
-
-
diff --git a/src/main/clojure/clara/macros.clj b/src/main/clojure/clara/macros.clj
deleted file mode 100644
index 65da673a..00000000
--- a/src/main/clojure/clara/macros.clj
+++ /dev/null
@@ -1,278 +0,0 @@
-(ns clara.macros
- "Direct use of this namespace is deprecated. Users can now
- simply use the defrule, defquery, and defsession macros
- in the clara.rules namespace. Users can simply pull those macros
- in like any other, for instance:
-
- (:require [clara.rules :refer [insert fire-rules query insert!]
- :refer-macros [defrule defsession defquery]])
-"
- (:require [clara.rules.engine :as eng]
- [clara.rules.memory :as mem]
- [clara.rules.compiler :as com]
- [clara.rules.dsl :as dsl]
- [cljs.analyzer :as ana]
- [cljs.env :as env]
- [schema.core :as sc]
- [clara.rules.schema :as schema]
- [clojure.set :as s]))
-
-;;; Clear productions stored in cljs.env/*compiler* for current namespace.
-;;; Only exists for its side-effect, hence returns nil.
-(defmacro clear-ns-productions!
- []
- (swap! env/*compiler* assoc-in [::productions (com/cljs-ns)] {})
- nil)
-
-;; Store production in cljs.env/*compiler* under ::productions seq?
-(defn- add-production [name production]
- (swap! env/*compiler* assoc-in [::productions (com/cljs-ns) name] production))
-
-(defn- get-productions-from-namespace
- "Returns a map of names to productions in the given namespace."
- [namespace]
- ;; TODO: remove need for ugly eval by changing our quoting strategy.
- (let [productions (get-in @env/*compiler* [::productions namespace])]
- (map eval (vals productions))))
-
-(defn- get-productions
- "Return the productions from the source"
- [source]
- (cond
- (symbol? source) (get-productions-from-namespace source)
- (coll? source) (seq source)
- :else (throw (IllegalArgumentException. "Unknown source value type passed to defsession"))))
-
-(defn defrule!
- [name production]
- (add-production name production)
- `(def ~name
- ~production))
-
-(defmacro defrule
- [name & body]
- (defrule! name (dsl/build-rule name body)))
-
-(defn defquery!
- [name query]
- (add-production name query)
- `(def ~name
- ~query))
-
-(defmacro defquery
- [name & body]
- (defquery! name (dsl/build-query name body)))
-
-(sc/defn gen-beta-network :- [sc/Any] ; Returns a sequence of compiled nodes.
- "Generates the beta network from the beta tree. "
- ([node-ids :- #{sc/Int} ; Nodes to compile.
- {:keys [id-to-production-node id-to-condition-node id-to-new-bindings forward-edges] :as beta-graph} :- schema/BetaGraph
- parent-bindings :- #{sc/Keyword}]
- (vec
- (for [id node-ids
- :let [beta-node (or (get id-to-condition-node id)
- (get id-to-production-node id))
-
- {:keys [condition production query join-bindings]} beta-node
-
- child-ids (get forward-edges id)
-
- constraint-bindings (com/variables-as-keywords (:constraints condition))
-
- ;; Get all bindings from the parent, condition, and returned fact.
- all-bindings (cond-> (s/union parent-bindings constraint-bindings)
- ;; Optional fact binding from a condition.
- (:fact-binding condition) (conj (:fact-binding condition))
- ;; Optional accumulator result.
- (:result-binding beta-node) (conj (:result-binding beta-node)))
-
- new-bindings (get id-to-new-bindings id)]]
-
- (case (:node-type beta-node)
-
- :join
- (if (:join-filter-expressions beta-node)
- `(eng/->ExpressionJoinNode
- ~id
- '~condition
- ~(com/compile-join-filter id
- "ExpressionJoinNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- {})
- ~(gen-beta-network child-ids beta-graph all-bindings)
- ~join-bindings)
- `(eng/->HashJoinNode
- ~id
- '~condition
- ~(gen-beta-network child-ids beta-graph all-bindings)
- ~join-bindings))
-
- :negation
- (if (:join-filter-expressions beta-node)
- `(eng/->NegationWithJoinFilterNode
- ~id
- '~condition
- ~(com/compile-join-filter id
- "NegationWithJoinFilterNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- {})
- ~(gen-beta-network child-ids beta-graph all-bindings)
- ~join-bindings)
- `(eng/->NegationNode
- ~id
- '~condition
- ~(gen-beta-network child-ids beta-graph all-bindings)
- ~join-bindings))
-
- :test
- `(eng/->TestNode
- ~id
- ~(:env beta-node)
- ~(com/compile-test id (:constraints condition) (:env beta-node))
- ~(gen-beta-network child-ids beta-graph all-bindings))
-
- :accumulator
- (if (:join-filter-expressions beta-node)
- `(eng/->AccumulateWithJoinFilterNode
- ~id
- {:accumulator '~(:accumulator beta-node)
- :from '~condition}
- ~(:accumulator beta-node)
- ~(com/compile-join-filter id
- "AccumulateWithJoinFilterNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- {})
- ~(:result-binding beta-node)
- ~(gen-beta-network child-ids beta-graph all-bindings)
- ~join-bindings
- ~new-bindings)
-
- `(eng/->AccumulateNode
- ~id
- {:accumulator '~(:accumulator beta-node)
- :from '~condition}
- ~(:accumulator beta-node)
- ~(:result-binding beta-node)
- ~(gen-beta-network child-ids beta-graph all-bindings)
- ~join-bindings
- ~new-bindings))
-
- :production
- `(eng/->ProductionNode
- ~id
- '~production
- ;; NOTE: This is a workaround around allowing the compiler to eval this
- ;; form in an unknown ns. It will suffer from var shadowing problems described
- ;; @ https://github.com/cerner/clara-rules/issues/178.
- ;; A better solution may be to enhance dsl/resolve-vars to deal with shadowing
- ;; correctly in cljs. This may be easier to do there since the compiler's
- ;; analyzer may be more exposed than on the clj side.
- ~(let [resolve-vars #(@#'dsl/resolve-vars (:rhs production)
- ;; It is unlikely that passing the bindings matters,
- ;; but all:fact-binding from the LHS conditions were
- ;; made available here in the past. However, all
- ;; bindings begin with "?" which typically means
- ;; that a rule wouldn't, or at least shouldn't for
- ;; clarity, start the names of other locals or vars
- ;; with "?".
- (mapv (comp symbol name) all-bindings))]
- (com/compile-action id
- all-bindings
- ;; Using private function for now as a workaround.
- (if (:ns-name production)
- (if (com/compiling-cljs?)
- (binding [cljs.analyzer/*cljs-ns* (:ns-name production)]
- (resolve-vars))
- (binding [*ns* (the-ns (:ns-name production))]
- (resolve-vars)))
- (resolve-vars))
- (:env production))))
-
- :query
- `(eng/->QueryNode
- ~id
- '~query
- ~(:params query))
- (throw (ex-info (str "Unknown node type " (:node-type beta-node)) {:node beta-node})))))))
-
-(sc/defn ^:always-validate compile-alpha-nodes
- [alpha-nodes :- [schema/AlphaNode]]
- (vec
- (for [{:keys [id condition beta-children env]} alpha-nodes
- :let [{:keys [type constraints fact-binding args]} condition]]
-
- {:id id
- :type (com/effective-type type)
- :alpha-fn (com/compile-condition type id (first args) constraints fact-binding env)
- :children (vec beta-children)})))
-
-(defn productions->session-assembly-form
- [productions options]
- ;;; Calling com/names-unique here rather than in sources-and-options->session-assembly-form
- ;;; as a ClojureScript DSL may call productions->session-assembly-form if that DSL
- ;;; has its own mechanism for grouping rules which is different than the clara DSL.
- (com/validate-names-unique productions)
- (let [id-counter (atom 0)
- create-id-fn (fn [] (swap! id-counter inc))
-
- beta-graph (com/to-beta-graph productions create-id-fn)
- ;; Compile the children of the logical root condition.
- beta-network (gen-beta-network (get-in beta-graph [:forward-edges 0]) beta-graph #{})
-
- alpha-graph (com/to-alpha-graph beta-graph create-id-fn)
- alpha-nodes (compile-alpha-nodes alpha-graph)]
-
- `(let [beta-network# ~beta-network
- alpha-nodes# ~alpha-nodes
- productions# '~productions
- options# ~options]
- (clara.rules/assemble-session beta-network# alpha-nodes# productions# options#))))
-
-(defn sources-and-options->session-assembly-form
- [sources-and-options]
- (let [sources (take-while #(not (keyword? %)) sources-and-options)
- options (apply hash-map (drop-while #(not (keyword? %)) sources-and-options))
- ;; Eval to unquote ns symbols, and to eval exprs to look up
- ;; explicit rule sources
- sources (eval (vec sources))
- productions (vec (for [source sources
- production (get-productions source)]
- production))]
- (productions->session-assembly-form productions options)))
-
-(defmacro defsession
- "Creates a session given a list of sources and keyword-style options, which are typically ClojureScript namespaces.
-
- Each source is eval'ed at compile time, in Clojure (not ClojureScript.)
-
- If the eval result is a symbol, it is presumed to be a ClojureScript
- namespace, and all rules and queries defined in that namespace will
- be found and used.
-
- If the eval result is a collection, it is presumed to be a
- collection of productions. Note that although the collection must
- exist in the compiling Clojure runtime (since the eval happens at
- macro-expansion time), any expressions in the rule or query
- definitions will be executed in ClojureScript.
-
- Typical usage would be like this, with a session defined as a var:
-
- (defsession my-session 'example.namespace)
-
- That var contains an immutable session that then can be used as a starting point to create sessions with
- caller-provided data. Since the session itself is immutable, it can be safely used from multiple threads
- and will not be modified by callers. So a user might grab it, insert facts, and otherwise
- use it as follows:
-
- (-> my-session
- (insert (->Temperature 23))
- (fire-rules))
- "
- [name & sources-and-options]
- `(def ~name ~(sources-and-options->session-assembly-form sources-and-options)))
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
new file mode 100644
index 00000000..c26815b1
--- /dev/null
+++ b/src/main/clojure/clara/rules.clj
@@ -0,0 +1,257 @@
+(ns clara.rules
+ "Forward-chaining rules for Clojure. The primary API is in this namespace."
+ (:require [clara.rules.engine :as eng]
+ [clara.rules.platform :as platform]
+ [clara.rules.compiler :as com]
+ [clara.rules.dsl :as dsl]))
+
+(defn insert
+ "Inserts one or more facts into a working session. It does not modify the given
+ session, but returns a new session with the facts added."
+ [session & facts]
+ (eng/insert session facts))
+
+(defn insert-all
+ "Inserts a sequence of facts into a working session. It does not modify the given
+ session, but returns a new session with the facts added."
+ [session fact-seq]
+ (eng/insert session fact-seq))
+
+(defn retract
+ "Retracts a fact from a working session. It does not modify the given session,
+ but returns a new session with the facts retracted."
+ [session & facts]
+ (eng/retract session facts))
+
+(defn fire-rules
+ "Fires are rules in the given session. Once a rule is fired, it is labeled in a fired
+ state and will not be re-fired unless facts affecting the rule are added or retracted.
+
+ This function does not modify the given session to mark rules as fired. Instead, it returns
+ a new session in which the rules are marked as fired.
+
+ This take an additional map of options as a second argument. Current options:
+
+ :cancelling true (EXPERIMENTAL, subject to change/removal. Not supported in ClojureScript.):
+ Simultaneously propagate insertions and retractions through the rules network, at every step using the insertion and retractions of equals facts to cancel each
+ other out and avoid operations deeper in the rules network. The behavior of unconditional insertions and RHS (right-hand side) retractions
+ is undefined when this option is enabled and this option should not be used when calling fire-rules can result in these operations.
+ Note that this is purely a performance optimization and no guarantees are made at this time on whether a given rule's RHS will be called.
+ When this option is used rule RHS code that is executed shouldn't do anything that impacts state other than perform logical insertions."
+ ([session] (eng/fire-rules session {}))
+ ([session opts] (eng/fire-rules session opts)))
+
+(defn query
+ "Runs the given query with the optional given parameters against the session.
+ The optional parameters should be in map form. For example, a query call might be:
+
+ (query session get-by-last-name :?last-name \"Jones\")
+
+ The query itself may be either the var created by a defquery statement,
+ or the actual name of the query.
+ "
+ [session query & params]
+ (let [params-map (->> (for [[param value] (apply hash-map params)]
+ [(platform/query-param param) value])
+ (into {}))]
+ (eng/query session query params-map)))
+
+(defn insert!
+ "To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
+
+ Inserted facts are logical, in that if the support for the insertion is removed, the fact
+ will automatically be retracted. For instance, if there is a rule that inserts a \"Cold\" fact
+ if a \"Temperature\" fact is below a threshold, and the \"Temperature\" fact that triggered
+ the rule is retracted, the \"Cold\" fact the rule inserted is also retracted. This is the underlying
+ truth maintenance facillity.
+
+ This truth maintenance is also transitive: if a rule depends on some criteria to fire, and a
+ criterion becomes invalid, it may retract facts that invalidate other rules, which in turn
+ retract their conclusions. This way we can ensure that information inferred by rules is always
+ in a consistent state."
+ [& facts]
+ (eng/insert-facts! facts false))
+
+(defn insert-all!
+ "Behaves the same as insert!, but accepts a sequence of facts to be inserted. This can be simpler and more efficient for
+ rules needing to insert multiple facts.
+
+ See the doc in insert! for details on insert behavior.."
+ [facts]
+ (eng/insert-facts! facts false))
+
+(defn insert-unconditional!
+ "To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
+
+ This differs from insert! in that it is unconditional. The facts inserted will not be retracted
+ even if the rule activation doing the insert becomes false. Most users should prefer the simple insert!
+ function as described above, but this function is available for use cases that don't wish to use
+ Clara's truth maintenance."
+ [& facts]
+ (eng/insert-facts! facts true))
+
+(defn insert-all-unconditional!
+ "Behaves the same as insert-unconditional!, but accepts a sequence of facts to be inserted rather than individual facts.
+
+ See the doc in insert-unconditional! for details on uncondotional insert behavior."
+ [facts]
+ (eng/insert-facts! facts true))
+
+(defn retract!
+ "To be executed within a rule's right-hand side, this retracts a fact or facts from the working memory.
+
+ Retracting facts from the right-hand side has slightly different semantics than insertion. As described
+ in the insert! documentation, inserts are logical and will automatically be retracted if the rule
+ that inserted them becomes false. This retract! function does not follow the inverse; retracted items
+ are simply removed, and not re-added if the rule that retracted them becomes false.
+
+ The reason for this is that retractions remove information from the knowledge base, and doing truth
+ maintenance over retractions would require holding onto all retracted items, which would be an issue
+ in some use cases. This retract! method is included to help with certain use cases, but unless you
+ have a specific need, it is better to simply do inserts on the rule's right-hand side, and let
+ Clara's underlying truth maintenance retract inserted items if their support becomes false."
+ [& facts]
+ (eng/rhs-retract-facts! facts))
+
+(extend-type clojure.lang.Symbol
+ com/IRuleSource
+ (load-rules [sym]
+ ;; Find the rules and queries in the namespace, shred them,
+ ;; and compile them into a rule base.
+ (if (namespace sym)
+ ;; The symbol is qualified, so load rules in the qualified symbol.
+ (let [resolved (resolve sym)]
+ (when (nil? resolved)
+ (throw (ex-info (str "Unable to resolve rule source: " sym) {:sym sym})))
+
+ (cond
+ ;; The symbol references a rule or query, so just return it
+ (or (:query (meta resolved))
+ (:rule (meta resolved))) [@resolved]
+
+ ;; The symbol refernces a sequence, so return it.
+ (sequential? @resolved) @resolved
+
+ :else
+ (throw (ex-info (str "The source referenced by " sym " is not valid.") {:sym sym}))))
+
+ ;; The symbol is not qualified, so treat it as a namespace.
+ (->> (ns-interns sym)
+ (vals) ; Get the references in the namespace.
+ (filter var?)
+ (filter (comp (some-fn :rule :query :production-seq) meta)) ; Filter down to rules, queries, and seqs of both.
+ ;; If definitions are created dynamically (i.e. are not reflected in an actual code file)
+ ;; it is possible that they won't have :line metadata, so we have a default of 0.
+ (sort (fn [v1 v2]
+ (compare (or (:line (meta v1)) 0)
+ (or (:line (meta v2)) 0))))
+ (mapcat #(if (:production-seq (meta %))
+ (deref %)
+ [(deref %)]))))))
+
+(defmacro mk-session
+ "Creates a new session using the given rule sources. The resulting session
+ is immutable, and can be used with insert, retract, fire-rules, and query functions.
+
+ If no sources are provided, it will attempt to load rules from the caller's namespace,
+ which is determined by reading Clojure's *ns* var.
+
+ This will use rules defined with defrule, queries defined with defquery, and sequences
+ of rule and/or query structures in vars that are annotated with the metadata ^:production-seq.
+
+ The caller may also specify keyword-style options at the end of the parameters. Currently five
+ options are supported, although most users will either not need these or just the first two:
+
+ * :fact-type-fn, which must have a value of a function used to determine the logical type of a given
+ fact. Defaults to Clojure's type function.
+ * :cache, indicating whether the session creation can be cached, effectively memoizing mk-session.
+ Defaults to true. Callers may wish to set this to false when needing to dynamically reload rules.
+ * :ancestors-fn, which returns a collection of ancestors for a given type. Defaults to Clojure's ancestors function. A
+ fact of a given type will match any rule that uses one of that type's ancestors. Note that if the collection is ordered
+ this ordering will be maintained by Clara; ordering the ancestors consistently will increase the consistency of overall performance.
+ * :activation-group-fn, a function applied to production structures and returns the group they should be activated with.
+ It defaults to checking the :salience property, or 0 if none exists.
+ * :activation-group-sort-fn, a comparator function used to sort the values returned by the above :activation-group-fn.
+ Defaults to >, so rules with a higher salience are executed first.
+ * :forms-per-eval - The maximum number of expressions that will be evaluated per call to eval.
+ Larger batch sizes should see better performance compared to smaller batch sizes. (Only applicable to Clojure)
+ Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
+ * :omit-compile-ctx - When false Clara, in Clojure, retains additional information to improve error messages during
+ session deserialization at the cost of additional memory use.
+ By default this information is retained until the session is initially compiled and then will be discarded. This
+ information might prove useful for debugging compilation errors within the rulebase, eg. rulebase serialization
+ (ie. via Clara's durability support).
+ Defaults to true, see clara.rules.compiler/omit-compile-ctx-default for more information.
+
+ This is not supported in ClojureScript, since it requires eval to dynamically build a session. ClojureScript
+ users must use pre-defined rule sessions using defsession."
+ [& args]
+ (if (and (seq args) (not (keyword? (first args))))
+ `(com/mk-session ~(vec args)) ; At least one namespace given, so use it.
+ `(com/mk-session (concat [(ns-name *ns*)] ~(vec args))))) ; No namespace given, so use the current one.
+
+(defmacro defsession
+ "Creates a sesson given a list of sources and keyword-style options, which are typically Clojure namespaces.
+
+ Typical usage would be like this, with a session defined as a var:
+
+ (defsession my-session 'example.namespace)
+
+ That var contains an immutable session that then can be used as a starting point to create sessions with
+ caller-provided data. Since the session itself is immutable, it can be safely used from multiple threads
+ and will not be modified by callers. So a user might grab it, insert facts, and otherwise
+ use it as follows:
+
+ (-> my-session
+ (insert (->Temperature 23))
+ (fire-rules))"
+ [name & sources-and-options]
+ `(def ~name (com/mk-session ~(vec sources-and-options))))
+
+(defmacro defrule
+ "Defines a rule and stores it in the given var. For instance, a simple rule would look like this:
+
+ (defrule hvac-approval
+ \"HVAC repairs need the appropriate paperwork, so insert
+ a validation error if approval is not present.\"
+ [WorkOrder (= type :hvac)]
+ [:not [ApprovalForm (= formname \"27B-6\")]]
+ =>
+ (insert! (->ValidationError
+ :approval
+ \"HVAC repairs must include a 27B-6 form.\")))
+
+ See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
+ [name & body]
+ (let [doc (if (string? (first body)) (first body) nil)]
+ `(def ~(vary-meta name assoc :rule true :doc doc)
+ ~(dsl/build-rule name body (meta &form)))))
+
+(defmacro defquery
+ "Defines a query and stored it in the given var. For instance, a simple query that accepts no
+ parameters would look like this:
+
+ (defquery check-job
+ \"Checks the job for validation errors.\"
+ []
+ [?issue <- ValidationError])
+
+ See the [query authoring documentation](http://www.clara-rules.org/docs/queries/) for details."
+ [name & body]
+ (let [doc (if (string? (first body)) (first body) nil)]
+ `(def ~(vary-meta name assoc :query true :doc doc)
+ ~(dsl/build-query name body (meta &form)))))
+
+(defmacro clear-ns-productions!
+ "Ensures that any rule/query definitions which have been cached will be cleared from the associated namespace.
+ Rule and query definitions can be cached such that if their definitions are not explicitly overwritten with the same
+ name (i.e. deleted or renamed), the stale definitions can be loaded into a session using that namespace on
+ reload via the REPL or mechanism such as figwheel. Place (clear-ns-productions!) at the top of any namespace
+ defining rules/queries to ensure the cache is cleared properly."
+ []
+ (let [production-syms (->> (ns-interns *ns*)
+ (filter (comp var? second))
+ (filter (comp (some-fn :rule :query :production-seq) meta second)) ; Filter down to rules, queries, and seqs of both.
+ (map first))] ; Take the symbols for the rule/query vars
+ (doseq [psym production-syms]
+ (ns-unmap *ns* psym))))
diff --git a/src/main/clojure/clara/rules.cljc b/src/main/clojure/clara/rules.cljc
deleted file mode 100644
index ea0e404a..00000000
--- a/src/main/clojure/clara/rules.cljc
+++ /dev/null
@@ -1,431 +0,0 @@
-(ns clara.rules
- "Forward-chaining rules for Clojure. The primary API is in this namespace."
- (:require [clara.rules.engine :as eng]
- [schema.core :as s]
- [clara.rules.platform :as platform]
- #?(:cljs [clara.rules.listener :as l])
- #?(:clj [clara.rules.compiler :as com])
- #?(:clj [clara.rules.dsl :as dsl]))
- #?(:cljs (:require-macros clara.rules)))
-
-(defn insert
- "Inserts one or more facts into a working session. It does not modify the given
- session, but returns a new session with the facts added."
- [session & facts]
- (eng/insert session facts))
-
-(defn insert-all
- "Inserts a sequence of facts into a working session. It does not modify the given
- session, but returns a new session with the facts added."
- [session fact-seq]
- (eng/insert session fact-seq))
-
-(defn retract
- "Retracts a fact from a working session. It does not modify the given session,
- but returns a new session with the facts retracted."
- [session & facts]
- (eng/retract session facts))
-
-(defn fire-rules
- "Fires are rules in the given session. Once a rule is fired, it is labeled in a fired
- state and will not be re-fired unless facts affecting the rule are added or retracted.
-
- This function does not modify the given session to mark rules as fired. Instead, it returns
- a new session in which the rules are marked as fired.
-
- This take an additional map of options as a second argument. Current options:
-
- :cancelling true (EXPERIMENTAL, subject to change/removal. Not supported in ClojureScript.):
- Simultaneously propagate insertions and retractions through the rules network, at every step using the insertion and retractions of equals facts to cancel each
- other out and avoid operations deeper in the rules network. The behavior of unconditional insertions and RHS (right-hand side) retractions
- is undefined when this option is enabled and this option should not be used when calling fire-rules can result in these operations.
- Note that this is purely a performance optimization and no guarantees are made at this time on whether a given rule's RHS will be called.
- When this option is used rule RHS code that is executed shouldn't do anything that impacts state other than perform logical insertions."
- ([session] (eng/fire-rules session {}))
- ([session opts] (eng/fire-rules session opts)))
-
-(defn query
- "Runs the given query with the optional given parameters against the session.
- The optional parameters should be in map form. For example, a query call might be:
-
- (query session get-by-last-name :?last-name \"Jones\")
-
- The query itself may be either the var created by a defquery statement,
- or the actual name of the query.
- "
- [session query & params]
- (let [params-map (->> (for [[param value] (apply hash-map params)]
- [(platform/query-param param) value])
- (into {}))]
- (eng/query session query params-map)))
-
-(defn insert!
- "To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
-
- Inserted facts are logical, in that if the support for the insertion is removed, the fact
- will automatically be retracted. For instance, if there is a rule that inserts a \"Cold\" fact
- if a \"Temperature\" fact is below a threshold, and the \"Temperature\" fact that triggered
- the rule is retracted, the \"Cold\" fact the rule inserted is also retracted. This is the underlying
- truth maintenance facillity.
-
- This truth maintenance is also transitive: if a rule depends on some criteria to fire, and a
- criterion becomes invalid, it may retract facts that invalidate other rules, which in turn
- retract their conclusions. This way we can ensure that information inferred by rules is always
- in a consistent state."
- [& facts]
- (eng/insert-facts! facts false))
-
-(defn insert-all!
- "Behaves the same as insert!, but accepts a sequence of facts to be inserted. This can be simpler and more efficient for
- rules needing to insert multiple facts.
-
- See the doc in insert! for details on insert behavior.."
- [facts]
- (eng/insert-facts! facts false))
-
-(defn insert-unconditional!
- "To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
-
- This differs from insert! in that it is unconditional. The facts inserted will not be retracted
- even if the rule activation doing the insert becomes false. Most users should prefer the simple insert!
- function as described above, but this function is available for use cases that don't wish to use
- Clara's truth maintenance."
- [& facts]
- (eng/insert-facts! facts true))
-
-(defn insert-all-unconditional!
- "Behaves the same as insert-unconditional!, but accepts a sequence of facts to be inserted rather than individual facts.
-
- See the doc in insert-unconditional! for details on uncondotional insert behavior."
- [facts]
- (eng/insert-facts! facts true))
-
-(defn retract!
- "To be executed within a rule's right-hand side, this retracts a fact or facts from the working memory.
-
- Retracting facts from the right-hand side has slightly different semantics than insertion. As described
- in the insert! documentation, inserts are logical and will automatically be retracted if the rule
- that inserted them becomes false. This retract! function does not follow the inverse; retracted items
- are simply removed, and not re-added if the rule that retracted them becomes false.
-
- The reason for this is that retractions remove information from the knowledge base, and doing truth
- maintenance over retractions would require holding onto all retracted items, which would be an issue
- in some use cases. This retract! method is included to help with certain use cases, but unless you
- have a specific need, it is better to simply do inserts on the rule's right-hand side, and let
- Clara's underlying truth maintenance retract inserted items if their support becomes false."
- [& facts]
- (eng/rhs-retract-facts! facts))
-
-(defn accumulate
- "DEPRECATED. Use clara.rules.accumulators/accum instead.
-
- Creates a new accumulator based on the given properties:
-
- * An initial-value to be used with the reduced operations.
- * A reduce-fn that can be used with the Clojure Reducers library to reduce items.
- * An optional combine-fn that can be used with the Clojure Reducers library to combine reduced items.
- * An optional retract-fn that can remove a retracted fact from a previously reduced computation
- * An optional convert-return-fn that converts the reduced data into something useful to the caller.
- Simply uses identity by default.
- "
- [& {:keys [initial-value reduce-fn combine-fn retract-fn convert-return-fn] :as args}]
- (eng/map->Accumulator
- (merge {;; Default conversion does nothing, so use identity.
- :convert-return-fn identity}
- args)))
-
-#?(:cljs
- (defrecord Rulebase [alpha-roots beta-roots productions queries production-nodes query-nodes id-to-node]))
-
-#?(:cljs
- (defn- create-get-alphas-fn
- "Returns a function that given a sequence of facts,
- returns a map associating alpha nodes with the facts they accept."
- [fact-type-fn ancestors-fn merged-rules]
-
- ;; We preserve a map of fact types to alpha nodes for efficiency,
- ;; effectively memoizing this operation.
- (let [alpha-map (atom {})
- wrapped-fact-type-fn (if (= fact-type-fn type)
- type
- (fn [fact]
- (if (isa? (type fact) :clara.rules.engine/system-type)
- ;; Internal system types always use ClojureScript's type mechanism.
- (type fact)
- ;; All other types defer to the provided function.
- (fact-type-fn fact))))
- wrapped-ancestors-fn (fn [fact-type]
- (if (isa? fact-type :clara.rules.engine/system-type)
- ;; Exclude system types from having ancestors for now
- ;; since none of our use-cases require them. If this changes
- ;; we may need to define a custom hierarchy for them.
- #{}
- (ancestors-fn fact-type)))]
- (fn [facts]
- (for [[fact-type facts] (platform/tuned-group-by wrapped-fact-type-fn facts)]
-
- (if-let [alpha-nodes (get @alpha-map fact-type)]
-
- ;; If the matching alpha nodes are cached, simply return them.
- [alpha-nodes facts]
-
- ;; The alpha nodes weren't cached for the type, so get them now.
- (let [ancestors (conj (wrapped-ancestors-fn fact-type) fact-type)
-
- ;; Get all alpha nodes for all ancestors.
- new-nodes (distinct
- (reduce
- (fn [coll ancestor]
- (concat
- coll
- (get-in merged-rules [:alpha-roots ancestor])))
- []
- ancestors))]
-
- (swap! alpha-map assoc fact-type new-nodes)
- [new-nodes facts])))))))
-
-#?(:cljs
- (defn- mk-rulebase
- [beta-roots alpha-fns productions]
-
- (let [beta-nodes (for [root beta-roots
- node (tree-seq :children :children root)]
- node)
-
- production-nodes (for [node beta-nodes
- :when (= eng/ProductionNode (type node))]
- node)
-
- query-nodes (for [node beta-nodes
- :when (= eng/QueryNode (type node))]
- node)
-
- query-map (into {} (for [query-node query-nodes
-
- ;; Queries can be looked up by reference or by name;
- entry [[(:query query-node) query-node]
- [(:name (:query query-node)) query-node]]]
- entry))
-
- ;; Map of node ids to beta nodes.
- id-to-node (into {} (for [node beta-nodes]
- [(:id node) node]))
-
- ;; type, alpha node tuples.
- alpha-nodes (for [{:keys [id type alpha-fn children env]} alpha-fns
- :let [beta-children (map id-to-node children)]]
- [type (eng/->AlphaNode id env beta-children alpha-fn type)])
-
- ;; Merge the alpha nodes into a multi-map
- alpha-map (reduce
- (fn [alpha-map [type alpha-node]]
- (update-in alpha-map [type] conj alpha-node))
- {}
- alpha-nodes)]
-
- (map->Rulebase
- {:alpha-roots alpha-map
- :beta-roots beta-roots
- :productions (filter :rhs productions)
- :queries (remove :rhs productions)
- :production-nodes production-nodes
- :query-nodes query-map
- :id-to-node id-to-node}))))
-
-
-#?(:cljs
- (defn assemble-session
- "This is used by tools to create a session; most users won't use this function."
- [beta-roots alpha-fns productions options]
- (let [rulebase (mk-rulebase beta-roots alpha-fns productions)
- transport (eng/LocalTransport.)
-
- ;; The fact-type uses Clojure's type function unless overridden.
- fact-type-fn (or (get options :fact-type-fn)
- type)
-
- ;; The ancestors for a logical type uses Clojurescript's ancestors function unless overridden.
- ancestors-fn (or (get options :ancestors-fn)
- ancestors)
-
- ;; Create a function that groups a sequence of facts by the collection
- ;; of alpha nodes they target.
- ;; We cache an alpha-map for facts of a given type to avoid computing
- ;; them for every fact entered.
- get-alphas-fn (create-get-alphas-fn fact-type-fn ancestors-fn rulebase)
-
- activation-group-sort-fn (eng/options->activation-group-sort-fn options)
-
- activation-group-fn (eng/options->activation-group-fn options)
-
- listener (if-let [listeners (:listeners options)]
- (l/delegating-listener listeners)
- l/default-listener)]
-
- (eng/LocalSession. rulebase
- (eng/local-memory rulebase transport activation-group-sort-fn activation-group-fn get-alphas-fn)
- transport
- listener
- get-alphas-fn
- []))))
-
-#?(:clj
- (extend-type clojure.lang.Symbol
- com/IRuleSource
- (load-rules [sym]
- ;; Find the rules and queries in the namespace, shred them,
- ;; and compile them into a rule base.
- (if (namespace sym)
- ;; The symbol is qualified, so load rules in the qualified symbol.
- (let [resolved (resolve sym)]
- (when (nil? resolved)
- (throw (ex-info (str "Unable to resolve rule source: " sym) {:sym sym})))
-
- (cond
- ;; The symbol references a rule or query, so just return it
- (or (:query (meta resolved))
- (:rule (meta resolved))) [@resolved]
-
- ;; The symbol refernces a sequence, so return it.
- (sequential? @resolved) @resolved
-
- :default
- (throw (ex-info (str "The source referenced by " sym " is not valid.") {:sym sym} ))))
-
- ;; The symbol is not qualified, so treat it as a namespace.
- (->> (ns-interns sym)
- (vals) ; Get the references in the namespace.
- (filter var?)
- (filter (comp (some-fn :rule :query :production-seq) meta)) ; Filter down to rules, queries, and seqs of both.
- ;; If definitions are created dynamically (i.e. are not reflected in an actual code file)
- ;; it is possible that they won't have :line metadata, so we have a default of 0.
- (sort (fn [v1 v2]
- (compare (or (:line (meta v1)) 0)
- (or (:line (meta v2)) 0))))
- (mapcat #(if (:production-seq (meta %))
- (deref %)
- [(deref %)])))))))
-
-#?(:clj
- (defmacro mk-session
- "Creates a new session using the given rule sources. The resulting session
- is immutable, and can be used with insert, retract, fire-rules, and query functions.
-
- If no sources are provided, it will attempt to load rules from the caller's namespace,
- which is determined by reading Clojure's *ns* var.
-
- This will use rules defined with defrule, queries defined with defquery, and sequences
- of rule and/or query structures in vars that are annotated with the metadata ^:production-seq.
-
- The caller may also specify keyword-style options at the end of the parameters. Currently five
- options are supported, although most users will either not need these or just the first two:
-
- * :fact-type-fn, which must have a value of a function used to determine the logical type of a given
- fact. Defaults to Clojure's type function.
- * :cache, indicating whether the session creation can be cached, effectively memoizing mk-session.
- Defaults to true. Callers may wish to set this to false when needing to dynamically reload rules.
- * :ancestors-fn, which returns a collection of ancestors for a given type. Defaults to Clojure's ancestors function. A
- fact of a given type will match any rule that uses one of that type's ancestors. Note that if the collection is ordered
- this ordering will be maintained by Clara; ordering the ancestors consistently will increase the consistency of overall performance.
- * :activation-group-fn, a function applied to production structures and returns the group they should be activated with.
- It defaults to checking the :salience property, or 0 if none exists.
- * :activation-group-sort-fn, a comparator function used to sort the values returned by the above :activation-group-fn.
- Defaults to >, so rules with a higher salience are executed first.
- * :forms-per-eval - The maximum number of expressions that will be evaluated per call to eval.
- Larger batch sizes should see better performance compared to smaller batch sizes. (Only applicable to Clojure)
- Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
- * :omit-compile-ctx - When false Clara, in Clojure, retains additional information to improve error messages during
- session deserialization at the cost of additional memory use.
- By default this information is retained until the session is initially compiled and then will be discarded. This
- information might prove useful for debugging compilation errors within the rulebase, eg. rulebase serialization
- (ie. via Clara's durability support).
- Defaults to true, see clara.rules.compiler/omit-compile-ctx-default for more information.
-
- This is not supported in ClojureScript, since it requires eval to dynamically build a session. ClojureScript
- users must use pre-defined rule sessions using defsession."
- [& args]
- (if (and (seq args) (not (keyword? (first args))))
- `(com/mk-session ~(vec args)) ; At least one namespace given, so use it.
- `(com/mk-session (concat [(ns-name *ns*)] ~(vec args)))))) ; No namespace given, so use the current one.
-
-#?(:clj
- (defmacro defsession
- "Creates a sesson given a list of sources and keyword-style options, which are typically Clojure namespaces.
-
- Typical usage would be like this, with a session defined as a var:
-
- (defsession my-session 'example.namespace)
-
- That var contains an immutable session that then can be used as a starting point to create sessions with
- caller-provided data. Since the session itself is immutable, it can be safely used from multiple threads
- and will not be modified by callers. So a user might grab it, insert facts, and otherwise
- use it as follows:
-
- (-> my-session
- (insert (->Temperature 23))
- (fire-rules))"
- [name & sources-and-options]
- (if (com/compiling-cljs?)
- `(clara.macros/defsession ~name ~@sources-and-options)
- `(def ~name (com/mk-session ~(vec sources-and-options))))))
-
-#?(:clj
- (defmacro defrule
- "Defines a rule and stores it in the given var. For instance, a simple rule would look like this:
-
- (defrule hvac-approval
- \"HVAC repairs need the appropriate paperwork, so insert
- a validation error if approval is not present.\"
- [WorkOrder (= type :hvac)]
- [:not [ApprovalForm (= formname \"27B-6\")]]
- =>
- (insert! (->ValidationError
- :approval
- \"HVAC repairs must include a 27B-6 form.\")))
-
-See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
- [name & body]
- (if (com/compiling-cljs?)
- `(clara.macros/defrule ~name ~@body)
- (let [doc (if (string? (first body)) (first body) nil)]
- `(def ~(vary-meta name assoc :rule true :doc doc)
- ~(dsl/build-rule name body (meta &form)))))))
-
-#?(:clj
- (defmacro defquery
- "Defines a query and stored it in the given var. For instance, a simple query that accepts no
-parameters would look like this:
-
- (defquery check-job
- \"Checks the job for validation errors.\"
- []
- [?issue <- ValidationError])
-
-See the [query authoring documentation](http://www.clara-rules.org/docs/queries/) for details."
- [name & body]
- (if (com/compiling-cljs?)
- `(clara.macros/defquery ~name ~@body)
- (let [doc (if (string? (first body)) (first body) nil)
- binding (if doc (second body) (first body))
- definition (if doc (drop 2 body) (rest body) )]
- `(def ~(vary-meta name assoc :query true :doc doc)
- ~(dsl/build-query name body (meta &form)))))))
-
-#?(:clj
- (defmacro clear-ns-productions!
- "Ensures that any rule/query definitions which have been cached will be cleared from the associated namespace.
- Rule and query definitions can be cached such that if their definitions are not explicitly overwritten with the same
- name (i.e. deleted or renamed), the stale definitions can be loaded into a session using that namespace on
- reload via the REPL or mechanism such as figwheel. Place (clear-ns-productions!) at the top of any namespace
- defining rules/queries to ensure the cache is cleared properly."
- []
- (if (com/compiling-cljs?)
- `(clara.macros/clear-ns-productions!)
- (let [production-syms (->> (ns-interns *ns*)
- (filter (comp var? second))
- (filter (comp (some-fn :rule :query :production-seq) meta second)) ; Filter down to rules, queries, and seqs of both.
- (map first) ; Take the symbols for the rule/query vars
- )]
- (doseq [psym production-syms]
- (ns-unmap *ns* psym))))))
diff --git a/src/main/clojure/clara/rules/accumulators.clj b/src/main/clojure/clara/rules/accumulators.clj
new file mode 100644
index 00000000..37da6748
--- /dev/null
+++ b/src/main/clojure/clara/rules/accumulators.clj
@@ -0,0 +1,212 @@
+(ns clara.rules.accumulators
+ "A set of common accumulators usable in Clara rules."
+ (:require [clara.rules.engine :as eng]
+ [schema.core :as s])
+ (:refer-clojure :exclude [min max distinct count]))
+
+(defn accum
+ "Creates a new accumulator. Users are encouraged to use a pre-defined
+ accumulator in this namespace if one fits their needs. (See min, max, all,
+ distinct, and others in this namespace.) This function
+ exists for cases where a custom accumulator is necessary.
+
+ The following properties are accepted.
+
+ * An initial-value to be used with the reduced operations.
+ * A reduce-fn that can be used with the Clojure Reducers library to reduce items.
+ * An optional combine-fn that can be used with the Clojure Reducers library to combine reduced items.
+ * An optional retract-fn that can remove a retracted fact from a previously reduced computation.
+ * An optional convert-return-fn that converts the reduced data into something useful to the caller.
+ Simply uses identity by default.
+ "
+ [{:keys [initial-value reduce-fn combine-fn retract-fn convert-return-fn] :as accum-map}]
+
+ ;; Validate expected arguments are present.
+ (s/validate {(s/optional-key :initial-value) s/Any
+ (s/optional-key :combine-fn) s/Any
+ (s/optional-key :convert-return-fn) s/Any
+ :reduce-fn s/Any
+ (s/optional-key :retract-fn) s/Any}
+ accum-map)
+
+ (eng/map->Accumulator
+ (merge {;; Default conversion does nothing, so use identity.
+ :convert-return-fn identity}
+ accum-map)))
+
+(defn- drop-one-of
+ "Removes one instance of the given value from the sequence."
+ [items value]
+ (let [pred #(not= value %)]
+ (into (empty items)
+ cat
+ [(take-while pred items)
+ (rest (drop-while pred items))])))
+
+(defn reduce-to-accum
+ "Creates an accumulator using a given reduce function with optional initial value and
+ conversion to the final result.
+
+ For example, a a simple function that return a Temperature fact with the highest value:
+
+ (acc/reduce-to-accum
+ (fn [previous value]
+ (if previous
+ (if (> (:temperature value) (:temperature previous))
+ value
+ previous)
+ value)))
+
+ Note that the above example produces the same result as
+ (clara.rules.accumulators/max :temperature :returns-fact true),
+ and users should prefer to use built-in accumulators when possible. This funciton exists to easily
+ convert arbitrary reduce functions to an accumulator.
+
+ Callers may optionally pass in an initial value (which defaults to nil),
+ a function to transform the value returned by the reduce (which defaults to identity),
+ and a function to combine two reduced results (which uses the reduce-fn to add new
+ items to the same reduced value by default)."
+
+ ([reduce-fn]
+ (reduce-to-accum reduce-fn nil))
+ ([reduce-fn initial-value]
+ (reduce-to-accum reduce-fn initial-value identity))
+ ([reduce-fn initial-value convert-return-fn]
+ (reduce-to-accum reduce-fn initial-value convert-return-fn nil))
+ ([reduce-fn initial-value convert-return-fn combine-fn]
+ (accum (cond-> {:initial-value initial-value
+ :reduce-fn reduce-fn
+ :convert-return-fn convert-return-fn}
+ combine-fn (assoc :combine-fn combine-fn)))))
+
+(let [grouping-fn (fnil conj [])]
+ (defn grouping-by
+ "Return a generic grouping accumulator. Behaves like clojure.core/group-by.
+
+ * `field` - required - The field of a fact to group by.
+ * `convert-return-fn` - optional - Converts the resulting grouped
+ data. Defaults to clojure.core/identity."
+ ([field]
+ (grouping-by field identity))
+ ([field convert-return-fn]
+ {:pre [(ifn? convert-return-fn)]}
+ (reduce-to-accum
+ (fn [m x]
+ (let [v (field x)]
+ (update m v grouping-fn x)))
+ {}
+ convert-return-fn))))
+
+(defn- comparison-based
+ "Creates a comparison-based result such as min or max"
+ [field comparator returns-fact]
+ (let [reduce-fn (fn [previous value]
+ (if previous
+ (if (comparator (field previous) (field value))
+ previous
+ value)
+ value))
+
+ convert-return-fn (if returns-fact
+ identity
+ field)]
+ (accum
+ {:reduce-fn reduce-fn
+ :convert-return-fn convert-return-fn})))
+
+(defn min
+ "Returns an accumulator that returns the minimum value of a given field.
+
+ The caller may provide the following options:
+
+ * :returns-fact Returns the fact rather than the field value if set to true. Defaults to false."
+ [field & {:keys [returns-fact]}]
+ (comparison-based field < returns-fact))
+
+(defn max
+ "Returns an accumulator that returns the maximum value of a given field.
+
+ The caller may provide the following options:
+
+ * :returns-fact Returns the fact rather than the field value if set to true. Defaults to false."
+ [field & {:keys [returns-fact]}]
+ (comparison-based field > returns-fact))
+
+(defn average
+ "Returns an accumulator that returns the average value of a given field."
+ [field]
+ (accum
+ {:initial-value [0 0]
+ :reduce-fn (fn [[value count] item]
+ [(+ value (field item)) (inc count)])
+ :retract-fn (fn [[value count] retracted]
+ [(- value (field retracted)) (dec count)])
+ :combine-fn (fn [[value1 count1] [value2 count2]]
+ [(+ value1 value2) (+ count1 count2)])
+ :convert-return-fn (fn [[value count]]
+ (if (= 0 count)
+ nil
+ (/ value count)))}))
+
+(defn sum
+ "Returns an accumulator that returns the sum of values of a given field"
+ [field]
+ (accum
+ {:initial-value 0
+ :reduce-fn (fn [total item]
+ (+ total (field item)))
+ :retract-fn (fn [total item]
+ (- total (field item)))
+ :combine-fn +}))
+
+(defn count
+ "Returns an accumulator that simply counts the number of matching facts"
+ []
+ (accum
+ {:initial-value 0
+ :reduce-fn (fn [count value] (inc count))
+ :retract-fn (fn [count retracted] (dec count))
+ :combine-fn +}))
+
+(defn exists
+ "Returns an accumulator that accumulates to true if at least one fact
+ exists and nil otherwise, the latter causing the accumulator condition to not match."
+ []
+ (assoc (count) :convert-return-fn (fn [v]
+ ;; This specifically needs to return nil rather than false if the pos? predicate is false so that
+ ;; the accumulator condition will fail to match; the accumulator will consider
+ ;; boolean false a valid match. See https://github.com/cerner/clara-rules/issues/182#issuecomment-217142418
+ ;; and the following comments for the original discussion around suppressing nil accumulator
+ ;; return values but propagating boolean false.
+ (when (pos? v)
+ true))))
+
+(defn distinct
+ "Returns an accumulator producing a distinct set of facts.
+ If given a field, returns a distinct set of values for that field."
+ ([] (distinct identity))
+ ([field]
+ (accum
+ {:initial-value {}
+ :reduce-fn (fn [freq-map value] (update freq-map (field value) (fnil inc 0)))
+ :retract-fn (fn [freq-map retracted-item]
+ (let [item-field (field retracted-item)
+ current (get freq-map item-field)]
+ (if (= 1 current)
+ (dissoc freq-map item-field)
+ (update freq-map item-field dec))))
+ :convert-return-fn (comp set keys)})))
+
+(defn all
+ "Returns an accumulator that preserves all accumulated items.
+ If given a field, returns all values in that field."
+ ([]
+ (accum
+ {:initial-value []
+ :reduce-fn (fn [items value] (conj items value))
+ :retract-fn (fn [items retracted] (drop-one-of items retracted))}))
+ ([field]
+ (accum
+ {:initial-value []
+ :reduce-fn (fn [items value] (conj items (field value)))
+ :retract-fn (fn [items retracted] (drop-one-of items (field retracted)))})))
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 163a19d9..899c06b4 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -22,6 +22,7 @@
AccumulateWithJoinFilterNode
LocalTransport
Accumulator
+ NegationResult
ISystemFact]
[java.beans
PropertyDescriptor]
@@ -61,8 +62,8 @@
query-nodes :- {sc/Any QueryNode}
;; Map of id to one of the alpha or beta nodes (join, accumulate, etc).
id-to-node :- {sc/Num (sc/conditional
- :activation AlphaNode
- :else BetaNode)}
+ :activation AlphaNode
+ :else BetaNode)}
;; Function for sorting activation groups of rules for firing.
activation-group-sort-fn
;; Function that takes a rule and returns its activation group.
@@ -84,73 +85,6 @@
`defrecords`. Work around by supplying our own which does."
(clojure.reflect.JavaReflector. (clojure.lang.RT/makeClassLoader)))
-;; This technique borrowed from Prismatic's schema library.
-(defn compiling-cljs?
- "Return true if we are currently generating cljs code. Useful because cljx does not
- provide a hook for conditional macro expansion."
- []
- (boolean
- (when-let [n (find-ns 'cljs.analyzer)]
- (when-let [v (ns-resolve n '*cljs-file*)]
-
- ;; We perform this require only if we are compiling ClojureScript
- ;; so non-ClojureScript users do not need to pull in
- ;; that dependency.
- (require 'clara.macros)
- @v))))
-
-(defn get-namespace-info
- "Get metadata about the given namespace."
- [namespace]
- (when-let [n (and (compiling-cljs?) (find-ns 'cljs.env))]
- (when-let [v (ns-resolve n '*compiler*)]
- (get-in @@v [ :cljs.analyzer/namespaces namespace]))))
-
-(defn cljs-ns
- "Returns the ClojureScript namespace being compiled during Clojurescript compilation."
- []
- (if (compiling-cljs?)
- (-> 'cljs.analyzer (find-ns) (ns-resolve '*cljs-ns*) deref)
- nil))
-
-(defn resolve-cljs-sym
- "Resolves a ClojureScript symbol in the given namespace."
- [ns-sym sym]
- (let [ns-info (get-namespace-info ns-sym)]
- (if (namespace sym)
-
- ;; Symbol qualified by a namespace, so look it up in the requires info.
- (if-let [source-ns (get-in ns-info [:requires (symbol (namespace sym))])]
- (symbol (name source-ns) (name sym))
- ;; Not in the requires block, so assume the qualified name is a refers and simply return the symbol.
- sym)
-
- ;; Symbol is unqualified, so check in the uses block.
- (if-let [source-ns (get-in ns-info [:uses sym])]
- (symbol (name source-ns) (name sym))
-
- ;; Symbol not found in eiher block, so attempt to retrieve it from
- ;; the current namespace.
- (if (get-in (get-namespace-info ns-sym) [:defs sym])
- (symbol (name ns-sym) (name sym))
- nil)))))
-
-(defn- get-cljs-accessors
- "Returns accessors for ClojureScript. WARNING: this touches
- ClojureScript implementation details that may change."
- [sym]
- (let [resolved (resolve-cljs-sym (cljs-ns) sym)
- constructor (symbol (str "->" (name resolved)))
- namespace-info (get-namespace-info (symbol (namespace resolved)))
- constructor-info (get-in namespace-info [:defs constructor])]
-
- (if constructor-info
- (into {}
- (for [field (first (:method-params constructor-info))]
- [field (keyword (name field))]))
- [])))
-
-
(defn- get-field-accessors
"Given a clojure.lang.IRecord subclass, returns a map of field name to a
symbol representing the function used to access it."
@@ -181,30 +115,18 @@
(symbol (str "." (.getName read-method)))])))
(defn effective-type [type]
- (if (compiling-cljs?)
- type
-
- (if (symbol? type)
- (.loadClass (clojure.lang.RT/makeClassLoader) (name type))
- type)))
+ (if (symbol? type)
+ (.loadClass (clojure.lang.RT/makeClassLoader) (name type))
+ type))
(defn get-fields
"Returns a map of field name to a symbol representing the function used to access it."
[type]
- (if (compiling-cljs?)
-
- ;; Get ClojureScript fields.
- (if (symbol? type)
- (get-cljs-accessors type)
- [])
-
- ;; Attempt to load the corresponding class for the type if it's a symbol.
- (let [type (effective-type type)]
-
- (cond
- (isa? type clojure.lang.IRecord) (get-field-accessors type)
- (class? type) (get-bean-accessors type) ; Treat unrecognized classes as beans.
- :default []))))
+ (let [type (effective-type type)]
+ (cond
+ (isa? type clojure.lang.IRecord) (get-field-accessors type)
+ (class? type) (get-bean-accessors type) ; Treat unrecognized classes as beans.
+ :else [])))
(defn- equality-expression? [expression]
(let [qualify-when-sym #(when-let [resolved (and (symbol? %)
@@ -250,16 +172,16 @@
(if (empty? exp-seq)
`(deref ~'?__bindings__)
- (let [ [exp & rest-exp] exp-seq
- variables (into #{}
- (filter (fn [item]
- (and (symbol? item)
- (= \? (first (name item)))
- (not (equality-only-variables item))))
- exp))
- expression-values (remove variables (rest exp))
- binds-variables? (and (equality-expression? exp)
- (seq variables))
+ (let [[exp & rest-exp] exp-seq
+ variables (into #{}
+ (filter (fn [item]
+ (and (symbol? item)
+ (= \? (first (name item)))
+ (not (equality-only-variables item))))
+ exp))
+ expression-values (remove variables (rest exp))
+ binds-variables? (and (equality-expression? exp)
+ (seq variables))
;; if we intend on binding any variables at this level of the
;; expression then future layers should not be able to rebind them.
@@ -297,14 +219,12 @@
;; equal.
~(if (> (count expression-values) 1)
-
`(if ~(cons '= expression-values) ~compiled-rest nil)
;; No additional values to check, so move on to the rest of
;; the expression
- compiled-rest)
- )
+ compiled-rest))
- ;; A contraint that is empty doesn't need to be added as a check,
+;; A contraint that is empty doesn't need to be added as a check,
;; simply move on to the rest
(empty? exp)
compiled-rest
@@ -314,7 +234,6 @@
:else
`(if ~exp ~compiled-rest nil))))))
-
(defn flatten-expression
"Flattens expression as clojure.core/flatten does, except will flatten
anything that is a collection rather than specifically sequential."
@@ -447,8 +366,8 @@
assignments (sequence
(comp
- (filter rhs-bindings-used)
- (mapcat build-token-assignment))
+ (filter rhs-bindings-used)
+ (mapcat build-token-assignment))
binding-keys)
;; The destructured environment, if any.
@@ -527,9 +446,9 @@
`(fn ~fn-name
[~'?__token__
- ~(add-meta '?__fact__ type)
- ~'?__element-bindings__
- ~destructured-env]
+ ~(add-meta '?__fact__ type)
+ ~'?__element-bindings__
+ ~destructured-env]
(let [~@assignments
~'?__bindings__ (atom {})]
~(compile-constraints constraints equality-only-variables)))))
@@ -646,8 +565,8 @@
(when (and (seq? form)
(not (equality-expression? form))
(some (fn [sym] (and (symbol? sym)
- (.startsWith (name sym) "?")
- (not (previously-bound sym))))
+ (.startsWith (name sym) "?")
+ (not (previously-bound sym))))
(flatten-expression form)))
(reset! found-complex true))
@@ -668,15 +587,15 @@
accumulator (:accumulator condition)
result-binding (:result-binding condition) ; Get the optional result binding used by accumulators.
condition (cond
- is-negation (second condition)
- accumulator (:from condition)
- :default condition)
+ is-negation (second condition)
+ accumulator (:from condition)
+ :else condition)
node-type (cond
- is-negation :negation
- is-exists :exists
- accumulator :accumulator
- (:type condition) :join
- :else :test)]
+ is-negation :negation
+ is-exists :exists
+ accumulator :accumulator
+ (:type condition) :join
+ :else :test)]
node-type))
@@ -694,9 +613,9 @@
;; This is an :exists condition, so expand it
;; into an accumulator and a test.
(let [exists-count (gensym "?__gen__")]
- [{:accumulator '(clara.rules.accumulators/exists)
- :from (second condition)
- :result-binding (keyword exists-count)}])
+ [{:accumulator '(clara.rules.accumulators/exists)
+ :from (second condition)
+ :result-binding (keyword exists-count)}])
;; This is not an :exists condition, so do not change it.
[condition])]
@@ -771,8 +690,7 @@
[nested-condition])]
leaf-condition)
-
- ;; A top level and of nested conditions, so just use them
+;; A top level and of nested conditions, so just use them
:and
(rest dnf-condition)
@@ -782,13 +700,12 @@
(reduce
(fn [{:keys [bound unbound condition is-accumulator]} leaf-condition]
-
- ;; The effective leaf for variable purposes may be contained in a negation or
+;; The effective leaf for variable purposes may be contained in a negation or
;; an accumulator, so extract it.
(let [effective-leaf (condp = (condition-type leaf-condition)
- :accumulator (:from leaf-condition)
- :negation (second leaf-condition)
- leaf-condition)
+ :accumulator (:from leaf-condition)
+ :negation (second leaf-condition)
+ leaf-condition)
constraints (:constraints effective-leaf)
@@ -877,8 +794,8 @@
;; Get the subset of variables that cannot be satisfied.
(let [unsatisfiable (set/difference
- (apply set/union (map :unbound still-unsatisfied))
- bound-variables)]
+ (apply set/union (map :unbound still-unsatisfied))
+ bound-variables)]
(throw (ex-info (str "Using variable that is not previously bound. This can happen "
"when an expression uses a previously unbound variable, "
"or if a variable is referenced in a nested part of a parent "
@@ -914,9 +831,9 @@
accumulator (:accumulator condition)
result-binding (:result-binding condition) ; Get the optional result binding used by accumulators.
condition (cond
- (= :negation node-type) (second condition)
- accumulator (:from condition)
- :default condition)
+ (= :negation node-type) (second condition)
+ accumulator (:from condition)
+ :else condition)
;; Convert a test within a negation to a negation of the test. This is necessary
;; because negation nodes expect an additional condition to match against.
@@ -939,16 +856,16 @@
;; If there are any non-equality unitifications, create a join with filter expression to handle them.
join-filter-expressions (if (seq non-equality-unifications)
- (assoc condition :constraints (filterv non-equality-unifications (:constraints condition)))
+ (assoc condition :constraints (filterv non-equality-unifications (:constraints condition)))
- nil)
+ nil)
;; Remove instances of non-equality constraints from accumulator
;; and negation nodes, since those are handled with specialized node implementations.
condition (if (seq non-equality-unifications)
(assoc condition
- :constraints (into [] (remove non-equality-unifications (:constraints condition)))
- :original-constraints (:constraints condition))
+ :constraints (into [] (remove non-equality-unifications (:constraints condition)))
+ :original-constraints (:constraints condition))
condition)
@@ -961,30 +878,30 @@
constraint-bindings)
new-bindings (set/difference (variables-as-keywords (:constraints condition))
- parent-bindings)
+ parent-bindings)
join-filter-bindings (if join-filter-expressions
(variables-as-keywords join-filter-expressions)
nil)]
- (cond->
- {:node-type node-type
- :condition condition
- :new-bindings new-bindings
- :used-bindings (set/union cond-bindings join-filter-bindings)}
+ (cond->
+ {:node-type node-type
+ :condition condition
+ :new-bindings new-bindings
+ :used-bindings (set/union cond-bindings join-filter-bindings)}
- (seq env) (assoc :env env)
+ (seq env) (assoc :env env)
;; Add the join bindings to join, accumulator or negation nodes.
- (#{:join :negation :accumulator} node-type) (assoc :join-bindings (set/intersection cond-bindings parent-bindings))
+ (#{:join :negation :accumulator} node-type) (assoc :join-bindings (set/intersection cond-bindings parent-bindings))
- accumulator (assoc :accumulator accumulator)
+ accumulator (assoc :accumulator accumulator)
- result-binding (assoc :result-binding result-binding)
+ result-binding (assoc :result-binding result-binding)
- join-filter-expressions (assoc :join-filter-expressions join-filter-expressions)
+ join-filter-expressions (assoc :join-filter-expressions join-filter-expressions)
- join-filter-bindings (assoc :join-filter-join-bindings (set/intersection join-filter-bindings parent-bindings)))))
+ join-filter-bindings (assoc :join-filter-join-bindings (set/intersection join-filter-bindings parent-bindings)))))
(sc/defn ^:private add-node :- schema/BetaGraph
"Adds a node to the beta graph."
@@ -1004,11 +921,10 @@
#{}))
beta-graph (if (#{:production :query} (:node-type target-node))
- (assoc-in beta-graph [:id-to-production-node target-id] target-node)
- (assoc-in beta-graph [:id-to-condition-node target-id] target-node))]
+ (assoc-in beta-graph [:id-to-production-node target-id] target-node)
+ (assoc-in beta-graph [:id-to-condition-node target-id] target-node))]
-
- ;; Associate the forward and backward edges.
+;; Associate the forward and backward edges.
(reduce (fn [beta-graph source-id]
(let [forward-path [:forward-edges source-id]
forward-previous (get-in beta-graph forward-path)
@@ -1029,7 +945,6 @@
beta-graph
source-ids)))
-
(declare add-production)
(sc/defn ^:private extract-negation :- {:new-expression schema/Condition
@@ -1063,8 +978,8 @@
;; See https://github.com/cerner/clara-rules/issues/304 for more details
;; and a case that behaves incorrectly without this check.
ancestor-bindings-in-negation-expr (set/intersection
- (variables-as-keywords negation-expr)
- ancestor-bindings)
+ (variables-as-keywords negation-expr)
+ ancestor-bindings)
ancestor-bindings-insertion-form (into {}
(map (fn [binding]
@@ -1077,9 +992,7 @@
(list '= (-> b name symbol)
(list b 'ancestor-bindings)))
- modified-expression `[:not {:type ~(if (compiling-cljs?)
- 'clara.rules.engine/NegationResult
- 'clara.rules.engine.NegationResult)
+ modified-expression `[:not {:type clara.rules.engine.NegationResult
:constraints [(~'= ~gen-rule-name ~'gen-rule-name)
~@(map ancestor-binding->restriction-form
ancestor-bindings-in-negation-expr)]}]
@@ -1099,7 +1012,6 @@
;; Add the generated rule to the beta network.
-
beta-with-negations (add-production generated-rule beta-graph create-id-fn)]
{:new-expression modified-expression
@@ -1202,7 +1114,6 @@
graph-with-node (add-node beta-graph parent-ids node-id node)]
-
(recur graph-with-node
[node-id]
all-bindings
@@ -1320,17 +1231,16 @@
:available-bindings ancestor-bindings
:query (:name production)}))))))))
-
(sc/defn to-beta-graph :- schema/BetaGraph
"Produces a description of the beta network."
[productions :- #{schema/Production}
create-id-fn :- IFn]
(reduce (fn [beta-graph production]
- (binding [*compile-ctx* {:production production}]
- (add-production production beta-graph create-id-fn)))
+ (binding [*compile-ctx* {:production production}]
+ (add-production production beta-graph create-id-fn)))
- empty-beta-graph
- productions))
+ empty-beta-graph
+ productions))
(sc/defn ^:private root-node? :- sc/Bool
"A helper function to determine if the node-id provided is a root node. A given node would be considered a root-node,
@@ -1352,139 +1262,139 @@
(let [backward-edges (:backward-edges beta-graph)
handle-expr (fn [id->expr s-expr id expr-key compilation-ctx]
(assoc id->expr
- [id expr-key]
- [s-expr (assoc compilation-ctx expr-key s-expr)]))
+ [id expr-key]
+ [s-expr (assoc compilation-ctx expr-key s-expr)]))
;; If extract-exprs ever became a hot spot, this could be changed out to use more java interop.
id->expr (reduce (fn [prev alpha-node]
- (let [{:keys [id condition env]} alpha-node
- {:keys [type constraints fact-binding args]} condition
- cmeta (meta condition)]
- (handle-expr prev
- (with-meta (compile-condition
- type id (first args) constraints
- fact-binding env)
+ (let [{:keys [id condition env]} alpha-node
+ {:keys [type constraints fact-binding args]} condition
+ cmeta (meta condition)]
+ (handle-expr prev
+ (with-meta (compile-condition
+ type id (first args) constraints
+ fact-binding env)
;; Remove all metadata but file and line number
;; to protect from evaluating unsafe metadata
;; See PR 243 for more detailed discussion
- (select-keys cmeta [:line :file]))
- id
- :alpha-expr
- {:file (or (:file cmeta) *file*)
- :compile-ctx {:condition condition
- :env env
- :msg "compiling alpha node"}})))
- {}
- alpha-graph)
+ (select-keys cmeta [:line :file]))
+ id
+ :alpha-expr
+ {:file (or (:file cmeta) *file*)
+ :compile-ctx {:condition condition
+ :env env
+ :msg "compiling alpha node"}})))
+ {}
+ alpha-graph)
id->expr (reduce-kv (fn [prev id production-node]
- (let [production (-> production-node :production)]
- (handle-expr prev
- (with-meta (compile-action id
- (:bindings production-node)
- (:rhs production)
- (:env production))
- (meta (:rhs production)))
- id
- :action-expr
+ (let [production (-> production-node :production)]
+ (handle-expr prev
+ (with-meta (compile-action id
+ (:bindings production-node)
+ (:rhs production)
+ (:env production))
+ (meta (:rhs production)))
+ id
+ :action-expr
;; ProductionNode expressions can be arbitrary code, therefore we need the
;; ns where the production was define so that we can compile the expression
;; later.
- {:ns (:ns-name production)
- :file (-> production :rhs meta :file)
- :compile-ctx {:production production
- :msg "compiling production node"}})))
+ {:ns (:ns-name production)
+ :file (-> production :rhs meta :file)
+ :compile-ctx {:production production
+ :msg "compiling production node"}})))
id->expr
- (:id-to-production-node beta-graph))]
+ (:id-to-production-node beta-graph))]
(reduce-kv
- (fn [prev id beta-node]
- (let [condition (:condition beta-node)
- condition (if (symbol? condition)
- (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
- condition)]
- (case (or (:node-type beta-node)
+ (fn [prev id beta-node]
+ (let [condition (:condition beta-node)
+ condition (if (symbol? condition)
+ (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
+ condition)]
+ (case (or (:node-type beta-node)
;; If there is no :node-type then the node is the ::root-condition
;; however, creating a case for nil could potentially cause weird effects if something about the
;; compilation of the beta graph changes. Therefore making an explicit case for ::root-condition
;; and if there was anything that wasn't ::root-condition this case statement will fail rather than
;; failing somewhere else.
- beta-node)
- ::root-condition prev
+ beta-node)
+ ::root-condition prev
- :join (if (or (root-node? backward-edges id)
- (not (:join-filter-expressions beta-node)))
+ :join (if (or (root-node? backward-edges id)
+ (not (:join-filter-expressions beta-node)))
;; This is either a RootJoin or HashJoin node, in either case they do not have an expression
;; to capture.
- prev
- (handle-expr prev
- (compile-join-filter id
- "ExpressionJoinNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- (:env beta-node))
- id
- :join-filter-expr
- {:compile-ctx {:condition condition
- :join-filter-expressions (:join-filter-expressions beta-node)
- :env (:env beta-node)
- :msg "compiling expression join node"}}))
- :negation (if (:join-filter-expressions beta-node)
- (handle-expr prev
- (compile-join-filter id
- "NegationWithJoinFilterNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- (:env beta-node))
- id
- :join-filter-expr
- {:compile-ctx {:condition condition
- :join-filter-expressions (:join-filter-expressions beta-node)
- :env (:env beta-node)
- :msg "compiling negation with join filter node"}})
- prev)
- :test (handle-expr prev
- (compile-test id (:constraints condition) (:env beta-node))
- id
- :test-expr
- {:compile-ctx {:condition condition
- :env (:env beta-node)
- :msg "compiling test node"}})
- :accumulator (cond-> (handle-expr prev
- (compile-accum id
- (if (:join-filter-expressions beta-node)
- "AccumulateWithJoinFilterNode"
- "AccumulateNode")
- (:accumulator beta-node)
- (:env beta-node))
- id
- :accum-expr
- {:compile-ctx {:condition condition
- :accumulator (:accumulator beta-node)
- :env (:env beta-node)
- :msg "compiling accumulator"}})
-
- (:join-filter-expressions beta-node)
- (handle-expr (compile-join-filter id
- "AccumulateWithJoinFilterNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- (:env beta-node))
- id
- :join-filter-expr
- {:compile-ctx {:condition condition
- :join-filter-expressions (:join-filter-expressions beta-node)
- :env (:env beta-node)
- :msg "compiling accumulate with join filter node"}}))
- :query prev
+ prev
+ (handle-expr prev
+ (compile-join-filter id
+ "ExpressionJoinNode"
+ (:join-filter-expressions beta-node)
+ (:join-filter-join-bindings beta-node)
+ (:new-bindings beta-node)
+ (:env beta-node))
+ id
+ :join-filter-expr
+ {:compile-ctx {:condition condition
+ :join-filter-expressions (:join-filter-expressions beta-node)
+ :env (:env beta-node)
+ :msg "compiling expression join node"}}))
+ :negation (if (:join-filter-expressions beta-node)
+ (handle-expr prev
+ (compile-join-filter id
+ "NegationWithJoinFilterNode"
+ (:join-filter-expressions beta-node)
+ (:join-filter-join-bindings beta-node)
+ (:new-bindings beta-node)
+ (:env beta-node))
+ id
+ :join-filter-expr
+ {:compile-ctx {:condition condition
+ :join-filter-expressions (:join-filter-expressions beta-node)
+ :env (:env beta-node)
+ :msg "compiling negation with join filter node"}})
+ prev)
+ :test (handle-expr prev
+ (compile-test id (:constraints condition) (:env beta-node))
+ id
+ :test-expr
+ {:compile-ctx {:condition condition
+ :env (:env beta-node)
+ :msg "compiling test node"}})
+ :accumulator (cond-> (handle-expr prev
+ (compile-accum id
+ (if (:join-filter-expressions beta-node)
+ "AccumulateWithJoinFilterNode"
+ "AccumulateNode")
+ (:accumulator beta-node)
+ (:env beta-node))
+ id
+ :accum-expr
+ {:compile-ctx {:condition condition
+ :accumulator (:accumulator beta-node)
+ :env (:env beta-node)
+ :msg "compiling accumulator"}})
+
+ (:join-filter-expressions beta-node)
+ (handle-expr (compile-join-filter id
+ "AccumulateWithJoinFilterNode"
+ (:join-filter-expressions beta-node)
+ (:join-filter-join-bindings beta-node)
+ (:new-bindings beta-node)
+ (:env beta-node))
+ id
+ :join-filter-expr
+ {:compile-ctx {:condition condition
+ :join-filter-expressions (:join-filter-expressions beta-node)
+ :env (:env beta-node)
+ :msg "compiling accumulate with join filter node"}}))
+ :query prev
;; This error should only be thrown if there are changes to the compilation of the beta-graph
;; such as an addition of a node type.
- (throw (ex-info "Invalid node type encountered while compiling rulebase."
- {:node beta-node})))))
- id->expr
- (:id-to-condition-node beta-graph))))
+ (throw (ex-info "Invalid node type encountered while compiling rulebase."
+ {:node beta-node})))))
+ id->expr
+ (:id-to-condition-node beta-graph))))
(sc/defn compile-exprs :- schema/NodeFnLookup
"Takes a map in the form produced by extract-exprs and evaluates the values(expressions) of the map in a batched manner.
@@ -1569,25 +1479,25 @@
;; Use an specialized root node for efficiency in this case.
(if is-root
(eng/->RootJoinNode
- id
- condition
- children
- join-bindings)
+ id
+ condition
+ children
+ join-bindings)
;; If the join operation includes arbitrary expressions
;; that can't expressed as a hash join, we must use the expressions
(if (:join-filter-expressions beta-node)
(eng/->ExpressionJoinNode
- id
- condition
- (compiled-expr-fn id :join-filter-expr)
- children
- join-bindings)
+ id
+ condition
+ (compiled-expr-fn id :join-filter-expr)
+ children
+ join-bindings)
(eng/->HashJoinNode
- id
- condition
- children
- join-bindings)))
+ id
+ condition
+ children
+ join-bindings)))
:negation
;; Check to see if the negation includes an
@@ -1595,23 +1505,23 @@
;; and use the appropriate node type.
(if (:join-filter-expressions beta-node)
(eng/->NegationWithJoinFilterNode
- id
- condition
- (compiled-expr-fn id :join-filter-expr)
- children
- join-bindings)
+ id
+ condition
+ (compiled-expr-fn id :join-filter-expr)
+ children
+ join-bindings)
(eng/->NegationNode
- id
- condition
- children
- join-bindings))
+ id
+ condition
+ children
+ join-bindings))
:test
(eng/->TestNode
- id
- env
- (compiled-expr-fn id :test-expr)
- children)
+ id
+ env
+ (compiled-expr-fn id :test-expr)
+ children)
:accumulator
;; We create an accumulator that accepts the environment for the beta node
@@ -1628,44 +1538,44 @@
(if (:join-filter-expressions beta-node)
(eng/->AccumulateWithJoinFilterNode
- id
+ id
;; Create an accumulator structure for use when examining the node or the tokens
;; it produces.
- {:accumulator (:accumulator beta-node)
+ {:accumulator (:accumulator beta-node)
;; Include the original filter expressions in the constraints for inspection tooling.
- :from (update-in condition [:constraints]
- into (-> beta-node :join-filter-expressions :constraints))}
- compiled-accum
- (compiled-expr-fn id :join-filter-expr)
- (:result-binding beta-node)
- children
- join-bindings
- (:new-bindings beta-node))
+ :from (update-in condition [:constraints]
+ into (-> beta-node :join-filter-expressions :constraints))}
+ compiled-accum
+ (compiled-expr-fn id :join-filter-expr)
+ (:result-binding beta-node)
+ children
+ join-bindings
+ (:new-bindings beta-node))
;; All unification is based on equality, so just use the simple accumulate node.
(eng/->AccumulateNode
- id
+ id
;; Create an accumulator structure for use when examining the node or the tokens
;; it produces.
- {:accumulator (:accumulator beta-node)
- :from condition}
- compiled-accum
- (:result-binding beta-node)
- children
- join-bindings
- (:new-bindings beta-node))))
+ {:accumulator (:accumulator beta-node)
+ :from condition}
+ compiled-accum
+ (:result-binding beta-node)
+ children
+ join-bindings
+ (:new-bindings beta-node))))
:production
(eng/->ProductionNode
- id
- production
- (compiled-expr-fn id :action-expr))
+ id
+ production
+ (compiled-expr-fn id :action-expr))
:query
(eng/->QueryNode
- id
- query
- (:params query)))))
+ id
+ query
+ (:params query)))))
(sc/defn ^:private compile-beta-graph :- {sc/Int sc/Any}
"Compile the beta description to the nodes used at runtime."
@@ -1701,8 +1611,8 @@
;; Get the children. The children should be sorted because the
;; id-to-compiled-nodes map is sorted.
children (->> (get forward-edges id-to-compile)
- (select-keys id-to-compiled-nodes)
- (vals))]
+ (select-keys id-to-compiled-nodes)
+ (vals))]
;; Sanity check for our logic...
(assert (= (count children)
@@ -1725,7 +1635,6 @@
(sorted-map)
ids-to-compile)))
-
(sc/defn to-alpha-graph :- [schema/AlphaNode]
"Returns a sequence of [condition-fn, [node-ids]] tuples to represent the alpha side of the network."
[beta-graph :- schema/BetaGraph
@@ -1757,15 +1666,13 @@
;; Compile conditions into functions.
(vec
(for [[[condition env] node-ids] condition-to-node-entries
- :when (:type condition) ; Exclude test conditions.
- ]
+ :when (:type condition)] ; Exclude test conditions.
(cond-> {:id (create-id-fn)
:condition condition
:beta-children (distinct node-ids)}
(seq env) (assoc :env env))))))
-
(sc/defn compile-alpha-nodes :- [{:id sc/Int
:type sc/Any
:alpha-fn sc/Any ;; TODO: is a function...
@@ -1778,7 +1685,7 @@
:type (effective-type (:type condition))
:alpha-fn (first (safe-get expr-fn-lookup [id :alpha-expr]))
:children beta-children}
- env (assoc :env env))))
+ env (assoc :env env))))
;; Wrap the fact-type so that Clojure equality and hashcode semantics are used
;; even though this is placed in a Java map.
@@ -1878,7 +1785,6 @@
(java.util.Collections/unmodifiableList return-list))))))
-
(sc/defn build-network
"Constructs the network from compiled beta tree and condition functions."
[id-to-node :- {sc/Int sc/Any}
@@ -2049,8 +1955,8 @@
exprs (if omit-compile-ctx
(into {}
(map
- (fn [[k [expr ctx]]]
- [k [expr (dissoc ctx :compile-ctx)]]))
+ (fn [[k [expr ctx]]]
+ [k [expr (dissoc ctx :compile-ctx)]]))
exprs)
exprs)
diff --git a/src/main/clojure/clara/rules/dsl.clj b/src/main/clojure/clara/rules/dsl.clj
index 0f1aeec0..aa98fc40 100644
--- a/src/main/clojure/clara/rules/dsl.clj
+++ b/src/main/clojure/clara/rules/dsl.clj
@@ -1,15 +1,8 @@
(ns clara.rules.dsl
"Implementation of the defrule-style DSL for Clara. Most users should simply use the clara.rules namespace."
- (:require [clojure.reflect :as reflect]
- [clojure.core.reducers :as r]
- [clojure.set :as s]
- [clojure.string :as string]
- [clojure.walk :as walk]
- [clara.rules.engine :as eng]
+ (:require [clojure.walk :as walk]
[clara.rules.compiler :as com]
- [clara.rules.platform :as platform]
- [clara.rules.schema :as schema]
- [schema.core :as sc])
+ [clara.rules.platform :as platform])
(:refer-clojure :exclude [qualified-keyword?]))
;; Let operators be symbols or keywords.
@@ -20,13 +13,13 @@
[x] (and (symbol? x) (= "=>" (name x))))
(defn split-lhs-rhs
- "Given a rule with the =>, splits the left- and right-hand sides."
- [rule-body]
- (let [[lhs [sep & rhs]] (split-with #(not (separator? %)) rule-body)]
+ "Given a rule with the =>, splits the left- and right-hand sides."
+ [rule-body]
+ (let [[lhs [sep & rhs]] (split-with #(not (separator? %)) rule-body)]
- {:lhs lhs
- :rhs (when-not (empty? rhs)
- (conj rhs 'do))}))
+ {:lhs lhs
+ :rhs (when-not (empty? rhs)
+ (conj rhs 'do))}))
(defn- throw-dsl-ex
"Throws an exception indicating a failure parsing a form."
@@ -49,12 +42,7 @@
"Creates a condition with the given optional result binding when parsing a rule."
[condition result-binding expr-meta]
(let [type (if (symbol? (first condition))
- (if-let [resolved (and
- ;; If we are compiling ClojureScript rules we don't want
- ;; to resolve the symbol in the ClojureScript compiler's
- ;; Clojure environment. See issue 300.
- (not (com/compiling-cljs?))
- (resolve (first condition)))]
+ (if-let [resolved (resolve (first condition))]
;; If the type resolves to a var, grab its contents for the match.
(if (var? resolved)
@@ -95,12 +83,12 @@
(with-meta
(cond-> {:type type
:constraints constraints}
- args (assoc :args args)
- result-binding (assoc :fact-binding result-binding))
+ args (assoc :args args)
+ result-binding (assoc :fact-binding result-binding))
(if (seq constraints)
(assoc (meta (first constraints))
- :file *file*)))))
+ :file *file*)))))
(defn- parse-condition-or-accum
"Parse an expression that could be a condition or an accumulator."
@@ -131,80 +119,69 @@
[expression expr-meta]
(cond
- (contains? ops (first expression))
- (into [(keyword (name (first expression)))] ; Ensure expression operator is a keyword.
- (for [nested-expr (rest expression)]
- (parse-expression nested-expr expr-meta)))
-
- (contains? #{'test :test} (first expression))
- (if (= 1 (count expression))
- (throw-dsl-ex (str "Empty :test conditions are not allowed.")
- {}
- expr-meta)
- {:constraints (vec (rest expression))})
+ (contains? ops (first expression))
+ (into [(keyword (name (first expression)))] ; Ensure expression operator is a keyword.
+ (for [nested-expr (rest expression)]
+ (parse-expression nested-expr expr-meta)))
+ (contains? #{'test :test} (first expression))
+ (if (= 1 (count expression))
+ (throw-dsl-ex (str "Empty :test conditions are not allowed.")
+ {}
+ expr-meta)
+ {:constraints (vec (rest expression))})
- :default
- (parse-condition-or-accum expression expr-meta)))
+ :else
+ (parse-condition-or-accum expression expr-meta)))
(defn- maybe-qualify
"Attempt to qualify the given symbol, returning the symbol itself
if we can't qualify it for any reason."
[env sym]
(let [env (set env)]
- (if (com/compiling-cljs?)
-
- ;; Qualify the symbol using the CLJS analyzer.
- (if-let [resolved (and (symbol? sym)
- (not (env sym))
- (com/resolve-cljs-sym (com/cljs-ns) sym))]
- resolved
- sym)
-
- (if (and (symbol? sym)
- (not= '.. sym) ; Skip the special .. host interop macro.
- (.endsWith (name sym) "."))
-
- ;; The . suffix indicates a type constructor, so we qualify the type instead, then
- ;; re-add the . to the resolved type.
- (-> (subs (name sym)
- 0
- (dec (count (name sym)))) ; Get the name without the trailing dot.
- (symbol) ; Convert it back to a symbol.
- ^Class (resolve) ; Resolve into the type class.
- (.getName) ; Get the qualified name.
- (str ".") ; Re-add the dot for the constructor, which is now qualified
- (symbol)) ; Convert back into a symbol used at compile time.
-
- ;; Qualify a normal clojure symbol.
- (if (and (symbol? sym) ; Only qualify symbols...
- (not (env sym))) ; not in env (env contains locals).) ; that we can resolve
-
- (cond
- ;; If it's a Java class, simply qualify it.
- (instance? Class (resolve sym))
- (symbol (.getName ^Class (resolve sym)))
-
- ;; Don't qualify clojure.core for portability, since CLJS uses a different namespace.
- (and (resolve sym)
- (not (= "clojure.core"
- (str (ns-name (:ns (meta (resolve sym)))))))
- (name sym))
- (symbol (str (ns-name (:ns (meta (resolve sym))))) (name sym))
-
- ;; See if it's a static method call and qualify it.
- (and (namespace sym)
- (not (resolve sym))
- (instance? Class (resolve (symbol (namespace sym))))) ; The namespace portion is the class name we try to resolve.
- (symbol (.getName ^Class (resolve (symbol (namespace sym))))
- (name sym))
-
- ;; Finally, just return the symbol unchanged if it doesn't match anything above,
- ;; assuming it's a local parameter or variable.n
- :default
- sym)
-
- sym)))))
+ (if (and (symbol? sym)
+ (not= '.. sym) ; Skip the special .. host interop macro.
+ (.endsWith (name sym) "."))
+
+ ;; The . suffix indicates a type constructor, so we qualify the type instead, then
+ ;; re-add the . to the resolved type.
+ (-> (subs (name sym)
+ 0
+ (dec (count (name sym)))) ; Get the name without the trailing dot.
+ (symbol) ; Convert it back to a symbol.
+ ^Class (resolve) ; Resolve into the type class.
+ (.getName) ; Get the qualified name.
+ (str ".") ; Re-add the dot for the constructor, which is now qualified
+ (symbol)) ; Convert back into a symbol used at compile time.
+
+ ;; Qualify a normal clojure symbol.
+ (if (and (symbol? sym) ; Only qualify symbols...
+ (not (env sym))) ; not in env (env contains locals).) ; that we can resolve
+
+ (cond
+ ;; If it's a Java class, simply qualify it.
+ (instance? Class (resolve sym))
+ (symbol (.getName ^Class (resolve sym)))
+
+ ;; Don't qualify clojure.core for portability, since CLJS uses a different namespace.
+ (and (resolve sym)
+ (not (= "clojure.core"
+ (str (ns-name (:ns (meta (resolve sym)))))))
+ (name sym))
+ (symbol (str (ns-name (:ns (meta (resolve sym))))) (name sym))
+
+ ;; See if it's a static method call and qualify it.
+ (and (namespace sym)
+ (not (resolve sym))
+ (instance? Class (resolve (symbol (namespace sym))))) ; The namespace portion is the class name we try to resolve.
+ (symbol (.getName ^Class (resolve (symbol (namespace sym))))
+ (name sym))
+
+ ;; Finally, just return the symbol unchanged if it doesn't match anything above,
+ ;; assuming it's a local parameter or variable.n
+ :else
+ sym)
+ sym))))
(defn- qualify-meta
"Qualify metadata associated with the symbol."
@@ -247,7 +224,7 @@
(let [conditions (into [] (for [expr lhs]
(parse-expression expr rule-meta)))
- rule {:ns-name (list 'quote (ns-name (if (com/compiling-cljs?) (com/cljs-ns) *ns*)))
+ rule {:ns-name (list 'quote (ns-name *ns*))
:lhs (list 'quote
(mapv #(resolve-vars % (destructure-syms %))
conditions))
@@ -305,7 +282,6 @@
[prod-name]
(cond
(qualified-keyword? prod-name) prod-name
- (com/compiling-cljs?) (str (name (com/cljs-ns)) "/" (name prod-name))
:else (str (name (ns-name *ns*)) "/" (name prod-name))))
(defn build-rule
diff --git a/src/main/clojure/clara/rules/durability.clj b/src/main/clojure/clara/rules/durability.clj
index 5d6e95b9..41f1149a 100644
--- a/src/main/clojure/clara/rules/durability.clj
+++ b/src/main/clojure/clara/rules/durability.clj
@@ -12,16 +12,10 @@
(:require [clara.rules.engine :as eng]
[clara.rules.compiler :as com]
[clara.rules.memory :as mem]
- [clojure.set :as set]
[schema.core :as s])
- (:import [clara.rules.compiler
- Rulebase]
- [clara.rules.memory
- RuleOrderedActivation]
- [java.util
- List
- Map
- IdentityHashMap]))
+ (:import [clara.rules.compiler Rulebase]
+ [clara.rules.memory RuleOrderedActivation]
+ [java.util List Map]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;; Rulebase serialization helpers.
@@ -84,8 +78,8 @@
"Gets the numeric index for the given struct from the clj-struct-holder."
[fact]
(-> clj-struct-holder
- ^Map (.get)
- (.get fact)))
+ ^Map (.get)
+ (.get fact)))
(defn clj-struct-holder-add-fact-idx!
"Adds the fact to the clj-struct-holder with a new index. This can later be retrieved
@@ -96,26 +90,26 @@
;; at read-time. This could have a cast to long here, but it would waste time
;; unnecessarily.
(-> clj-struct-holder
- ^Map (.get)
- (.put fact (-> clj-struct-holder
- ^Map (.get)
- (.size)))))
+ ^Map (.get)
+ (.put fact (-> clj-struct-holder
+ ^Map (.get)
+ (.size)))))
(defn clj-struct-idx->obj
"The reverse of clj-struct->idx. Returns an object for the given index found
in clj-struct-holder."
[id]
(-> clj-struct-holder
- ^List (.get)
- (.get id)))
+ ^List (.get)
+ (.get id)))
(defn clj-struct-holder-add-obj!
"The reverse of clj-struct-holder-add-fact-idx!. Adds the object to the clj-struct-holder
at the next available index."
[fact]
(-> clj-struct-holder
- ^List (.get)
- (.add fact))
+ ^List (.get)
+ (.add fact))
fact)
(defn create-map-entry
@@ -252,13 +246,13 @@
(if-let [idx (find-mem-internal-idx internal-seen token)]
idx
(let [indexed (-> token
- (update :matches
- #(mapv (fn [[fact node-id]]
- [(find-index-or-add! seen fact)
- node-id])
- %))
- (update :bindings
- #(index-bindings seen %)))]
+ (update :matches
+ #(mapv (fn [[fact node-id]]
+ [(find-index-or-add! seen fact)
+ node-id])
+ %))
+ (update :bindings
+ #(index-bindings seen %)))]
(add-mem-internal-idx! internal-seen token indexed))))
(defn index-alpha-memory [internal-seen seen amem]
@@ -268,10 +262,10 @@
(mapv #(if-let [idx (find-mem-internal-idx internal-seen %)]
idx
(let [indexed (-> %
- (update :fact
- index-update-fact-fn)
- (update :bindings
- index-update-bindings-fn))]
+ (update :fact
+ index-update-fact-fn)
+ (update :bindings
+ index-update-bindings-fn))]
(add-mem-internal-idx! internal-seen % indexed)))
elements))]
(update-vals amem
@@ -363,17 +357,17 @@
(defn find-mem-idx
"Finds the fact from mem-facts at the given index. See docs on mem-facts for more."
[idx]
- (-> mem-facts
- (.get)
- (get idx)))
+ (-> mem-facts
+ (.get)
+ (get idx)))
(defn find-internal-idx
"Finds the fact from mem-internal at the given index. See docs on mem-internal for more."
[idx]
(-> mem-internal
- (.get)
- (get idx)))
-
+ (.get)
+ (get idx)))
+
(defn indexed-session-memory-state
"Takes the working memory from a session and strips it down to only the memory needed for
serialization. Along with this, replaces all working memory facts with MemIdx place holders.
@@ -490,7 +484,7 @@
of memory."
([rulebase opts]
(let [{:keys [listeners transport]} opts]
-
+
(eng/assemble {:rulebase rulebase
:memory (eng/local-memory rulebase
(clara.rules.engine.LocalTransport.)
@@ -505,7 +499,7 @@
([rulebase memory opts]
(let [{:keys [listeners transport]} opts]
-
+
(eng/assemble {:rulebase rulebase
:memory (assoc memory
:rulebase rulebase
@@ -535,7 +529,7 @@
via the opts argument to both serialize and deserialize. Certain options are expected and
required to be supported by any implementation of ISessionSerializer. These are referred to as
the 'standard' options.
-
+
These include:
* :rulebase-only? - When true indicates the rulebase is the only part of the session to serializer.
@@ -585,7 +579,7 @@
The important part of this serialization protocol is that the facts returned from deserialize-facts are in
the *same order* as how they were given to serialize-facts."
-
+
(serialize-facts [this fact-seq]
"Serialize the given fact-seq, which is an order sequence of facts from working memory of a session.
Note, as mentioned in the protocol docs, the *order* these are given is *important* and should be preserved
@@ -614,7 +608,7 @@
be stored and retrieved a single time for potentially many sessions containing different working
memory data, for the same rules. This function is only a convenience for passing the
:rulebase-only? true flag to the serialize-session-state function.
- See serialize-session-state for more."
+ See serialize-session-state for more."
([session :- (s/protocol eng/ISession)
session-serializer :- (s/protocol ISessionSerializer)]
(serialize-rulebase session
@@ -696,7 +690,7 @@
(deserialize-session-state session-serializer
memory-facts-serializer
{}))
-
+
([session-serializer :- (s/protocol ISessionSerializer)
memory-facts-serializer :- (s/protocol IWorkingMemorySerializer)
opts :- {s/Any s/Any}]
diff --git a/src/main/clojure/clara/rules/durability/fressian.clj b/src/main/clojure/clara/rules/durability/fressian.clj
index 7f33673a..c60c9b00 100644
--- a/src/main/clojure/clara/rules/durability/fressian.clj
+++ b/src/main/clojure/clara/rules/durability/fressian.clj
@@ -244,24 +244,24 @@
"clj/set"
(create-identity-based-handler
- clojure.lang.APersistentSet
- "clj/set"
- write-with-meta
- (fn clj-set-reader [rdr] (read-with-meta rdr set)))
+ clojure.lang.APersistentSet
+ "clj/set"
+ write-with-meta
+ (fn clj-set-reader [rdr] (read-with-meta rdr set)))
"clj/vector"
(create-identity-based-handler
- clojure.lang.APersistentVector
- "clj/vector"
- write-with-meta
- (fn clj-vec-reader [rdr] (read-with-meta rdr vec)))
+ clojure.lang.APersistentVector
+ "clj/vector"
+ write-with-meta
+ (fn clj-vec-reader [rdr] (read-with-meta rdr vec)))
"clj/list"
(create-identity-based-handler
- clojure.lang.PersistentList
- "clj/list"
- write-with-meta
- (fn clj-list-reader [rdr] (read-with-meta rdr #(apply list %))))
+ clojure.lang.PersistentList
+ "clj/list"
+ write-with-meta
+ (fn clj-list-reader [rdr] (read-with-meta rdr #(apply list %))))
"clj/emptylist"
;; Not using the identity based handler as this will always be identical anyway
@@ -280,115 +280,115 @@
(read [_ rdr tag component-count]
(let [m (read-meta rdr)]
(cond-> '()
- m (with-meta m)))))}}
+ m (with-meta m)))))}}
"clj/aseq"
(create-identity-based-handler
- clojure.lang.ASeq
- "clj/aseq"
- write-with-meta
- (fn clj-seq-reader [rdr] (read-with-meta rdr sequence)))
+ clojure.lang.ASeq
+ "clj/aseq"
+ write-with-meta
+ (fn clj-seq-reader [rdr] (read-with-meta rdr sequence)))
"clj/lazyseq"
(create-identity-based-handler
- clojure.lang.LazySeq
- "clj/lazyseq"
- write-with-meta
- (fn clj-lazy-seq-reader [rdr] (read-with-meta rdr sequence)))
+ clojure.lang.LazySeq
+ "clj/lazyseq"
+ write-with-meta
+ (fn clj-lazy-seq-reader [rdr] (read-with-meta rdr sequence)))
"clj/map"
(create-identity-based-handler
- clojure.lang.APersistentMap
- "clj/map"
- (fn clj-map-writer [wtr tag m] (write-with-meta wtr tag m write-map))
- (fn clj-map-reader [rdr] (read-with-meta rdr #(into {} %))))
+ clojure.lang.APersistentMap
+ "clj/map"
+ (fn clj-map-writer [wtr tag m] (write-with-meta wtr tag m write-map))
+ (fn clj-map-reader [rdr] (read-with-meta rdr #(into {} %))))
"clj/treeset"
(create-identity-based-handler
- clojure.lang.PersistentTreeSet
- "clj/treeset"
- (fn clj-treeset-writer [^Writer wtr tag s]
- (let [cname (d/sorted-comparator-name s)]
- (.writeTag wtr tag 3)
- (if cname
- (.writeObject wtr cname true)
- (.writeNull wtr))
+ clojure.lang.PersistentTreeSet
+ "clj/treeset"
+ (fn clj-treeset-writer [^Writer wtr tag s]
+ (let [cname (d/sorted-comparator-name s)]
+ (.writeTag wtr tag 3)
+ (if cname
+ (.writeObject wtr cname true)
+ (.writeNull wtr))
;; Preserve metadata.
- (if-let [m (meta s)]
- (.writeObject wtr m)
- (.writeNull wtr))
- (.writeList wtr s)))
- (fn clj-treeset-reader [^Reader rdr]
- (let [c (some-> rdr .readObject resolve deref)
- m (.readObject rdr)
- s (-> (.readObject rdr)
- (d/seq->sorted-set c))]
- (if m
- (with-meta s m)
- s))))
+ (if-let [m (meta s)]
+ (.writeObject wtr m)
+ (.writeNull wtr))
+ (.writeList wtr s)))
+ (fn clj-treeset-reader [^Reader rdr]
+ (let [c (some-> rdr .readObject resolve deref)
+ m (.readObject rdr)
+ s (-> (.readObject rdr)
+ (d/seq->sorted-set c))]
+ (if m
+ (with-meta s m)
+ s))))
"clj/treemap"
(create-identity-based-handler
- clojure.lang.PersistentTreeMap
- "clj/treemap"
- (fn clj-treemap-writer [^Writer wtr tag o]
- (let [cname (d/sorted-comparator-name o)]
- (.writeTag wtr tag 3)
- (if cname
- (.writeObject wtr cname true)
- (.writeNull wtr))
+ clojure.lang.PersistentTreeMap
+ "clj/treemap"
+ (fn clj-treemap-writer [^Writer wtr tag o]
+ (let [cname (d/sorted-comparator-name o)]
+ (.writeTag wtr tag 3)
+ (if cname
+ (.writeObject wtr cname true)
+ (.writeNull wtr))
;; Preserve metadata.
- (if-let [m (meta o)]
- (.writeObject wtr m)
- (.writeNull wtr))
- (write-map wtr o)))
- (fn clj-treemap-reader [^Reader rdr]
- (let [c (some-> rdr .readObject resolve deref)
- m (.readObject rdr)
- s (d/seq->sorted-map (.readObject rdr) c)]
- (if m
- (with-meta s m)
- s))))
+ (if-let [m (meta o)]
+ (.writeObject wtr m)
+ (.writeNull wtr))
+ (write-map wtr o)))
+ (fn clj-treemap-reader [^Reader rdr]
+ (let [c (some-> rdr .readObject resolve deref)
+ m (.readObject rdr)
+ s (d/seq->sorted-map (.readObject rdr) c)]
+ (if m
+ (with-meta s m)
+ s))))
"clj/mapentry"
(create-identity-based-handler
- clojure.lang.MapEntry
- "clj/mapentry"
- (fn clj-mapentry-writer [^Writer wtr tag o]
- (.writeTag wtr tag 2)
- (.writeObject wtr (key o) true)
- (.writeObject wtr (val o)))
- (fn clj-mapentry-reader [^Reader rdr]
- (d/create-map-entry (.readObject rdr)
- (.readObject rdr))))
+ clojure.lang.MapEntry
+ "clj/mapentry"
+ (fn clj-mapentry-writer [^Writer wtr tag o]
+ (.writeTag wtr tag 2)
+ (.writeObject wtr (key o) true)
+ (.writeObject wtr (val o)))
+ (fn clj-mapentry-reader [^Reader rdr]
+ (d/create-map-entry (.readObject rdr)
+ (.readObject rdr))))
;; Have to redefine both Symbol and IRecord to support metadata as well
;; as identity-based caching for the IRecord case.
"clj/sym"
(create-identity-based-handler
- clojure.lang.Symbol
- "clj/sym"
- (fn clj-sym-writer [^Writer wtr tag o]
+ clojure.lang.Symbol
+ "clj/sym"
+ (fn clj-sym-writer [^Writer wtr tag o]
;; Mostly copied from private fres/write-named, except the metadata part.
- (.writeTag wtr tag 3)
- (.writeObject wtr (namespace o) true)
- (.writeObject wtr (name o) true)
- (if-let [m (meta o)]
- (.writeObject wtr m)
- (.writeNull wtr)))
- (fn clj-sym-reader [^Reader rdr]
- (let [s (symbol (.readObject rdr) (.readObject rdr))
- m (read-meta rdr)]
- (cond-> s
- m (with-meta m)))))
+ (.writeTag wtr tag 3)
+ (.writeObject wtr (namespace o) true)
+ (.writeObject wtr (name o) true)
+ (if-let [m (meta o)]
+ (.writeObject wtr m)
+ (.writeNull wtr)))
+ (fn clj-sym-reader [^Reader rdr]
+ (let [s (symbol (.readObject rdr) (.readObject rdr))
+ m (read-meta rdr)]
+ (cond-> s
+ m (with-meta m)))))
"clj/record"
(create-identity-based-handler
- clojure.lang.IRecord
- "clj/record"
- write-record
- read-record)
+ clojure.lang.IRecord
+ "clj/record"
+ write-record
+ read-record)
"clara/productionnode"
(create-cached-node-handler ProductionNode
@@ -508,7 +508,7 @@
(def read-handlers
"All Fressian read handlers used by FressianSessionSerializer's."
- (->> handlers
+ (->> handlers
vals
(into fres/clojure-read-handlers
(mapcat :readers))))
@@ -546,7 +546,7 @@
(pform/thread-local-binding [d/node-id->node-cache (volatile! {})
d/clj-struct-holder record-holder]
(doseq [s sources] (fres/write-object wtr s)))))]
-
+
;; In this case there is nothing to do with memory, so just serialize immediately.
(if (:rulebase-only? opts)
;; node-expr-fn-lookup is a map with a structure of:
@@ -556,23 +556,23 @@
;; during deserialization the compilation-context({Keyword Any}), which contains the unevaluated form,
;; can be used to reconstruct the original map.
(do-serialize [(remove-node-fns node-expr-fn-lookup) rulebase])
-
+
;; Otherwise memory needs to have facts extracted to return.
(let [{:keys [memory indexed-facts internal-indexed-facts]} (d/indexed-session-memory-state memory)
sources (if (:with-rulebase? opts)
[(remove-node-fns node-expr-fn-lookup) rulebase internal-indexed-facts memory]
[internal-indexed-facts memory])]
-
+
(do-serialize sources)
-
+
;; Return the facts needing to be serialized still.
indexed-facts))))
-
+
(deserialize [_ mem-facts opts]
(with-open [^FressianReader rdr (fres/create-reader in-stream :handlers read-handler-lookup)]
(let [{:keys [rulebase-only? base-rulebase forms-per-eval]} opts
-
+
record-holder (ArrayList.)
;; The rulebase should either be given from the base-session or found in
;; the restored session-state.
@@ -598,10 +598,10 @@
reconstruct-expressions
(com/compile-exprs forms-per-eval))]
(assoc (fres/read-object rdr)
- :node-expr-fn-lookup
- (.get d/node-fn-cache))))]
+ :node-expr-fn-lookup
+ (.get d/node-fn-cache))))]
(d/rulebase->rulebase-with-opts without-opts-rulebase opts)))]
-
+
(if rulebase-only?
rulebase
(d/assemble-restored-session rulebase
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
new file mode 100644
index 00000000..0cfde6dd
--- /dev/null
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -0,0 +1,2119 @@
+(ns clara.rules.engine
+ "This namespace is for internal use and may move in the future. Most users should use only the clara.rules namespace."
+ (:require [clojure.core.reducers :as r]
+ [clojure.string :as string]
+ [clara.rules.memory :as mem]
+ [clara.rules.listener :as l]
+ [clara.rules.platform :as platform]
+ [clara.rules.update-cache.core :as uc]
+ [clara.rules.update-cache.cancelling :as ca]))
+
+;; The accumulator is a Rete extension to run an accumulation (such as sum, average, or similar operation)
+;; over a collection of values passing through the Rete network. This object defines the behavior
+;; of an accumulator. See the AccumulateNode for the actual node implementation in the network.
+(defrecord Accumulator [initial-value retract-fn reduce-fn combine-fn convert-return-fn])
+
+;; A Rete-style token, which contains two items:
+;; * matches, a vector of [fact, node-id] tuples for the facts and corresponding nodes they matched.
+;; NOTE: It is important that this remains an indexed vector for memory optimizations as well as
+;; for correct conj behavior for new elements i.e. added to the end.
+;; * bindings, a map of keyword-to-values for bound variables.
+(defrecord Token [matches bindings])
+
+;; A working memory element, containing a single fact and its corresponding bound variables.
+(defrecord Element [fact bindings])
+
+;; An activation for the given production and token.
+(defrecord Activation [node token])
+
+;; Token with no bindings, used as the root of beta nodes.
+(def empty-token (->Token [] {}))
+
+;; Record indicating the negation existing in the working memory.
+;;
+;; Determining if an object is an instance of a class is a primitive
+;; JVM operation and is much more efficient than determining
+;; if that object descends from a particular object through
+;; Clojure's hierarchy as determined by the isa? function.
+;; See Issue 239 for more details.
+;; A marker interface to identify internal facts.
+(definterface ISystemFact)
+
+(defrecord NegationResult
+ [gen-rule-name ancestor-bindings]
+ ISystemFact)
+
+;; Schema for the structure returned by the components
+;; function on the session protocol.
+;; This is simply a comment rather than first-class schema
+;; for now since it's unused for validation and created
+;; undesired warnings as described at https://groups.google.com/forum/#!topic/prismatic-plumbing/o65PfJ4CUkI
+(comment
+ (def session-components-schema
+ {:rulebase s/Any
+ :memory s/Any
+ :transport s/Any
+ :listeners [s/Any]
+ :get-alphas-fn s/Any}))
+
+;; Returns a new session with the additional facts inserted.
+(defprotocol ISession
+
+ ;; Inserts facts.
+ (insert [session facts])
+
+ ;; Retracts facts.
+ (retract [session facts])
+
+ ;; Fires pending rules and returns a new session where they are in a fired state.
+ ;;
+ ;; Note that clara.rules/fire-rules, the public API for these methods, will handle
+ ;; calling the two-arg fire-rules with an empty map itself, but we add handle it in the fire-rules implementation
+ ;; as well in case anyone is directly calling the fire-rules protocol function or interface method on the LocalSession.
+ ;; The two-argument version of fire-rules was added for issue 249.
+ (fire-rules [session] [session opts])
+
+ ;; Runs a query agains thte session.
+ (query [session query params])
+
+ ;; Returns the components of a session as defined in the session-components-schema
+ (components [session]))
+
+;; Left activation protocol for various types of beta nodes.
+(defprotocol ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener])
+ (left-retract [node join-bindings tokens memory transport listener])
+ (description [node])
+ (get-join-keys [node]))
+
+;; Right activation protocol to insert new facts, connecting alpha nodes
+;; and beta nodes.
+(defprotocol IRightActivate
+ (right-activate [node join-bindings elements memory transport listener])
+ (right-retract [node join-bindings elements memory transport listener]))
+
+;; Specialized right activation interface for accumulator nodes,
+;; where the caller has the option of pre-reducing items
+;; to reduce the data sent to the node. This would be useful
+;; if the caller is not in the same memory space as the accumulator node itself.
+(defprotocol IAccumRightActivate
+ ;; Pre-reduces elements, returning a map of bindings to reduced elements.
+ (pre-reduce [node elements])
+
+ ;; Right-activate the node with items reduced in the above pre-reduce step.
+ (right-activate-reduced [node join-bindings reduced memory transport listener]))
+
+(defprotocol IAccumInspect
+ "This protocol is expected to be implemented on accumulator nodes in the rules network.
+ It is not expected that users will implement this protocol, and most likely will not call
+ the protocol function directly."
+ (token->matching-elements [node memory token]
+ "Takes a token that was previously propagated from the node,
+ or a token that is a descendant of such a token, and returns the facts in elements
+ matching the token propagated from the node. During rules firing
+ accumulators only propagate bindings created and the result binding
+ downstream rather than all facts that were accumulated over, but there
+ are use-cases in session inspection where we want to retrieve the individual facts.
+
+ Example: [?min-temp <- (acc/min :temperature) :from [Temperature (= temperature ?loc)]]
+ [?windspeed <- [WindSpeed (= location ?loc)]]
+
+ Given a token propagated from the node for the WindSpeed condition
+ we could retrieve the Temperature facts from the matching location."))
+
+;; The transport protocol for sending and retracting items between nodes.
+(defprotocol ITransport
+ (send-elements [transport memory listener nodes elements])
+ (send-tokens [transport memory listener nodes tokens])
+ (retract-elements [transport memory listener nodes elements])
+ (retract-tokens [transport memory listener nodes tokens]))
+
+(defn- propagate-items-to-nodes [transport memory listener nodes items propagate-fn]
+ (doseq [node nodes
+ :let [join-keys (get-join-keys node)]]
+
+ (if (pos? (count join-keys))
+
+ ;; Group by the join keys for the activation.
+ (doseq [[join-bindings item-group] (platform/group-by-seq #(select-keys (:bindings %) join-keys) items)]
+ (propagate-fn node
+ join-bindings
+ item-group
+ memory
+ transport
+ listener))
+
+ ;; The node has no join keys, so just send everything at once
+ ;; (if there is something to send.)
+ (when (seq items)
+ (propagate-fn node
+ {}
+ items
+ memory
+ transport
+ listener)))))
+
+;; Simple, in-memory transport.
+(deftype LocalTransport []
+ ITransport
+ (send-elements [transport memory listener nodes elements]
+ (propagate-items-to-nodes transport memory listener nodes elements right-activate))
+
+ (send-tokens [transport memory listener nodes tokens]
+ (propagate-items-to-nodes transport memory listener nodes tokens left-activate))
+
+ (retract-elements [transport memory listener nodes elements]
+ (propagate-items-to-nodes transport memory listener nodes elements right-retract))
+
+ (retract-tokens [transport memory listener nodes tokens]
+ (propagate-items-to-nodes transport memory listener nodes tokens left-retract)))
+
+;; Protocol for activation of Rete alpha nodes.
+(defprotocol IAlphaActivate
+ (alpha-activate [node facts memory transport listener])
+ (alpha-retract [node facts memory transport listener]))
+
+;; Protocol for getting the type (e.g. :production and :query) and name of a
+;; terminal node.
+(defprotocol ITerminalNode
+ (terminal-node-type [this]))
+
+;; Protocol for getting a node's condition expression.
+(defprotocol IConditionNode
+ (get-condition-description [this]))
+
+(defn get-terminal-node-types
+ [node]
+ (->> node
+ (tree-seq (comp seq :children) :children)
+ (keep #(when (satisfies? ITerminalNode %)
+ (terminal-node-type %)))
+ (into (sorted-set))))
+
+(defn get-conditions-and-rule-names
+ "Returns a map from conditions to sets of rules."
+ ([node]
+ (if-let [condition (when (satisfies? IConditionNode node)
+ (get-condition-description node))]
+ {condition (get-terminal-node-types node)}
+ (->> node
+ :children
+ (map get-conditions-and-rule-names)
+ (reduce (partial merge-with into) {})))))
+
+;; Active session during rule execution.
+(def ^:dynamic *current-session* nil)
+
+;; Note that this can hold facts directly retracted and facts logically retracted
+;; as a result of an external retraction or insertion.
+;; The value is expected to be an atom holding such facts.
+(def ^:dynamic *pending-external-retractions* nil)
+
+;; The token that triggered a rule to fire.
+(def ^:dynamic *rule-context* nil)
+
+(defn ^:private external-retract-loop
+ "Retract all facts, then group and retract all facts that must be logically retracted because of these
+ retractions, and so forth, until logical consistency is reached. When an external retraction causes multiple
+ facts of the same type to be retracted in the same iteration of the loop this improves efficiency since they can be grouped.
+ For example, if we have a rule that matches on FactA and inserts FactB, and then a later rule that accumulates on FactB,
+ if we have multiple FactA external retractions it is more efficient to logically retract all the FactB instances at once to minimize the number of times we must re-accumulate on FactB.
+ This is similar to the function of the pending-updates in the fire-rules* loop."
+ [get-alphas-fn memory transport listener]
+ (loop []
+ (let [retractions (deref *pending-external-retractions*)
+ ;; We have already obtained a direct reference to the facts to be
+ ;; retracted in this iteration of the loop outside the cache. Now reset
+ ;; the cache. The retractions we execute may cause new retractions to be queued
+ ;; up, in which case the loop will execute again.
+ _ (reset! *pending-external-retractions* [])]
+ (doseq [[alpha-roots fact-group] (get-alphas-fn retractions)
+ root alpha-roots]
+ (alpha-retract root fact-group memory transport listener))
+ (when (-> *pending-external-retractions* deref not-empty)
+ (recur)))))
+
+(defn- flush-updates
+ "Flush all pending updates in the current session. Returns true if there were
+ some items to flush, false otherwise"
+ [current-session]
+ (letfn [(flush-all [current-session flushed-items?]
+ (let [{:keys [rulebase transient-memory transport insertions get-alphas-fn listener]} current-session
+ pending-updates (-> current-session :pending-updates uc/get-updates-and-reset!)]
+
+ (if (empty? pending-updates)
+ flushed-items?
+ (do
+ (doseq [partition pending-updates
+ :let [facts (mapcat :facts partition)]
+ [alpha-roots fact-group] (get-alphas-fn facts)
+ root alpha-roots]
+
+ (if (= :insert (:type (first partition)))
+ (alpha-activate root fact-group transient-memory transport listener)
+ (alpha-retract root fact-group transient-memory transport listener)))
+
+ ;; There may be new pending updates due to the flush just
+ ;; made. So keep flushing until there are none left. Items
+ ;; were flushed though, so flush-items? is now true.
+ (flush-all current-session true)))))]
+
+ (flush-all current-session false)))
+
+(defn insert-facts!
+ "Place facts in a stateful cache to be inserted into the session
+ immediately after the RHS of a rule fires."
+ [facts unconditional]
+ (if unconditional
+ (swap! (:batched-unconditional-insertions *rule-context*) into facts)
+ (swap! (:batched-logical-insertions *rule-context*) into facts)))
+
+(defn rhs-retract-facts!
+ "Place all facts retracted in the RHS in a buffer to be retracted after
+ the eval'ed RHS function completes."
+ [facts]
+ (swap! (:batched-rhs-retractions *rule-context*) into facts))
+
+(defn ^:private flush-rhs-retractions!
+ "Retract all facts retracted in the RHS after the eval'ed RHS function completes.
+ This should only be used for facts explicitly retracted in a RHS.
+ It should not be used for retractions that occur as part of automatic truth maintenance."
+ [facts]
+ (let [{:keys [rulebase transient-memory transport insertions get-alphas-fn listener]} *current-session*
+ {:keys [node token]} *rule-context*]
+ ;; Update the count so the rule engine will know when we have normalized.
+ (swap! insertions + (count facts))
+
+ (when listener
+ (l/retract-facts! listener node token facts))
+
+ (doseq [[alpha-roots fact-group] (get-alphas-fn facts)
+ root alpha-roots]
+
+ (alpha-retract root fact-group transient-memory transport listener))))
+
+(defn ^:private flush-insertions!
+ "Perform the actual fact insertion, optionally making them unconditional. This should only
+ be called once per rule activation for logical insertions."
+ [facts unconditional]
+ (let [{:keys [rulebase transient-memory transport insertions get-alphas-fn listener]} *current-session*
+ {:keys [node token]} *rule-context*]
+
+ ;; Update the insertion count.
+ (swap! insertions + (count facts))
+
+ ;; Track this insertion in our transient memory so logical retractions will remove it.
+ (if unconditional
+ (l/insert-facts! listener node token facts)
+ (do
+ (mem/add-insertions! transient-memory node token facts)
+ (l/insert-facts-logical! listener node token facts)))
+
+ (-> *current-session* :pending-updates (uc/add-insertions! facts))))
+
+(defn retract-facts!
+ "Perform the fact retraction."
+ [facts]
+ (-> *current-session* :pending-updates (uc/add-retractions! facts)))
+
+;; Record for the production node in the Rete network.
+(defrecord ProductionNode [id production rhs]
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener]
+
+ ;; Provide listeners information on all left-activate calls,
+ ;; but we don't store these tokens in the beta-memory since the production-memory
+ ;; and activation-memory collectively contain all information that ProductionNode
+ ;; needs. See https://github.com/cerner/clara-rules/issues/386
+ (l/left-activate! listener node tokens)
+
+ ;; Fire the rule if it's not a no-loop rule, or if the rule is not
+ ;; active in the current context.
+ (when (or (not (get-in production [:props :no-loop]))
+ (not (= production (get-in *rule-context* [:node :production]))))
+
+ (let [activations (platform/eager-for [token tokens]
+ (->Activation node token))]
+
+ (l/add-activations! listener node activations)
+
+ ;; The production matched, so add the tokens to the activation list.
+ (mem/add-activations! memory production activations))))
+
+ (left-retract [node join-bindings tokens memory transport listener]
+
+ ;; Provide listeners information on all left-retract calls for passivity,
+ ;; but we don't store these tokens in the beta-memory since the production-memory
+ ;; and activation-memory collectively contain all information that ProductionNode
+ ;; needs. See https://github.com/cerner/clara-rules/issues/386
+ (l/left-retract! listener node tokens)
+
+ ;; Remove pending activations triggered by the retracted tokens.
+ (let [activations (platform/eager-for [token tokens]
+ (->Activation node token))
+
+ ;; We attempt to remove a pending activation for all tokens retracted, but our expectation
+ ;; is that each token may remove a pending activation
+ ;; or logical insertions from a previous rule activation but not both.
+ ;; We first attempt to use each token to remove a pending activation but keep track of which
+ ;; tokens were not used to remove an activation.
+ [removed-activations unremoved-activations]
+ (mem/remove-activations! memory production activations)
+
+ _ (l/remove-activations! listener node removed-activations)
+
+ unremoved-tokens (mapv :token unremoved-activations)
+
+ ;; Now use each token that was not used to remove a pending activation to remove
+ ;; the logical insertions from a previous activation if the truth maintenance system
+ ;; has a matching previous activation.
+ token-insertion-map (mem/remove-insertions! memory node unremoved-tokens)]
+
+ (when-let [insertions (seq (apply concat (vals token-insertion-map)))]
+ ;; If there is current session with rules firing, add these items to the queue
+ ;; to be retracted so they occur in the same order as facts being inserted.
+ (cond
+
+ ;; Both logical retractions resulting from rule network activity and manual RHS retractions
+ ;; expect *current-session* to be bound since both happen in the context of a fire-rules call.
+ *current-session*
+ ;; Retract facts that have become untrue, unless they became untrue
+ ;; because of an activation of the current rule that is :no-loop
+ (when (or (not (get-in production [:props :no-loop]))
+ (not (= production (get-in *rule-context* [:node :production]))))
+ (do
+ ;; Notify the listener of logical retractions.
+ ;; Note that this notification happens immediately, while the
+ ;; alpha-retract notification on matching alpha nodes will happen when the
+ ;; retraction is actually removed from the buffer and executed in the rules network.
+ (doseq [[token token-insertions] token-insertion-map]
+ (l/retract-facts-logical! listener node token token-insertions))
+ (retract-facts! insertions)))
+
+ ;; Any session implementation is required to bind this during external retractions and insertions.
+ *pending-external-retractions*
+ (do
+ (doseq [[token token-insertions] token-insertion-map]
+ (l/retract-facts-logical! listener node token token-insertions))
+ (swap! *pending-external-retractions* into insertions))
+
+ :else
+ (throw (ex-info (str "Attempting to retract from a ProductionNode when neither *current-session* nor "
+ "*pending-external-retractions* is bound is illegal.")
+ {:node node
+ :join-bindings join-bindings
+ :tokens tokens}))))))
+
+ (get-join-keys [node] [])
+
+ (description [node] "ProductionNode")
+
+ ITerminalNode
+ (terminal-node-type [this] [:production (:name production)]))
+
+;; The QueryNode is a terminal node that stores the
+;; state that can be queried by a rule user.
+(defrecord QueryNode [id query param-keys]
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener]
+ (l/left-activate! listener node tokens)
+ (mem/add-tokens! memory node join-bindings tokens))
+
+ (left-retract [node join-bindings tokens memory transport listener]
+ (l/left-retract! listener node tokens)
+ (mem/remove-tokens! memory node join-bindings tokens))
+
+ (get-join-keys [node] param-keys)
+
+ (description [node] (str "QueryNode -- " query))
+
+ ITerminalNode
+ (terminal-node-type [this] [:query (:name query)]))
+
+(defn node-rule-names
+ [child-type node]
+ (->> node
+ (tree-seq (comp seq :children) :children)
+ (keep child-type)
+ (map :name)
+ (distinct)
+ (sort)))
+
+(defn- list-of-names
+ "Returns formatted string with correctly pluralized header and
+ list of names. Returns nil if no such node is found."
+ [singular plural prefix names]
+ (let [msg-for-unnamed (str " An unnamed " singular ", provide names to your "
+ plural " if you want them to be identified here.")
+ names-string (->> names
+ (sort)
+ (map #(if (nil? %) msg-for-unnamed %))
+ (map #(str prefix " " %))
+ (string/join "\n"))]
+ (if (pos? (count names))
+ (str prefix plural ":\n" names-string "\n"))))
+
+(defn- single-condition-message
+ [condition-number [condition-definition terminals]]
+ (let [productions (->> terminals
+ (filter (comp #{:production} first))
+ (map second))
+ queries (->> terminals
+ (filter (comp #{:query} first))
+ (map second))
+ production-section (list-of-names "rule" "rules" " " productions)
+ query-section (list-of-names "query" "queries" " " queries)]
+ (string/join
+ [(str (inc condition-number) ". " condition-definition "\n")
+ production-section
+ query-section])))
+
+(defn- throw-condition-exception
+ "Adds a useful error message when executing a constraint node raises an exception."
+ [{:keys [cause node fact env bindings] :as args}]
+ (let [bindings-description (if (empty? bindings)
+ "with no bindings"
+ (str "with bindings\n " bindings))
+ facts-description (if (contains? args :fact)
+ (str "when processing fact\n " (pr-str fact))
+ "with no fact")
+ message-header (string/join ["Condition exception raised.\n"
+ (str facts-description "\n")
+ (str bindings-description "\n")
+ "Conditions:\n"])
+ conditions-and-rules (get-conditions-and-rule-names node)
+ condition-messages (->> conditions-and-rules
+ (map-indexed single-condition-message)
+ (string/join "\n"))
+ message (str message-header "\n" condition-messages)]
+ (throw (ex-info message
+ {:fact fact
+ :bindings bindings
+ :env env
+ :conditions-and-rules conditions-and-rules}
+ cause))))
+
+(defn- alpha-node-matches
+ [facts env activation node]
+ (platform/eager-for [fact facts
+ :let [bindings (try
+ (activation fact env)
+ (catch Exception e
+ (throw-condition-exception {:cause e
+ :node node
+ :fact fact
+ :env env})))]
+ :when bindings] ; FIXME: add env.
+ [fact bindings]))
+
+;; Record representing alpha nodes in the Rete network,
+;; each of which evaluates a single condition and
+;; propagates matches to its children.
+(defrecord AlphaNode [id env children activation fact-type]
+
+ IAlphaActivate
+ (alpha-activate [node facts memory transport listener]
+ (let [fact-binding-pairs (alpha-node-matches facts env activation node)]
+ (l/alpha-activate! listener node (map first fact-binding-pairs))
+ (send-elements
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [[fact bindings] fact-binding-pairs]
+ (->Element fact bindings)))))
+
+ (alpha-retract [node facts memory transport listener]
+ (let [fact-binding-pairs (alpha-node-matches facts env activation node)]
+ (l/alpha-retract! listener node (map first fact-binding-pairs))
+ (retract-elements
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [[fact bindings] fact-binding-pairs]
+ (->Element fact bindings))))))
+
+(defrecord RootJoinNode [id condition children binding-keys]
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener])
+ ;; This specialized root node doesn't need to deal with the
+ ;; empty token, so do nothing.
+
+ (left-retract [node join-bindings tokens memory transport listener])
+ ;; The empty token can't be retracted from the root node,
+ ;; so do nothing.
+
+ (get-join-keys [node] binding-keys)
+
+ (description [node] (str "RootJoinNode -- " (:text condition)))
+
+ IRightActivate
+ (right-activate [node join-bindings elements memory transport listener]
+
+ (l/right-activate! listener node elements)
+
+;; Add elements to the working memory to support analysis tools.
+ (mem/add-elements! memory node join-bindings elements)
+ ;; Simply create tokens and send it downstream.
+ (send-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [{:keys [fact bindings] :as element} elements]
+ (->Token [[fact (:id node)]] bindings))))
+
+ (right-retract [node join-bindings elements memory transport listener]
+
+ (l/right-retract! listener node elements)
+
+ ;; Remove matching elements and send the retraction downstream.
+ (retract-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)]
+ (->Token [[fact (:id node)]] bindings))))
+
+ IConditionNode
+ (get-condition-description [this]
+ (let [{:keys [type constraints]} condition]
+ (into [type] constraints))))
+
+;; Record for the join node, a type of beta node in the rete network. This node performs joins
+;; between left and right activations, creating new tokens when joins match and sending them to
+;; its descendents.
+(defrecord HashJoinNode [id condition children binding-keys]
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener]
+ ;; Add token to the node's working memory for future right activations.
+ (mem/add-tokens! memory node join-bindings tokens)
+ (l/left-activate! listener node tokens)
+ (send-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [element (mem/get-elements memory node join-bindings)
+ token tokens
+ :let [fact (:fact element)
+ fact-binding (:bindings element)]]
+ (->Token (conj (:matches token) [fact id]) (conj fact-binding (:bindings token))))))
+
+ (left-retract [node join-bindings tokens memory transport listener]
+ (l/left-retract! listener node tokens)
+ (retract-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [token (mem/remove-tokens! memory node join-bindings tokens)
+ element (mem/get-elements memory node join-bindings)
+ :let [fact (:fact element)
+ fact-bindings (:bindings element)]]
+ (->Token (conj (:matches token) [fact id]) (conj fact-bindings (:bindings token))))))
+
+ (get-join-keys [node] binding-keys)
+
+ (description [node] (str "JoinNode -- " (:text condition)))
+
+ IRightActivate
+ (right-activate [node join-bindings elements memory transport listener]
+ (mem/add-elements! memory node join-bindings elements)
+ (l/right-activate! listener node elements)
+ (send-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [token (mem/get-tokens memory node join-bindings)
+ {:keys [fact bindings] :as element} elements]
+ (->Token (conj (:matches token) [fact id]) (conj (:bindings token) bindings)))))
+
+ (right-retract [node join-bindings elements memory transport listener]
+ (l/right-retract! listener node elements)
+ (retract-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)
+ token (mem/get-tokens memory node join-bindings)]
+ (->Token (conj (:matches token) [fact id]) (conj (:bindings token) bindings)))))
+
+ IConditionNode
+ (get-condition-description [this]
+ (let [{:keys [type constraints]} condition]
+ (into [type] constraints))))
+
+(defn- join-node-matches
+ [node join-filter-fn token fact fact-bindings env]
+ (let [beta-bindings (try (join-filter-fn token fact fact-bindings {})
+ (catch Exception e
+ (throw-condition-exception {:cause e
+ :node node
+ :fact fact
+ :env env
+ :bindings (merge (:bindings token)
+ fact-bindings)})))]
+ beta-bindings))
+
+(defrecord ExpressionJoinNode [id condition join-filter-fn children binding-keys]
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener]
+ ;; Add token to the node's working memory for future right activations.
+ (mem/add-tokens! memory node join-bindings tokens)
+ (l/left-activate! listener node tokens)
+ (send-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [element (mem/get-elements memory node join-bindings)
+ token tokens
+ :let [fact (:fact element)
+ fact-binding (:bindings element)
+ beta-bindings (join-node-matches node join-filter-fn token fact fact-binding {})]
+ :when beta-bindings]
+ (->Token (conj (:matches token) [fact id])
+ (conj fact-binding (:bindings token) beta-bindings)))))
+
+ (left-retract [node join-bindings tokens memory transport listener]
+ (l/left-retract! listener node tokens)
+ (retract-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [token (mem/remove-tokens! memory node join-bindings tokens)
+ element (mem/get-elements memory node join-bindings)
+ :let [fact (:fact element)
+ fact-bindings (:bindings element)
+ beta-bindings (join-node-matches node join-filter-fn token fact fact-bindings {})]
+ :when beta-bindings]
+ (->Token (conj (:matches token) [fact id])
+ (conj fact-bindings (:bindings token) beta-bindings)))))
+
+ (get-join-keys [node] binding-keys)
+
+ (description [node] (str "JoinNode -- " (:text condition)))
+
+ IRightActivate
+ (right-activate [node join-bindings elements memory transport listener]
+ (mem/add-elements! memory node join-bindings elements)
+ (l/right-activate! listener node elements)
+ (send-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [token (mem/get-tokens memory node join-bindings)
+ {:keys [fact bindings] :as element} elements
+ :let [beta-bindings (join-node-matches node join-filter-fn token fact bindings {})]
+ :when beta-bindings]
+ (->Token (conj (:matches token) [fact id])
+ (conj (:bindings token) bindings beta-bindings)))))
+
+ (right-retract [node join-bindings elements memory transport listener]
+ (l/right-retract! listener node elements)
+ (retract-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)
+ token (mem/get-tokens memory node join-bindings)
+ :let [beta-bindings (join-node-matches node join-filter-fn token fact bindings {})]
+ :when beta-bindings]
+ (->Token (conj (:matches token) [fact id])
+ (conj (:bindings token) bindings beta-bindings)))))
+
+ IConditionNode
+ (get-condition-description [this]
+ (let [{:keys [type constraints original-constraints]} condition
+ full-constraints (if (seq original-constraints)
+ original-constraints
+ constraints)]
+ (into [type] full-constraints))))
+
+;; The NegationNode is a beta node in the Rete network that simply
+;; negates the incoming tokens from its ancestors. It sends tokens
+;; to its descendent only if the negated condition or join fails (is false).
+(defrecord NegationNode [id condition children binding-keys]
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener]
+ ;; Add token to the node's working memory for future right activations.
+ (l/left-activate! listener node tokens)
+ (mem/add-tokens! memory node join-bindings tokens)
+ (when (empty? (mem/get-elements memory node join-bindings))
+ (send-tokens transport memory listener children tokens)))
+
+ (left-retract [node join-bindings tokens memory transport listener]
+ (l/left-retract! listener node tokens)
+ (mem/remove-tokens! memory node join-bindings tokens)
+ (when (empty? (mem/get-elements memory node join-bindings))
+ (retract-tokens transport memory listener children tokens)))
+
+ (get-join-keys [node] binding-keys)
+
+ (description [node] (str "NegationNode -- " (:text condition)))
+
+ IRightActivate
+ (right-activate [node join-bindings elements memory transport listener]
+ ;; Immediately evaluate whether there are previous elements since mem/get-elements
+ ;; returns a mutable list with a LocalMemory on the JVM currently.
+ (let [previously-empty? (empty? (mem/get-elements memory node join-bindings))]
+ (l/right-activate! listener node elements)
+ (mem/add-elements! memory node join-bindings elements)
+ ;; Retract tokens that matched the activation if no element matched the negation previously.
+ ;; If an element matched the negation already then no elements were propagated and there is
+ ;; nothing to retract.
+ (when previously-empty?
+ (retract-tokens transport memory listener children (mem/get-tokens memory node join-bindings)))))
+
+ (right-retract [node join-bindings elements memory transport listener]
+ (l/right-retract! listener node elements)
+ (mem/remove-elements! memory node join-bindings elements)
+ (when (empty? (mem/get-elements memory node join-bindings))
+ (send-tokens transport memory listener children (mem/get-tokens memory node join-bindings))))
+
+ IConditionNode
+ (get-condition-description [this]
+ (let [{:keys [type constraints]} condition]
+ [:not (into [type] constraints)])))
+
+(defn- matches-some-facts?
+ "Returns true if the given token matches one or more of the given elements."
+ [node token elements join-filter-fn condition]
+ (some (fn [{:keys [fact bindings]}]
+ (join-node-matches node join-filter-fn token fact bindings (:env condition)))
+ elements))
+
+;; A specialization of the NegationNode that supports additional tests
+;; that have to occur on the beta side of the network. The key difference between this and the simple
+;; negation node is the join-filter-fn, which allows negation tests to
+;; be applied with the parent token in context, rather than just a simple test of the non-existence
+;; on the alpha side.
+(defrecord NegationWithJoinFilterNode [id condition join-filter-fn children binding-keys]
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener]
+ ;; Add token to the node's working memory for future right activations.
+ (l/left-activate! listener node tokens)
+ (mem/add-tokens! memory node join-bindings tokens)
+
+ (send-tokens transport
+ memory
+ listener
+ children
+ (let [elements (mem/get-elements memory node join-bindings)]
+ (platform/eager-for [token tokens
+ :when (not (matches-some-facts? node
+ token
+ elements
+ join-filter-fn
+ condition))]
+ token))))
+
+ (left-retract [node join-bindings tokens memory transport listener]
+ (l/left-retract! listener node tokens)
+ (mem/remove-tokens! memory node join-bindings tokens)
+ (retract-tokens transport
+ memory
+ listener
+ children
+
+ ;; Retract only if it previously had no matches in the negation node,
+ ;; and therefore had an activation.
+ (let [elements (mem/get-elements memory node join-bindings)]
+ (platform/eager-for [token tokens
+ :when (not (matches-some-facts? node
+ token
+ elements
+ join-filter-fn
+ condition))]
+ token))))
+
+ (get-join-keys [node] binding-keys)
+
+ (description [node] (str "NegationWithJoinFilterNode -- " (:text condition)))
+
+ IRightActivate
+ (right-activate [node join-bindings elements memory transport listener]
+ (l/right-activate! listener node elements)
+ (let [previous-elements (mem/get-elements memory node join-bindings)]
+ ;; Retract tokens that matched the activation, since they are no longer negated.
+ (retract-tokens transport
+ memory
+ listener
+ children
+ (platform/eager-for [token (mem/get-tokens memory node join-bindings)
+
+ ;; Retract downstream if the token now has matching elements and didn't before.
+ ;; We check the new elements first in the expectation that the new elements will be
+ ;; smaller than the previous elements most of the time
+ ;; and that the time to check the elements will be proportional
+ ;; to the number of elements.
+ :when (and (matches-some-facts? node
+ token
+ elements
+ join-filter-fn
+ condition)
+ (not (matches-some-facts? node
+ token
+ previous-elements
+ join-filter-fn
+ condition)))]
+ token))
+ ;; Adding the elements will mutate the previous-elements since, on the JVM, the LocalMemory
+ ;; currently returns a mutable List from get-elements after changes in issue 184. We need to use the
+ ;; new and old elements in the logic above as separate collections. Therefore we need to delay updating the
+ ;; memory with the new elements until after we are done with previous-elements.
+ (mem/add-elements! memory node join-bindings elements)))
+
+ (right-retract [node join-bindings elements memory transport listener]
+
+ (l/right-retract! listener node elements)
+ (mem/remove-elements! memory node join-bindings elements)
+
+ (send-tokens transport
+ memory
+ listener
+ children
+ (let [remaining-elements (mem/get-elements memory node join-bindings)]
+ (platform/eager-for [token (mem/get-tokens memory node join-bindings)
+
+ ;; Propagate tokens when some of the retracted facts joined
+ ;; but none of the remaining facts do.
+ :when (and (matches-some-facts? node
+ token
+ elements
+ join-filter-fn
+ condition)
+ (not (matches-some-facts? node
+ token
+ remaining-elements
+ join-filter-fn
+ condition)))]
+ token))))
+
+ IConditionNode
+ (get-condition-description [this]
+ (let [{:keys [type constraints original-constraints]} condition
+ full-constraints (if (seq original-constraints)
+ original-constraints
+ constraints)]
+ [:not (into [type] full-constraints)])))
+
+(defn- test-node-matches
+ [node test-handler env token]
+ (let [test-result (try
+ (test-handler token env)
+ (catch Exception e
+ (throw-condition-exception {:cause e
+ :node node
+ :env env
+ :bindings (:bindings token)})))]
+ test-result))
+
+;; The test node represents a Rete extension in which an arbitrary test condition is run
+;; against bindings from ancestor nodes. Since this node
+;; performs no joins it does not accept right activations or retractions.
+(defrecord TestNode [id env test children]
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener]
+ (l/left-activate! listener node tokens)
+ (send-tokens
+ transport
+ memory
+ listener
+ children
+ (platform/eager-for
+ [token tokens
+ :when (test-node-matches node (:handler test) env token)]
+ token)))
+
+ (left-retract [node join-bindings tokens memory transport listener]
+ (l/left-retract! listener node tokens)
+ (retract-tokens transport memory listener children tokens))
+
+ (get-join-keys [node] [])
+
+ (description [node] (str "TestNode -- " (:text test)))
+
+ IConditionNode
+ (get-condition-description [this]
+ (into [:test] (:constraints test))))
+
+(defn- do-accumulate
+ "Runs the actual accumulation. Returns the accumulated value."
+ [accumulator facts]
+ (r/reduce (:reduce-fn accumulator)
+ (:initial-value accumulator)
+ facts))
+
+(defn- retract-accumulated
+ "Helper function to retract an accumulated value."
+ [node accum-condition accumulator result-binding token converted-result fact-bindings transport memory listener]
+ (let [new-facts (conj (:matches token) [converted-result (:id node)])
+ new-bindings (merge (:bindings token)
+ fact-bindings
+ (when result-binding
+ {result-binding
+ converted-result}))]
+
+ (retract-tokens transport memory listener (:children node)
+ [(->Token new-facts new-bindings)])))
+
+(defn- send-accumulated
+ "Helper function to send the result of an accumulated value to the node's children."
+ [node accum-condition accumulator result-binding token converted-result fact-bindings transport memory listener]
+ (let [new-bindings (merge (:bindings token)
+ fact-bindings
+ (when result-binding
+ {result-binding
+ converted-result}))
+
+ ;; This is to check that the produced accumulator result is
+ ;; consistent with any variable from another rule condition
+ ;; that has the same binding. If another condition binds something
+ ;; to ?x, only the accumulator results that match that would propagate.
+ ;; We can do this safely because previous states get retracted.
+ previous-result (get (:bindings token) result-binding ::no-previous-result)]
+
+ (when (or (= previous-result ::no-previous-result)
+ (= previous-result converted-result))
+
+ (send-tokens transport memory listener (:children node)
+ [(->Token (conj (:matches token) [converted-result (:id node)]) new-bindings)]))))
+
+;; The AccumulateNode hosts Accumulators, a Rete extension described above, in the Rete network.
+;; It behaves similarly to a JoinNode, but performs an accumulation function on the incoming
+;; working-memory elements before sending a new token to its descendents.
+(defrecord AccumulateNode [id accum-condition accumulator result-binding children binding-keys new-bindings]
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener]
+ (l/left-activate! listener node tokens)
+ (let [previous-results (mem/get-accum-reduced-all memory node join-bindings)
+ convert-return-fn (:convert-return-fn accumulator)
+ has-matches? (seq previous-results)
+ initial-value (when-not has-matches?
+ (:initial-value accumulator))
+ initial-converted (when (some? initial-value)
+ (convert-return-fn initial-value))]
+
+ (mem/add-tokens! memory node join-bindings tokens)
+
+ (cond
+ ;; If there are previously accumulated results to propagate, use them. If this is the
+ ;; first time there are matching tokens, then the reduce will have to happen for the
+ ;; first time. However, this reduce operation is independent of the specific tokens
+ ;; since the elements join to the tokens via pre-computed hash join bindings for this
+ ;; node. So only reduce once per binding grouped facts, for all tokens. This includes
+ ;; all bindings, not just the join bindings.
+ has-matches?
+ (doseq [[fact-bindings [previous previous-reduced]] previous-results
+ :let [first-reduce? (= ::not-reduced previous-reduced)
+ previous-reduced (if first-reduce?
+ ;; Need to accumulate since this is the first time we have
+ ;; tokens matching so we have not accumulated before.
+ (do-accumulate accumulator previous)
+ previous-reduced)
+ accum-reduced (when first-reduce?
+ ^::accum-node [previous previous-reduced])
+ converted (when (some? previous-reduced)
+ (convert-return-fn previous-reduced))]]
+
+ ;; Newly accumulated results need to be added to memory.
+ (when first-reduce?
+ (l/add-accum-reduced! listener node join-bindings accum-reduced fact-bindings)
+ (mem/add-accum-reduced! memory node join-bindings accum-reduced fact-bindings))
+
+ (when (some? converted)
+ (doseq [token tokens]
+ (send-accumulated node accum-condition accumulator result-binding token converted fact-bindings
+ transport memory listener))))
+
+ ;; There are no previously accumulated results, but we still may need to propagate things
+ ;; such as a sum of zero items.
+ ;; If an initial value is provided and the converted value is non-nil, we can propagate
+ ;; the converted value as the accumulated item.
+ (and (some? initial-converted)
+ (empty? new-bindings))
+
+ ;; Note that this is added to memory a single time for all matching tokens because the memory
+ ;; location doesn't depend on bindings from individual tokens.
+
+ (let [accum-reduced ^::accum-node [[] initial-value]]
+ ;; The fact-bindings are normally a superset of the join-bindings. We have no fact-bindings
+ ;; that are not join-bindings in this case since we have verified that new-bindings is empty.
+ ;; Therefore the join-bindings and fact-bindings are exactly equal.
+ (l/add-accum-reduced! listener node join-bindings accum-reduced join-bindings)
+ (mem/add-accum-reduced! memory node join-bindings accum-reduced join-bindings)
+
+ ;; Send the created accumulated item to the children for each token.
+ (doseq [token tokens]
+ (send-accumulated node accum-condition accumulator result-binding token initial-converted {}
+ transport memory listener)))
+
+ ;; Propagate nothing if the above conditions don't apply.
+ :else
+ nil)))
+
+ (left-retract [node join-bindings tokens memory transport listener]
+ (l/left-retract! listener node tokens)
+ (doseq [:let [removed-tokens (mem/remove-tokens! memory node join-bindings tokens)
+ remaining-tokens (mem/get-tokens memory node join-bindings)
+
+ ;; Note: Memory *must* be read here before the memory is potentially cleared in the
+ ;; following lines.
+ previous-results (mem/get-accum-reduced-all memory node join-bindings)
+
+ ;; If there are no new bindings created by the accumulator condition then
+ ;; a left-activation can create a new binding group in the accumulator memory.
+ ;; If this token is later removed without corresponding elements having been added,
+ ;; we remove the binding group from the accum memory. Otherwise adding and then retracting
+ ;; tokens could force bindings to retained for the duration of the JVM, regardless of whether
+ ;; the backing facts were garbage collectable. This would be a memory leak.
+ _ (when (and (empty? remaining-tokens)
+ (empty? new-bindings)
+ (let [current (mem/get-accum-reduced memory node join-bindings join-bindings)]
+ (and
+ ;; If there is nothing under these bindings already in the memory then there is no
+ ;; need to take further action.
+ (not= current ::mem/no-accum-reduced)
+ ;; Check to see if there are elements under this binding group.
+ ;; If elements are present we must keep the binding group regardless of the
+ ;; presence or absence of tokens.
+ (-> current first empty?))))
+ (mem/remove-accum-reduced! memory node join-bindings join-bindings))]
+ ;; There is nothing to do if no tokens were removed.
+ :when (seq removed-tokens)
+ ;; Note that this will cause a Cartesian join between tokens and elements groups where the token
+ ;; and element group share the same join bindings, but the element groups may have additional bindings
+ ;; that come from their alpha nodes. Keep in mind that these element groups need elements to be created
+ ;; and cannot come from initial values if they have bindings that are not shared with tokens.
+ [fact-bindings [previous previous-reduced]] previous-results
+ :let [;; If there were tokens before that are now removed, the value would have been accumulated already.
+ ;; This means there is no need to check for ::not-reduced here.
+ previous-converted (when (some? previous-reduced)
+ ((:convert-return-fn accumulator) previous-reduced))]
+ ;; A nil previous result should not have been propagated before.
+ :when (some? previous-converted)
+ token removed-tokens]
+ (retract-accumulated node accum-condition accumulator result-binding token previous-converted fact-bindings
+ transport memory listener)))
+
+ (get-join-keys [node] binding-keys)
+
+ (description [node] (str "AccumulateNode -- " accumulator))
+
+ IAccumRightActivate
+ (pre-reduce [node elements]
+ ;; Return a seq tuples with the form [binding-group facts-from-group-elements].
+ (platform/eager-for [[bindings element-group] (platform/group-by-seq :bindings elements)]
+ [bindings (mapv :fact element-group)]))
+
+ (right-activate-reduced [node join-bindings fact-seq memory transport listener]
+
+ ;; Combine previously reduced items together, join to matching tokens, and emit child tokens.
+ (doseq [:let [convert-return-fn (:convert-return-fn accumulator)
+ ;; Note that we want to iterate over all tokens with the desired join bindings later
+ ;; independently of the fact binding groups created by elements; that is, a token
+ ;; can join with multiple groups of fact bindings when the accumulator condition
+ ;; creates new bindings.
+ matched-tokens (mem/get-tokens memory node join-bindings)
+ has-matches? (seq matched-tokens)]
+ [bindings facts] fact-seq
+ :let [previous (mem/get-accum-reduced memory node join-bindings bindings)
+ has-previous? (not= ::mem/no-accum-reduced previous)
+ [previous previous-reduced] (if has-previous?
+ previous
+ [::mem/no-accum-reduced ::not-reduced])
+ combined (if has-previous?
+ (into previous facts)
+ facts)
+ combined-reduced
+ (cond
+ ;; Reduce all of the combined items for the first time if there are
+ ;; now matches, and nothing was reduced before.
+ (and has-matches?
+ (= ::not-reduced previous-reduced))
+ (do-accumulate accumulator combined)
+
+ ;; There are matches, a previous reduced value for the previous items and a
+ ;; :combine-fn is given. Use the :combine-fn on both the previously reduced
+ ;; and the newly reduced results.
+ (and has-matches?
+ (:combine-fn accumulator))
+ ((:combine-fn accumulator) previous-reduced (do-accumulate accumulator facts))
+
+ ;; There are matches and there is a previous reduced value for the previous
+ ;; items. So just add the new items to the accumulated value.
+ has-matches?
+ (do-accumulate (assoc accumulator :initial-value previous-reduced) facts)
+
+ ;; There are no matches right now. So do not perform any accumulations.
+ ;; If there are never matches, time will be saved by never reducing.
+ :else
+ ::not-reduced)
+
+ converted (when (and (some? combined-reduced)
+ (not= ::not-reduced combined-reduced))
+ (convert-return-fn combined-reduced))
+
+ previous-converted (when (and has-previous?
+ (some? previous-reduced)
+ (not= ::not-reduced previous-reduced))
+ (convert-return-fn previous-reduced))
+
+ accum-reduced ^::accum-node [combined combined-reduced]]]
+
+ ;; Add the combined results to memory.
+ (l/add-accum-reduced! listener node join-bindings accum-reduced bindings)
+ (mem/add-accum-reduced! memory node join-bindings accum-reduced bindings)
+
+ (cond
+
+ ;; Do nothing when the result was nil before and after.
+ (and (nil? previous-converted)
+ (nil? converted))
+ nil
+
+ (nil? converted)
+ (doseq [token matched-tokens]
+ (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
+ transport memory listener))
+
+ (nil? previous-converted)
+ (doseq [token matched-tokens]
+ (send-accumulated node accum-condition accumulator result-binding token converted bindings
+ transport memory listener))
+
+;; If there are previous results, then propagate downstream if the new result differs from
+ ;; the previous result. If the new result is equal to the previous result don't do
+ ;; anything. Note that the memory has already been updated with the new combined value,
+ ;; which may be needed if elements in memory changes later.
+ (not= converted previous-converted)
+ ;; There is no requirement that we doseq over all retractions then doseq over propagations; we could
+ ;; just as easily doseq over tokens at the top level and retract and propagate for each token in turn.
+ ;; In the absence of hard evidence either way, doing it this way is just an educated guess as to
+ ;; which is likely to be more performant.
+ (do
+ (doseq [token matched-tokens]
+ (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
+ transport memory listener))
+ (doseq [token matched-tokens]
+ (send-accumulated node accum-condition accumulator result-binding token converted bindings
+ transport memory listener))))))
+
+ IRightActivate
+ (right-activate [node join-bindings elements memory transport listener]
+
+ (l/right-activate! listener node elements)
+ ;; Simple right-activate implementation simple defers to
+ ;; accumulator-specific logic.
+ (right-activate-reduced
+ node
+ join-bindings
+ (pre-reduce node elements)
+ memory
+ transport
+ listener))
+
+ (right-retract [node join-bindings elements memory transport listener]
+
+ (l/right-retract! listener node elements)
+
+ (doseq [:let [convert-return-fn (:convert-return-fn accumulator)
+ ;; As in right-activate-reduced, a token can match with multiple groupings of elements
+ ;; by their bindings.
+ matched-tokens (mem/get-tokens memory node join-bindings)
+ has-matches? (seq matched-tokens)]
+ [bindings elements] (platform/group-by-seq :bindings elements)
+
+ :let [previous (mem/get-accum-reduced memory node join-bindings bindings)
+ has-previous? (not= ::mem/no-accum-reduced previous)
+ [previous previous-reduced] (if has-previous?
+ previous
+ ^::accum-node [::mem/no-accum-reduced ::not-reduced])]
+
+ ;; No need to retract anything if there were no previous items.
+ :when has-previous?
+
+ ;; Compute the new version with the retracted information.
+ :let [facts (mapv :fact elements)
+ [removed retracted] (mem/remove-first-of-each facts previous)
+ all-retracted? (empty? retracted)
+ ;; If there is a previous and matches, there would have been a
+ ;; propagated and accumulated value. So there is something to
+ ;; retract and re-accumulated in place of.
+ ;; Otherwise, no reduce is needed right now.
+ retracted-reduced (if (and has-matches?
+ (not all-retracted?))
+ ;; Use the provided :retract-fn if one is provided.
+ ;; Otherwise, just re-accumulate based on the
+ ;; remaining items after retraction.
+ (if-let [retract-fn (:retract-fn accumulator)]
+ (r/reduce retract-fn previous-reduced removed)
+ (do-accumulate accumulator retracted))
+ ::not-reduced)
+
+ ;; It is possible that either the retracted or previous reduced are ::not-reduced
+ ;; at this point if there are no matching tokens. has-matches? indicates this. If
+ ;; this is the case, there are no converted values to calculate. However, memory still
+ ;; will be updated since the facts left after this retraction still need to be stored
+ ;; for later possible activations.
+ retracted-converted (when (and (some? retracted-reduced)
+ (not= ::not-reduced retracted-reduced))
+ (convert-return-fn retracted-reduced))
+ previous-converted (when (and (some? previous-reduced)
+ (not= ::not-reduced previous-reduced))
+ (convert-return-fn previous-reduced))]]
+
+ (if all-retracted?
+ (do
+ ;; When everything has been retracted we need to remove the accumulated results from memory.
+ (l/remove-accum-reduced! listener node join-bindings bindings)
+ (mem/remove-accum-reduced! memory node join-bindings bindings)
+
+ (doseq [:when (some? previous-converted)
+ token matched-tokens]
+ ;; Retract the previous token.
+ (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
+ transport memory listener))
+
+ (let [initial-value (:initial-value accumulator)
+
+ initial-converted (when initial-value
+ (convert-return-fn initial-value))]
+
+ (when (and (some? initial-converted)
+ (empty? new-bindings))
+
+ (doseq [token matched-tokens]
+ (l/add-accum-reduced! listener node join-bindings ^::accum-node [[] initial-value] join-bindings)
+ (mem/add-accum-reduced! memory node join-bindings ^::accum-node [[] initial-value] join-bindings)
+ (send-accumulated node accum-condition accumulator result-binding token initial-converted {}
+ transport memory listener)))))
+ (do
+ ;; Add our newly retracted information to our node.
+ (l/add-accum-reduced! listener node join-bindings ^::accum-node [retracted retracted-reduced] bindings)
+ (mem/add-accum-reduced! memory node join-bindings ^::accum-node [retracted retracted-reduced] bindings)
+
+ (cond
+ (and (nil? previous-converted)
+ (nil? retracted-converted))
+ nil
+
+ (nil? previous-converted)
+ (doseq [token matched-tokens]
+ (send-accumulated node accum-condition accumulator result-binding token retracted-converted bindings
+ transport memory listener))
+
+ (nil? retracted-converted)
+ (doseq [token matched-tokens]
+ (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
+ transport memory listener))
+
+ (not= retracted-converted previous-converted)
+ ;; There is no requirement that we doseq over all retractions then doseq over propagations; we could
+ ;; just as easily doseq over tokens at the top level and retract and propagate for each token in turn.
+ ;; In the absence of hard evidence either way, doing it this way is just an educated guess as to
+ ;; which is likely to be more performant.
+ (do
+ (doseq [token matched-tokens]
+ (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
+ transport memory listener))
+ (doseq [token matched-tokens]
+ (send-accumulated node accum-condition accumulator result-binding token retracted-converted bindings
+ transport memory listener))))))))
+
+ IConditionNode
+ (get-condition-description [this]
+ (let [{:keys [accumulator from]} accum-condition
+ {:keys [type constraints]} from
+ condition (into [type] constraints)
+ result-symbol (symbol (name result-binding))]
+ [result-symbol '<- accumulator :from condition]))
+
+ IAccumInspect
+ (token->matching-elements [this memory token]
+ ;; Tokens are stored in the memory keyed on join bindings with previous nodes and new bindings
+ ;; introduced in this node. Each of these sets of bindings is known at the time of rule network
+ ;; compilation. It is expected that this function will receive tokens that were propagated from this
+ ;; node to its children and may have had other bindings added in the process. The bindings map entries
+ ;; in the tokens created by descendants based on tokens propagated from ancestors are subsets of the bindings
+ ;; in each ancestor. Put differently, if token T1 is passed to a child that create a token T2 based on it
+ ;; and passes it to its children, the following statement is true:
+ ;; (= (select-keys (-> t1 :bindings keys) t2)
+ ;; (:bindings t1))
+ ;; This being the case, we can use the downstream token to find out what binding key-value pairs were used
+ ;; to create the token "stream" of which it is part.
+ (let [join-bindings (-> token :bindings (select-keys (get-join-keys this)))
+ fact-bindings (-> token :bindings (select-keys new-bindings))]
+ (first (mem/get-accum-reduced memory this join-bindings (merge join-bindings fact-bindings))))))
+
+(defn- filter-accum-facts
+ "Run a filter on elements against a given token for constraints that are not simple hash joins."
+ [node join-filter-fn token candidate-facts bindings]
+ (filter #(join-node-matches node join-filter-fn token % bindings {}) candidate-facts))
+
+;; A specialization of the AccumulateNode that supports additional tests
+;; that have to occur on the beta side of the network. The key difference between this and the simple
+;; accumulate node is the join-filter-fn, which accepts a token and a fact and filters out facts that
+;; are not consistent with the given token.
+(defrecord AccumulateWithJoinFilterNode [id accum-condition accumulator join-filter-fn
+ result-binding children binding-keys new-bindings]
+
+ ILeftActivate
+ (left-activate [node join-bindings tokens memory transport listener]
+
+ (l/left-activate! listener node tokens)
+
+ ;; Facts that are candidates for matching the token are used in this accumulator node,
+ ;; which must be filtered before running the accumulation.
+ (let [convert-return-fn (:convert-return-fn accumulator)
+ grouped-candidate-facts (mem/get-accum-reduced-all memory node join-bindings)]
+ (mem/add-tokens! memory node join-bindings tokens)
+
+ (cond
+
+ (seq grouped-candidate-facts)
+ (doseq [token tokens
+ [fact-bindings candidate-facts] grouped-candidate-facts
+
+ ;; Filter to items that match the incoming token, then apply the accumulator.
+ :let [filtered-facts (filter-accum-facts node join-filter-fn token candidate-facts fact-bindings)]
+
+ :when (or (seq filtered-facts)
+ ;; Even if there no filtered facts, if there are no new bindings we may
+ ;; have an initial value to propagate.
+ (and (some? (:initial-value accumulator))
+ (empty? new-bindings)))
+
+ :let [accum-result (do-accumulate accumulator filtered-facts)
+ converted-result (when (some? accum-result)
+ (convert-return-fn accum-result))]
+
+ :when (some? converted-result)]
+
+ (send-accumulated node accum-condition accumulator result-binding token
+ converted-result fact-bindings transport memory listener))
+
+ ;; There are no previously accumulated results, but we still may need to propagate things
+ ;; such as a sum of zero items.
+ ;; If all variables in the accumulated item are bound and an initial
+ ;; value is provided, we can propagate the initial value as the accumulated item.
+
+ ;; We need to not propagate nil initial values, regardless of whether the convert-return-fn
+ ;; makes them non-nil, in order to not break existing code; this is discussed more in the
+ ;; right-activate-reduced implementation.
+ (and (some? (:initial-value accumulator))
+ (empty? new-bindings)) ; An initial value exists that we can propagate.
+ (let [initial-value (:initial-value accumulator)
+ ;; Note that we check the the :initial-value is non-nil above, which is why we
+ ;; don't need (when initial-value (convert-return-fn initial-value)) here.
+ converted-result (convert-return-fn initial-value)]
+
+ (when (some? converted-result)
+ ;; Send the created accumulated item to the children.
+ (doseq [token tokens]
+ (send-accumulated node accum-condition accumulator result-binding token
+ converted-result join-bindings transport memory listener))))
+
+ ;; Propagate nothing if the above conditions don't apply.
+ :default nil)))
+
+ (left-retract [node join-bindings tokens memory transport listener]
+
+ (l/left-retract! listener node tokens)
+
+ (let [;; Even if the accumulator didn't propagate anything before we still need to remove the tokens
+ ;; in case they would have otherwise been used in the future.
+ tokens (mem/remove-tokens! memory node join-bindings tokens)
+ convert-return-fn (:convert-return-fn accumulator)
+ grouped-candidate-facts (mem/get-accum-reduced-all memory node join-bindings)]
+
+ (cond
+
+ (seq grouped-candidate-facts)
+ (doseq [token tokens
+ [fact-bindings candidate-facts] grouped-candidate-facts
+
+ :let [filtered-facts (filter-accum-facts node join-filter-fn token candidate-facts fact-bindings)]
+
+ :when (or (seq filtered-facts)
+ ;; Even if there no filtered facts, if there are no new bindings an initial value
+ ;; maybe have propagated, and if so we need to retract it.
+ (and (some? (:initial-value accumulator))
+ (empty? new-bindings)))
+
+ :let [accum-result (do-accumulate accumulator filtered-facts)
+ retracted-converted (when (some? accum-result)
+ (convert-return-fn accum-result))]
+
+ ;; A nil retracted previous result should not have been propagated before.
+ :when (some? retracted-converted)]
+
+ (retract-accumulated node accum-condition accumulator result-binding token
+ retracted-converted fact-bindings transport memory listener))
+
+ (and (some? (:initial-value accumulator))
+ (empty? new-bindings))
+ (let [initial-value (:initial-value accumulator)
+ ;; Note that we check the the :initial-value is non-nil above, which is why we
+ ;; don't need (when initial-value (convert-return-fn initial-value)) here.
+ converted-result (convert-return-fn initial-value)]
+
+ (when (some? converted-result)
+ (doseq [token tokens]
+ (retract-accumulated node accum-condition accumulator result-binding token
+ converted-result join-bindings transport memory listener))))
+
+ :else nil)))
+
+ (get-join-keys [node] binding-keys)
+
+ (description [node] (str "AccumulateWithBetaPredicateNode -- " accumulator))
+
+ IAccumRightActivate
+ (pre-reduce [node elements]
+ ;; Return a map of bindings to the candidate facts that match them. This accumulator
+ ;; depends on the values from parent facts, so we defer actually running the accumulator
+ ;; until we have a token.
+ (platform/eager-for [[bindings element-group] (platform/group-by-seq :bindings elements)]
+ [bindings (map :fact element-group)]))
+
+ (right-activate-reduced [node join-bindings binding-candidates-seq memory transport listener]
+
+ ;; Combine previously reduced items together, join to matching tokens,
+ ;; and emit child tokens.
+ (doseq [:let [convert-return-fn (:convert-return-fn accumulator)
+ matched-tokens (mem/get-tokens memory node join-bindings)]
+ [bindings candidates] binding-candidates-seq
+ :let [previous-candidates (mem/get-accum-reduced memory node join-bindings bindings)
+ previously-reduced? (not= ::mem/no-accum-reduced previous-candidates)
+ previous-candidates (when previously-reduced? previous-candidates)]]
+
+ ;; Combine the newly reduced values with any previous items. Ensure that new items are always added to the end so that
+ ;; we have a consistent order for retracting results from accumulators such as acc/all whose results can be in any order. Making this
+ ;; ordering consistent allows us to skip the filter step on previous elements on right-activations.
+ (let [combined-candidates (into []
+ cat
+ [previous-candidates candidates])]
+
+ (l/add-accum-reduced! listener node join-bindings combined-candidates bindings)
+
+ (mem/add-accum-reduced! memory node join-bindings combined-candidates bindings))
+
+ (doseq [token matched-tokens
+
+ :let [new-filtered-facts (filter-accum-facts node join-filter-fn token candidates bindings)]
+
+ ;; If no new elements matched the token, we don't need to do anything for this token
+ ;; since the final result is guaranteed to be the same.
+ :when (seq new-filtered-facts)
+
+ :let [previous-filtered-facts (filter-accum-facts node join-filter-fn token previous-candidates bindings)
+
+ previous-accum-result-init (cond
+ (seq previous-filtered-facts)
+ (do-accumulate accumulator previous-filtered-facts)
+
+ (and (-> accumulator :initial-value some?)
+ (empty? new-bindings))
+ (:initial-value accumulator)
+
+ ;; Allow direct determination later of whether there was a previous value
+ ;; as determined by the preceding cond conditions.
+ :else ::no-previous-value)
+
+ previous-accum-result (when (not= previous-accum-result-init ::no-previous-value)
+ previous-accum-result-init)
+
+ ;; Since the new elements are added onto the end of the previous elements in the accum-memory
+ ;; accumulating using the new elements on top of the previous result is an accumulation in the same
+ ;; order as the elements are present in memory. As a result, future accumulations on the contents of the accum memory
+ ;; prior to further modification of that memory will return the same result as here. This is important since if we use
+ ;; something like acc/all to accumulate to and propagate [A B] if B is retracted we need to retract [A B] not [B A]; the latter won't
+ ;; actually retract anything, which would be invalid.
+ accum-result (let [accum-previous-init (if (not= previous-accum-result-init ::no-previous-value)
+ ;; If there was a previous result, use it as the initial value.
+ (assoc accumulator :initial-value previous-accum-result)
+ ;; If there was no previous result, use the default initial value.
+ ;; Note that if there is a non-nil initial value but there are new binding
+ ;; groups we consider there to have been no previous value, but we still want
+ ;; to use the actual initial value, not nil.
+ accumulator)]
+ (do-accumulate accum-previous-init new-filtered-facts))
+
+ previous-converted (when (some? previous-accum-result)
+ (convert-return-fn previous-accum-result))
+
+ new-converted (when (some? accum-result)
+ (convert-return-fn accum-result))]]
+
+ (cond
+
+ ;; When both the new and previous result were nil do nothing.
+ (and (nil? previous-converted)
+ (nil? new-converted))
+ nil
+
+ (nil? new-converted)
+ (retract-accumulated node accum-condition accumulator result-binding token
+ previous-converted bindings transport memory listener)
+
+ (nil? previous-converted)
+ (send-accumulated node accum-condition accumulator result-binding token new-converted bindings transport memory listener)
+
+ (not= new-converted previous-converted)
+ (do
+ (retract-accumulated node accum-condition accumulator result-binding token
+ previous-converted bindings transport memory listener)
+ (send-accumulated node accum-condition accumulator result-binding token new-converted bindings transport memory listener))))))
+
+ IRightActivate
+ (right-activate [node join-bindings elements memory transport listener]
+
+ (l/right-activate! listener node elements)
+
+ ;; Simple right-activate implementation simple defers to
+ ;; accumulator-specific logic.
+ (right-activate-reduced
+ node
+ join-bindings
+ (pre-reduce node elements)
+ memory
+ transport
+ listener))
+
+ (right-retract [node join-bindings elements memory transport listener]
+
+ (l/right-retract! listener node elements)
+
+ (doseq [:let [convert-return-fn (:convert-return-fn accumulator)
+ matched-tokens (mem/get-tokens memory node join-bindings)]
+ [bindings elements] (platform/group-by-seq :bindings elements)
+ :let [previous-candidates (mem/get-accum-reduced memory node join-bindings bindings)]
+
+ ;; No need to retract anything if there was no previous item.
+ :when (not= ::mem/no-accum-reduced previous-candidates)
+
+ :let [facts (mapv :fact elements)
+ new-candidates (second (mem/remove-first-of-each facts previous-candidates))]]
+
+ ;; Add the new candidates to our node.
+ (l/add-accum-reduced! listener node join-bindings new-candidates bindings)
+ (mem/add-accum-reduced! memory node join-bindings new-candidates bindings)
+
+ (doseq [;; Get all of the previously matched tokens so we can retract and re-send them.
+ token matched-tokens
+
+ :let [previous-facts (filter-accum-facts node join-filter-fn token previous-candidates bindings)
+
+ new-facts (filter-accum-facts node join-filter-fn token new-candidates bindings)]
+
+ ;; The previous matching elements are a superset of the matching elements after retraction.
+ ;; Therefore, if the counts before and after are equal nothing retracted actually matched
+ ;; and we don't need to do anything else here since the end result shouldn't change.
+ :when (not= (count previous-facts)
+ (count new-facts))
+
+ :let [;; We know from the check above that matching elements existed previously,
+ ;; since if there were no previous matching elements the count of matching
+ ;; elements before and after a right-retraction cannot be different.
+ previous-result (do-accumulate accumulator previous-facts)
+
+ ;; TODO: Can we use the retract-fn here if present to improve performance? We'd also potentially
+ ;; avoid needing to filter facts twice above, since elements present both before and after retraction
+ ;; will given to the join-filter-fn twice (once when creating previous-facts and once when creating new-facts).
+ ;; Note that any future optimizations here must ensure that the result propagated here is equal to the result
+ ;; that will be recreated as the previous result in right activate, and that this can be dependent on the order
+ ;; of candidates in the memory, since, for example (acc/all) can return both [A B] and [B A] but these are not equal.
+
+ new-result (cond
+
+ (seq new-facts)
+ (do-accumulate accumulator new-facts)
+
+ (and (-> accumulator :initial-value some?)
+ (empty? new-bindings))
+ (:initial-value accumulator)
+
+ :else nil)
+
+ previous-converted (when (some? previous-result)
+ (convert-return-fn previous-result))
+
+ new-converted (when (some? new-result)
+ (convert-return-fn new-result))]]
+
+ (cond
+
+ ;; When both the previous and new results are nil do nothing.
+ (and (nil? previous-converted)
+ (nil? new-converted))
+ nil
+
+ (nil? new-converted)
+ (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings transport memory listener)
+
+ (nil? previous-converted)
+ (send-accumulated node accum-condition accumulator result-binding token new-converted bindings transport memory listener)
+
+ (not= previous-converted new-converted)
+ (do
+ (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings transport memory listener)
+ (send-accumulated node accum-condition accumulator result-binding token new-converted bindings transport memory listener))))))
+
+ IConditionNode
+ (get-condition-description [this]
+ (let [{:keys [accumulator from]} accum-condition
+ {:keys [type constraints original-constraints]} from
+ result-symbol (symbol (name result-binding))
+ full-constraints (if (seq original-constraints)
+ original-constraints
+ constraints)
+ condition (into [type] full-constraints)]
+ [result-symbol '<- accumulator :from condition]))
+
+ ;; The explanation of the implementation of token->matching-elements on AccumulateNode applies here as well.
+ ;; Note that since we store all facts propagated from the alpha network to this condition in the accum memory,
+ ;; regardless of whether they meet the join condition with upstream facts from the beta network, we rerun the
+ ;; the join filter function. Since the :matches are not used in the join filter function and the bindings in the
+ ;; token will contain all bindings used in the "ancestor token" to join with these same facts, we can just pass the token
+ ;; as-is to the join filter.
+ IAccumInspect
+ (token->matching-elements [this memory token]
+ (let [join-bindings (-> token :bindings (select-keys (get-join-keys this)))
+ fact-bindings (-> token :bindings (select-keys new-bindings))
+ unfiltered-facts (mem/get-accum-reduced memory this join-bindings (merge join-bindings fact-bindings))]
+ ;; The functionality to throw conditions with meaningful information assumes that all bindings in the token
+ ;; are meaningful to the join, which is not the case here since the token passed is from a descendant of this node, not
+ ;; this node. The generated error message also wouldn't make much sense in the context of session inspection.
+ ;; We could create specialized error handling here, but in reality most cases that cause errors here would also cause
+ ;; errors at rule firing time so the benefit would be limited. Nevertheless there would be some benefit and it is
+ ;; possible that we will do it in the future..
+ (filter (fn [fact] (join-filter-fn token fact fact-bindings {}))
+ unfiltered-facts))))
+
+;; This lives here as it is both close to the node that it represents, and is accessible to both clj and cljs
+(def node-type->abbreviated-type
+ "To minimize function name length and attempt to prevent issues with filename length we can use these abbreviations to
+ shorten the node types. Used during compilation of the rules network."
+ {"AlphaNode" "AN"
+ "TestNode" "TN"
+ "AccumulateNode" "AccN"
+ "AccumulateWithJoinFilterNode" "AJFN"
+ "ProductionNode" "PN"
+ "NegationWithJoinFilterNode" "NJFN"
+ "ExpressionJoinNode" "EJN"})
+
+(defn variables-as-keywords
+ "Returns symbols in the given s-expression that start with '?' as keywords"
+ [expression]
+ (into #{} (platform/eager-for [item (tree-seq coll? seq expression)
+ :when (and (symbol? item)
+ (= \? (first (name item))))]
+ (keyword item))))
+
+(defn conj-rulebases
+ "DEPRECATED. Simply concat sequences of rules and queries.
+
+ Conjoin two rulebases, returning a new one with the same rules."
+ [base1 base2]
+ (concat base1 base2))
+
+(defn fire-rules*
+ "Fire rules for the given nodes."
+ [rulebase nodes transient-memory transport listener get-alphas-fn update-cache]
+ (binding [*current-session* {:rulebase rulebase
+ :transient-memory transient-memory
+ :transport transport
+ :insertions (atom 0)
+ :get-alphas-fn get-alphas-fn
+ :pending-updates update-cache
+ :listener listener}]
+
+ (loop [next-group (mem/next-activation-group transient-memory)
+ last-group nil]
+
+ (if next-group
+
+ (if (and last-group (not= last-group next-group))
+
+ ;; We have changed groups, so flush the updates from the previous
+ ;; group before continuing.
+ (do
+ (flush-updates *current-session*)
+ (let [upcoming-group (mem/next-activation-group transient-memory)]
+ (l/activation-group-transition! listener next-group upcoming-group)
+ (recur upcoming-group next-group)))
+
+ (do
+
+ ;; If there are activations, fire them.
+ (when-let [{:keys [node token] :as activation} (mem/pop-activation! transient-memory)]
+ ;; Use vectors for the insertion caches so that within an insertion type
+ ;; (unconditional or logical) all insertions are done in order after the into
+ ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
+ ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
+ ;; in the sequences.
+ (let [batched-logical-insertions (atom [])
+ batched-unconditional-insertions (atom [])
+ batched-rhs-retractions (atom [])]
+ (binding [*rule-context* {:token token
+ :node node
+ :batched-logical-insertions batched-logical-insertions
+ :batched-unconditional-insertions batched-unconditional-insertions
+ :batched-rhs-retractions batched-rhs-retractions}]
+
+ ;; Fire the rule itself.
+ (try
+ ((:rhs node) token (:env (:production node)))
+ ;; Don't do anything if a given insertion type has no corresponding
+ ;; facts to avoid complicating traces. Note that since each no RHS's of
+ ;; downstream rules are fired here everything is governed by truth maintenance.
+ ;; Therefore, the reordering of retractions and insertions should have no impact
+ ;; assuming that the evaluation of rule conditions is pure, which is a general expectation
+ ;; of the rules engine.
+ ;;
+ ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
+ ;; below. They will be dereferenced again if an exception is caught, but in the error
+ ;; case we aren't worried about performance.
+ (let [retrieved-unconditional-insertions @batched-unconditional-insertions
+ retrieved-logical-insertions @batched-logical-insertions
+ retrieved-rhs-retractions @batched-rhs-retractions]
+ (l/fire-activation! listener
+ activation
+ {:unconditional-insertions retrieved-unconditional-insertions
+ :logical-insertions retrieved-logical-insertions
+ :rhs-retractions retrieved-rhs-retractions})
+ (when-let [batched (seq retrieved-unconditional-insertions)]
+ (flush-insertions! batched true))
+ (when-let [batched (seq retrieved-logical-insertions)]
+ (flush-insertions! batched false))
+ (when-let [batched (seq retrieved-rhs-retractions)]
+ (flush-rhs-retractions! batched)))
+ (catch Exception e
+ ;; If the rule fired an exception, help debugging by attaching
+ ;; details about the rule itself, cached insertions, and any listeners
+ ;; while propagating the cause.
+ (let [production (:production node)
+ rule-name (:name production)
+ rhs (:rhs production)]
+ (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
+ " with bindings " (pr-str (:bindings token)))
+ {:bindings (:bindings token)
+ :name rule-name
+ :rhs rhs
+ :batched-logical-insertions @batched-logical-insertions
+ :batched-unconditional-insertions @batched-unconditional-insertions
+ :batched-rhs-retractions @batched-rhs-retractions
+ :listeners (try
+ (let [p-listener (l/to-persistent! listener)]
+ (if (l/null-listener? p-listener)
+ []
+ (l/get-children p-listener)))
+ (catch Exception listener-exception
+ listener-exception))}
+ e)))))
+
+ ;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
+ ;; will be in context for child rules.
+ (when (some-> node :production :props :no-loop)
+ (flush-updates *current-session*)))))
+
+ (recur (mem/next-activation-group transient-memory) next-group)))
+
+ ;; There were no items to be activated, so flush any pending
+ ;; updates and recur with a potential new activation group
+ ;; since a flushed item may have triggered one.
+ (when (flush-updates *current-session*)
+ (let [upcoming-group (mem/next-activation-group transient-memory)]
+ (l/activation-group-transition! listener next-group upcoming-group)
+ (recur upcoming-group next-group)))))))
+
+(deftype LocalSession [rulebase memory transport listener get-alphas-fn pending-operations]
+ ISession
+ (insert [session facts]
+
+ (let [new-pending-operations (conj pending-operations (uc/->PendingUpdate :insertion
+ ;; Preserve the behavior prior to https://github.com/cerner/clara-rules/issues/268
+ ;; , particularly for the Java API, where the caller could freely mutate a
+ ;; collection of facts after passing it to Clara for the constituent
+ ;; facts to be inserted or retracted. If the caller passes a persistent
+ ;; Clojure collection don't do any additional work.
+ (if (coll? facts)
+ facts
+ (into [] facts))))]
+
+ (LocalSession. rulebase
+ memory
+ transport
+ listener
+ get-alphas-fn
+ new-pending-operations)))
+
+ (retract [session facts]
+
+ (let [new-pending-operations (conj pending-operations (uc/->PendingUpdate :retraction
+ ;; As in insert above defend against facts being a mutable collection.
+ (if (coll? facts)
+ facts
+ (into [] facts))))]
+
+ (LocalSession. rulebase
+ memory
+ transport
+ listener
+ get-alphas-fn
+ new-pending-operations)))
+
+ ;; Prior to issue 249 we only had a one-argument fire-rules method. clara.rules/fire-rules will always call the two-argument method now
+ ;; but we kept a one-argument version of the fire-rules in case anyone is calling the fire-rules protocol function or method on the session directly.
+ (fire-rules [session] (fire-rules session {}))
+ (fire-rules [session opts]
+
+ (let [transient-memory (mem/to-transient memory)
+ transient-listener (l/to-transient listener)]
+
+ (if-not (:cancelling opts)
+ ;; We originally performed insertions and retractions immediately after the insert and retract calls,
+ ;; but this had the downside of making a pattern like "Retract facts, insert other facts, and fire the rules"
+ ;; perform at least three transitions between a persistent and transient memory. Delaying the actual execution
+ ;; of the insertions and retractions until firing the rules allows us to cut this down to a single transition
+ ;; between persistent and transient memory. There is some cost to the runtime dispatch on operation types here,
+ ;; but this is presumably less significant than the cost of memory transitions.
+ ;;
+ ;; We perform the insertions and retractions in the same order as they were applied to the session since
+ ;; if a fact is not in the session, retracted, and then subsequently inserted it should be in the session at
+ ;; the end.
+ (do
+ (doseq [{op-type :type facts :facts} pending-operations]
+
+ (case op-type
+
+ :insertion
+ (do
+ (l/insert-facts! transient-listener nil nil facts)
+
+ (binding [*pending-external-retractions* (atom [])]
+ ;; Bind the external retractions cache so that any logical retractions as a result
+ ;; of these insertions can be cached and executed as a batch instead of eagerly realizing
+ ;; them. An external insertion of a fact that matches
+ ;; a negation or accumulator condition can cause logical retractions.
+ (doseq [[alpha-roots fact-group] (get-alphas-fn facts)
+ root alpha-roots]
+ (alpha-activate root fact-group transient-memory transport transient-listener))
+ (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))
+
+ :retraction
+ (do
+ (l/retract-facts! transient-listener nil nil facts)
+
+ (binding [*pending-external-retractions* (atom facts)]
+ (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))))
+
+ (fire-rules* rulebase
+ (:production-nodes rulebase)
+ transient-memory
+ transport
+ transient-listener
+ get-alphas-fn
+ (uc/get-ordered-update-cache)))
+
+ (let [insertions (sequence
+ (comp (filter (fn [pending-op]
+ (= (:type pending-op)
+ :insertion)))
+ (mapcat :facts))
+ pending-operations)
+
+ retractions (sequence
+ (comp (filter (fn [pending-op]
+ (= (:type pending-op)
+ :retraction)))
+ (mapcat :facts))
+ pending-operations)
+
+ update-cache (ca/get-cancelling-update-cache)]
+
+ (binding [*current-session* {:rulebase rulebase
+ :transient-memory transient-memory
+ :transport transport
+ :insertions (atom 0)
+ :get-alphas-fn get-alphas-fn
+ :pending-updates update-cache
+ :listener transient-listener}]
+
+ ;; Insertions should come before retractions so that if we insert and then retract the same
+ ;; fact that is not already in the session the end result will be that the session won't have that fact.
+ ;; If retractions came first then we'd first retract a fact that isn't in the session, which doesn't do anything,
+ ;; and then later we would insert the fact.
+ (doseq [[alpha-roots fact-group] (get-alphas-fn insertions)
+ root alpha-roots]
+ (alpha-activate root fact-group transient-memory transport transient-listener))
+
+ (doseq [[alpha-roots fact-group] (get-alphas-fn retractions)
+ root alpha-roots]
+ (alpha-retract root fact-group transient-memory transport transient-listener))
+
+ (fire-rules* rulebase
+ (:production-nodes rulebase)
+ transient-memory
+ transport
+ transient-listener
+ get-alphas-fn
+ ;; This continues to use the cancelling cache after the first batch of insertions and retractions.
+ ;; If this is suboptimal for some workflows we can revisit this.
+ update-cache))))
+
+ (LocalSession. rulebase
+ (mem/to-persistent! transient-memory)
+ transport
+ (l/to-persistent! transient-listener)
+ get-alphas-fn
+ [])))
+
+ (query [session query params]
+ (let [query-node (get-in rulebase [:query-nodes query])]
+ (when (= nil query-node)
+ (platform/throw-error (str "The query " query " is invalid or not included in the rule base.")))
+ (when-not (= (into #{} (keys params)) ;; nil params should be equivalent to #{}
+ (:param-keys query-node))
+ (platform/throw-error (str "The query " query " was not provided with the correct parameters, expected: "
+ (:param-keys query-node) ", provided: " (set (keys params)))))
+
+ (->> (mem/get-tokens memory query-node params)
+
+ ;; Get the bindings for each token and filter generate symbols.
+ (map (fn [{bindings :bindings}]
+
+ ;; Filter generated symbols. We check first since this is an uncommon flow.
+ (if (some #(re-find #"__gen" (name %)) (keys bindings))
+
+ (into {} (remove (fn [[k v]] (re-find #"__gen" (name k)))
+ bindings))
+ bindings))))))
+
+ (components [session]
+ {:rulebase rulebase
+ :memory memory
+ :transport transport
+ :listeners (l/flatten-listener listener)
+ :get-alphas-fn get-alphas-fn}))
+
+(defn assemble
+ "Assembles a session from the given components, which must be a map
+ containing the following:
+
+ :rulebase A recorec matching the clara.rules.compiler/Rulebase structure.
+ :memory An implementation of the clara.rules.memory/IMemoryReader protocol
+ :transport An implementation of the clara.rules.engine/ITransport protocol
+ :listeners A vector of listeners implementing the clara.rules.listener/IPersistentListener protocol
+ :get-alphas-fn The function used to return the alpha nodes for a fact of the given type."
+
+ [{:keys [rulebase memory transport listeners get-alphas-fn]}]
+ (LocalSession. rulebase
+ memory
+ transport
+ (if (> (count listeners) 0)
+ (l/delegating-listener listeners)
+ l/default-listener)
+ get-alphas-fn
+ []))
+
+(defn with-listener
+ "Return a new session with the listener added to the provided session,
+ in addition to all listeners previously on the session."
+ [session listener]
+ (let [{:keys [listeners] :as components} (components session)]
+ (assemble (assoc components
+ :listeners
+ (conj listeners
+ listener)))))
+
+(defn remove-listeners
+ "Return a new session with all listeners matching the predicate removed"
+ [session pred]
+ (let [{:keys [listeners] :as components} (components session)]
+ (if (some pred listeners)
+ (assemble (assoc components
+ :listeners
+ (into [] (remove pred) listeners)))
+ session)))
+
+(defn find-listeners
+ "Return all listeners on the session matching the predicate."
+ [session pred]
+ (let [{:keys [listeners]} (components session)]
+ (filterv pred listeners)))
+
+(defn local-memory
+ "Returns a local, in-process working memory."
+ [rulebase transport activation-group-sort-fn activation-group-fn alphas-fn]
+ (let [memory (mem/to-transient (mem/local-memory rulebase activation-group-sort-fn activation-group-fn alphas-fn))]
+ (doseq [beta-node (:beta-roots rulebase)]
+ (left-activate beta-node {} [empty-token] memory transport l/default-listener))
+ (mem/to-persistent! memory)))
+
+(defn options->activation-group-sort-fn
+ "Given the map of options for a session, construct an activation group sorting
+ function that takes into account the user-provided salience and internal salience.
+ User-provided salience is considered first. Under normal circumstances this function should
+ only be called by Clara itself."
+ [options]
+ (let [user-activation-group-sort-fn (or (get options :activation-group-sort-fn)
+ ;; Default to sort by descending numerical order.
+ >)]
+
+ ;; Compare user-provided salience first, using either the provided salience function or the default,
+ ;; then use the internal salience if the former does not provide an ordering between the two salience values.
+ (fn [salience1 salience2]
+ (let [forward-result (user-activation-group-sort-fn (nth salience1 0)
+ (nth salience2 0))]
+ (if (number? forward-result)
+ (if (= 0 forward-result)
+ (> (nth salience1 1)
+ (nth salience2 1))
+
+ forward-result)
+ (let [backward-result (user-activation-group-sort-fn (nth salience2 0)
+ (nth salience1 0))
+ forward-bool (boolean forward-result)
+ backward-bool (boolean backward-result)]
+ ;; Since we just use Clojure functions, for example >, equality may be implied
+ ;; by returning false for comparisons in both directions rather than by returning 0.
+ ;; Furthermore, ClojureScript will use truthiness semantics rather than requiring a
+ ;; boolean (unlike Clojure), so we use the most permissive semantics between Clojure
+ ;; and ClojureScript.
+ (if (not= forward-bool backward-bool)
+ forward-bool
+ (> (nth salience1 1)
+ (nth salience2 1)))))))))
+
+(def ^:private internal-salience-levels {:default 0
+ ;; Extracted negations need to be prioritized over their original
+ ;; rules since their original rule could fire before the extracted condition.
+ ;; This is a problem if the original rule performs an unconditional insertion
+ ;; or has other side effects not controlled by truth maintenance.
+ :extracted-negation 1})
+
+(defn options->activation-group-fn
+ "Given a map of options for a session, construct a function that takes a production
+ and returns the activation group to which it belongs, considering both user-provided
+ and internal salience. Under normal circumstances this function should only be called by
+ Clara itself."
+ [options]
+ (let [rule-salience-fn (or (:activation-group-fn options)
+ (fn [production] (or (some-> production :props :salience)
+ 0)))]
+
+ (fn [production]
+ [(rule-salience-fn production)
+ (internal-salience-levels (or (some-> production :props :clara-rules/internal-salience)
+ :default))])))
diff --git a/src/main/clojure/clara/rules/java.clj b/src/main/clojure/clara/rules/java.clj
index eb299d2b..533409cd 100644
--- a/src/main/clojure/clara/rules/java.clj
+++ b/src/main/clojure/clara/rules/java.clj
@@ -3,11 +3,9 @@
Java support. Users should use the Java API, or the clara.rules namespace from Clojure."
(:require [clara.rules :as clara]
[clara.rules.engine :as eng]
- [clara.rules.compiler :as com]
- [clara.rules.memory :as mem])
+ [clara.rules.compiler :as com])
(:refer-clojure :exclude [==])
- (:import [clara.rules.engine LocalTransport]
- [clara.rules WorkingMemory QueryResult]))
+ (:import [clara.rules WorkingMemory QueryResult]))
(deftype JavaQueryResult [result]
QueryResult
@@ -42,10 +40,10 @@
(JavaWorkingMemory. (clara/fire-rules session)))
(query [this name args]
- (run-query session name args))
+ (run-query session name args))
(query [this name]
- (run-query session name {})))
+ (run-query session name {})))
(defn mk-java-session [rulesets]
(JavaWorkingMemory.
diff --git a/src/main/clojure/clara/rules/listener.clj b/src/main/clojure/clara/rules/listener.clj
new file mode 100644
index 00000000..a5b1368d
--- /dev/null
+++ b/src/main/clojure/clara/rules/listener.clj
@@ -0,0 +1,176 @@
+(ns clara.rules.listener
+ "Event listeners for analyzing the flow through Clara. This is for primarily for use by
+ tooling, but advanced users may use this to analyze sessions.")
+
+(defprotocol IPersistentEventListener
+ (to-transient [listener]))
+
+;; TODO: Handle add-accum-reduced
+(defprotocol ITransientEventListener
+ (left-activate! [listener node tokens])
+ (left-retract! [listener node tokens])
+ (right-activate! [listener node elements])
+ (right-retract! [listener node elements])
+ (insert-facts! [listener node token facts])
+ (alpha-activate! [listener node facts])
+ (insert-facts-logical! [listener node token facts])
+ (retract-facts! [listener node token facts])
+ (alpha-retract! [listener node facts])
+ (retract-facts-logical! [listener node token facts])
+ (add-accum-reduced! [listener node join-bindings result fact-bindings])
+ (remove-accum-reduced! [listener node join-bindings fact-bindings])
+ (add-activations! [listener node activations])
+ (remove-activations! [listener node activations])
+ (fire-activation! [listener activation resulting-operations])
+ (fire-rules! [listener node])
+ (activation-group-transition! [listener original-group new-group])
+ (to-persistent! [listener]))
+
+;; A listener that does nothing.
+(deftype NullListener []
+ ITransientEventListener
+ (left-activate! [listener node tokens]
+ listener)
+ (left-retract! [listener node tokens]
+ listener)
+ (right-activate! [listener node elements]
+ listener)
+ (right-retract! [listener node elements]
+ listener)
+ (insert-facts! [listener node token facts]
+ listener)
+ (alpha-activate! [listener node facts]
+ listener)
+ (insert-facts-logical! [listener node token facts]
+ listener)
+ (retract-facts! [listener node token facts]
+ listener)
+ (alpha-retract! [listener node facts]
+ listener)
+ (retract-facts-logical! [listener node token facts]
+ listener)
+ (add-accum-reduced! [listener node join-bindings result fact-bindings]
+ listener)
+ (remove-accum-reduced! [listener node join-bindings fact-bindings]
+ listener)
+ (add-activations! [listener node activations]
+ listener)
+ (remove-activations! [listener node activations]
+ listener)
+ (fire-activation! [listener activation resulting-operations]
+ listener)
+ (fire-rules! [listener node]
+ listener)
+ (activation-group-transition! [listener original-group new-group]
+ listener)
+ (to-persistent! [listener]
+ listener)
+
+ IPersistentEventListener
+ (to-transient [listener]
+ listener))
+
+(declare delegating-listener)
+
+;; A listener that simply delegates to others
+(deftype DelegatingListener [children]
+ ITransientEventListener
+ (left-activate! [listener node tokens]
+ (doseq [child children]
+ (left-activate! child node tokens)))
+
+ (left-retract! [listener node tokens]
+ (doseq [child children]
+ (left-retract! child node tokens)))
+
+ (right-activate! [listener node elements]
+ (doseq [child children]
+ (right-activate! child node elements)))
+
+ (right-retract! [listener node elements]
+ (doseq [child children]
+ (right-retract! child node elements)))
+
+ (insert-facts! [listener node token facts]
+ (doseq [child children]
+ (insert-facts! child node token facts)))
+
+ (alpha-activate! [listener node facts]
+ (doseq [child children]
+ (alpha-activate! child node facts)))
+
+ (insert-facts-logical! [listener node token facts]
+ (doseq [child children]
+ (insert-facts-logical! child node token facts)))
+
+ (retract-facts! [listener node token facts]
+ (doseq [child children]
+ (retract-facts! child node token facts)))
+
+ (alpha-retract! [listener node facts]
+ (doseq [child children]
+ (alpha-retract! child node facts)))
+
+ (retract-facts-logical! [listener node token facts]
+ (doseq [child children]
+ (retract-facts-logical! child node token facts)))
+
+ (add-accum-reduced! [listener node join-bindings result fact-bindings]
+ (doseq [child children]
+ (add-accum-reduced! child node join-bindings result fact-bindings)))
+
+ (remove-accum-reduced! [listener node join-bindings fact-bindings]
+ (doseq [child children]
+ (remove-accum-reduced! child node join-bindings fact-bindings)))
+
+ (add-activations! [listener node activations]
+ (doseq [child children]
+ (add-activations! child node activations)))
+
+ (remove-activations! [listener node activations]
+ (doseq [child children]
+ (remove-activations! child node activations)))
+
+ (fire-activation! [listener activation resulting-operations]
+ (doseq [child children]
+ (fire-activation! child activation resulting-operations)))
+
+ (fire-rules! [listener node]
+ (doseq [child children]
+ (fire-rules! child node)))
+
+ (activation-group-transition! [listener original-group new-group]
+ (doseq [child children]
+ (activation-group-transition! child original-group new-group)))
+
+ (to-persistent! [listener]
+ (delegating-listener (map to-persistent! children))))
+
+(deftype PersistentDelegatingListener [children]
+ IPersistentEventListener
+ (to-transient [listener]
+ (DelegatingListener. (map to-transient children))))
+
+(defn delegating-listener
+ "Returns a listener that delegates to its children."
+ [children]
+ (PersistentDelegatingListener. children))
+
+(defn null-listener?
+ "Returns true if the given listener is the null listener, false otherwise."
+ [listener]
+ (instance? NullListener listener))
+
+(defn get-children
+ "Returns the children of a delegating listener."
+ [^PersistentDelegatingListener listener]
+ (.-children listener))
+
+;; Default listener.
+(def default-listener (NullListener.))
+
+(defn ^:internal ^:no-doc flatten-listener
+ [listener]
+ (if (null-listener? listener)
+ []
+ (get-children listener)))
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
new file mode 100644
index 00000000..958234ba
--- /dev/null
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -0,0 +1,909 @@
+(ns clara.rules.memory
+ "This namespace is for internal use and may move in the future.
+ Specification and default implementation of working memory"
+ (:import [java.util
+ Collections
+ LinkedList
+ NavigableMap
+ PriorityQueue
+ TreeMap]))
+
+(defprotocol IPersistentMemory
+ (to-transient [memory]))
+
+(defprotocol IMemoryReader
+ ;; Returns the rulebase associated with the given memory.
+ (get-rulebase [memory])
+
+ ;; Returns a function that produces a map of alpha nodes to
+ ;; facts that match the type of the node
+ (get-alphas-fn [memory])
+
+ ;; Returns the elements assoicated with the given node.
+ (get-elements [memory node bindings])
+
+ ;; Returns all elements associated with the given node, regardless of bindings.
+ (get-elements-all [memory node])
+
+ ;; Returns the tokens associated with the given node.
+ (get-tokens [memory node bindings])
+
+ ;; Returns all tokens associated with the given node, regardless of bindings
+ (get-tokens-all [memory node])
+
+ ;; Returns the reduced form of objects processed by an accumulator node
+ ;; for facts that match the given bindings.
+ (get-accum-reduced [memory node join-bindings fact-bindings])
+
+ ;; Returns all reduced results for the given node that matches
+ ;; the given join bindings, independent of the individual fact-bindings
+ ;; created by the accumulator's condition.
+ (get-accum-reduced-all [memory node join-bindings])
+
+ ;; Returns a tuple of [join-bindings fact-bindings result] for all
+ ;; accumulated items on this node.
+ (get-accum-reduced-complete [memory node])
+
+ ;; Returns insertions that occurred at the given node for the given token.
+ ;; Returns a sequence of the form
+ ;; [facts-inserted-for-one-rule-activation facts-inserted-for-another-rule-activation]
+ (get-insertions [memory node token])
+
+ ;; Returns all insertions that occurred in the given node's RHS; this takes the form
+ ;; {token [facts-inserted-for-one-rule-activation facts-inserted-for-another-rule-activation]}
+ (get-insertions-all [memory node])
+
+ ;; Returns a map of nodes with pending activations to the activations themselves.
+ (get-activations [memory]))
+
+(defprotocol ITransientMemory
+
+ ;; Adds working memory elements to the given working memory at the given node.
+ (add-elements! [memory node join-bindings elements])
+
+ ;; Remove working memory elements from the given working memory at the given node.
+ (remove-elements! [memory node elements join-bindings])
+
+ ;; Add tokens to the given working memory at the given node.
+ (add-tokens! [memory node join-bindings tokens])
+
+ ;; Removes tokens from the given working memory at the given node.
+ (remove-tokens! [memory node join-bindings tokens])
+
+ ;; Adds the result of a reduced accumulator execution to the given memory and node.
+ (add-accum-reduced! [memory node join-bindings accum-result fact-bindings])
+
+ ;; Removes the result of a reduced accumulator execution to the given memory and node.
+ (remove-accum-reduced! [memory node join-bindings fact-bindings])
+
+ ;; Add a record that a given fact twas inserted at a given node with
+ ;; the given support. Used for truth maintenance.
+ ;; This should be called at most once per rule activation.
+ (add-insertions! [memory node token facts])
+
+ ;; Removes all records of facts that were inserted at the given node
+ ;; due to the given token. Used for truth maintenance.
+ ;; This function returns a map of each token to the associated facts
+ ;; it removed.
+ (remove-insertions! [memory node token])
+
+ ;; Add a sequence of activations.
+ (add-activations! [memory production activations])
+
+ ;; Pop an activation from the working memory. Returns nil if no
+ ;; activations are pending.
+ (pop-activation! [memory])
+
+ ;; Returns the group of the next activation, or nil if none are pending.
+ (next-activation-group [memory])
+
+ ;; Remove the given activations from the working memory. This is expected
+ ;; to return a tuple of the form [removed-activations unremoved-activations],
+ ;; where unremoved-activations is comprised of activations passed to the memory
+ ;; for removal but which were not removed because they were not present in the memory's
+ ;; store of pending activations.
+ (remove-activations! [memory production activations])
+
+ ;; Clear all activations from the working memory
+ (clear-activations! [memory])
+
+ ;; Converts the transient memory to persistent form.
+ (to-persistent! [memory]))
+
+(defn- coll-empty?
+ "Returns true if the collection is empty. Does not call seq due to avoid
+ overhead that may cause for non-persistent collection types, e.g.
+ java.util.LinkedList, etc."
+ [^java.util.Collection coll]
+ (or (nil? coll) (.isEmpty coll)))
+
+(defn- list-remove!
+ "Removes the item, to-remove, from the given list, lst. If it is found and removed,
+ returns true. Otherwise returns false. Only removes the first element in the list
+ that is equal to to-remove. Equality is done based on the given eq-pred function.
+ If it isn't given, the default is = . If others are equal, they will not be removed.
+ This is similar to java.util.List.remove(Object). lst is updated in place for performance.
+ This implies that the list must support the mutable list interface, namely via the
+ java.util.List.listIterator()."
+ ([^java.util.List lst to-remove]
+ (list-remove! lst to-remove =))
+ ([^java.util.List lst to-remove eq-pred]
+ (if-not (coll-empty? lst)
+ (let [li (.listIterator lst)]
+ (loop [x (.next li)]
+ (cond
+ (eq-pred to-remove x)
+ (do
+ (.remove li)
+ true)
+
+ (.hasNext li)
+ (recur (.next li))
+
+ :else
+ false)))
+ false)))
+
+(defn- add-all!
+ "Adds all items from source to the destination dest collection
+ destructively. Avoids using Collection.addAll() due to unnecessary
+ performance overhead of calling Collection.toArray() on the
+ incoming source. Returns dest."
+ [^java.util.Collection dest source]
+ (doseq [x source]
+ (.add dest x))
+ dest)
+
+(defn- ->linked-list
+ "Creates a new java.util.LinkedList from the coll, but avoids using
+ Collection.addAll(Collection) since there is unnecessary overhead
+ in this of calling Collection.toArray() on coll."
+ ^java.util.List [coll]
+ (if (instance? LinkedList coll)
+ coll
+ (add-all! (LinkedList.) coll)))
+
+(defn- remove-first-of-each!
+ "Remove the first instance of each item in the given remove-seq that
+ appears in the collection coll. coll is updated in place for
+ performance. This implies that the coll must support the mutable
+ collection interface method Collection.remove(Object). Returns a tuple
+ of the form [remove-seq-items-removed remove-seq-items-not-removed].
+ An optional compare-fn can be given to specify how to compare remove-seq
+ items against items in coll. The default compare-fn is = .
+ For immutable collection removal, use the non-destructive remove-first-of-each
+ defined below."
+ ([remove-seq ^java.util.List coll]
+ (remove-first-of-each! remove-seq coll =))
+
+ ([remove-seq ^java.util.List coll compare-fn]
+ ;; Optimization for special case of one item to remove,
+ ;; which occurs frequently.
+ (if (= 1 (count remove-seq))
+ (let [to-remove (first remove-seq)]
+ (if (list-remove! coll to-remove compare-fn)
+ [remove-seq []]
+ [[] remove-seq]))
+
+ ;; Otherwise, perform a linear search for items to remove.
+ (loop [to-remove (first remove-seq)
+ remove-seq (next remove-seq)
+ removed (transient [])
+ not-removed (transient [])]
+ (if to-remove
+ (let [found? (list-remove! coll to-remove compare-fn)
+ removed (if found?
+ (conj! removed to-remove)
+ removed)
+ not-removed (if found?
+ not-removed
+ (conj! not-removed to-remove))]
+ (recur (first remove-seq)
+ (next remove-seq)
+ removed
+ not-removed))
+ ;; If this is expensive, using a mutable collection maybe good to
+ ;; consider here in a future optimization.
+ [(persistent! removed) (persistent! not-removed)])))))
+
+(defn remove-first-of-each
+ "Remove the first instance of each item in the given remove-seq that
+ appears in the collection. This also tracks which items were found
+ and removed. Returns a tuple of the form:
+ [items-removed coll-with-items-removed items-not-removed]
+ This function does so eagerly since
+ the working memories with large numbers of insertions and retractions
+ can cause lazy sequences to become deeply nested."
+ [remove-seq coll]
+ (cond
+
+ ;; There is nothing to remove.
+ (empty? remove-seq) [[] coll]
+
+ ;; Otherwise, perform a linear search for items to remove.
+ :else (loop [f (first coll)
+ r (rest coll)
+ [remove-seq items-removed result] [remove-seq (transient []) (transient [])]]
+
+ (if f
+ (recur (first r)
+ (rest r)
+
+ ;; Determine if f matches any of the items to remove.
+ (loop [to-remove (first remove-seq)
+ remove-seq (rest remove-seq)
+ ;; Remember what is left to remove for later.
+ left-to-remove (transient [])]
+
+ ;; Try to find if f matches anything to-remove.
+ (if to-remove
+ (if (= to-remove f)
+
+ ;; Found a match, so the search is done.
+ [(persistent! (reduce conj! left-to-remove remove-seq))
+ (conj! items-removed to-remove)
+ result]
+
+ ;; Keep searching for a match.
+ (recur (first remove-seq)
+ (rest remove-seq)
+ (conj! left-to-remove to-remove)))
+
+ ;; No matches found.
+ [(persistent! left-to-remove)
+ items-removed
+ (conj! result f)])))
+
+ [(persistent! items-removed) (persistent! result) remove-seq]))))
+
+(defn fast-token-compare [compare-fn token other-token]
+ ;; Fastest path is if the two tokens are truly identical.
+ (or (identical? token other-token)
+ ;; Assumption is that both arguments given are tokens already.
+ (and (let [bindings (:bindings token)
+ other-bindings (:bindings other-token)]
+ ;; Calling `count` on these Clojure maps shows up as a bottleneck
+ ;; even with clojure.lang.IPersistentMap being clojure.lang.Counted unfortunately.
+ (and (= (.size ^java.util.Map bindings)
+ (.size ^java.util.Map other-bindings))
+ ;; `every?` is too slow for a performance critical place like this. It
+ ;; calls `seq` too many times on the underlying maps. Instead `seq` one
+ ;; time and keep using that same seq.
+ ;; Also avoiding Clojure destructuring since even that is not as fast
+ ;; pre-1.9.0.
+ (if-let [^clojure.lang.ISeq entries (.seq ^clojure.lang.Seqable bindings)]
+ ;; Type hint to Indexed vs MapEntry just because MapEntry seems to be a
+ ;; less stable impl detail to rely on.
+ (loop [^clojure.lang.Indexed entry (.first entries)
+ entries (.next entries)]
+ (let [k (some-> entry (.nth 0))
+ v (some-> entry (.nth 1))]
+ (if (and k
+ ;; other-bindings will always be persistent map so invoke
+ ;; it directly. It is faster than `get`.
+ (compare-fn v (other-bindings k)))
+ (recur (some-> entries .first)
+ (some-> entries .next))
+ ;; If there is no k left, then every entry matched. If there is a k,
+ ;; that means the comparison failed, so the maps aren't equal.
+ (not k))))
+ ;; Empty bindings on both sides.
+ true)))
+
+ ;; Check the :matches on each token. :matches need to be in the same order on both
+ ;; tokens to be considered the same.
+ (let [^clojure.lang.Indexed matches (:matches token)
+ ^clojure.lang.Indexed other-matches (:matches other-token)
+ count-matches (.size ^java.util.List matches)]
+ (and (= count-matches
+ (.size ^java.util.List other-matches))
+ (loop [i 0]
+ (cond
+ (= i count-matches)
+ true
+
+ ;; Compare node-id's first. Fallback to comparing facts. This will
+ ;; most very likely be the most expensive part to execute.
+ (let [^clojure.lang.Indexed m (.nth matches i)
+ ^clojure.lang.Indexed om (.nth other-matches i)]
+ ;; A token :matches tuple is of the form [fact node-id].
+ (and (= (.nth m 1) (.nth om 1))
+ (compare-fn (.nth m 0) (.nth om 0))))
+ (recur (inc i))
+
+ :else
+ false)))))))
+
+(defprotocol IdentityComparable
+ (using-token-identity! [this bool]))
+
+(deftype RuleOrderedActivation [node-id
+ token
+ activation
+ rule-load-order
+ ^:unsynchronized-mutable use-token-identity?]
+ IdentityComparable
+ ;; NOTE! This should never be called on a RuleOrderedActivation instance that has been stored
+ ;; somewhere in local memory because it could cause interference across threads that have
+ ;; multiple versions of local memories that are sharing some of their state. This is only intended
+ ;; to be called by ephemeral, only-local references to RuleOrderedActivation instances used to
+ ;; search for activations to remove from memory when performing `remove-activations!` operations.
+ ;; The reason this mutable state exists at all is to "flip" a single instance of a RuleOrderedActivation
+ ;; from identity to value equality based comparable when doing the "two-pass" removal search operation
+ ;; of `remove-activations!`. This avoids having to create different instances for each pass.
+ (using-token-identity! [this bool]
+ (set! use-token-identity? bool)
+ this)
+ Object
+ ;; Two RuleOrderedActivation instances should be equal if and only if their
+ ;; activation is equal. Note that if the node of two activations is the same,
+ ;; their rule-load-order must be the same. Using a deftype wrapper allows us to
+ ;; use Clojure equality to determine this while placing the wrapper in a Java data
+ ;; structure that uses Java equality; the Java equality check will simply end up calling
+ ;; Clojure equality checks.
+ (equals [this other]
+ ;; Note that the .equals method is only called by PriorityQueue.remove, and the object provided
+ ;; to the .remove method will never be identical to any object in the queue. A short-circuiting
+ ;; check for reference equality would therefore be pointless here because it would never be true.
+ (boolean
+ (when (instance? RuleOrderedActivation other)
+ (let [^RuleOrderedActivation other other]
+ (and
+ ;; If the node-id of two nodes is equal then we can assume that the nodes are equal.
+ (= node-id
+ (.node-id other))
+
+ ;; We check with identity based semantics on the other when the use-token-identity? field
+ ;; indicates to do so.
+ (if (or use-token-identity? (.-use-token-identity? other))
+ (fast-token-compare identical? token (.-token other))
+ (fast-token-compare = token (.-token other)))))))))
+
+(defn- ->rule-ordered-activation
+ "Take an activation from the engine and wrap it in a map that includes information
+ on the rule load order. In Clojure, as opposed to ClojureScript, each activation should
+ be wrapped in this way exactly once (that is, the value of the :activation key should
+ be an activation from the engine.)"
+ ([activation]
+ (->rule-ordered-activation activation false))
+ ([activation use-token-identity?]
+ (let [node (:node activation)]
+ (RuleOrderedActivation. (:id node)
+ (:token activation)
+ activation
+ (or (-> node
+ :production
+ meta
+ :clara.rules.compiler/rule-load-order)
+ 0)
+ use-token-identity?))))
+
+(defn- queue-activations!
+ "Add activations to a queue. The wrap-rule-order? option should be true
+ unless the activations in question have previously been wrapped."
+ ([^java.util.Queue pq activations]
+ (queue-activations! pq activations true))
+ ([^java.util.Queue pq activations wrap-rule-order?]
+ (if wrap-rule-order?
+ (doseq [act activations]
+ (.add pq (->rule-ordered-activation act)))
+ (doseq [act activations]
+ (.add pq act)))
+ pq))
+
+(defn- ->activation-priority-queue
+ "Given activations, create a priority queue based on rule ordering.
+ The activations should be wrapped by using the wrap-rule-order? option
+ if they have not been wrapped already."
+ ([activations]
+ (->activation-priority-queue activations true))
+ ([activations wrap-rule-order?]
+ (let [init-cnt (count activations)
+ ;; Note that 11 is the default initial value; there is no constructor
+ ;; for PriorityQueue that takes a custom comparator and does not require
+ ;; an initial size to be passed.
+ pq (PriorityQueue. (if (pos? init-cnt) init-cnt 11)
+ (fn [^RuleOrderedActivation x
+ ^RuleOrderedActivation y]
+ (compare (.rule-load-order x)
+ (.rule-load-order y))))]
+ (queue-activations! pq activations wrap-rule-order?))))
+
+(declare ->PersistentLocalMemory)
+
+;;; Transient local memory implementation. Typically only persistent memory will be visible externally.
+
+(deftype TransientLocalMemory [rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ ^:unsynchronized-mutable alpha-memory
+ ^:unsynchronized-mutable beta-memory
+ ^:unsynchronized-mutable accum-memory
+ ^:unsynchronized-mutable production-memory
+ ^:unsynchronized-mutable ^NavigableMap activation-map]
+
+ IMemoryReader
+ (get-rulebase [memory] rulebase)
+
+ (get-alphas-fn [memory] alphas-fn)
+
+ (get-elements [memory node bindings]
+ (get (get alpha-memory (:id node) {})
+ bindings
+ []))
+
+ (get-elements-all [memory node]
+ (sequence
+ cat
+ (vals
+ (get alpha-memory (:id node) {}))))
+
+ (get-tokens [memory node bindings]
+ (get (get beta-memory (:id node) {})
+ bindings
+ []))
+
+ (get-tokens-all [memory node]
+ (sequence
+ cat
+ (vals (get beta-memory (:id node) {}))))
+
+ (get-accum-reduced [memory node join-bindings fact-bindings]
+ (get-in accum-memory [(:id node) join-bindings fact-bindings] ::no-accum-reduced))
+
+ (get-accum-reduced-all [memory node join-bindings]
+ (get
+ (get accum-memory (:id node) {})
+ join-bindings))
+
+ ;; TODO: rename existing get-accum-reduced-all and use something better here.
+ (get-accum-reduced-complete [memory node]
+ (for [[join-binding joins] (get accum-memory (:id node) {})
+ [fact-binding reduced] joins]
+ {:join-bindings join-binding
+ :fact-bindings fact-binding
+ :result reduced}))
+
+ (get-insertions [memory node token]
+ (get
+ (get production-memory (:id node) {})
+ token
+ []))
+
+ (get-insertions-all [memory node]
+ (get production-memory (:id node) {}))
+
+ (get-activations [memory]
+ (into []
+ (comp cat
+ (map (fn [^RuleOrderedActivation a]
+ (.activation a))))
+ (vals activation-map)))
+
+ ITransientMemory
+ (add-elements! [memory node join-bindings elements]
+ (let [binding-element-map (get alpha-memory (:id node) {})
+ previous-elements (get binding-element-map join-bindings)]
+
+ (cond
+ ;; When changing existing persistent collections, just add on
+ ;; the new elements.
+ (coll? previous-elements)
+ (set! alpha-memory
+ (assoc! alpha-memory
+ (:id node)
+ (assoc binding-element-map
+ join-bindings
+ (into previous-elements elements))))
+
+ ;; Already mutable, so update-in-place.
+ previous-elements
+ (add-all! previous-elements elements)
+
+ ;; No previous. We can just leave it persistent if it is
+ ;; until we actually need to modify anything. This avoids
+ ;; unnecessary copying.
+ elements
+ (set! alpha-memory
+ (assoc! alpha-memory
+ (:id node)
+ (assoc binding-element-map
+ join-bindings
+ elements))))))
+
+ (remove-elements! [memory node join-bindings elements]
+ ;; Do nothing when no elements to remove.
+ (when-not (coll-empty? elements)
+ (let [binding-element-map (get alpha-memory (:id node) {})
+ previous-elements (get binding-element-map join-bindings)]
+ (cond
+ ;; Do nothing when no previous elements to remove from.
+ (coll-empty? previous-elements)
+ []
+
+ ;; Convert persistent collection to a mutable one prior to calling remove-first-of-each!
+ ;; alpha-memory needs to be updated this time since there is now going to be a mutable
+ ;; collection associated in this memory location instead.
+ (coll? previous-elements)
+ (let [remaining-elements (->linked-list previous-elements)
+ removed-elements (first (remove-first-of-each! elements remaining-elements))]
+ ;; If there are no remaining elements under a binding group for the node remove the binding group.
+ ;; This allows these binding values to be garbage collected.
+ (let [new-bindings-map (if (.isEmpty ^java.util.List remaining-elements)
+ (dissoc binding-element-map join-bindings)
+ (assoc binding-element-map
+ join-bindings
+ remaining-elements))]
+ (set! alpha-memory
+ (assoc! alpha-memory
+ (:id node)
+ new-bindings-map))
+ removed-elements))
+
+ ;; Already mutable, so we do not need to re-associate to alpha-memory.
+ previous-elements
+ (let [removed-elements (first (remove-first-of-each! elements previous-elements))]
+ (when (.isEmpty ^java.util.List previous-elements)
+ (set! alpha-memory
+ (assoc! alpha-memory
+ (:id node)
+ (dissoc binding-element-map join-bindings))))
+ removed-elements)))))
+
+ (add-tokens! [memory node join-bindings tokens]
+ (let [binding-token-map (get beta-memory (:id node) {})
+ previous-tokens (get binding-token-map join-bindings)]
+ ;; The reasoning here is the same as in add-elements! impl above.
+ (cond
+ (coll? previous-tokens)
+ (set! beta-memory
+ (assoc! beta-memory
+ (:id node)
+ (assoc binding-token-map
+ join-bindings
+ (into previous-tokens tokens))))
+
+ previous-tokens
+ (add-all! previous-tokens tokens)
+
+ tokens
+ (set! beta-memory
+ (assoc! beta-memory
+ (:id node)
+ (assoc binding-token-map
+ join-bindings
+ tokens))))))
+
+ (remove-tokens! [memory node join-bindings tokens]
+ ;; The reasoning here is the same as remove-elements!
+ (when-not (coll-empty? tokens)
+ (let [binding-token-map (get beta-memory (:id node) {})
+ previous-tokens (get binding-token-map join-bindings)]
+ (if
+ (coll-empty? previous-tokens)
+ []
+
+ (let [;; Attempt to remove tokens using the faster indentity-based equality first since
+ ;; most of the time this is all we need and it can be much faster. Any token that
+ ;; wasn't removed via identity, has to be "retried" with normal value-based
+ ;; equality though since those semantics are supported within the engine. This
+ ;; slower path should be rare for any heavy retraction flows - such as those that come
+ ;; via truth maintenance.
+ two-pass-remove! (fn [remaining-tokens tokens]
+ (let [[removed-tokens not-removed-tokens]
+ (remove-first-of-each! tokens
+ remaining-tokens
+ (fn [t1 t2]
+ (fast-token-compare identical? t1 t2)))]
+
+ (if-let [other-removed (and (seq not-removed-tokens)
+ (-> not-removed-tokens
+ (remove-first-of-each! remaining-tokens
+ (fn [t1 t2]
+ (fast-token-compare = t1 t2)))
+ first
+ seq))]
+ (into removed-tokens other-removed)
+ removed-tokens)))]
+ (cond
+ (coll? previous-tokens)
+ (let [remaining-tokens (->linked-list previous-tokens)
+ removed-tokens (two-pass-remove! remaining-tokens tokens)
+ new-tokens-map (if (.isEmpty ^java.util.List remaining-tokens)
+ (dissoc binding-token-map join-bindings)
+ (assoc binding-token-map join-bindings remaining-tokens))]
+ (set! beta-memory
+ (assoc! beta-memory
+ (:id node)
+ new-tokens-map))
+ removed-tokens)
+
+ previous-tokens
+ (let [removed-tokens (two-pass-remove! previous-tokens tokens)]
+ (when (.isEmpty ^java.util.List previous-tokens)
+ (set! beta-memory
+ (assoc! beta-memory
+ (:id node)
+ (dissoc binding-token-map join-bindings))))
+
+ removed-tokens)))))))
+
+ (add-accum-reduced! [memory node join-bindings accum-result fact-bindings]
+ (set! accum-memory
+ (assoc! accum-memory
+ (:id node)
+ (assoc-in (get accum-memory (:id node) {})
+ [join-bindings fact-bindings]
+ accum-result))))
+
+ (remove-accum-reduced! [memory node join-bindings fact-bindings]
+ (let [node-id (:id node)
+ node-id-mem (get accum-memory node-id {})
+ join-mem (dissoc (get node-id-mem join-bindings) fact-bindings)
+ node-id-mem (if (empty? join-mem)
+ (dissoc node-id-mem join-bindings)
+ (assoc node-id-mem join-bindings join-mem))]
+ (set! accum-memory
+ (if (empty? node-id-mem)
+ (dissoc! accum-memory
+ node-id)
+ (assoc! accum-memory
+ node-id
+ node-id-mem)))))
+
+ ;; The value under each token in the map should be a sequence
+ ;; of sequences of facts, with each inner sequence coming from a single
+ ;; rule activation.
+ (add-insertions! [memory node token facts]
+ (let [token-facts-map (get production-memory (:id node) {})]
+ (set! production-memory
+ (assoc! production-memory
+ (:id node)
+ (update token-facts-map token conj facts)))))
+
+ (remove-insertions! [memory node tokens]
+
+ ;; Remove the facts inserted from the given token.
+ (let [token-facts-map (get production-memory (:id node) {})
+ ;; Get removed tokens for the caller.
+ [results
+ new-token-facts-map]
+
+ (loop [results (transient {})
+ token-map (transient token-facts-map)
+ to-remove tokens]
+ (if-let [head-token (first to-remove)]
+ ;; Don't use contains? due to http://dev.clojure.org/jira/browse/CLJ-700
+ (if-let [token-insertions (get token-map head-token)]
+ (let [;; There is no particular significance in removing the
+ ;; first group; we just need to remove exactly one.
+ [removed-facts & remaining-facts] token-insertions
+ removed-insertion-map (if (not-empty remaining-facts)
+ (assoc! token-map head-token remaining-facts)
+ (dissoc! token-map head-token))
+ prev-token-result (get results head-token [])]
+ (recur (assoc! results head-token (into prev-token-result removed-facts))
+ removed-insertion-map
+ (rest to-remove)))
+ ;; If the token isn't present in the insertions just try the next one.
+ (recur results token-map (rest to-remove)))
+ [(persistent! results)
+ (persistent! token-map)]))]
+
+ ;; Clear the tokens and update the memory.
+ (set! production-memory
+ (if (not-empty new-token-facts-map)
+ (assoc! production-memory
+ (:id node)
+ new-token-facts-map)
+ (dissoc! production-memory (:id node))))
+ results))
+
+ (add-activations!
+ [memory production new-activations]
+ (let [activation-group (activation-group-fn production)
+ previous (.get activation-map activation-group)]
+ ;; The reasoning here is the same as in add-elements! impl above.
+ (cond
+ previous
+ (queue-activations! previous new-activations)
+
+ (not (coll-empty? new-activations))
+ (.put activation-map
+ activation-group
+ (->activation-priority-queue new-activations)))))
+
+ (pop-activation!
+ [memory]
+ (when (not (.isEmpty activation-map))
+ (let [entry (.firstEntry activation-map)
+ key (.getKey entry)
+ ^java.util.Queue value (.getValue entry)
+
+ ;; An empty value is illegal and should be removed by an action
+ ;; that creates one (e.g. a remove-activations! call).
+ ^RuleOrderedActivation activation (.remove value)]
+
+ ;; This activation group is empty now, so remove it from
+ ;; the map entirely.
+ (when (.isEmpty value)
+ (.remove activation-map key))
+
+ ;; Return the selected activation.
+ (.activation activation))))
+
+ (next-activation-group
+ [memory]
+ (when (not (.isEmpty activation-map))
+ (let [entry (.firstEntry activation-map)]
+ (.getKey entry))))
+
+ (remove-activations! [memory production to-remove]
+ (when-not (coll-empty? to-remove)
+ (let [activation-group (activation-group-fn production)
+ ^java.util.Queue activations (.get activation-map activation-group)
+ removed-activations (LinkedList.)
+ unremoved-activations (LinkedList.)]
+
+ (if (coll-empty? activations)
+ ;; If there are no activations present under the group
+ ;; then we can't remove any.
+ [[] to-remove]
+ ;; Remove as many activations by identity as possible first.
+ (let [not-removed (loop [to-remove-item (first to-remove)
+ to-remove (next to-remove)
+ not-removed (transient [])]
+ (if to-remove-item
+ (let [^RuleOrderedActivation act (->rule-ordered-activation to-remove-item true)]
+ (if (.remove activations act)
+ (do
+ (.add removed-activations (.activation act))
+ ;; The activations that are not removed in the identity checking sweep
+ ;; are a superset of the activations that are removed after the equality
+ ;; sweep finishes so we can just create the list of unremoved activations
+ ;; during that sweep.
+ (recur (first to-remove) (next to-remove) not-removed))
+ (recur (first to-remove) (next to-remove) (conj! not-removed act))))
+ (persistent! not-removed)))]
+
+ ;; There may still be activations not removed since the removal may be based on value-based
+ ;; equality semantics. Retractions in the engine do not require that identical object references
+ ;; are given to remove object values that are equal.
+ (doseq [^RuleOrderedActivation act not-removed]
+ (if (.remove activations (using-token-identity! act false))
+ (.add removed-activations (.activation act))
+ (.add unremoved-activations (.activation act))))
+
+ (when (coll-empty? activations)
+ (.remove activation-map activation-group))
+
+ [(Collections/unmodifiableList removed-activations)
+ (Collections/unmodifiableList unremoved-activations)])))))
+
+ (clear-activations!
+ [memory]
+ (.clear activation-map))
+
+ (to-persistent! [memory]
+ (let [->persistent-coll #(if (coll? %)
+ %
+ (seq %))
+ update-vals (fn [m update-fn]
+ (->> m
+ (reduce-kv (fn [m k v]
+ (assoc! m k (update-fn v)))
+ (transient m))
+ persistent!))
+ persistent-vals #(update-vals % ->persistent-coll)]
+ (->PersistentLocalMemory rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ (update-vals (persistent! alpha-memory) persistent-vals)
+ (update-vals (persistent! beta-memory) persistent-vals)
+ (persistent! accum-memory)
+ (persistent! production-memory)
+ (into {}
+ (map (juxt key (comp ->persistent-coll val)))
+ activation-map)))))
+
+(defrecord PersistentLocalMemory [rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ alpha-memory
+ beta-memory
+ accum-memory
+ production-memory
+ activation-map]
+ IMemoryReader
+ (get-rulebase [memory] rulebase)
+
+ (get-alphas-fn [memory] alphas-fn)
+
+ (get-elements [memory node bindings]
+ (get (get alpha-memory (:id node) {})
+ bindings
+ []))
+
+ (get-elements-all [memory node]
+ (sequence
+ cat
+ (vals
+ (get alpha-memory (:id node) {}))))
+
+ (get-tokens [memory node bindings]
+ (get (get beta-memory (:id node) {})
+ bindings
+ []))
+
+ (get-tokens-all [memory node]
+ (sequence
+ cat
+ (vals (get beta-memory (:id node) {}))))
+
+ (get-accum-reduced [memory node join-bindings fact-bindings]
+ ;; nil is a valid previously reduced value that can be found in the map.
+ ;; Return ::no-accum-reduced instead of nil when there is no previously
+ ;; reduced value in memory.
+ (get-in accum-memory [(:id node) join-bindings fact-bindings] ::no-accum-reduced))
+
+ (get-accum-reduced-all [memory node join-bindings]
+ (get
+ (get accum-memory (:id node) {})
+ join-bindings))
+
+ (get-accum-reduced-complete [memory node]
+ (for [[join-binding joins] (get accum-memory (:id node) {})
+ [fact-binding reduced] joins]
+ {:join-bindings join-binding
+ :fact-bindings fact-binding
+ :result reduced}))
+
+ (get-insertions [memory node token]
+ (get
+ (get production-memory (:id node) {})
+ token
+ []))
+
+ (get-insertions-all [memory node]
+ (get production-memory (:id node) {}))
+
+ (get-activations [memory]
+ (into []
+ (comp cat
+ (map (fn [^RuleOrderedActivation a]
+ (.activation a))))
+ (vals activation-map)))
+
+ IPersistentMemory
+ (to-transient [memory]
+ (TransientLocalMemory. rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ (transient alpha-memory)
+ (transient beta-memory)
+ (transient accum-memory)
+ (transient production-memory)
+ (let [treemap (TreeMap. ^java.util.Comparator activation-group-sort-fn)]
+ (doseq [[activation-group activations] activation-map]
+ (.put treemap
+ activation-group
+ (->activation-priority-queue activations false)))
+ treemap))))
+
+(defn local-memory
+ "Creates an persistent local memory for the given rule base."
+ [rulebase activation-group-sort-fn activation-group-fn alphas-fn]
+
+ (->PersistentLocalMemory rulebase
+ activation-group-sort-fn
+ activation-group-fn
+ alphas-fn
+ {}
+ {}
+ {}
+ {}
+ {}))
diff --git a/src/main/clojure/clara/rules/memory.cljc b/src/main/clojure/clara/rules/memory.cljc
deleted file mode 100644
index dd72c998..00000000
--- a/src/main/clojure/clara/rules/memory.cljc
+++ /dev/null
@@ -1,1068 +0,0 @@
-(ns clara.rules.memory
- "This namespace is for internal use and may move in the future.
- Specification and default implementation of working memory"
- (:require [clojure.set :as s]))
-
-;; Activation record used by get-activations and add-activations! below.
-(defrecord Activation [node token])
-
-(defprotocol IPersistentMemory
- (to-transient [memory]))
-
-(defprotocol IMemoryReader
- ;; Returns the rulebase associated with the given memory.
- (get-rulebase [memory])
-
- ;; Returns a function that produces a map of alpha nodes to
- ;; facts that match the type of the node
- (get-alphas-fn [memory])
-
- ;; Returns the elements assoicated with the given node.
- (get-elements [memory node bindings])
-
- ;; Returns all elements associated with the given node, regardless of bindings.
- (get-elements-all [memory node])
-
- ;; Returns the tokens associated with the given node.
- (get-tokens [memory node bindings])
-
- ;; Returns all tokens associated with the given node, regardless of bindings
- (get-tokens-all [memory node])
-
- ;; Returns the reduced form of objects processed by an accumulator node
- ;; for facts that match the given bindings.
- (get-accum-reduced [memory node join-bindings fact-bindings])
-
- ;; Returns all reduced results for the given node that matches
- ;; the given join bindings, independent of the individual fact-bindings
- ;; created by the accumulator's condition.
- (get-accum-reduced-all [memory node join-bindings])
-
- ;; Returns a tuple of [join-bindings fact-bindings result] for all
- ;; accumulated items on this node.
- (get-accum-reduced-complete [memory node])
-
- ;; Returns insertions that occurred at the given node for the given token.
- ;; Returns a sequence of the form
- ;; [facts-inserted-for-one-rule-activation facts-inserted-for-another-rule-activation]
- (get-insertions [memory node token])
-
- ;; Returns all insertions that occurred in the given node's RHS; this takes the form
- ;; {token [facts-inserted-for-one-rule-activation facts-inserted-for-another-rule-activation]}
- (get-insertions-all [memory node])
-
- ;; Returns a map of nodes with pending activations to the activations themselves.
- (get-activations [memory]))
-
-(defprotocol ITransientMemory
-
- ;; Adds working memory elements to the given working memory at the given node.
- (add-elements! [memory node join-bindings elements])
-
- ;; Remove working memory elements from the given working memory at the given node.
- (remove-elements! [memory node elements join-bindings])
-
- ;; Add tokens to the given working memory at the given node.
- (add-tokens! [memory node join-bindings tokens])
-
- ;; Removes tokens from the given working memory at the given node.
- (remove-tokens! [memory node join-bindings tokens])
-
- ;; Adds the result of a reduced accumulator execution to the given memory and node.
- (add-accum-reduced! [memory node join-bindings accum-result fact-bindings])
-
- ;; Removes the result of a reduced accumulator execution to the given memory and node.
- (remove-accum-reduced! [memory node join-bindings fact-bindings])
-
- ;; Add a record that a given fact twas inserted at a given node with
- ;; the given support. Used for truth maintenance.
- ;; This should be called at most once per rule activation.
- (add-insertions! [memory node token facts])
-
- ;; Removes all records of facts that were inserted at the given node
- ;; due to the given token. Used for truth maintenance.
- ;; This function returns a map of each token to the associated facts
- ;; it removed.
- (remove-insertions! [memory node token])
-
- ;; Add a sequence of activations.
- (add-activations! [memory production activations])
-
- ;; Pop an activation from the working memory. Returns nil if no
- ;; activations are pending.
- (pop-activation! [memory])
-
- ;; Returns the group of the next activation, or nil if none are pending.
- (next-activation-group [memory])
-
- ;; Remove the given activations from the working memory. This is expected
- ;; to return a tuple of the form [removed-activations unremoved-activations],
- ;; where unremoved-activations is comprised of activations passed to the memory
- ;; for removal but which were not removed because they were not present in the memory's
- ;; store of pending activations.
- (remove-activations! [memory production activations])
-
- ;; Clear all activations from the working memory
- (clear-activations! [memory])
-
- ;; Converts the transient memory to persistent form.
- (to-persistent! [memory]))
-
-#?(:clj
- (defn- coll-empty?
- "Returns true if the collection is empty. Does not call seq due to avoid
- overhead that may cause for non-persistent collection types, e.g.
- java.util.LinkedList, etc."
- [^java.util.Collection coll]
- (or (nil? coll) (.isEmpty coll))))
-
-#?(:clj
- (defn- list-remove!
- "Removes the item, to-remove, from the given list, lst. If it is found and removed,
- returns true. Otherwise returns false. Only removes the first element in the list
- that is equal to to-remove. Equality is done based on the given eq-pred function.
- If it isn't given, the default is = . If others are equal, they will not be removed.
- This is similar to java.util.List.remove(Object). lst is updated in place for performance.
- This implies that the list must support the mutable list interface, namely via the
- java.util.List.listIterator()."
- ([^java.util.List lst to-remove]
- (list-remove! lst to-remove =))
- ([^java.util.List lst to-remove eq-pred]
- (if-not (coll-empty? lst)
- (let [li (.listIterator lst)]
- (loop [x (.next li)]
- (cond
- (eq-pred to-remove x)
- (do
- (.remove li)
- true)
-
- (.hasNext li)
- (recur (.next li))
-
- :else
- false)))
- false))))
-
-#?(:clj
- (defn- add-all!
- "Adds all items from source to the destination dest collection
- destructively. Avoids using Collection.addAll() due to unnecessary
- performance overhead of calling Collection.toArray() on the
- incoming source. Returns dest."
- [^java.util.Collection dest source]
- (doseq [x source]
- (.add dest x))
- dest))
-
-#?(:clj
- (defn- ^java.util.List ->linked-list
- "Creates a new java.util.LinkedList from the coll, but avoids using
- Collection.addAll(Collection) since there is unnecessary overhead
- in this of calling Collection.toArray() on coll."
- [coll]
- (if (instance? java.util.LinkedList coll)
- coll
- (add-all! (java.util.LinkedList.) coll))))
-
-#?(:clj
- (defn- remove-first-of-each!
- "Remove the first instance of each item in the given remove-seq that
- appears in the collection coll. coll is updated in place for
- performance. This implies that the coll must support the mutable
- collection interface method Collection.remove(Object). Returns a tuple
- of the form [remove-seq-items-removed remove-seq-items-not-removed].
- An optional compare-fn can be given to specify how to compare remove-seq
- items against items in coll. The default compare-fn is = .
- For immutable collection removal, use the non-destructive remove-first-of-each
- defined below."
- ([remove-seq ^java.util.List coll]
- (remove-first-of-each! remove-seq coll =))
-
- ([remove-seq ^java.util.List coll compare-fn]
- ;; Optimization for special case of one item to remove,
- ;; which occurs frequently.
- (if (= 1 (count remove-seq))
- (let [to-remove (first remove-seq)]
- (if (list-remove! coll to-remove compare-fn)
- [remove-seq []]
- [[] remove-seq]))
-
- ;; Otherwise, perform a linear search for items to remove.
- (loop [to-remove (first remove-seq)
- remove-seq (next remove-seq)
- removed (transient [])
- not-removed (transient [])]
- (if to-remove
- (let [found? (list-remove! coll to-remove compare-fn)
- removed (if found?
- (conj! removed to-remove)
- removed)
- not-removed (if found?
- not-removed
- (conj! not-removed to-remove))]
- (recur (first remove-seq)
- (next remove-seq)
- removed
- not-removed))
- ;; If this is expensive, using a mutable collection maybe good to
- ;; consider here in a future optimization.
- [(persistent! removed) (persistent! not-removed)]))))))
-
-(defn remove-first-of-each
- "Remove the first instance of each item in the given remove-seq that
- appears in the collection. This also tracks which items were found
- and removed. Returns a tuple of the form:
- [items-removed coll-with-items-removed items-not-removed]
- This function does so eagerly since
- the working memories with large numbers of insertions and retractions
- can cause lazy sequences to become deeply nested."
- [remove-seq coll]
- (cond
-
- ;; There is nothing to remove.
- (empty? remove-seq) [[] coll]
-
- ;; Otherwise, perform a linear search for items to remove.
- :else (loop [f (first coll)
- r (rest coll)
- [remove-seq items-removed result] [remove-seq (transient []) (transient [])]]
-
- (if f
- (recur (first r)
- (rest r)
-
- ;; Determine if f matches any of the items to remove.
- (loop [to-remove (first remove-seq)
- remove-seq (rest remove-seq)
- ;; Remember what is left to remove for later.
- left-to-remove (transient [])]
-
- ;; Try to find if f matches anything to-remove.
- (if to-remove
- (if (= to-remove f)
-
- ;; Found a match, so the search is done.
- [(persistent! (reduce conj! left-to-remove remove-seq))
- (conj! items-removed to-remove)
- result]
-
- ;; Keep searching for a match.
- (recur (first remove-seq)
- (rest remove-seq)
- (conj! left-to-remove to-remove)))
-
- ;; No matches found.
- [(persistent! left-to-remove)
- items-removed
- (conj! result f)])))
-
- [(persistent! items-removed) (persistent! result) remove-seq]))))
-
-#?(:clj
- (defn fast-token-compare [compare-fn token other-token]
- ;; Fastest path is if the two tokens are truly identical.
- (or (identical? token other-token)
- ;; Assumption is that both arguments given are tokens already.
- (and (let [bindings (:bindings token)
- other-bindings (:bindings other-token)]
- ;; Calling `count` on these Clojure maps shows up as a bottleneck
- ;; even with clojure.lang.IPersistentMap being clojure.lang.Counted unfortunately.
- (and (= (.size ^java.util.Map bindings)
- (.size ^java.util.Map other-bindings))
- ;; `every?` is too slow for a performance critical place like this. It
- ;; calls `seq` too many times on the underlying maps. Instead `seq` one
- ;; time and keep using that same seq.
- ;; Also avoiding Clojure destructuring since even that is not as fast
- ;; pre-1.9.0.
- (if-let [^clojure.lang.ISeq entries (.seq ^clojure.lang.Seqable bindings)]
- ;; Type hint to Indexed vs MapEntry just because MapEntry seems to be a
- ;; less stable impl detail to rely on.
- (loop [^clojure.lang.Indexed entry (.first entries)
- entries (.next entries)]
- (let [k (some-> entry (.nth 0))
- v (some-> entry (.nth 1))]
- (if (and k
- ;; other-bindings will always be persistent map so invoke
- ;; it directly. It is faster than `get`.
- (compare-fn v (other-bindings k)))
- (recur (some-> entries .first)
- (some-> entries .next))
- ;; If there is no k left, then every entry matched. If there is a k,
- ;; that means the comparison failed, so the maps aren't equal.
- (not k))))
- ;; Empty bindings on both sides.
- true)))
-
- ;; Check the :matches on each token. :matches need to be in the same order on both
- ;; tokens to be considered the same.
- (let [^clojure.lang.Indexed matches (:matches token)
- ^clojure.lang.Indexed other-matches (:matches other-token)
- count-matches (.size ^java.util.List matches)]
- (and (= count-matches
- (.size ^java.util.List other-matches))
- (loop [i 0]
- (cond
- (= i count-matches)
- true
-
- ;; Compare node-id's first. Fallback to comparing facts. This will
- ;; most very likely be the most expensive part to execute.
- (let [^clojure.lang.Indexed m (.nth matches i)
- ^clojure.lang.Indexed om (.nth other-matches i)]
- ;; A token :matches tuple is of the form [fact node-id].
- (and (= (.nth m 1) (.nth om 1))
- (compare-fn (.nth m 0) (.nth om 0))))
- (recur (inc i))
-
- :else
- false))))))))
-
-#?(:clj
- (defprotocol IdentityComparable
- (using-token-identity! [this bool])))
-
-#?(:clj
- (deftype RuleOrderedActivation [node-id
- token
- activation
- rule-load-order
- ^:unsynchronized-mutable use-token-identity?]
- IdentityComparable
- ;; NOTE! This should never be called on a RuleOrderedActivation instance that has been stored
- ;; somewhere in local memory because it could cause interference across threads that have
- ;; multiple versions of local memories that are sharing some of their state. This is only intended
- ;; to be called by ephemeral, only-local references to RuleOrderedActivation instances used to
- ;; search for activations to remove from memory when performing `remove-activations!` operations.
- ;; The reason this mutable state exists at all is to "flip" a single instance of a RuleOrderedActivation
- ;; from identity to value equality based comparable when doing the "two-pass" removal search operation
- ;; of `remove-activations!`. This avoids having to create different instances for each pass.
- (using-token-identity! [this bool]
- (set! use-token-identity? bool)
- this)
- Object
- ;; Two RuleOrderedActivation instances should be equal if and only if their
- ;; activation is equal. Note that if the node of two activations is the same,
- ;; their rule-load-order must be the same. Using a deftype wrapper allows us to
- ;; use Clojure equality to determine this while placing the wrapper in a Java data
- ;; structure that uses Java equality; the Java equality check will simply end up calling
- ;; Clojure equality checks.
- (equals [this other]
- ;; Note that the .equals method is only called by PriorityQueue.remove, and the object provided
- ;; to the .remove method will never be identical to any object in the queue. A short-circuiting
- ;; check for reference equality would therefore be pointless here because it would never be true.
- (boolean
- (when (instance? RuleOrderedActivation other)
- (let [^RuleOrderedActivation other other]
- (and
- ;; If the node-id of two nodes is equal then we can assume that the nodes are equal.
- (= node-id
- (.node-id other))
-
- ;; We check with identity based semantics on the other when the use-token-identity? field
- ;; indicates to do so.
- (if (or use-token-identity? (.-use-token-identity? other))
- (fast-token-compare identical? token (.-token other))
- (fast-token-compare = token (.-token other))))))))))
-
-#?(:clj
- (defn- ->rule-ordered-activation
- "Take an activation from the engine and wrap it in a map that includes information
- on the rule load order. In Clojure, as opposed to ClojureScript, each activation should
- be wrapped in this way exactly once (that is, the value of the :activation key should
- be an activation from the engine.)"
- ([activation]
- (->rule-ordered-activation activation false))
- ([activation use-token-identity?]
- (let [node (:node activation)]
- (RuleOrderedActivation. (:id node)
- (:token activation)
- activation
- (or (-> node
- :production
- meta
- :clara.rules.compiler/rule-load-order)
- 0)
- use-token-identity?)))))
-
-#?(:clj
- (defn- queue-activations!
- "Add activations to a queue. The wrap-rule-order? option should be true
- unless the activations in question have previously been wrapped."
- ([^java.util.Queue pq activations]
- (queue-activations! pq activations true))
- ([^java.util.Queue pq activations wrap-rule-order?]
- (if wrap-rule-order?
- (doseq [act activations]
- (.add pq (->rule-ordered-activation act)))
- (doseq [act activations]
- (.add pq act)))
- pq)))
-
-#?(:clj
- (defn- ->activation-priority-queue
- "Given activations, create a priority queue based on rule ordering.
- The activations should be wrapped by using the wrap-rule-order? option
- if they have not been wrapped already."
- ([activations]
- (->activation-priority-queue activations true))
- ([activations wrap-rule-order?]
- (let [init-cnt (count activations)
- ;; Note that 11 is the default initial value; there is no constructor
- ;; for PriorityQueue that takes a custom comparator and does not require
- ;; an initial size to be passed.
- pq (java.util.PriorityQueue. (if (pos? init-cnt) init-cnt 11)
- (fn [^RuleOrderedActivation x
- ^RuleOrderedActivation y]
- (compare (.rule-load-order x)
- (.rule-load-order y))))]
- (queue-activations! pq activations wrap-rule-order?)))))
-
-(declare ->PersistentLocalMemory)
-
-;;; Transient local memory implementation. Typically only persistent memory will be visible externally.
-
-(deftype TransientLocalMemory [rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- ^:unsynchronized-mutable alpha-memory
- ^:unsynchronized-mutable beta-memory
- ^:unsynchronized-mutable accum-memory
- ^:unsynchronized-mutable production-memory
- ^:unsynchronized-mutable #?(:clj ^java.util.NavigableMap activation-map :cljs activation-map)]
-
- IMemoryReader
- (get-rulebase [memory] rulebase)
-
- (get-alphas-fn [memory] alphas-fn)
-
- (get-elements [memory node bindings]
- (get (get alpha-memory (:id node) {})
- bindings
- []))
-
- (get-elements-all [memory node]
- (sequence
- cat
- (vals
- (get alpha-memory (:id node) {}))))
-
- (get-tokens [memory node bindings]
- (get (get beta-memory (:id node) {})
- bindings
- []))
-
- (get-tokens-all [memory node]
- (sequence
- cat
- (vals (get beta-memory (:id node) {}))))
-
- (get-accum-reduced [memory node join-bindings fact-bindings]
- (get-in accum-memory [(:id node) join-bindings fact-bindings] ::no-accum-reduced))
-
- (get-accum-reduced-all [memory node join-bindings]
- (get
- (get accum-memory (:id node) {})
- join-bindings))
-
- ;; TODO: rename existing get-accum-reduced-all and use something better here.
- (get-accum-reduced-complete [memory node]
- (for [[join-binding joins] (get accum-memory (:id node) {})
- [fact-binding reduced] joins]
- {:join-bindings join-binding
- :fact-bindings fact-binding
- :result reduced}))
-
- (get-insertions [memory node token]
- (get
- (get production-memory (:id node) {})
- token
- []))
-
- (get-insertions-all [memory node]
- (get production-memory (:id node) {}))
-
- (get-activations [memory]
- (into []
- (comp cat
- (map (fn [^RuleOrderedActivation a]
- (.activation a))))
- (vals activation-map)))
-
- ITransientMemory
- #?(:clj
- (add-elements! [memory node join-bindings elements]
- (let [binding-element-map (get alpha-memory (:id node) {})
- previous-elements (get binding-element-map join-bindings)]
-
- (cond
- ;; When changing existing persistent collections, just add on
- ;; the new elements.
- (coll? previous-elements)
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- (assoc binding-element-map
- join-bindings
- (into previous-elements elements))))
-
- ;; Already mutable, so update-in-place.
- previous-elements
- (add-all! previous-elements elements)
-
- ;; No previous. We can just leave it persistent if it is
- ;; until we actually need to modify anything. This avoids
- ;; unnecessary copying.
- elements
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- (assoc binding-element-map
- join-bindings
- elements))))))
- :cljs
- (add-elements! [memory node join-bindings elements]
- (let [binding-element-map (get alpha-memory (:id node) {})
- previous-elements (get binding-element-map join-bindings [])]
-
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- (assoc binding-element-map join-bindings (into previous-elements elements)))))))
-
- (remove-elements! [memory node join-bindings elements]
- #?(:clj
- ;; Do nothing when no elements to remove.
- (when-not (coll-empty? elements)
- (let [binding-element-map (get alpha-memory (:id node) {})
- previous-elements (get binding-element-map join-bindings)]
- (cond
- ;; Do nothing when no previous elements to remove from.
- (coll-empty? previous-elements)
- []
-
- ;; Convert persistent collection to a mutable one prior to calling remove-first-of-each!
- ;; alpha-memory needs to be updated this time since there is now going to be a mutable
- ;; collection associated in this memory location instead.
- (coll? previous-elements)
- (let [remaining-elements (->linked-list previous-elements)
- removed-elements (first (remove-first-of-each! elements remaining-elements))]
- ;; If there are no remaining elements under a binding group for the node remove the binding group.
- ;; This allows these binding values to be garbage collected.
- (let [new-bindings-map (if (.isEmpty ^java.util.List remaining-elements)
- (dissoc binding-element-map join-bindings)
- (assoc binding-element-map
- join-bindings
- remaining-elements))]
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- new-bindings-map))
- removed-elements))
-
- ;; Already mutable, so we do not need to re-associate to alpha-memory.
- previous-elements
- (let [removed-elements (first (remove-first-of-each! elements previous-elements))]
- (when (.isEmpty ^java.util.List previous-elements)
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- (dissoc binding-element-map join-bindings))))
- removed-elements))))
- :cljs
- (let [binding-element-map (get alpha-memory (:id node) {})
- previous-elements (get binding-element-map join-bindings [])
- [removed-elements filtered-elements] (remove-first-of-each elements previous-elements)
- new-bindings-map (if (seq filtered-elements)
- (assoc binding-element-map join-bindings filtered-elements)
- (dissoc binding-element-map join-bindings))]
-
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- new-bindings-map))
-
- ;; Return the removed elements.
- removed-elements)))
-
- (add-tokens! [memory node join-bindings tokens]
- #?(:clj
- (let [binding-token-map (get beta-memory (:id node) {})
- previous-tokens (get binding-token-map join-bindings)]
- ;; The reasoning here is the same as in add-elements! impl above.
- (cond
- (coll? previous-tokens)
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- (assoc binding-token-map
- join-bindings
- (into previous-tokens tokens))))
-
- previous-tokens
- (add-all! previous-tokens tokens)
-
- tokens
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- (assoc binding-token-map
- join-bindings
- tokens)))))
- :cljs
- (let [binding-token-map (get beta-memory (:id node) {})
- previous-tokens (get binding-token-map join-bindings [])]
-
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- (assoc binding-token-map join-bindings (into previous-tokens tokens)))))))
-
- (remove-tokens! [memory node join-bindings tokens]
- #?(:clj
- ;; The reasoning here is the same as remove-elements!
- (when-not (coll-empty? tokens)
- (let [binding-token-map (get beta-memory (:id node) {})
- previous-tokens (get binding-token-map join-bindings)]
- (if
- (coll-empty? previous-tokens)
- []
-
- (let [;; Attempt to remove tokens using the faster indentity-based equality first since
- ;; most of the time this is all we need and it can be much faster. Any token that
- ;; wasn't removed via identity, has to be "retried" with normal value-based
- ;; equality though since those semantics are supported within the engine. This
- ;; slower path should be rare for any heavy retraction flows - such as those that come
- ;; via truth maintenance.
- two-pass-remove! (fn [remaining-tokens tokens]
- (let [[removed-tokens not-removed-tokens]
- (remove-first-of-each! tokens
- remaining-tokens
- (fn [t1 t2]
- (fast-token-compare identical? t1 t2)))]
-
- (if-let [other-removed (and (seq not-removed-tokens)
- (-> not-removed-tokens
- (remove-first-of-each! remaining-tokens
- (fn [t1 t2]
- (fast-token-compare = t1 t2)))
- first
- seq))]
- (into removed-tokens other-removed)
- removed-tokens)))]
- (cond
- (coll? previous-tokens)
- (let [remaining-tokens (->linked-list previous-tokens)
- removed-tokens (two-pass-remove! remaining-tokens tokens)
- new-tokens-map (if (.isEmpty ^java.util.List remaining-tokens)
- (dissoc binding-token-map join-bindings)
- (assoc binding-token-map join-bindings remaining-tokens))]
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- new-tokens-map))
- removed-tokens)
-
- previous-tokens
- (let [removed-tokens (two-pass-remove! previous-tokens tokens)]
- (when (.isEmpty ^java.util.List previous-tokens)
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- (dissoc binding-token-map join-bindings))))
-
- removed-tokens))))))
- :cljs
- (let [binding-token-map (get beta-memory (:id node) {})
- previous-tokens (get binding-token-map join-bindings [])
- [removed-tokens filtered-tokens] (remove-first-of-each tokens previous-tokens)
- new-tokens-map (if (seq filtered-tokens)
- (assoc binding-token-map join-bindings filtered-tokens)
- (dissoc binding-token-map join-bindings))]
-
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- new-tokens-map))
-
- ;; Return the removed tokens.
- removed-tokens)))
-
- (add-accum-reduced! [memory node join-bindings accum-result fact-bindings]
-
- (set! accum-memory
- (assoc! accum-memory
- (:id node)
- (assoc-in (get accum-memory (:id node) {})
- [join-bindings fact-bindings]
- accum-result))))
-
- (remove-accum-reduced! [memory node join-bindings fact-bindings]
- (let [node-id (:id node)
- node-id-mem (get accum-memory node-id {})
- join-mem (dissoc (get node-id-mem join-bindings) fact-bindings)
- node-id-mem (if (empty? join-mem)
- (dissoc node-id-mem join-bindings)
- (assoc node-id-mem join-bindings join-mem))]
- (set! accum-memory
- (if (empty? node-id-mem)
- (dissoc! accum-memory
- node-id)
- (assoc! accum-memory
- node-id
- node-id-mem)))))
-
- ;; The value under each token in the map should be a sequence
- ;; of sequences of facts, with each inner sequence coming from a single
- ;; rule activation.
- (add-insertions! [memory node token facts]
- (let [token-facts-map (get production-memory (:id node) {})]
- (set! production-memory
- (assoc! production-memory
- (:id node)
- (update token-facts-map token conj facts)))))
-
- (remove-insertions! [memory node tokens]
-
- ;; Remove the facts inserted from the given token.
- (let [token-facts-map (get production-memory (:id node) {})
- ;; Get removed tokens for the caller.
- [results
- new-token-facts-map]
-
- (loop [results (transient {})
- token-map (transient token-facts-map)
- to-remove tokens]
- (if-let [head-token (first to-remove)]
- ;; Don't use contains? due to http://dev.clojure.org/jira/browse/CLJ-700
- (if-let [token-insertions (get token-map head-token)]
- (let [;; There is no particular significance in removing the
- ;; first group; we just need to remove exactly one.
- [removed-facts & remaining-facts] token-insertions
- removed-insertion-map (if (not-empty remaining-facts)
- (assoc! token-map head-token remaining-facts)
- (dissoc! token-map head-token))
- prev-token-result (get results head-token [])]
- (recur (assoc! results head-token (into prev-token-result removed-facts))
- removed-insertion-map
- (rest to-remove)))
- ;; If the token isn't present in the insertions just try the next one.
- (recur results token-map (rest to-remove)))
- [(persistent! results)
- (persistent! token-map)]))]
-
- ;; Clear the tokens and update the memory.
- (set! production-memory
- (if (not-empty new-token-facts-map)
- (assoc! production-memory
- (:id node)
- new-token-facts-map)
- (dissoc! production-memory (:id node))))
- results))
-
- #?(:clj
- (add-activations!
- [memory production new-activations]
- (let [activation-group (activation-group-fn production)
- previous (.get activation-map activation-group)]
- ;; The reasoning here is the same as in add-elements! impl above.
- (cond
- previous
- (queue-activations! previous new-activations)
-
- (not (coll-empty? new-activations))
- (.put activation-map
- activation-group
- (->activation-priority-queue new-activations)))))
- :cljs
- (add-activations!
- [memory production new-activations]
- (let [activation-group (activation-group-fn production)
- previous (get activation-map activation-group)]
- (set! activation-map
- (assoc activation-map
- activation-group
- (if previous
- (into previous new-activations)
- new-activations))))))
-
- #?(:clj
- (pop-activation!
- [memory]
- (when (not (.isEmpty activation-map))
- (let [entry (.firstEntry activation-map)
- key (.getKey entry)
- ^java.util.Queue value (.getValue entry)
-
- ;; An empty value is illegal and should be removed by an action
- ;; that creates one (e.g. a remove-activations! call).
- ^RuleOrderedActivation activation (.remove value)]
-
- ;; This activation group is empty now, so remove it from
- ;; the map entirely.
- (when (.isEmpty value)
- (.remove activation-map key))
-
- ;; Return the selected activation.
- (.activation activation))))
-
- :cljs
- (pop-activation!
- [memory]
- (when (not (empty? activation-map))
- (let [[key value] (first activation-map)
- remaining (rest value)]
-
- (set! activation-map
- (if (empty? remaining)
- (dissoc activation-map key)
- (assoc activation-map key remaining)))
- (first value)))))
-
- #?(:clj
- (next-activation-group
- [memory]
- (when (not (.isEmpty activation-map))
- (let [entry (.firstEntry activation-map)]
- (.getKey entry))))
- :cljs
- (next-activation-group
- [memory]
- (let [[key val] (first activation-map)]
- key)))
-
- (remove-activations! [memory production to-remove]
- #?(:clj
- ;; The reasoning here is the same as remove-elements!
- (when-not (coll-empty? to-remove)
- (let [activation-group (activation-group-fn production)
- ^java.util.Queue activations (.get activation-map activation-group)
- removed-activations (java.util.LinkedList.)
- unremoved-activations (java.util.LinkedList.)]
-
- (if (coll-empty? activations)
- ;; If there are no activations present under the group
- ;; then we can't remove any.
- [[] to-remove]
- ;; Remove as many activations by identity as possible first.
- (let [not-removed (loop [to-remove-item (first to-remove)
- to-remove (next to-remove)
- not-removed (transient [])]
- (if to-remove-item
- (let [^RuleOrderedActivation act (->rule-ordered-activation to-remove-item true)]
- (if (.remove activations act)
- (do
- (.add removed-activations (.activation act))
- ;; The activations that are not removed in the identity checking sweep
- ;; are a superset of the activations that are removed after the equality
- ;; sweep finishes so we can just create the list of unremoved activations
- ;; during that sweep.
- (recur (first to-remove) (next to-remove) not-removed))
- (recur (first to-remove) (next to-remove) (conj! not-removed act))))
- (persistent! not-removed)))]
-
- ;; There may still be activations not removed since the removal may be based on value-based
- ;; equality semantics. Retractions in the engine do not require that identical object references
- ;; are given to remove object values that are equal.
- (doseq [^RuleOrderedActivation act not-removed]
- (if (.remove activations (using-token-identity! act false))
- (.add removed-activations (.activation act))
- (.add unremoved-activations (.activation act))))
-
- (when (coll-empty? activations)
- (.remove activation-map activation-group))
-
- [(java.util.Collections/unmodifiableList removed-activations)
- (java.util.Collections/unmodifiableList unremoved-activations)]))))
- :cljs
- (let [activation-group (activation-group-fn production)
- current-activations (get activation-map activation-group)
- [removed-activations remaining-activations unremoved-activations]
- (remove-first-of-each
- to-remove
- current-activations)]
- (set! activation-map (assoc activation-map
- activation-group
- remaining-activations))
- [removed-activations unremoved-activations])))
-
- #?(:clj
- (clear-activations!
- [memory]
- (.clear activation-map))
- :cljs
- (clear-activations!
- [memory]
- (set! activation-map (sorted-map-by activation-group-sort-fn))))
-
- (to-persistent! [memory]
- #?(:clj
- ;; Be sure to remove all transients and internal mutable
- ;; collections used in memory. Convert any collection that is
- ;; not already a Clojure persistent collection.
- (let [->persistent-coll #(if (coll? %)
- %
- (seq %))
- update-vals (fn [m update-fn]
- (->> m
- (reduce-kv (fn [m k v]
- (assoc! m k (update-fn v)))
- (transient m))
- persistent!))
- persistent-vals #(update-vals % ->persistent-coll)]
- (->PersistentLocalMemory rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- (update-vals (persistent! alpha-memory) persistent-vals)
- (update-vals (persistent! beta-memory) persistent-vals)
- (persistent! accum-memory)
- (persistent! production-memory)
- (into {}
- (map (juxt key (comp ->persistent-coll val)))
- activation-map)))
- :cljs
- (->PersistentLocalMemory rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- (persistent! alpha-memory)
- (persistent! beta-memory)
- (persistent! accum-memory)
- (persistent! production-memory)
- (into {}
- (for [[key val] activation-map]
- [key val]))))))
-
-(defrecord PersistentLocalMemory [rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- alpha-memory
- beta-memory
- accum-memory
- production-memory
- activation-map]
- IMemoryReader
- (get-rulebase [memory] rulebase)
-
- (get-alphas-fn [memory] alphas-fn)
-
- (get-elements [memory node bindings]
- (get (get alpha-memory (:id node) {})
- bindings
- []))
-
- (get-elements-all [memory node]
- (sequence
- cat
- (vals
- (get alpha-memory (:id node) {}))))
-
- (get-tokens [memory node bindings]
- (get (get beta-memory (:id node) {})
- bindings
- []))
-
- (get-tokens-all [memory node]
- (sequence
- cat
- (vals (get beta-memory (:id node) {}))))
-
- (get-accum-reduced [memory node join-bindings fact-bindings]
- ;; nil is a valid previously reduced value that can be found in the map.
- ;; Return ::no-accum-reduced instead of nil when there is no previously
- ;; reduced value in memory.
- (get-in accum-memory [(:id node) join-bindings fact-bindings] ::no-accum-reduced))
-
- (get-accum-reduced-all [memory node join-bindings]
- (get
- (get accum-memory (:id node) {})
- join-bindings))
-
- (get-accum-reduced-complete [memory node]
- (for [[join-binding joins] (get accum-memory (:id node) {})
- [fact-binding reduced] joins]
- {:join-bindings join-binding
- :fact-bindings fact-binding
- :result reduced}))
-
- (get-insertions [memory node token]
- (get
- (get production-memory (:id node) {})
- token
- []))
-
- (get-insertions-all [memory node]
- (get production-memory (:id node) {}))
-
- (get-activations [memory]
- #?(:clj
- (into []
- (comp cat
- (map (fn [^RuleOrderedActivation a]
- (.activation a))))
- (vals activation-map))
-
- :cljs
- (apply concat (vals activation-map))))
-
- IPersistentMemory
- (to-transient [memory]
- #?(:clj
- (TransientLocalMemory. rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- (transient alpha-memory)
- (transient beta-memory)
- (transient accum-memory)
- (transient production-memory)
- (let [treemap (java.util.TreeMap. ^java.util.Comparator activation-group-sort-fn)]
- (doseq [[activation-group activations] activation-map]
- (.put treemap
- activation-group
- (->activation-priority-queue activations false)))
- treemap))
- :cljs
- (let [activation-map (reduce
- (fn [treemap [activation-group activations]]
- (let [previous (get treemap activation-group)]
- (assoc treemap activation-group
- (if previous
- (into previous activations)
- activations))))
- (sorted-map-by activation-group-sort-fn)
- activation-map)]
- (TransientLocalMemory. rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- (transient alpha-memory)
- (transient beta-memory)
- (transient accum-memory)
- (transient production-memory)
- (reduce
- (fn [treemap [activation-group activations]]
- (let [previous (get treemap activation-group)]
- (assoc treemap activation-group
- (if previous
- (into previous activations)
- activations))))
- (sorted-map-by activation-group-sort-fn)
- activation-map))))))
-
-(defn local-memory
- "Creates an persistent local memory for the given rule base."
- [rulebase activation-group-sort-fn activation-group-fn alphas-fn]
-
- (->PersistentLocalMemory rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- {}
- {}
- {}
- {}
- {}))
diff --git a/src/main/clojure/clara/rules/platform.clj b/src/main/clojure/clara/rules/platform.clj
new file mode 100644
index 00000000..36a88b55
--- /dev/null
+++ b/src/main/clojure/clara/rules/platform.clj
@@ -0,0 +1,93 @@
+(ns clara.rules.platform
+ "This namespace is for internal use and may move in the future.
+ Platform unified code Clojure/ClojureScript."
+ (:import [java.lang IllegalArgumentException]
+ [java.util LinkedHashMap]))
+
+(defn throw-error
+ "Throw an error with the given description string."
+ [^String description]
+ (throw (IllegalArgumentException. description)))
+
+(defn query-param
+ "Coerces a query param to a parameter keyword such as :?param, if an unsupported type is
+ supplied then an exception will be thrown"
+ [p]
+ (cond
+ (keyword? p) p
+ (symbol? p) (keyword p)
+ :else
+ (throw-error (str "Query bindings must be specified as a keyword or symbol: " p))))
+
+;; This class wraps Clojure objects to ensure Clojure's equality and hash
+;; semantics are visible to Java code. This allows these Clojure objects
+;; to be safely used in things like Java Sets or Maps.
+;; This class also accepts and stores the hash code, since it almost always
+;; will be used once and generally more than once.
+(deftype JavaEqualityWrapper [wrapped ^int hash-code]
+
+ Object
+ (equals [this other]
+ (and (instance? JavaEqualityWrapper other)
+ (= wrapped (.wrapped ^JavaEqualityWrapper other))))
+
+ (hashCode [this]
+ hash-code))
+
+(defn group-by-seq
+ "Groups the items of the given coll by f to each item. Returns a seq of tuples of the form
+ [f-val xs] where xs are items from the coll and f-val is the result of applying f to any of
+ those xs. Each x in xs has the same value (f x). xs will be in the same order as they were
+ found in coll.
+ The behavior is similar to calling `(seq (group-by f coll))` However, the returned seq will
+ always have consistent ordering from process to process. The ordering is insertion order
+ as new (f x) values are found traversing the given coll collection in its seq order. The
+ returned order is made consistent to ensure that relevant places within the rules engine that
+ use this grouping logic have deterministic behavior across different processes."
+ [f coll]
+ (let [^java.util.Map m (reduce (fn [^java.util.Map m x]
+ (let [k (f x)
+ ;; Use Java's hashcode for performance reasons as
+ ;; discussed at https://github.com/cerner/clara-rules/issues/393
+ wrapper (JavaEqualityWrapper. k
+ (if (nil? k)
+ (int 0)
+ (int (.hashCode ^Object k))))
+ xs (or (.get m wrapper)
+ (transient []))]
+ (.put m wrapper (conj! xs x)))
+ m)
+ (LinkedHashMap.)
+ coll)
+ it (.iterator (.entrySet m))]
+ ;; Explicitly iterate over a Java iterator in order to avoid running into issues as
+ ;; discussed in http://dev.clojure.org/jira/browse/CLJ-1738
+ (loop [coll (transient [])]
+ (if (.hasNext it)
+ (let [^java.util.Map$Entry e (.next it)]
+ (recur (conj! coll [(.wrapped ^JavaEqualityWrapper (.getKey e)) (persistent! (.getValue e))])))
+ (persistent! coll)))))
+
+(defmacro thread-local-binding
+ "Wraps given body in a try block, where it sets each given ThreadLocal binding
+ and removes it in finally block."
+ [bindings & body]
+ (when-not (vector? bindings)
+ (throw (ex-info "Binding needs to be a vector."
+ {:bindings bindings})))
+ (when-not (even? (count bindings))
+ (throw (ex-info "Needs an even number of forms in binding vector"
+ {:bindings bindings})))
+ (let [binding-pairs (partition 2 bindings)]
+ `(try
+ ~@(for [[tl v] binding-pairs]
+ `(.set ~tl ~v))
+ ~@body
+ (finally
+ ~@(for [[tl] binding-pairs]
+ `(.remove ~tl))))))
+
+(defmacro eager-for
+ "A for wrapped with a doall to force realisation. Usage is the same as regular for."
+ [& body]
+ `(doall (for ~@body)))
diff --git a/src/main/clojure/clara/rules/platform.cljc b/src/main/clojure/clara/rules/platform.cljc
deleted file mode 100644
index 9470e282..00000000
--- a/src/main/clojure/clara/rules/platform.cljc
+++ /dev/null
@@ -1,115 +0,0 @@
-(ns clara.rules.platform
- "This namespace is for internal use and may move in the future.
- Platform unified code Clojure/ClojureScript.")
-
-(defn throw-error
- "Throw an error with the given description string."
- [^String description]
- (throw #?(:clj (IllegalArgumentException. description) :cljs (js/Error. description))))
-
-(defn query-param
- "Coerces a query param to a parameter keyword such as :?param, if an unsupported type is
- supplied then an exception will be thrown"
- [p]
- (cond
- (keyword? p) p
- (symbol? p) (keyword p)
- :else
- (throw-error (str "Query bindings must be specified as a keyword or symbol: " p))))
-
-;; This class wraps Clojure objects to ensure Clojure's equality and hash
-;; semantics are visible to Java code. This allows these Clojure objects
-;; to be safely used in things like Java Sets or Maps.
-;; This class also accepts and stores the hash code, since it almost always
-;; will be used once and generally more than once.
-#?(:clj
- (deftype JavaEqualityWrapper [wrapped ^int hash-code]
-
- Object
- (equals [this other]
- (and (instance? JavaEqualityWrapper other)
- (= wrapped (.wrapped ^JavaEqualityWrapper other))))
-
- (hashCode [this]
- hash-code)))
-
-#?(:clj
- (defn group-by-seq
- "Groups the items of the given coll by f to each item. Returns a seq of tuples of the form
- [f-val xs] where xs are items from the coll and f-val is the result of applying f to any of
- those xs. Each x in xs has the same value (f x). xs will be in the same order as they were
- found in coll.
- The behavior is similar to calling `(seq (group-by f coll))` However, the returned seq will
- always have consistent ordering from process to process. The ordering is insertion order
- as new (f x) values are found traversing the given coll collection in its seq order. The
- returned order is made consistent to ensure that relevant places within the rules engine that
- use this grouping logic have deterministic behavior across different processes."
- [f coll]
- (let [^java.util.Map m (reduce (fn [^java.util.Map m x]
- (let [k (f x)
- ;; Use Java's hashcode for performance reasons as
- ;; discussed at https://github.com/cerner/clara-rules/issues/393
- wrapper (JavaEqualityWrapper. k
- (if (nil? k)
- (int 0)
- (int (.hashCode ^Object k))))
- xs (or (.get m wrapper)
- (transient []))]
- (.put m wrapper (conj! xs x)))
- m)
- (java.util.LinkedHashMap.)
- coll)
- it (.iterator (.entrySet m))]
- ;; Explicitly iterate over a Java iterator in order to avoid running into issues as
- ;; discussed in http://dev.clojure.org/jira/browse/CLJ-1738
- (loop [coll (transient [])]
- (if (.hasNext it)
- (let [^java.util.Map$Entry e (.next it)]
- (recur (conj! coll [(.wrapped ^JavaEqualityWrapper (.getKey e)) (persistent! (.getValue e))])))
- (persistent! coll)))))
- :cljs
- (def group-by-seq (comp seq clojure.core/group-by)))
-
-#?(:clj
- (defn tuned-group-by
- "Equivalent of the built-in group-by, but tuned for when there are many values per key."
- [f coll]
- (->> coll
- (reduce (fn [map value]
- (let [k (f value)
- items (or (.get ^java.util.HashMap map k)
- (transient []))]
- (.put ^java.util.HashMap map k (conj! items value)))
- map)
- (java.util.HashMap.))
- (reduce (fn [map [key value]]
- (assoc! map key (persistent! value)))
- (transient {}))
- (persistent!)))
- :cljs
- (def tuned-group-by clojure.core/group-by))
-
-#?(:clj
- (defmacro thread-local-binding
- "Wraps given body in a try block, where it sets each given ThreadLocal binding
- and removes it in finally block."
- [bindings & body]
- (when-not (vector? bindings)
- (throw (ex-info "Binding needs to be a vector."
- {:bindings bindings})))
- (when-not (even? (count bindings))
- (throw (ex-info "Needs an even number of forms in binding vector"
- {:bindings bindings})))
- (let [binding-pairs (partition 2 bindings)]
- `(try
- ~@(for [[tl v] binding-pairs]
- `(.set ~tl ~v))
- ~@body
- (finally
- ~@(for [[tl] binding-pairs]
- `(.remove ~tl)))))))
-
-(defmacro eager-for
- "A for wrapped with a doall to force realisation. Usage is the same as regular for."
- [& body]
- `(doall (for ~@body)))
diff --git a/src/main/clojure/clara/rules/schema.clj b/src/main/clojure/clara/rules/schema.clj
new file mode 100644
index 00000000..50669daf
--- /dev/null
+++ b/src/main/clojure/clara/rules/schema.clj
@@ -0,0 +1,200 @@
+(ns clara.rules.schema
+ "Schema definition of Clara data structures using Prismatic's Schema library. This includes structures for rules and queries, as well as the schema
+ for the underlying Rete network itself. This can be used by tools or other libraries working with rules."
+ (:require [schema.core :as s]))
+
+(s/defn condition-type :- (s/enum :or :not :and :exists :fact :accumulator :test)
+ "Returns the type of node in a LHS condition expression."
+ [condition]
+ (if (map? condition) ; Leaf nodes are maps, per the schema
+
+ (cond
+ (:type condition) :fact
+ (:accumulator condition) :accumulator
+ :else :test)
+
+ ;; Otherwise the node must a sequential that starts with the boolean operator.
+ (first condition)))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Rule and query structure schema.
+
+(def SExpr
+ (s/pred seq? "s-expression"))
+
+(def FactCondition
+ {:type s/Any ;(s/either s/Keyword (s/pred symbol?))
+ :constraints [SExpr]
+ ;; Original constraints preserved for tooling in case a transformation was applied to the condition.
+ (s/optional-key :original-constraints) [SExpr]
+ (s/optional-key :fact-binding) s/Keyword
+ (s/optional-key :args) s/Any})
+
+(def AccumulatorCondition
+ {:accumulator s/Any
+ :from FactCondition
+ (s/optional-key :result-binding) s/Keyword})
+
+(def TestCondition
+ {:constraints [SExpr]})
+
+(def LeafCondition
+ (s/conditional
+ :type FactCondition
+ :accumulator AccumulatorCondition
+ :else TestCondition))
+
+(declare Condition)
+
+(def BooleanCondition
+ [(s/one (s/enum :or :not :and :exists) "operator")
+ (s/recursive #'Condition)])
+
+(def Condition
+ (s/conditional
+ sequential? BooleanCondition
+ map? LeafCondition))
+
+(def Rule
+ {;; :ns-name is currently used to eval the :rhs form of a rule in the same
+ ;; context that it was originally defined in. It is optional and only used
+ ;; when given. It may be used for other purposes in the future.
+ (s/optional-key :ns-name) s/Symbol
+ (s/optional-key :name) (s/cond-pre s/Str s/Keyword)
+ (s/optional-key :doc) s/Str
+ (s/optional-key :props) {s/Keyword s/Any}
+ (s/optional-key :env) {s/Keyword s/Any}
+ :lhs [Condition]
+ :rhs s/Any})
+
+(def Query
+ {(s/optional-key :name) (s/cond-pre s/Str s/Keyword)
+ (s/optional-key :doc) s/Str
+ (s/optional-key :props) {s/Keyword s/Any}
+ (s/optional-key :env) {s/Keyword s/Any}
+ :lhs [Condition]
+ :params #{s/Keyword}})
+
+(def Production
+ (s/conditional
+ :rhs Rule
+ :else Query))
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Schema for the Rete network itself.
+
+(def ConditionNode
+ {:node-type (s/enum :join :negation :test :accumulator)
+ :condition LeafCondition
+
+ ;; Captured environment in which the condition was defined, like closed variables.
+ ;; Most rules (such as those defined by defrule) have no surrounding
+ ;; environment, but user generated rules might.
+ (s/optional-key :env) {s/Keyword s/Any}
+
+ ;; Variables used to join to other expressions in the network.
+ (s/optional-key :join-bindings) #{s/Keyword}
+
+ ;; Variable bindings used by expressions in this node.
+ :used-bindings #{s/Keyword}
+
+ ;; Variable bindings used in the constraints that are not present in the ancestors of this node.
+ :new-bindings #{s/Keyword}
+
+ ;; An expression used to filter joined data.
+ (s/optional-key :join-filter-expressions) LeafCondition
+
+ ;; Bindings used to perform non-hash joins in the join filter expression.
+ ;; this is a subset of :used-bindings.
+ (s/optional-key :join-filter-join-bindings) #{s/Keyword}
+
+ ;; The expression to create the accumulator.
+ (s/optional-key :accumulator) s/Any
+
+ ;; The optional fact or accumulator result binding.
+ (s/optional-key :result-binding) s/Keyword})
+
+(def ProductionNode
+ {:node-type (s/enum :production :query)
+
+ ;; Rule for rule nodes.
+ (s/optional-key :production) Rule
+
+ ;; Query for query nodes.
+ (s/optional-key :query) Query
+
+ ;; Bindings used in the rule right-hand side.
+ (s/optional-key :bindings) #{s/Keyword}})
+
+;; Alpha network schema.
+(def AlphaNode
+ {:id s/Int
+ :condition FactCondition
+ ;; Opional environment for the alpha node.
+ (s/optional-key :env) {s/Keyword s/Any}
+ ;; IDs of the beta nodes that are the children.
+ :beta-children [s/Num]})
+
+;; A graph representing the beta side of the rete network.
+(def BetaGraph
+ {;; Edges from parent to child nodes.
+ :forward-edges {s/Int #{s/Int}}
+
+ ;; Edges from child to parent nodes.
+ :backward-edges {s/Int #{s/Int}}
+
+ ;; Map of identifier to condition nodes.
+ :id-to-condition-node {s/Int (s/cond-pre (s/eq :clara.rules.compiler/root-condition)
+ ConditionNode)}
+
+ ;; Map of identifier to query or rule nodes.
+ :id-to-production-node {s/Int ProductionNode}
+
+ ;; Map of identifier to new bindings created by the corresponding node.
+ :id-to-new-bindings {s/Int #{s/Keyword}}})
+
+(defn tuple
+ "Given `items`, a list of schemas, will generate a schema to validate that a vector contains and is in the order provided
+ by `items`."
+ [& items]
+ (s/constrained [s/Any]
+ (fn [tuple-vals]
+ (and (= (count tuple-vals)
+ (count items))
+ (every? nil? (map s/check items tuple-vals))))
+ "tuple"))
+
+(def NodeCompilationValue
+ (s/constrained {s/Keyword s/Any}
+ (fn [compilation]
+ (let [expr-keys #{:alpha-expr :action-expr :join-filter-expr :test-expr :accum-expr}]
+ (some expr-keys (keys compilation))))
+ "node-compilation-value"))
+
+(def NodeCompilationContext
+ (s/constrained NodeCompilationValue
+ (fn [compilation]
+ (let [xor #(and (or %1 %2)
+ (not (and %1 %2)))]
+ (and (contains? compilation :compile-ctx)
+ (contains? (:compile-ctx compilation) :msg)
+ (xor (contains? (:compile-ctx compilation) :condition)
+ (contains? (:compile-ctx compilation) :production)))))
+ "node-compilation-context"))
+
+;; A map of [ ] to SExpression, used in compilation of the rulebase.
+(def NodeExprLookup
+ ;; schema should be NodeCompilationContext in standard compilation,
+ ;; but during serde it might be either as :compile-ctx is only used for compilation failures
+ ;; and can be disabled post compilation.
+ {(tuple s/Int s/Keyword) (tuple SExpr (s/conditional :compile-ctx NodeCompilationContext
+ :else NodeCompilationValue))})
+
+;; An evaluated version of the schema mentioned above.
+(def NodeFnLookup
+ ;; This schema uses a relaxed version of NodeCompilationContext as once the expressions
+ ;; have been eval'd there is technically no need for compile-ctx to be maintained except for
+ ;; deserialization. In such events the compile-ctx would only be valuable when the environment
+ ;; where the Session is being deserialized doesn't match that of the serialization, ie functions
+ ;; and symbols cannot be resolved on the deserialization side.
+ {(tuple s/Int s/Keyword) (tuple (s/pred ifn? "ifn?") NodeCompilationValue)})
\ No newline at end of file
diff --git a/src/main/clojure/clara/rules/test_rules_data.clj b/src/main/clojure/clara/rules/test_rules_data.clj
index dac16da5..8239fb4a 100644
--- a/src/main/clojure/clara/rules/test_rules_data.clj
+++ b/src/main/clojure/clara/rules/test_rules_data.clj
@@ -5,23 +5,22 @@
(ns clara.rules.test-rules-data
(:require [clara.rules]
- [clara.rules.testfacts]
- [clara.rules.compiler :as com]))
+ [clara.rules.testfacts]))
(def the-rules
[{:doc "Rule to determine whether it is indeed cold and windy."
:name "clara.rules.test-rules-data/is-cold-and-windy-data"
- :lhs [{:type (if (com/compiling-cljs?) 'clara.rules.testfacts/Temperature 'clara.rules.testfacts.Temperature)
+ :lhs [{:type clara.rules.testfacts.Temperature
:constraints '[(< temperature 20)
(== ?t temperature)]}
- {:type (if (com/compiling-cljs?) 'clara.rules.testfacts/WindSpeed 'clara.rules.testfacts.WindSpeed)
+ {:type clara.rules.testfacts.WindSpeed
:constraints '[(> windspeed 30)
(== ?w windspeed)]}]
:rhs '(clara.rules/insert! (clara.rules.testfacts/->ColdAndWindy ?t ?w))}
{:name "clara.rules.test-rules-data/find-cold-and-windy-data"
:lhs [{:fact-binding :?fact
- :type (if (com/compiling-cljs?) 'clara.rules.testfacts/ColdAndWindy 'clara.rules.testfacts.ColdAndWindy)
+ :type clara.rules.testfacts.ColdAndWindy
:constraints []}]
:params #{}}])
@@ -35,4 +34,4 @@
(defn weather-rules-with-keyword-names
"Return some weather rules using keyword names"
[]
- the-rules-with-keyword-names)
\ No newline at end of file
+ the-rules-with-keyword-names)
diff --git a/src/main/clojure/clara/rules/testfacts.clj b/src/main/clojure/clara/rules/testfacts.clj
new file mode 100644
index 00000000..f11b2ac9
--- /dev/null
+++ b/src/main/clojure/clara/rules/testfacts.clj
@@ -0,0 +1,24 @@
+(ns clara.rules.testfacts
+ "This namespace exists primary for testing purposes, working around the fact that we cannot AOT compile test classes. This should be moved to the tests once a workaround for this is solved.")
+
+;; Reflection against records requires them to be compiled AOT, so we temporarily
+;; place them here as leiningen won't AOT compile test resources.
+(defrecord Temperature [temperature location])
+(defrecord WindSpeed [windspeed location])
+(defrecord Cold [temperature])
+(defrecord Hot [temperature])
+(defrecord ColdAndWindy [temperature windspeed])
+(defrecord LousyWeather [])
+(defrecord TemperatureHistory [temperatures])
+
+;; Test facts for chained rules.
+(defrecord First [])
+(defrecord Second [])
+(defrecord Third [])
+(defrecord Fourth [])
+
+;; Record utilizing clj flexible field names.
+(defrecord FlexibleFields [it-works?
+ a->b
+ x+y
+ bang!])
diff --git a/src/main/clojure/clara/rules/update_cache/cancelling.clj b/src/main/clojure/clara/rules/update_cache/cancelling.clj
index cb13cc12..de506c3d 100644
--- a/src/main/clojure/clara/rules/update_cache/cancelling.clj
+++ b/src/main/clojure/clara/rules/update_cache/cancelling.clj
@@ -34,7 +34,6 @@
(hashCode [this] fact-hash))
-
;;; These functions essentially allow us to use a Java map to create a set that stores
;;; the frequency of its items. Note that when multiple instances of a fact are added
;;; we keep both instances as distinct objects. We don't strictly speaking need to do this
diff --git a/src/main/clojure/clara/rules/update_cache/core.clj b/src/main/clojure/clara/rules/update_cache/core.clj
new file mode 100644
index 00000000..80073ea6
--- /dev/null
+++ b/src/main/clojure/clara/rules/update_cache/core.clj
@@ -0,0 +1,32 @@
+(ns clara.rules.update-cache.core)
+
+;; Record indicating pending insertion or removal of a sequence of facts.
+(defrecord PendingUpdate [type facts])
+
+;; This is expected to be used while activating rules in a given salience group
+;; to store updates before propagating those updates to the alpha nodes as a group.
+(defprotocol UpdateCache
+ (add-insertions! [this facts])
+ (add-retractions! [this facts])
+ (get-updates-and-reset! [this]))
+
+;; This cache replicates the behavior prior to https://github.com/cerner/clara-rules/issues/249,
+;; just in a stateful object rather than a persistent data structure.
+(deftype OrderedUpdateCache [updates]
+
+ UpdateCache
+
+ (add-insertions! [this facts]
+ (swap! updates into [(->PendingUpdate :insert facts)]))
+
+ (add-retractions! [this facts]
+ (swap! updates into [(->PendingUpdate :retract facts)]))
+
+ (get-updates-and-reset! [this]
+ (let [current-updates @updates]
+ (reset! updates [])
+ (partition-by :type current-updates))))
+
+(defn get-ordered-update-cache
+ []
+ (OrderedUpdateCache. (atom [])))
diff --git a/src/main/clojure/clara/tools/fact_graph.clj b/src/main/clojure/clara/tools/fact_graph.clj
new file mode 100644
index 00000000..dacc7455
--- /dev/null
+++ b/src/main/clojure/clara/tools/fact_graph.clj
@@ -0,0 +1,96 @@
+(ns clara.tools.fact-graph
+ (:require [clara.tools.inspect :as i]
+ [schema.core :as sc]))
+
+;; This node will have either facts or results of accumulations as its parents.
+;; Its children will be facts that the rule inserted.
+(sc/defrecord RuleActivationNode [rule-name :- sc/Str
+ id :- sc/Int])
+
+;; The parents of this node are facts over which an accumulation was run.
+;; It will have a single child, the result of the accumulation. So, for example,
+;; with the condition [?t <- (acc/min :temperature) :from [Temperature]], if we have
+;; (->Temperature 50 "MCI") and (->Temperature 30 "MCI") the child of this node will be
+;; an AccumulationResult with the :result 30 and the parents will be the two Temperature facts.
+(sc/defrecord AccumulationNode [id :- sc/Int])
+
+;; As alluded to above, this node represents the result of an accumulation. Its child will be a
+;; RuleActivationNode. Note that there will be an AccumulationResult for each distinct rules firing.
+(sc/defrecord AccumulationResultNode [id :- sc/Int
+ result :- sc/Any])
+
+(def ^:private empty-fact-graph {:forward-edges {}
+ :backward-edges {}})
+
+(defn ^:private add-edge [graph from to]
+ (-> graph
+ (update-in [:forward-edges from] (fnil conj #{}) to)
+ (update-in [:backward-edges to] (fnil conj #{}) from)))
+
+(defn ^:private add-insertion-to-graph
+ [original-graph id-counter fact-inserted {:keys [rule-name explanation]}]
+ (let [facts-direct (sequence
+ (comp (remove :facts-accumulated)
+ (map :fact))
+ (:matches explanation))
+
+ activation-node (map->RuleActivationNode {:rule-name rule-name
+ :id (swap! id-counter inc)})
+
+ accum-matches (filter :facts-accumulated (:matches explanation))]
+
+ (as-> original-graph graph
+ (if (seq accum-matches)
+ (reduce (fn [reduce-graph accum-match]
+ (let [accum-node (->AccumulationNode (swap! id-counter inc))
+ accum-result (->AccumulationResultNode (swap! id-counter inc) (:fact accum-match))]
+ (as-> reduce-graph g
+ ;; Add edges from individual facts to an AccumulationResultNode.
+ (reduce (fn [g accum-element]
+ (add-edge g accum-element accum-node))
+ g (:facts-accumulated accum-match))
+ (add-edge g accum-node accum-result)
+ (add-edge g accum-result activation-node))))
+ graph
+ accum-matches)
+ graph)
+ ;; Add edges to the rule activation node from the facts that contributed
+ ;; to the rule firing that were not in accumulator condition.
+ (reduce (fn [g f]
+ (add-edge g f activation-node))
+ graph
+ facts-direct)
+ (add-edge graph activation-node fact-inserted))))
+
+(defn session->fact-graph
+ "Given a session, return a graph structure connecting all facts to the facts
+ that they caused to be logically inserted. Note that such connections will not
+ be made for unconditionally inserted facts."
+ [session]
+ (let [id-counter (atom 0)
+ ;; Use a counter, whose value will be added to internal nodes, to the ensure that
+ ;; these nodes are not equal to each other. This ensures that the number of the internal
+ ;; nodes accurately reflects the cardinality of each fact in the session.
+
+ ;; This function generates one of the entries in the map returned by clara.tools.inspect/inspect.
+ ;; The function is private since it would be confusing for only one of the entries in clara.tools.inspect/inspect
+ ;; to be accessible without generating the entire session inspection map. However, we want to reuse the functionality
+ ;; here without the performance penalty of generating all of the inspection data for the session. Therefore, for now
+ ;; we break the privacy of the function here. Once issue 286 is completed we should remove this private Var access.
+ fact->explanations (@#'i/gen-fact->explanations session)
+
+ ;; Produce tuples of the form [inserted-fact {:rule rule :explanation clara.tools.inspect.Explanation}]
+ insertion-tuples (into []
+ (comp
+ (map (fn [[fact v]]
+ (map (fn [{:keys [rule explanation]}]
+ [fact {:rule-name (:name rule)
+ :explanation explanation}])
+ v)))
+ cat)
+ fact->explanations)]
+
+ (reduce (fn [graph tuple]
+ (apply add-insertion-to-graph graph id-counter tuple))
+ empty-fact-graph
+ insertion-tuples)))
diff --git a/src/main/clojure/clara/tools/inspect.clj b/src/main/clojure/clara/tools/inspect.clj
new file mode 100644
index 00000000..b2831863
--- /dev/null
+++ b/src/main/clojure/clara/tools/inspect.clj
@@ -0,0 +1,354 @@
+(ns clara.tools.inspect
+ "Tooling to inspect a rule session. The two major methods here are:
+
+ * inspect, which returns a data structure describing the session that can be used by tooling.
+ * explain-activations, which uses inspect and prints a human-readable description covering
+ why each rule activation or query match occurred."
+ (:require [clara.rules.engine :as eng]
+ [clara.rules.schema :as schema]
+ [clara.rules.memory :as mem]
+ [clara.tools.internal.inspect :as i]
+ [clojure.main :refer [demunge]]
+ [schema.core :as s]
+ [clojure.string :as str])
+ (:import [clara.rules.engine
+ ProductionNode
+ RootJoinNode
+ HashJoinNode
+ ExpressionJoinNode
+ NegationNode
+ NegationWithJoinFilterNode]))
+
+(s/defschema ConditionMatch
+ "A structure associating a condition with the facts that matched them. The fields are:
+ :fact - A fact propagated from this condition in a rule or query. For non-accumulator conditions,
+ this will be the fact matched by the condition. For accumulator conditions, it will be the result
+ of the accumulation. So, for example, if we have a condition like
+
+ [?cold <- Cold]
+
+ a ConditionMatch for this condition will have a Cold fact in its :fact field. If we have a condition like
+
+ [?min-cold <- (acc/min :temperature) :from [Cold]]
+
+ the value of :fact will be the minimum temperature returned by the accumulator.
+
+ :condition - A structure representing this condition. This is the same structure used inside the structures defining
+ rules and queries.
+
+ :facts-accumulated (nullable) : When the condition is an accumulator condition, this will contain the individual facts over
+ which the accumulator ran. For example, in the case above with the condition
+
+ [?min-cold <- (acc/min :temperature) :from [Cold]]
+
+ this will contain the individual Cold facts over which we accumulated, while the :fact field
+ will contain the result of the accumulation."
+ {:fact s/Any
+ :condition schema/Condition
+ (s/optional-key :facts-accumulated) [s/Any]})
+
+;; A structured explanation of why a rule or query matched.
+;; This is derived from the Rete-style tokens, but this token
+;; is designed to propagate all context needed to easily inspect
+;; the state of rules.
+(s/defrecord Explanation [matches :- [ConditionMatch]
+ bindings :- {s/Keyword s/Any}]) ; Bound variables
+
+;; Schema of an inspected rule session.
+(def InspectionSchema
+ {:rule-matches {schema/Rule [Explanation]}
+ :query-matches {schema/Query [Explanation]}
+ :condition-matches {schema/Condition [s/Any]}
+ :insertions {schema/Rule [{:explanation Explanation :fact s/Any}]}})
+
+(defn- get-condition-matches
+ "Returns facts matching each condition"
+ [nodes memory]
+ (let [node-class->node-type (fn [node]
+ (get {ExpressionJoinNode :join
+ HashJoinNode :join
+ RootJoinNode :join
+ NegationNode :negation
+ NegationWithJoinFilterNode :negation} (type node)))
+
+ join-node-ids (for [beta-node nodes
+ :let [node-type (node-class->node-type beta-node)]
+ ;; Unsupported and irrelevant node types will have a node-type of nil
+ ;; since the map in node-class->node-type won't contain an entry
+ ;; for them, so this check will remove them.
+ :when (contains? #{:join :negation}
+ node-type)]
+ [(:id beta-node) (:condition beta-node) node-type])]
+ (reduce
+ (fn [matches [node-id condition node-type]]
+ (update-in matches
+ (condp = node-type
+
+ :join
+ [condition]
+
+ ;; Negation nodes store the fact that they are a negation
+ ;; in their :node-type and strip the information out of the
+ ;; :condition field. We reconstruct the negation boolean condition
+ ;; that is contained in rule and query data structures created by defrule
+ ;; and that conforms to the Condition schema.
+ :negation
+ [[:not condition]])
+ concat (map :fact (mem/get-elements-all memory {:id node-id}))))
+ {}
+ join-node-ids)))
+
+(defn- to-explanations
+ "Helper function to convert tokens to explanation records."
+ [session tokens]
+ (let [memory (-> session eng/components :memory)
+ id-to-node (get-in (eng/components session) [:rulebase :id-to-node])]
+
+ (for [{:keys [matches bindings] :as token} tokens]
+ (->Explanation
+ ;; Convert matches to explanation structure.
+ (for [[fact node-id] matches
+ :let [node (id-to-node node-id)
+ condition (if (:accum-condition node)
+
+ {:accumulator (get-in node [:accum-condition :accumulator])
+ :from {:type (get-in node [:accum-condition :from :type])
+ :constraints (or (seq (get-in node [:accum-condition :from :original-constraints]))
+ (get-in node [:accum-condition :from :constraints]))}}
+
+ {:type (:type (:condition node))
+ :constraints (or (seq (:original-constraints (:condition node)))
+ (:constraints (:condition node)))})]]
+ (if (:accum-condition node)
+ {:fact fact
+ :condition condition
+ :facts-accumulated (eng/token->matching-elements node memory token)}
+ {:fact fact
+ :condition condition}))
+
+ ;; Remove generated bindings from user-facing explanation.
+ (into {} (remove (fn [[k v]]
+ (.startsWith (name k) "?__gen__"))
+ bindings))))))
+
+(defn ^:private gen-all-rule-matches
+ [session]
+ (when-let [activation-info (i/get-activation-info session)]
+ (let [grouped-info (group-by #(-> % :activation :node) activation-info)]
+ (into {}
+ (map (fn [[k v]]
+ [(:production k)
+ (to-explanations session (map #(-> % :activation :token) v))]))
+ grouped-info))))
+
+(defn ^:private gen-fact->explanations
+ [session]
+
+ (let [{:keys [memory rulebase]} (eng/components session)
+ {:keys [productions production-nodes query-nodes]} rulebase
+ rule-to-rule-node (into {} (for [rule-node production-nodes]
+ [(:production rule-node) rule-node]))]
+ (apply merge-with into
+ (for [[rule rule-node] rule-to-rule-node
+ token (keys (mem/get-insertions-all memory rule-node))
+ insertion-group (mem/get-insertions memory rule-node token)
+ insertion insertion-group]
+ {insertion [{:rule rule
+ :explanation (first (to-explanations session [token]))}]}))))
+
+(def ^{:doc "Return a new session on which information will be gathered for optional inspection keys.
+ This can significantly increase memory consumption since retracted facts
+ cannot be garbage collected as normally."}
+ with-full-logging i/with-activation-listening)
+
+(def ^{:doc "Return a new session without information gathering on this session for optional inspection keys.
+ This new session will not retain references to any such information previously gathered."}
+ without-full-logging i/without-activation-listening)
+
+(s/defn inspect
+ " Returns a representation of the given rule session useful to understand the
+ state of the underlying rules.
+
+ The returned structure always includes the following keys:
+
+ * :rule-matches -- a map of rule structures to their matching explanations.
+ Note that this only includes rule matches with corresponding logical
+ insertions after the rules finished firing.
+ * :query-matches -- a map of query structures to their matching explanations.
+ * :condition-matches -- a map of conditions pulled from each rule to facts they match.
+ * :insertions -- a map of rules to a sequence of {:explanation E, :fact F} records
+ to allow inspection of why a given fact was inserted.
+ * :fact->explanations -- a map of facts inserted to a sequence
+ of maps of the form {:rule rule-structure :explanation explanation},
+ where each such map justifies a single insertion of the fact.
+
+ And additionally includes the following keys for operations
+ performed after a with-full-logging call on the session:
+
+ * :unfiltered-rule-matches: A map of rule structures to their matching explanations.
+ This includes all rule activations, regardless of whether they led to insertions or if
+ they were ultimately retracted. This should be considered low-level information primarily
+ useful for debugging purposes rather than application control logic, although legitimate use-cases
+ for the latter do exist if care is taken. Patterns of insertion and retraction prior to returning to
+ the caller are internal implementation details of Clara unless explicitly controlled by the user.
+
+ Users may inspect the entire structure for troubleshooting or explore it
+ for specific cases. For instance, the following code snippet could look
+ at all matches for some example rule:
+
+ (defrule example-rule ... )
+
+ ...
+
+ (get-in (inspect example-session) [:rule-matches example-rule])
+
+ ...
+
+ The above segment will return matches for the rule in question."
+ [session] :- InspectionSchema
+ (let [{:keys [memory rulebase]} (eng/components session)
+ {:keys [productions production-nodes query-nodes id-to-node]} rulebase
+
+ ;; Map of queries to their nodes in the network.
+ query-to-nodes (into {} (for [[query-name query-node] query-nodes]
+ [(:query query-node) query-node]))
+
+ ;; Map of rules to their nodes in the network.
+ rule-to-nodes (into {} (for [rule-node production-nodes]
+ [(:production rule-node) rule-node]))
+
+ base-info {:rule-matches (into {}
+ (for [[rule rule-node] rule-to-nodes]
+ [rule (to-explanations session
+ (keys (mem/get-insertions-all memory rule-node)))]))
+
+ :query-matches (into {}
+ (for [[query query-node] query-to-nodes]
+ [query (to-explanations session
+ (mem/get-tokens-all memory query-node))]))
+
+ :condition-matches (get-condition-matches (vals id-to-node) memory)
+
+ :insertions (into {}
+ (for [[rule rule-node] rule-to-nodes]
+ [rule
+ (for [token (keys (mem/get-insertions-all memory rule-node))
+ insertion-group (get (mem/get-insertions-all memory rule-node) token)
+ insertion insertion-group]
+ {:explanation (first (to-explanations session [token])) :fact insertion})]))
+
+ :fact->explanations (gen-fact->explanations session)}]
+
+ (if-let [unfiltered-rule-matches (gen-all-rule-matches session)]
+ (assoc base-info :unfiltered-rule-matches unfiltered-rule-matches)
+ base-info)))
+
+(defn- explain-activation
+ "Prints a human-readable explanation of the facts and conditions that created the Rete token."
+ ([explanation] (explain-activation explanation ""))
+ ([explanation prefix]
+ (doseq [{:keys [fact condition]} (:matches explanation)]
+ (if (:from condition)
+ ;; Explain why the accumulator matched.
+ (let [{:keys [accumulator from]} condition]
+ (println prefix fact)
+ (println prefix " accumulated with" accumulator)
+ (println prefix " from" (:type from))
+ (println prefix " where" (:constraints from)))
+
+ ;; Explain why a condition matched.
+ (let [{:keys [type constraints]} condition]
+ (println prefix fact)
+ (println prefix " is a" type)
+ (println prefix " where" constraints))))))
+
+(defn explain-activations
+ "Prints a human-friendly explanation of why rules and queries matched in the given session.
+ A caller my optionally pass a :rule-filter-fn, which is a predicate
+
+ (clara.tools.inspect/explain-activations session
+ :rule-filter-fn (fn [rule] (re-find my-rule-regex (:name rule))))"
+
+ [session & {:keys [rule-filter-fn] :as options}]
+ (let [filter-fn (or rule-filter-fn (constantly true))]
+
+ (doseq [[rule explanations] (:rule-matches (inspect session))
+ :when (filter-fn rule)
+ :when (seq explanations)]
+ (println "rule" (or (:name rule) (str "<" (:lhs rule) ">")))
+ (println " executed")
+ (println " " (:rhs rule))
+ (doseq [explanation explanations]
+ (println " with bindings")
+ (println " " (:bindings explanation))
+ (println " because")
+ (explain-activation explanation " "))
+ (println))
+
+ (doseq [[rule explanations] (:query-matches (inspect session))
+ :when (filter-fn rule)
+ :when (seq explanations)]
+ (println "query" (or (:name rule) (str "<" (:lhs rule) ">")))
+ (doseq [explanation explanations]
+ (println " with bindings")
+ (println " " (:bindings explanation))
+ (println " qualified because")
+ (explain-activation explanation " "))
+ (println))))
+
+(let [inverted-type-lookup (zipmap (vals eng/node-type->abbreviated-type)
+ (keys eng/node-type->abbreviated-type))]
+ (defn node-fn-name->production-name
+ "A helper function for retrieving the name or names of rules that a generated function belongs to.
+
+ 'session' - a LocalSession from which a function was retrieved
+ 'node-fn' - supports the following types:
+ 1. String - expected to be in the format '/__'.
+ Expected use-case for string would be in the event that a user copy pasted this function identifier
+ from an external tool, ex. a jvm profiler
+ 2. Symbol - expected to be in the format '/__.
+ Has the same use-case as string, just adds flexibility to the type.
+ 3. Function - expected to be the actual function from the Session
+ This covers a use-case where the user can capture the function being used and programmatically
+ trace it back to the rules being executed."
+ [session node-fn]
+ (let [fn-name-str (cond
+ (string? node-fn)
+ node-fn
+
+ (fn? node-fn)
+ (str node-fn)
+
+ (symbol? node-fn)
+ (str node-fn)
+
+ :else
+ (throw (ex-info "Unsupported type for 'node-fn-name->production-name'"
+ {:type (type node-fn)
+ :supported-types ["string" "symbol" "fn"]})))
+ fn-name-str (-> fn-name-str demunge (str/split #"/") last)
+
+ simple-fn-name (-> (or (re-find #"(.+)--\d+" fn-name-str) ;; anonymous function
+ (re-find #"(.+)" fn-name-str)) ;; regular function
+ last)
+
+ [node-abr node-id _] (str/split simple-fn-name #"-")]
+ ;; used as a sanity check that the fn provided is in the form expected, ie. --
+ (if (contains? inverted-type-lookup node-abr)
+ (if-let [node (-> (eng/components session)
+ :rulebase
+ :id-to-node
+ (get (Long/valueOf ^String node-id)))]
+ (if (= ProductionNode (type node))
+ [(-> node :production :name)]
+ (if-let [production-names (seq (eng/node-rule-names (some-fn :production :query) node))]
+ production-names
+ ;; This should be un-reachable but i am leaving it here in the event that the rulebase is somehow corrupted
+ (throw (ex-info "Unable to determine suitable name from node"
+ {:node node}))))
+ (throw (ex-info "Node-id not found in rulebase"
+ {:node-id node-id
+ :simple-name simple-fn-name})))
+ (throw (ex-info "Unable to determine node from function"
+ {:name node-fn
+ :simple-name simple-fn-name}))))))
diff --git a/src/main/clojure/clara/tools/internal/inspect.clj b/src/main/clojure/clara/tools/internal/inspect.clj
new file mode 100644
index 00000000..a6b33498
--- /dev/null
+++ b/src/main/clojure/clara/tools/internal/inspect.clj
@@ -0,0 +1,80 @@
+(ns clara.tools.internal.inspect
+ "Internal implementation details of session inspection. Nothing in this namespace
+ should be directly referenced by callers outside of the clara-rules project."
+ (:require [clara.rules.listener :as l]
+ [clara.rules.engine :as eng]))
+
+(declare to-persistent-listener)
+
+(deftype TransientActivationListener [activations]
+ l/ITransientEventListener
+ (fire-activation! [listener activation resulting-operations]
+ (swap! (.-activations listener) conj {:activation activation
+ :resulting-operations resulting-operations})
+ listener)
+ (to-persistent! [listener]
+ (to-persistent-listener @(.-activations listener)))
+
+ ;; The methods below don't do anything; they aren't needed for this functionality.
+ (left-activate! [listener node tokens]
+ listener)
+ (left-retract! [listener node tokens]
+ listener)
+ (right-activate! [listener node elements]
+ listener)
+ (right-retract! [listener node elements]
+ listener)
+ (insert-facts! [listener node token facts]
+ listener)
+ (alpha-activate! [listener node facts]
+ listener)
+ (insert-facts-logical! [listener node token facts]
+ listener)
+ (retract-facts! [listener node token facts]
+ listener)
+ (alpha-retract! [listener node facts]
+ listener)
+ (retract-facts-logical! [listener node token facts]
+ listener)
+ (add-accum-reduced! [listener node join-bindings result fact-bindings]
+ listener)
+ (remove-accum-reduced! [listener node join-bindings fact-bindings]
+ listener)
+ (add-activations! [listener node activations]
+ listener)
+ (remove-activations! [listener node activations]
+ listener)
+ (activation-group-transition! [listener previous-group new-group]
+ listener)
+ (fire-rules! [listener node]
+ listener))
+
+(deftype PersistentActivationListener [activations]
+ l/IPersistentEventListener
+ (to-transient [listener]
+ (TransientActivationListener. (atom activations))))
+
+(defn to-persistent-listener
+ [activations]
+ (PersistentActivationListener. activations))
+
+(defn with-activation-listening
+ [session]
+ (if (empty? (eng/find-listeners session (partial instance? PersistentActivationListener)))
+ (eng/with-listener session (PersistentActivationListener. []))
+ session))
+
+(defn without-activation-listening
+ [session]
+ (eng/remove-listeners session (partial instance? PersistentActivationListener)))
+
+(defn get-activation-info
+ [session]
+ (let [matching-listeners (eng/find-listeners session (partial instance? PersistentActivationListener))]
+ (condp = (count matching-listeners)
+ 0 nil
+ 1 (-> matching-listeners ^PersistentActivationListener (first) .-activations)
+ (throw (ex-info "Found more than one PersistentActivationListener on session"
+ {:session session})))))
+
+
diff --git a/src/main/clojure/clara/tools/loop_detector.clj b/src/main/clojure/clara/tools/loop_detector.clj
new file mode 100644
index 00000000..bbe14fe4
--- /dev/null
+++ b/src/main/clojure/clara/tools/loop_detector.clj
@@ -0,0 +1,102 @@
+(ns clara.tools.loop-detector
+ (:require [clara.rules.listener :as l]
+ [clara.rules.engine :as eng]
+ [clara.tools.tracing :as trace]))
+
+;; Although we use a single type here note that the cycles-count and the on-limit-delay fields
+;; will be nil during the persistent state of the listener.
+(deftype CyclicalRuleListener [cycles-count max-cycles on-limit-fn on-limit-delay]
+ l/ITransientEventListener
+ (left-activate! [listener node tokens]
+ listener)
+ (left-retract! [listener node tokens]
+ listener)
+ (right-activate! [listener node elements]
+ listener)
+ (right-retract! [listener node elements]
+ listener)
+ (insert-facts! [listener node token facts]
+ listener)
+ (alpha-activate! [listener node facts]
+ listener)
+ (insert-facts-logical! [listener node token facts]
+ listener)
+ (retract-facts! [listener node token facts]
+ listener)
+ (alpha-retract! [listener node facts]
+ listener)
+ (retract-facts-logical! [listener node token facts]
+ listener)
+ (add-accum-reduced! [listener node join-bindings result fact-bindings]
+ listener)
+ (remove-accum-reduced! [listener node join-bindings fact-bindings]
+ listener)
+ (add-activations! [listener node activations]
+ listener)
+ (remove-activations! [listener node activations]
+ listener)
+ (fire-activation! [listener activation resulting-operations]
+ listener)
+ (fire-rules! [listener node]
+ listener)
+ (activation-group-transition! [listener original-group new-group]
+ (when (>= @cycles-count max-cycles)
+ @on-limit-delay)
+ (swap! cycles-count inc))
+ (to-persistent! [listener]
+ (CyclicalRuleListener. nil max-cycles on-limit-fn nil))
+
+ l/IPersistentEventListener
+ (to-transient [listener]
+ ;; To-transient will be called when a call to fire-rules begins, and to-persistent! will be called when it ends.
+ ;; The resetting of the cycles-count atom prevents cycles from one call of fire-rules from leaking into the count
+ ;; for another. Similarly the on-limit-fn should be invoked 1 or 0 times per fire-rules call. We only call
+ ;; it once, rather than each time the limit is breached, since it may not cause the call to terminate but rather log
+ ;; something etc., in which case we don't want to spam the user's logs.
+ (CyclicalRuleListener. (atom 0) max-cycles on-limit-fn (delay (on-limit-fn)))))
+
+(defn throw-exception-on-max-cycles
+ []
+ (let [trace (trace/listener->trace (l/to-persistent! (:listener eng/*current-session*)))]
+ (throw (ex-info "Reached maximum activation group transitions threshhold; an infinite loop is suspected"
+ (cond-> {:clara-rules/infinite-loop-suspected true}
+ trace (assoc :trace trace))))))
+
+(defn ->standard-out-warning
+ []
+ (println "Reached maximum activation group transitions threshhold; an infinite loop is suspected"))
+
+(defn on-limit-fn-lookup
+ [fn-or-keyword]
+ (cond
+ (= fn-or-keyword :throw-exception) throw-exception-on-max-cycles
+ (= fn-or-keyword :standard-out-warning) ->standard-out-warning
+ (ifn? fn-or-keyword) fn-or-keyword
+ :else (throw (ex-info "The :on-error-fn must be a non-nil function value" {:clara-rules/max-cycles-exceeded-fn fn-or-keyword}))))
+
+(defn with-loop-detection
+ "Detect suspected infinite loops in the session.
+
+ Max-cycles is the maximum
+ number of transitions permitted between different activation groups (salience levels)
+ plus the number of times all rules are evaluated and their facts inserted, thus leading
+ to another cycle of rules activations in the same activation group.
+
+ on-limit-fn is a 0-arg function that is invoked exactly once when this limit is exceeded. It can either be
+ a user-provided function or a keyword that indicates a built-in function to use. Currently supported keywords are:
+
+ :throw-exception - This throws an exception when the limit is reached. If tracing is enabled, the exception will include
+ the trace.
+
+ :standard-out-warning - This prints a warning to standard out."
+
+ [session max-cycles on-limit-fn]
+
+ (let [on-limit-fn-normalized (on-limit-fn-lookup on-limit-fn)]
+ (eng/with-listener
+ session
+ (CyclicalRuleListener.
+ nil
+ max-cycles
+ on-limit-fn-normalized
+ nil))))
diff --git a/src/main/clojure/clara/tools/testing_utils.clj b/src/main/clojure/clara/tools/testing_utils.clj
new file mode 100644
index 00000000..c2e8d6a3
--- /dev/null
+++ b/src/main/clojure/clara/tools/testing_utils.clj
@@ -0,0 +1,204 @@
+(ns clara.tools.testing-utils
+ "Internal utilities for testing clara-rules and derivative projects. These should be considered experimental
+ right now from the perspective of consumers of clara-rules, although it is possible that this namespace
+ will be made part of the public API once its functionality has proven robust and reliable. The focus, however,
+ is functionality needed to test the rules engine itself."
+ (:require [clara.rules.update-cache.core :as uc]
+ [clara.rules.update-cache.cancelling :as ca]
+ [clara.rules.compiler :as com]
+ [clara.rules.dsl :as dsl]
+ [clojure.test :refer [is]]))
+
+(defmacro def-rules-test
+ "This macro allows creation of rules, queries, and sessions from arbitrary combinations of rules
+ and queries in a setup map without the necessity of creating a namespace or defining a session
+ using defsession in both Clojure and ClojureScript. The first argument is the name of the test,
+ and the second argument is a map with entries :rules, :queries, and :sessions. For example usage see
+ clara.test-testing-utils. Note that sessions currently can only contain rules and queries defined
+ in the setup map; supporting other rule sources such as namespaces and defrule/defquery may be possible
+ in the future.
+
+ Namespaces consuming this macro are expected to require clara.rules and either clojure.test or cljs.test.
+ Unfortunately, at this time we can't add inline requires for these namespace with the macroexpanded code in
+ ClojureScript; see https://anmonteiro.com/2016/10/clojurescript-require-outside-ns/ for some discussion on the
+ subject. However, the test namespaces consuming this will in all likelihood have these dependencies anyway
+ so this probably isn't a significant shortcoming of this macro."
+ [name params & forms]
+ (let [sym->rule (->> params
+ :rules
+ (partition 2)
+ (into {}
+ (map (fn [[rule-name [lhs rhs props]]]
+ [rule-name (assoc (dsl/parse-rule* lhs rhs props {}) :name (str rule-name))]))))
+
+ sym->query (->> params
+ :queries
+ (partition 2)
+ (into {}
+ (map (fn [[query-name [params lhs]]]
+ [query-name (assoc (dsl/parse-query* params lhs {}) :name (str query-name))]))))
+
+ production-syms->productions (fn [p-syms]
+ (map (fn [s]
+ (or (get sym->rule s)
+ (get sym->query s)))
+ p-syms))
+
+ session-syms->session-forms (->> params
+ :sessions
+ (partition 3)
+ (into []
+ (comp (map (fn [[session-name production-syms session-opts]]
+ [session-name (production-syms->productions production-syms) session-opts]))
+ (map (fn [[session-name productions session-opts]]
+ [session-name `(com/mk-session ~(into [(vec productions)]
+ cat
+ session-opts))]))
+ cat)))
+
+ test-form `(clojure.test/deftest
+ ~name
+ (let [~@session-syms->session-forms
+ ~@(sequence cat sym->query)
+ ~@(sequence cat sym->rule)]
+ ~@forms))]
+ test-form))
+
+(defn opts-fixture
+ ;; For operations other than replace-facts uc/get-ordered-update-cache is currently
+ ;; always used. This fixture ensures that CancellingUpdateCache is tested for a wide
+ ;; variety of different cases rather than a few cases cases specific to it.
+ [f]
+ (f)
+ (with-redefs [uc/get-ordered-update-cache ca/get-cancelling-update-cache]
+ (f)))
+
+(defn join-filter-equals
+ "Intended to be a test function that is the same as equals, but is not visible to Clara as such
+ and thus forces usage of join filters instead of hash joins"
+ [& args]
+ (apply = args))
+
+(def side-effect-holder (atom nil))
+
+(defn side-effect-holder-fixture
+ "Fixture to reset the side effect holder to nil both before and after tests.
+ This should be used as a :each fixture."
+ [t]
+ (reset! side-effect-holder nil)
+ (t)
+ (reset! side-effect-holder nil))
+
+(defn time-execution
+ [func]
+ (let [start (System/currentTimeMillis)
+ _ (func)
+ stop (System/currentTimeMillis)]
+ (- stop start)))
+
+(defn execute-tests
+ [func iterations]
+ (let [execution-times (for [_ (range iterations)]
+ (time-execution func))
+ sum #(reduce + %)
+ mean (/ (sum execution-times) iterations)
+ std (->
+ (into []
+ (comp
+ (map #(- % mean))
+ (map #(Math/pow (double %) 2.0)))
+ execution-times)
+ sum
+ (/ iterations)
+ Math/sqrt)]
+ {:std (double std)
+ :mean (double mean)}))
+
+(defn run-performance-test
+ "Created as a rudimentary alternative to criterium, due to assumptions made during benchmarking. Specifically, that
+ criterium attempts to reach a steady state of compiled and loaded classes. This fundamentally doesn't work when the
+ metrics needed rely on compilation or evaluation."
+ [form]
+ (let [{:keys [description func iterations mean-assertion verbose]} form
+ {:keys [std mean]} (execute-tests func iterations)]
+ (when verbose
+ (println (str \newline "Running Performance tests for:"))
+ (println description)
+ (println "==========================================")
+ (println (str "Mean: " mean "ms"))
+ (println (str "Standard Deviation: " std "ms" \newline)))
+ (is (mean-assertion mean)
+ (str "Actual mean value: " mean))
+ {:mean mean
+ :std std}))
+
+(defn ex-data-search
+ ([^Exception e edata]
+ (ex-data-search e nil edata))
+ ([^Exception e emsg edata]
+ (loop [non-matches []
+ e e]
+ (cond
+ ;; Found match.
+ (and (= edata
+ (select-keys (ex-data e)
+ (keys edata)))
+ (or (= emsg
+ (.getMessage e))
+ (nil? emsg)))
+ :success
+
+ ;; Keep searching, record any non-matching ex-data.
+ (.getCause e)
+ (recur (if-let [ed (ex-data e)]
+ (conj non-matches {(.getMessage e) ed})
+ non-matches)
+ (.getCause e))
+
+ ;; Can't find a match.
+ :else
+ non-matches))))
+
+(defn get-all-ex-data
+ "Walk a Throwable chain and return a sequence of all data maps
+ from any ExceptionInfo instances in that chain."
+ [e]
+ (let [get-ex-chain (fn get-ex-chain [e]
+ (if-let [cause (.getCause e)]
+ (conj (get-ex-chain cause) e)
+ [e]))]
+
+ (map ex-data
+ (filter (partial instance? clojure.lang.IExceptionInfo)
+ (get-ex-chain e)))))
+
+(defmacro assert-ex-data
+ ([expected-ex-data form]
+ `(assert-ex-data nil ~expected-ex-data ~form))
+ ([expected-ex-message expected-ex-data form]
+ `(try
+ ~form
+ (is false
+ (str "Exception expected to be thrown when evaluating: " \newline
+ '~form))
+ (catch Exception e#
+ (let [res# (ex-data-search e# ~expected-ex-message ~expected-ex-data)]
+ (is (= :success res#)
+ (str "Exception msg found: " \newline
+ e# \newline
+ "Non matches found: " \newline
+ res#)))))))
+
+(defn ex-data-maps
+ "Given a throwable/exception/error `t`, return all `ex-data` maps from the stack trace cause chain in
+ the order they occur traversing the chain from this `t` through the rest of the call stack."
+ [t]
+ (let [append-self (fn append-self
+ [prior t1]
+ (if t1
+ (append-self (conj prior t1) (.getCause ^Throwable t1))
+ prior))
+ throwables (append-self [] t)]
+ (into []
+ (comp (map ex-data))
+ throwables)))
diff --git a/src/main/clojure/clara/tools/testing_utils.cljc b/src/main/clojure/clara/tools/testing_utils.cljc
deleted file mode 100644
index fc2e890f..00000000
--- a/src/main/clojure/clara/tools/testing_utils.cljc
+++ /dev/null
@@ -1,229 +0,0 @@
-#?(:clj
- (ns clara.tools.testing-utils
- "Internal utilities for testing clara-rules and derivative projects. These should be considered experimental
- right now from the perspective of consumers of clara-rules, although it is possible that this namespace
- will be made part of the public API once its functionality has proven robust and reliable. The focus, however,
- is functionality needed to test the rules engine itself."
- (:require [clara.rules.update-cache.core :as uc]
- [clara.rules.update-cache.cancelling :as ca]
- [clara.rules.compiler :as com]
- [clara.macros :as m]
- [clara.rules.dsl :as dsl]
- [clojure.test :refer [is]]))
- :cljs
- (ns clara.tools.testing-utils
- (:require [clara.rules.update-cache.core :as uc])
- (:require-macros [clara.tools.testing-utils]
- [cljs.test :refer [is]])))
-
-#?(:clj
- (defmacro def-rules-test
- "This macro allows creation of rules, queries, and sessions from arbitrary combinations of rules
- and queries in a setup map without the necessity of creating a namespace or defining a session
- using defsession in both Clojure and ClojureScript. The first argument is the name of the test,
- and the second argument is a map with entries :rules, :queries, and :sessions. For example usage see
- clara.test-testing-utils. Note that sessions currently can only contain rules and queries defined
- in the setup map; supporting other rule sources such as namespaces and defrule/defquery may be possible
- in the future.
-
- Namespaces consuming this macro are expected to require clara.rules and either clojure.test or cljs.test.
- Unfortunately, at this time we can't add inline requires for these namespace with the macroexpanded code in
- ClojureScript; see https://anmonteiro.com/2016/10/clojurescript-require-outside-ns/ for some discussion on the
- subject. However, the test namespaces consuming this will in all likelihood have these dependencies anyway
- so this probably isn't a significant shortcoming of this macro."
- [name params & forms]
- (let [sym->rule (->> params
- :rules
- (partition 2)
- (into {}
- (map (fn [[rule-name [lhs rhs props]]]
- [rule-name (assoc (dsl/parse-rule* lhs rhs props {}) :name (str rule-name))]))))
-
- sym->query (->> params
- :queries
- (partition 2)
- (into {}
- (map (fn [[query-name [params lhs]]]
- [query-name (assoc (dsl/parse-query* params lhs {}) :name (str query-name))]))))
-
- production-syms->productions (fn [p-syms]
- (map (fn [s]
- (or (get sym->rule s)
- (get sym->query s)))
- p-syms))
-
- session-syms->session-forms (->> params
- :sessions
- (partition 3)
- (into []
- (comp (map (fn [[session-name production-syms session-opts]]
- [session-name (production-syms->productions production-syms) session-opts]))
- (map (fn [[session-name productions session-opts]]
- [session-name (if (com/compiling-cljs?)
- (m/productions->session-assembly-form (map eval productions) session-opts)
- `(com/mk-session ~(into [(vec productions)]
- cat
- session-opts)))]))
- cat)))
-
- test-form `(~(if (com/compiling-cljs?)
- 'cljs.test/deftest
- 'clojure.test/deftest)
- ~name
- (let [~@session-syms->session-forms
- ~@(sequence cat sym->query)
- ~@(sequence cat sym->rule)]
- ~@forms))]
- test-form)))
-
-#?(:clj
- (defn opts-fixture
- ;; For operations other than replace-facts uc/get-ordered-update-cache is currently
- ;; always used. This fixture ensures that CancellingUpdateCache is tested for a wide
- ;; variety of different cases rather than a few cases cases specific to it.
- [f]
- (f)
- (with-redefs [uc/get-ordered-update-cache ca/get-cancelling-update-cache]
- (f))))
-
-(defn join-filter-equals
- "Intended to be a test function that is the same as equals, but is not visible to Clara as such
- and thus forces usage of join filters instead of hash joins"
- [& args]
- (apply = args))
-
-(def side-effect-holder (atom nil))
-
-(defn side-effect-holder-fixture
- "Fixture to reset the side effect holder to nil both before and after tests.
- This should be used as a :each fixture."
- [t]
- (reset! side-effect-holder nil)
- (t)
- (reset! side-effect-holder nil))
-
-#?(:clj
- (defn time-execution
- [func]
- (let [start (System/currentTimeMillis)
- _ (func)
- stop (System/currentTimeMillis)]
- (- stop start)))
- :cljs
- (defn time-execution
- [func]
- (let [start (.getTime (js/Date.))
- _ (func)
- stop (.getTime (js/Date.))]
- (- stop start))))
-
-(defn execute-tests
- [func iterations]
- (let [execution-times (for [_ (range iterations)]
- (time-execution func))
- sum #(reduce + %)
- mean (/ (sum execution-times) iterations)
- std (->
- (into []
- (comp
- (map #(- % mean))
- (map #(Math/pow (double %) 2.0)))
- execution-times)
- sum
- (/ iterations)
- Math/sqrt)]
- {:std (double std)
- :mean (double mean)}))
-
-(defn run-performance-test
- "Created as a rudimentary alternative to criterium, due to assumptions made during benchmarking. Specifically, that
- criterium attempts to reach a steady state of compiled and loaded classes. This fundamentally doesn't work when the
- metrics needed rely on compilation or evaluation."
- [form]
- (let [{:keys [description func iterations mean-assertion verbose]} form
- {:keys [std mean]} (execute-tests func iterations)]
- (when verbose
- (println (str \newline "Running Performance tests for:"))
- (println description)
- (println "==========================================")
- (println (str "Mean: " mean "ms"))
- (println (str "Standard Deviation: " std "ms" \newline)))
- (is (mean-assertion mean)
- (str "Actual mean value: " mean))
- {:mean mean
- :std std}))
-
-#?(:clj
- (defn ex-data-search
- ([^Exception e edata]
- (ex-data-search e nil edata))
- ([^Exception e emsg edata]
- (loop [non-matches []
- e e]
- (cond
- ;; Found match.
- (and (= edata
- (select-keys (ex-data e)
- (keys edata)))
- (or (= emsg
- (.getMessage e))
- (nil? emsg)))
- :success
-
- ;; Keep searching, record any non-matching ex-data.
- (.getCause e)
- (recur (if-let [ed (ex-data e)]
- (conj non-matches {(.getMessage e) ed})
- non-matches)
- (.getCause e))
-
- ;; Can't find a match.
- :else
- non-matches)))))
-
-#?(:clj
- (defn get-all-ex-data
- "Walk a Throwable chain and return a sequence of all data maps
- from any ExceptionInfo instances in that chain."
- [e]
- (let [get-ex-chain (fn get-ex-chain [e]
- (if-let [cause (.getCause e)]
- (conj (get-ex-chain cause) e)
- [e]))]
-
- (map ex-data
- (filter (partial instance? clojure.lang.IExceptionInfo)
- (get-ex-chain e))))))
-
-#?(:clj
- (defmacro assert-ex-data
- ([expected-ex-data form]
- `(assert-ex-data nil ~expected-ex-data ~form))
- ([expected-ex-message expected-ex-data form]
- `(try
- ~form
- (is false
- (str "Exception expected to be thrown when evaluating: " \newline
- '~form))
- (catch Exception e#
- (let [res# (ex-data-search e# ~expected-ex-message ~expected-ex-data)]
- (is (= :success res#)
- (str "Exception msg found: " \newline
- e# \newline
- "Non matches found: " \newline
- res#))))))))
-
-#?(:clj
- (defn ex-data-maps
- "Given a throwable/exception/error `t`, return all `ex-data` maps from the stack trace cause chain in
- the order they occur traversing the chain from this `t` through the rest of the call stack."
- [t]
- (let [append-self (fn append-self
- [prior t1]
- (if t1
- (append-self (conj prior t1) (.getCause ^Throwable t1))
- prior))
- throwables (append-self [] t)]
- (into []
- (comp (map ex-data))
- throwables))))
diff --git a/src/main/clojure/clara/tools/tracing.clj b/src/main/clojure/clara/tools/tracing.clj
new file mode 100644
index 00000000..4cc6d955
--- /dev/null
+++ b/src/main/clojure/clara/tools/tracing.clj
@@ -0,0 +1,175 @@
+(ns clara.tools.tracing
+ "Support for tracing state changes in a Clara session."
+ (:require [clara.rules.listener :as l]
+ [clara.rules.engine :as eng]))
+
+(declare to-tracing-listener)
+
+(deftype PersistentTracingListener [trace]
+ l/IPersistentEventListener
+ (to-transient [listener]
+ (to-tracing-listener listener)))
+
+(declare append-trace)
+
+(deftype TracingListener [trace]
+ l/ITransientEventListener
+ (left-activate! [listener node tokens]
+ (append-trace listener {:type :left-activate :node-id (:id node) :tokens tokens}))
+
+ (left-retract! [listener node tokens]
+ (append-trace listener {:type :left-retract :node-id (:id node) :tokens tokens}))
+
+ (right-activate! [listener node elements]
+ (append-trace listener {:type :right-activate :node-id (:id node) :elements elements}))
+
+ (right-retract! [listener node elements]
+ (append-trace listener {:type :right-retract :node-id (:id node) :elements elements}))
+
+ (insert-facts! [listener node token facts]
+ (append-trace listener {:type :add-facts :node node :token token :facts facts}))
+
+ (alpha-activate! [listener node facts]
+ (append-trace listener {:type :alpha-activate :facts facts}))
+
+ (insert-facts-logical! [listener node token facts]
+ (append-trace listener {:type :add-facts-logical :node node :token token :facts facts}))
+
+ (retract-facts! [listener node token facts]
+ (append-trace listener {:type :retract-facts :node node :token token :facts facts}))
+
+ (alpha-retract! [listener node facts]
+ (append-trace listener {:type :alpha-retract :facts facts}))
+
+ (retract-facts-logical! [listener node token facts]
+ (append-trace listener {:type :retract-facts-logical :node node :token token :facts facts}))
+
+ (add-accum-reduced! [listener node join-bindings result fact-bindings]
+ (append-trace listener {:type :accum-reduced
+ :node-id (:id node)
+ :join-bindings join-bindings
+ :result result
+ :fact-bindings fact-bindings}))
+
+ (remove-accum-reduced! [listener node join-bindings fact-bindings]
+ (append-trace listener {:type :remove-accum-reduced
+ :node-id (:id node)
+ :join-bindings join-bindings
+ :fact-bindings fact-bindings}))
+
+ (add-activations! [listener node activations]
+ (append-trace listener {:type :add-activations :node-id (:id node) :tokens (map :token activations)}))
+
+ (remove-activations! [listener node activations]
+ (append-trace listener {:type :remove-activations :node-id (:id node) :activations activations}))
+
+ (fire-activation! [listener activation resulting-operations]
+ (append-trace listener {:type :fire-activation :activation activation :resulting-operations resulting-operations}))
+
+ (fire-rules! [listener node]
+ (append-trace listener {:type :fire-rules :node-id (:id node)}))
+
+ (activation-group-transition! [listener previous-group new-group]
+ (append-trace listener {:type :activation-group-transition :new-group new-group :previous-group previous-group}))
+
+ (to-persistent! [listener]
+ (PersistentTracingListener. @trace)))
+
+(defn- to-tracing-listener [^PersistentTracingListener listener]
+ (TracingListener. (atom (.-trace listener))))
+
+(defn- append-trace
+ "Appends a trace event and returns a new listener with it."
+ [^TracingListener listener event]
+ (reset! (.-trace listener) (conj @(.-trace listener) event)))
+
+(defn tracing-listener
+ "Creates a persistent tracing event listener"
+ []
+ (PersistentTracingListener. []))
+
+(defn is-tracing?
+ "Returns true if the given session has tracing enabled, false otherwise."
+ [session]
+ (let [{:keys [listeners]} (eng/components session)]
+ (boolean (some #(instance? PersistentTracingListener %) listeners))))
+
+(defn with-tracing
+ "Returns a new session identical to the given one, but with tracing enabled.
+ The given session is returned unmodified if tracing is already enabled."
+ [session]
+ (if (is-tracing? session)
+ session
+ (eng/with-listener session (PersistentTracingListener. []))))
+
+(defn without-tracing
+ "Returns a new session identical to the given one, but with tracing disabled
+ The given session is returned unmodified if tracing is already disabled."
+ [session]
+ (eng/remove-listeners session (partial instance? PersistentTracingListener)))
+
+(defn get-trace
+ "Returns the trace from the given session."
+ [session]
+ (if-let [listener (->> (eng/components session)
+ :listeners
+ (filter #(instance? PersistentTracingListener %))
+ (first))]
+ (.-trace ^PersistentTracingListener listener)
+ (throw (ex-info "No tracing listener attached to session." {:session session}))))
+
+(defn listener->trace
+ [listener]
+ (let [tracing-listener (cond
+ (instance? PersistentTracingListener listener)
+ listener
+
+ (some (partial instance? PersistentTracingListener) (l/flatten-listener listener))
+ (first (filter (partial instance? PersistentTracingListener) (l/flatten-listener listener))))]
+ (when tracing-listener
+ (.-trace ^PersistentTracingListener tracing-listener))))
+
+(defn ^:private node-id->productions
+ "Given a session and a node ID return a list of the rule and query names associated
+ with the node."
+ [session id]
+ (let [node (-> session eng/components :rulebase :id-to-node (get id))]
+ (into []
+ (comp
+ (map second)
+ cat
+ (map second))
+ (eng/get-conditions-and-rule-names node))))
+
+(defn ranked-productions
+ "Given a session with tracing enabled, return a map of rule and query names
+ to a numerical index that represents an approximation of the proportional
+ amount of times Clara performed processing related to this rule. This
+ is not intended to have a precise meaning, and is intended solely as a means
+ to provide a rough guide to which rules and queries should be considered
+ the first suspects when diagnosing performance problems in rules sessions.
+ It is possible for a relatively small number of interactions to take a long
+ time if those interactions are particularly costly. It is expected that
+ the results may change between different versions when Clara's internals change,
+ for example to optimize the rules network. Nevertheless, it is anticipated
+ that this will provide useful information for a first pass at rules
+ performance problem debugging. This should not be used to drive user logic.
+
+ This currently returns a Clojure array map in order to conveniently have the rules
+ with the most interactions printed first in the string representation of the map."
+ [session]
+ (let [node-ids (->> session
+ get-trace
+ (map :node-id))
+
+ production-names (into []
+ (comp
+ (map (partial node-id->productions session))
+ cat)
+ node-ids)
+
+ production-name->interactions (frequencies production-names)
+
+ ranked-tuples (reverse (sort-by second production-name->interactions))]
+
+ (apply array-map (into [] cat ranked-tuples))))
diff --git a/src/test/clojure/clara/generative/generators.clj b/src/test/clojure/clara/generative/generators.clj
index cb3d2486..a8ecb9e8 100644
--- a/src/test/clojure/clara/generative/generators.clj
+++ b/src/test/clojure/clara/generative/generators.clj
@@ -4,13 +4,13 @@
[schema.core :as s]))
(s/defschema FactSessionOperation {:type (s/enum :insert :retract)
- :facts [s/Any]})
+ :facts [s/Any]})
(s/defschema FireSessionOperation {:type (s/enum :fire)})
(s/defschema SessionOperation (s/conditional
- #(= (:type %) :fire) FireSessionOperation
- :else FactSessionOperation))
+ #(= (:type %) :fire) FireSessionOperation
+ :else FactSessionOperation))
(defn session-run-ops
"Run the provided sequence of operations on the provide session and return the final session."
@@ -38,7 +38,7 @@
any-count-negative? (fn [fc]
(boolean (some neg? (vals fc))))]
-
+
(= ::premature-retract (reduce (fn [fact-count op]
(let [new-count (condp = (:type op)
:insert (inc-fact-count fact-count (:facts op))
@@ -87,7 +87,7 @@
{:keys [dup-level] :or {dup-level 0}}]
(let [dup-ops-seqs (ops->add-insert-retract ops dup-level)
permutations (mapcat combo/permutations dup-ops-seqs)]
-
+
;; The permutation creation allows for a retraction to occur before insertion, which
;; effectively removes the retraction from the seq of operations since retractions of facts
;; that are not present do not cause alteration of the session state. The idea of these helpers
@@ -99,4 +99,4 @@
;;
;; For now, we can just find all permutations and remove the ones with invalid ordering.
;; This is inefficient and there may be a more efficient algorithm or implementation.
- (remove retract-before-insertion? permutations)))
+ (remove retract-before-insertion? permutations)))
diff --git a/src/test/clojure/clara/generative/test_accum.clj b/src/test/clojure/clara/generative/test_accum.clj
index 66f48ef0..cda79545 100644
--- a/src/test/clojure/clara/generative/test_accum.clj
+++ b/src/test/clojure/clara/generative/test_accum.clj
@@ -15,7 +15,7 @@
(use-fixtures :once schema.test/validate-schemas)
-(deftest test-simple-all-condition-binding-groups
+(deftest ^:generative test-simple-all-condition-binding-groups
(let [r (dsl/parse-rule [[?ts <- (acc/all) :from [Temperature (= ?loc location)]]]
;; The all accumulator can return facts in different orders, so we sort
;; the temperatures to make asserting on the output easier.
@@ -50,7 +50,7 @@
expected-temp-hist (frequencies [{:?history (->TemperatureHistory ["MCI" [11 19]])}
{:?history (->TemperatureHistory ["ORD" [1]])}])]
(= actual-temp-hist expected-temp-hist)))]
-
+
(doseq [permutation (map #(concat % [{:type :fire}]) operation-permutations)
:let [session (gen/session-run-ops empty-session permutation)]]
(is (expected-output? session permutation)
@@ -75,7 +75,7 @@
(->Temperature 25 "LGA")])
operation-permutations (gen/ops->permutations operations {})]
-
+
(doseq [permutation (map #(concat % [{:type :fire}]) operation-permutations)
:let [session (gen/session-run-ops empty-session permutation)
output (query session q)]]
@@ -86,7 +86,7 @@
"Output was: "
(into [] output)))))))
-(deftest test-min-accum-with-binding-groups
+(deftest ^:generative test-min-accum-with-binding-groups
(let [coldest-rule (dsl/parse-rule [[?coldest-temp <- (acc/min :temperature :returns-fact true)
:from [ColdAndWindy (= ?w windspeed)]]]
(insert! (->Cold (:temperature ?coldest-temp))))
@@ -121,7 +121,7 @@
"The output was: "
(into [] output))))))
-(deftest test-min-accum-without-binding-groups
+(deftest ^:generative test-min-accum-without-binding-groups
(let [coldest-rule (dsl/parse-rule [[?coldest <- (acc/min :temperature) :from [Cold]]]
(insert! (->Temperature ?coldest "MCI")))
temp-query (dsl/parse-query [] [[Temperature (= ?t temperature)]])
@@ -147,7 +147,7 @@
:let [session (gen/session-run-ops empty-session permutation)
output (query session temp-query)]]
-
+
(is (= output
[{:?t (min temp-1 temp-2)}])
(str "Did not find the correct minimum temperature for permutation: "
diff --git a/src/test/clojure/clara/generative/test_generators.clj b/src/test/clojure/clara/generative/test_generators.clj
index f28e48ae..41ab9f41 100644
--- a/src/test/clojure/clara/generative/test_generators.clj
+++ b/src/test/clojure/clara/generative/test_generators.clj
@@ -7,7 +7,7 @@
(use-fixtures :once schema.test/validate-schemas)
;; Basic sanity test of the insert/retract/fire permutation generation.
-(deftest test-basic-permutations
+(deftest ^:generative test-basic-permutations
(let [base-ops [{:type :insert
:facts [:a]}]
permuted-ops (ops->permutations base-ops {:dup-level 1})]
diff --git a/src/test/clojure/clara/other_ruleset.clj b/src/test/clojure/clara/other_ruleset.clj
index a2de3e75..afabe846 100644
--- a/src/test/clojure/clara/other_ruleset.clj
+++ b/src/test/clojure/clara/other_ruleset.clj
@@ -23,7 +23,7 @@
(defquery temp-by-location
"Query temperatures by location."
[:?loc]
- (Temperature (== ?temp temperature)
+ (Temperature (== ?temp temperature)
(== ?loc location)))
diff --git a/src/test/clojure/clara/performance/test_compilation.clj b/src/test/clojure/clara/performance/test_compilation.clj
index 33ad4bed..803b614d 100644
--- a/src/test/clojure/clara/performance/test_compilation.clj
+++ b/src/test/clojure/clara/performance/test_compilation.clj
@@ -21,11 +21,11 @@
(for [seed-sym seed-syms
:let [next-fact (symbol (str seed-sym "prime"))
production (assoc base-production
- :lhs [{:type (keyword seed-sym)
- :constraints [`(= ~'this ~'?binding) `(filter-fn ~'this)]}]
- :rhs `(r/insert! (with-meta ~(set (repeatedly 10 #(rand-nth (range 100))))
- {:type ~(keyword next-fact)
- :val ~'?binding})))]]
+ :lhs [{:type (keyword seed-sym)
+ :constraints [`(= ~'this ~'?binding) `(filter-fn ~'this)]}]
+ :rhs `(r/insert! (with-meta ~(set (repeatedly 10 #(rand-nth (range 100))))
+ {:type ~(keyword next-fact)
+ :val ~'?binding})))]]
[next-fact production])))
(defn generate-compose-productions
@@ -34,12 +34,12 @@
([l] (template l l))
([l r] (let [next-fact (symbol (str l r "prime"))
production (assoc base-production
- :lhs [{:type (keyword l)
- :constraints [`(= ~'this ~'?binding-l)]}
- {:type (keyword r)
- :constraints [`(= ~'?binding-l ~'this)]}]
- :rhs `(r/insert! (with-meta ~(set (repeatedly 10 #(rand-nth (range 100))))
- {:type ~(keyword next-fact)})))]
+ :lhs [{:type (keyword l)
+ :constraints [`(= ~'this ~'?binding-l)]}
+ {:type (keyword r)
+ :constraints [`(= ~'?binding-l ~'this)]}]
+ :rhs `(r/insert! (with-meta ~(set (repeatedly 10 #(rand-nth (range 100))))
+ {:type ~(keyword next-fact)})))]
[next-fact production])))]
(into {}
(for [combo (partition-all 2 (shuffle seed-syms))]
@@ -51,12 +51,12 @@
(for [seed-sym seed-syms
:let [next-fact (symbol (str seed-sym "prime"))
production (assoc base-production
- :lhs [{:accumulator `(acc/all)
- :from {:type (keyword seed-sym),
- :constraints [`(filter-fn ~'this)]}
- :result-binding :?binding}]
- :rhs `(r/insert! (with-meta ~'?binding
- {:type ~(keyword next-fact)})))]]
+ :lhs [{:accumulator `(acc/all)
+ :from {:type (keyword seed-sym),
+ :constraints [`(filter-fn ~'this)]}
+ :result-binding :?binding}]
+ :rhs `(r/insert! (with-meta ~'?binding
+ {:type ~(keyword next-fact)})))]]
[next-fact production])))
(defn generate-queries
@@ -64,10 +64,10 @@
(into {}
(for [seed-sym seed-syms
:let [production (assoc base-production
- :lhs [{:type (keyword seed-sym)
- :constraints []
- :fact-binding :?binding}]
- :params #{})]]
+ :lhs [{:type (keyword seed-sym)
+ :constraints []
+ :fact-binding :?binding}]
+ :params #{})]]
[seed-sym production])))
(defn generate-rules-and-opts
@@ -91,31 +91,31 @@
(let [rules (generate-rules-and-opts 500)]
(testing "Session creation performance"
(utils/run-performance-test
- {:description "Generated Session Compilation"
- :func #(com/mk-session rules)
- :iterations 50
- :mean-assertion (partial > 5000)}))
+ {:description "Generated Session Compilation"
+ :func #(com/mk-session rules)
+ :iterations 50
+ :mean-assertion (partial > 5000)}))
(let [session (com/mk-session rules)
os (ByteArrayOutputStream.)]
(testing "Session rulebase serialization performance"
(utils/run-performance-test
- {:description "Session rulebase serialization"
- :func #(dura/serialize-rulebase
- session
- (fres/create-session-serializer (ByteArrayOutputStream.)))
- :iterations 50
- :mean-assertion (partial > 1000)}))
+ {:description "Session rulebase serialization"
+ :func #(dura/serialize-rulebase
+ session
+ (fres/create-session-serializer (ByteArrayOutputStream.)))
+ :iterations 50
+ :mean-assertion (partial > 1000)}))
(testing "Session rulebase deserialization performance"
(dura/serialize-rulebase
- session
- (fres/create-session-serializer os))
+ session
+ (fres/create-session-serializer os))
(let [session-bytes (.toByteArray os)]
(utils/run-performance-test
- {:description "Session rulebase deserialization"
- :func #(dura/deserialize-rulebase
- (fres/create-session-serializer (ByteArrayInputStream. session-bytes)))
- :iterations 50
- :mean-assertion (partial > 5000)}))))))
+ {:description "Session rulebase deserialization"
+ :func #(dura/deserialize-rulebase
+ (fres/create-session-serializer (ByteArrayInputStream. session-bytes)))
+ :iterations 50
+ :mean-assertion (partial > 5000)}))))))
diff --git a/src/test/common/clara/performance/test_rule_execution.cljc b/src/test/clojure/clara/performance/test_rule_execution.clj
similarity index 82%
rename from src/test/common/clara/performance/test_rule_execution.cljc
rename to src/test/clojure/clara/performance/test_rule_execution.clj
index 44707311..ed2c35de 100644
--- a/src/test/common/clara/performance/test_rule_execution.cljc
+++ b/src/test/clojure/clara/performance/test_rule_execution.clj
@@ -1,13 +1,8 @@
(ns clara.performance.test-rule-execution
(:require [clara.rules.accumulators :as acc]
[clara.rules :as r]
- #?(:clj
[clojure.test :refer :all]
- :cljs [cljs.test :refer-macros [is deftest]])
- #?(:clj
- [clara.tools.testing-utils :refer [def-rules-test run-performance-test]]
- :cljs [clara.tools.testing-utils :refer [run-performance-test]]))
- #?(:cljs (:require-macros [clara.tools.testing-utils :refer [def-rules-test]])))
+ [clara.tools.testing-utils :refer [def-rules-test run-performance-test]]))
(defrecord AFact [id])
(defrecord BFact [id])
@@ -16,7 +11,7 @@
(def counter (atom {:a-count 0
:b-count 0}))
-(def number-of-facts #?(:clj 1500 :cljs 150))
+(def number-of-facts 1500)
(def-rules-test test-get-in-perf
{:rules [rule [[[?parent <- ParentFact]
@@ -39,4 +34,4 @@
(r/insert-all facts)
r/fire-rules)
:iterations 5
- :mean-assertion (partial > 10000)})))
\ No newline at end of file
+ :mean-assertion (partial > 10000)})))
diff --git a/src/test/clojure/clara/rule_defs.clj b/src/test/clojure/clara/rule_defs.clj
new file mode 100644
index 00000000..bb9f3af5
--- /dev/null
+++ b/src/test/clojure/clara/rule_defs.clj
@@ -0,0 +1,37 @@
+(ns clara.rule-defs
+ (:require [clara.rules.accumulators :as acc]
+ [clara.rules.testfacts :as tf]
+ [clara.tools.testing-utils :as tu]
+ [clara.rules :refer [defrule defquery insert!]])
+ (:import [clara.rules.testfacts Temperature WindSpeed ColdAndWindy]))
+
+;; Rule definitions used for tests in clara.test-rules-require.
+
+(defrule test-rule
+ [?t <- Temperature (< temperature 20)]
+ =>
+ (reset! tu/side-effect-holder ?t))
+
+(defquery cold-query
+ []
+ [Temperature (< temperature 20) (== ?t temperature)])
+
+;; Accumulator for getting the lowest temperature.
+(def lowest-temp (acc/min :temperature))
+
+(defquery coldest-query
+ []
+ [?t <- lowest-temp :from [Temperature]])
+
+(defrule is-cold-and-windy
+ "Rule to determine whether it is indeed cold and windy."
+
+ [Temperature (< temperature 20) (== ?t temperature)]
+ [WindSpeed (> windspeed 30) (== ?w windspeed)]
+ =>
+ (insert! (tf/->ColdAndWindy ?t ?w)))
+
+(defquery find-cold-and-windy
+ []
+ [?fact <- ColdAndWindy])
+
diff --git a/src/test/clojure/clara/sample_ruleset.clj b/src/test/clojure/clara/sample_ruleset.clj
index 1cb6b13f..ceac8728 100644
--- a/src/test/clojure/clara/sample_ruleset.clj
+++ b/src/test/clojure/clara/sample_ruleset.clj
@@ -11,7 +11,6 @@
ColdAndWindy
LousyWeather]))
-
;;; These rules are used for unit testing loading from a namespace.
(defquery freezing-locations
"Query the freezing locations."
diff --git a/src/test/common/clara/test_accumulation.cljc b/src/test/clojure/clara/test_accumulation.clj
similarity index 78%
rename from src/test/common/clara/test_accumulation.cljc
rename to src/test/clojure/clara/test_accumulation.clj
index 196009ee..bf74cd6c 100644
--- a/src/test/common/clara/test_accumulation.cljc
+++ b/src/test/clojure/clara/test_accumulation.clj
@@ -1,65 +1,35 @@
-#?(:clj
- (ns clara.test-accumulation
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert-all!
- insert!
- retract
- query
- accumulate]]
- [clara.rules.accumulators :as acc]
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
- ->ColdAndWindy map->FlexibleFields ->TemperatureHistory
- ->First ->Second ->Hot ->LousyWeather]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- Cold
- WindSpeed
- ColdAndWindy
- FlexibleFields
- TemperatureHistory
- First
- Second
- Hot
- LousyWeather]))
-
- :cljs
- (ns clara.test-accumulation
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- insert-all!
- retract
- query
- accumulate]]
- [clara.rules.testfacts :refer [map->FlexibleFields FlexibleFields
- ->Temperature Temperature
- ->Cold Cold
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy
- ->TemperatureHistory TemperatureHistory
- ->First First
- ->Second Second
- ->Hot Hot
- ->LousyWeather LousyWeather]]
- [clara.rules.accumulators :as acc]
- [clara.tools.testing-utils :as tu]
- [cljs.test]
- [schema.test :as st])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
+(ns clara.test-accumulation
+ (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert-all!
+ insert!
+ retract
+ query]]
+ [clara.rules.accumulators :as acc]
+ [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
+ ->ColdAndWindy map->FlexibleFields ->TemperatureHistory
+ ->First ->Second ->Hot ->LousyWeather]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Temperature
+ Cold
+ WindSpeed
+ ColdAndWindy
+ FlexibleFields
+ TemperatureHistory
+ First
+ Second
+ Hot
+ LousyWeather]))
;; Tests focused on DSL functionality such as binding visibility in edge cases, fact field access, etc.
;; The distinction between tests here and tests in files focused on the aspects of Clara that the DSL represents
;; may not be clear and there are borderline cases.
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
(def side-effect-holder (atom nil))
@@ -94,7 +64,7 @@
(testing "Unrelated constraint on the field and then a binding with the variable first"
(reset! side-effect-holder nil)
-
+
(-> rule1-session
(insert (->Temperature 10 "MCI"))
fire-rules)
@@ -109,22 +79,22 @@
(defn min-fact
"Function to create a new accumulator for a test."
[field]
- (accumulate
- :retract-fn identity-retract
- :reduce-fn (fn [value item]
- (if (or (= value nil)
- (< (field item) (field value) ))
- item
- value))))
+ (acc/accum
+ {:retract-fn identity-retract
+ :reduce-fn (fn [value item]
+ (if (or (= value nil)
+ (< (field item) (field value)))
+ item
+ value))}))
(def-rules-test test-simple-accumulator
- {:queries [coldest-query [[] [[?t <- (accumulate
- :retract-fn identity-retract
- :reduce-fn (fn [value item]
- (if (or (= value nil)
- (< (:temperature item) (:temperature value) ))
- item
- value)))
+ {:queries [coldest-query [[] [[?t <- (acc/accum
+ {:retract-fn identity-retract
+ :reduce-fn (fn [value item]
+ (if (or (= value nil)
+ (< (:temperature item) (:temperature value)))
+ item
+ value))})
from [Temperature]]]]
coldest-query-defined [[] [[?t <- (min-fact :temperature) from [Temperature]]]]]
@@ -164,16 +134,16 @@
(defn average-value
"Test accumulator that returns the average of a field"
[field]
- (accumulate
- :initial-value [0 0]
- :reduce-fn (fn [[value count] item]
- [(+ value (field item)) (inc count)])
- :combine-fn (fn [[value1 count1] [value2 count2]]
- [(+ value1 value2) (+ count1 count2)])
- :retract-fn identity-retract
- :convert-return-fn (fn [[value count]] (if (= 0 count)
- nil
- (/ value count)))))
+ (acc/accum
+ {:initial-value [0 0]
+ :reduce-fn (fn [[value count] item]
+ [(+ value (field item)) (inc count)])
+ :combine-fn (fn [[value1 count1] [value2 count2]]
+ [(+ value1 value2) (+ count1 count2)])
+ :retract-fn identity-retract
+ :convert-return-fn (fn [[value count]] (if (= 0 count)
+ nil
+ (/ value count)))}))
(def-rules-test test-accumulator-with-result
@@ -196,22 +166,22 @@
(def-rules-test test-accumulate-with-retract
- {:queries [coldest-query [[] [[?t <- (accumulate
- :initial-value []
- :reduce-fn conj
- :combine-fn concat
+ {:queries [coldest-query [[] [[?t <- (acc/accum
+ {:initial-value []
+ :reduce-fn conj
+ :combine-fn concat
- ;; Retract by removing the retracted item.
- ;; In general, this would need to remove
- ;; only the first matching item to achieve expected semantics.
- :retract-fn (fn [reduced item]
- (remove #{item} reduced))
+ ;; Retract by removing the retracted item.
+ ;; In general, this would need to remove
+ ;; only the first matching item to achieve expected semantics.
+ :retract-fn (fn [reduced item]
+ (remove #{item} reduced))
- ;; Sort here and return the smallest.
- :convert-return-fn (fn [reduced]
- (first
- (sort #(< (:temperature %1) (:temperature %2))
- reduced))))
+ ;; Sort here and return the smallest.
+ :convert-return-fn (fn [reduced]
+ (first
+ (sort #(< (:temperature %1) (:temperature %2))
+ reduced)))})
:from (Temperature (< temperature 20))]]]]
@@ -233,24 +203,24 @@
{:queries [coldest-query-simple-join [[]
[(WindSpeed (= ?loc location))
- [?t <- (accumulate
- :retract-fn identity-retract
- :reduce-fn (fn [value item]
- (if (or (= value nil)
- (< (:temperature item) (:temperature value) ))
- item
- value)))
+ [?t <- (acc/accum
+ {:retract-fn identity-retract
+ :reduce-fn (fn [value item]
+ (if (or (= value nil)
+ (< (:temperature item) (:temperature value)))
+ item
+ value))})
:from (Temperature (= ?loc location))]]]
coldest-query-complex-join [[]
[(WindSpeed (= ?loc location))
- [?t <- (accumulate
- :retract-fn identity-retract
- :reduce-fn (fn [value item]
- (if (or (= value nil)
- (< (:temperature item) (:temperature value) ))
- item
- value)))
+ [?t <- (acc/accum
+ {:retract-fn identity-retract
+ :reduce-fn (fn [value item]
+ (if (or (= value nil)
+ (< (:temperature item) (:temperature value)))
+ item
+ value))})
:from (Temperature (tu/join-filter-equals ?loc location))]]]]
:sessions [simple-join-session [coldest-query-simple-join] {}
@@ -296,13 +266,13 @@
(def-rules-test test-bound-accumulator-var
{:queries [coldest-query [[:?loc]
- [[?t <- (accumulate
- :retract-fn identity-retract
- :reduce-fn (fn [value item]
- (if (or (= value nil)
- (< (:temperature item) (:temperature value) ))
- item
- value)))
+ [[?t <- (acc/accum
+ {:retract-fn identity-retract
+ :reduce-fn (fn [value item]
+ (if (or (= value nil)
+ (< (:temperature item) (:temperature value)))
+ item
+ value))})
:from [Temperature (= ?loc location)]]]]]
:sessions [empty-session [coldest-query] {}]}
@@ -321,17 +291,17 @@
(def-rules-test test-accumulator-rule-with-no-fact-binding
- {:rules [rule [[[(accumulate :initial-value []
- :reduce-fn conj
- :retract-fn identity-retract
- :combine-fn into)
- from [WindSpeed]]]
+ {:rules [rule [[[(acc/accum {:initial-value []
+ :reduce-fn conj
+ :retract-fn identity-retract
+ :combine-fn into})
+ from [WindSpeed]]]
(reset! side-effect-holder true)]]
:sessions [empty-session [rule] {}]}
(reset! side-effect-holder nil)
-
+
(-> empty-session
(insert (->WindSpeed 20 "MCI")
(->WindSpeed 20 "SFO"))
@@ -363,8 +333,7 @@
[:?colder-temps]
set)))))]
-
- ;; Simple insertion of facts at once.
+;; Simple insertion of facts at once.
(check-result (-> empty-session
(insert (->Temperature 15 "MCI")
(->Temperature 10 "ORD")
@@ -372,8 +341,7 @@
(->Temperature 30 "SFO"))
(fire-rules)))
-
- ;; Insert facts separately to ensure they are combined.
+;; Insert facts separately to ensure they are combined.
(check-result (-> empty-session
(insert (->Temperature 15 "MCI")
(->Temperature 5 "LGA")
@@ -631,7 +599,7 @@
(retract (->Temperature 10 "MCI")) ; retracted under same bindings as inserted, but never actually inserted
fire-rules
(query temp-sum-no-join))]
-
+
(is (= (frequencies [{:?s 10 :?t 10}])
(frequencies res))))
@@ -812,7 +780,6 @@
(is (= [{:?his (->TemperatureHistory [temp-10-mci])}]
temp-history))))
-
(def-rules-test test-retract-initial-value-filtered
{:queries [get-temp-history [[] [[?his <- TemperatureHistory]]]]
@@ -873,20 +840,20 @@
fire-rules
(query same-wind-and-temp)))))
-(def maybe-nil-min-temp (accumulate
- :retract-fn identity-retract
- :reduce-fn (fn [value item]
- (let [t (:temperature item)]
- ;; When no :temperature return `value`.
- ;; Note: `value` could be nil.
- (if (and t
- (or (= value nil)
- (< t (:temperature value))))
- item
- value)))))
+(def maybe-nil-min-temp (acc/accum
+ {:retract-fn identity-retract
+ :reduce-fn (fn [value item]
+ (let [t (:temperature item)]
+ ;; When no :temperature return `value`.
+ ;; Note: `value` could be nil.
+ (if (and t
+ (or (= value nil)
+ (< t (:temperature value))))
+ item
+ value)))}))
(def-rules-test test-nil-accum-reduced-has-tokens-retracted-when-new-item-inserted
-
+
{:rules [coldest-temp-rule-no-join [[[?coldest <- maybe-nil-min-temp :from [Temperature]]]
(insert! (->Cold (:temperature ?coldest)))]
@@ -957,14 +924,14 @@
(insert! (->Cold (:temperature ?coldest)))]
coldest-temp-rule-join [[[:max-threshold [{:keys [temperature]}]
- (= ?max-temp temperature)]
+ (= ?max-temp temperature)]
;; Note a non-equality based unification.
;; Gets max temp under a given max threshold.
- [?coldest <- maybe-nil-min-temp :from [Temperature
+ [?coldest <- maybe-nil-min-temp :from [Temperature
;; Gracefully handle nil.
- (< (or temperature 0)
- ?max-temp)]]]
+ (< (or temperature 0)
+ ?max-temp)]]]
(insert! (->Cold (:temperature ?coldest)))]]
@@ -993,12 +960,12 @@
"Failed expected empty query results for AccumulateWithJoinNode.")))
(defn constant-accum [v]
- (accumulate
- :initial-value v
- :reduce-fn (constantly v)
- :combine-fn (constantly v)
- :retract-fn (constantly v)
- :convert-return-fn identity))
+ (acc/accum
+ {:initial-value v
+ :reduce-fn (constantly v)
+ :combine-fn (constantly v)
+ :retract-fn (constantly v)
+ :convert-return-fn identity}))
(def-rules-test test-constant-accum-bindings-downstream-accumulate-node
@@ -1006,8 +973,8 @@
(insert! (->TemperatureHistory [?result ?temperature]))]
r2 [[[Hot (= ?hot-temp temperature)]
- [?result <- (constant-accum []) :from [Cold (< temperature ?hot-temp)
- (= ?temperature temperature)]]]
+ [?result <- (constant-accum []) :from [Cold (< temperature ?hot-temp)
+ (= ?temperature temperature)]]]
(insert! (->TemperatureHistory [?result ?temperature]))]]
:queries [q1 [[] [[TemperatureHistory (= ?temps temperatures)]]]]
@@ -1021,52 +988,52 @@
"AccumulateWithJoinFilterNode"]]]
(is (= (-> empty-session
- fire-rules
- (insert (Cold. 10) (Hot. 100))
- fire-rules
- (query q1))
- [{:?temps [[] 10]}])
- (str "Single Cold fact with double firing for node type " node-type))
-
- (is (= (-> empty-session
- (insert (Cold. 10) (Hot. 100))
- fire-rules
- (query q1))
- [{:?temps [[] 10]}])
- (str "Single Cold fact with single firing for node type " node-type))
-
- (is (= (-> empty-session
- fire-rules
- (insert (->Cold 10) (->Cold 20) (Hot. 100))
- fire-rules
- (query q1)
- frequencies)
- {{:?temps [[] 10]} 1
- {:?temps [[] 20]} 1})
- (str "Two Cold facts with double firing for node type " node-type))
-
- (is (= (-> empty-session
- fire-rules
- (insert (->Cold 10) (->Cold 20) (Hot. 100))
- fire-rules
- (query q1)
- frequencies)
- {{:?temps [[] 10]} 1
- {:?temps [[] 20]} 1})
- (str "Two Cold facts with single firing for node type " node-type))
-
- (is (= (-> empty-session
- (insert (->Cold 10) (->Hot 100))
- (fire-rules)
- (retract (->Cold 10))
- (fire-rules)
- (query q1))
- [])
- (str "Retracting all elements that matched an accumulator with an initial value "
- \newline
- "should cause all facts downstream from the accumulator to be retracted when the accumulator creates binding groups."
- \newline
- "Accumulator node type: " node-type))))
+ fire-rules
+ (insert (Cold. 10) (Hot. 100))
+ fire-rules
+ (query q1))
+ [{:?temps [[] 10]}])
+ (str "Single Cold fact with double firing for node type " node-type))
+
+ (is (= (-> empty-session
+ (insert (Cold. 10) (Hot. 100))
+ fire-rules
+ (query q1))
+ [{:?temps [[] 10]}])
+ (str "Single Cold fact with single firing for node type " node-type))
+
+ (is (= (-> empty-session
+ fire-rules
+ (insert (->Cold 10) (->Cold 20) (Hot. 100))
+ fire-rules
+ (query q1)
+ frequencies)
+ {{:?temps [[] 10]} 1
+ {:?temps [[] 20]} 1})
+ (str "Two Cold facts with double firing for node type " node-type))
+
+ (is (= (-> empty-session
+ fire-rules
+ (insert (->Cold 10) (->Cold 20) (Hot. 100))
+ fire-rules
+ (query q1)
+ frequencies)
+ {{:?temps [[] 10]} 1
+ {:?temps [[] 20]} 1})
+ (str "Two Cold facts with single firing for node type " node-type))
+
+ (is (= (-> empty-session
+ (insert (->Cold 10) (->Hot 100))
+ (fire-rules)
+ (retract (->Cold 10))
+ (fire-rules)
+ (query q1))
+ [])
+ (str "Retracting all elements that matched an accumulator with an initial value "
+ \newline
+ "should cause all facts downstream from the accumulator to be retracted when the accumulator creates binding groups."
+ \newline
+ "Accumulator node type: " node-type))))
(def-rules-test nil-accumulate-node-test
@@ -1105,7 +1072,7 @@
(insert! (->Cold (:temperature ?coldest-temp)))]
coldest-rule-2 [[[Hot (= ?max-temp temperature)]
- [?coldest-temp <- (acc/min :temperature :returns-fact true) :from [ColdAndWindy (< temperature ?max-temp)]]]
+ [?coldest-temp <- (acc/min :temperature :returns-fact true) :from [ColdAndWindy (< temperature ?max-temp)]]]
(insert! (->Cold (:temperature ?coldest-temp)))]]
:queries [cold-query [[] [[Cold (= ?t temperature)]]]]
@@ -1115,46 +1082,46 @@
(doseq [[empty-session node-type]
- [[session-no-join "AccumulateNode"]
- [session-join "AccumulateWithJoinFilterNode"]]]
+ [[session-no-join "AccumulateNode"]
+ [session-join "AccumulateWithJoinFilterNode"]]]
;; Note: in the tests below we deliberately insert or retract one fact at a time and then fire the rules. The idea
;; is to verify that even accumulator activations and retractions that do not immediately change the tokens propagated
;; downstream are registered appropriately for the purpose of truth maintenance later. This pattern ensures that we have
;; distinct activations and retractions.
- (let [all-temps-session (-> empty-session
- (insert (->ColdAndWindy 10 10) (->Hot 100))
- fire-rules
+ (let [all-temps-session (-> empty-session
+ (insert (->ColdAndWindy 10 10) (->Hot 100))
+ fire-rules
;; Even though the minimum won't change, the addition of an additional
;; fact with the same minimum changes how many of these facts can be retracted
;; before the minimum propagated downstream changes.
- (insert (->ColdAndWindy 10 10))
- fire-rules
- (insert (->ColdAndWindy 20 20))
- fire-rules)
+ (insert (->ColdAndWindy 10 10))
+ fire-rules
+ (insert (->ColdAndWindy 20 20))
+ fire-rules)
- one-min-retracted-session (-> all-temps-session
+ one-min-retracted-session (-> all-temps-session
;; Even though the minimum won't change after a single retraction, we now only need
;; one retraction to change the minimum rather than two.
- (retract (->ColdAndWindy 10 10))
- fire-rules)
+ (retract (->ColdAndWindy 10 10))
+ fire-rules)
- all-min-retracted (-> one-min-retracted-session
- (retract (->ColdAndWindy 10 10))
- fire-rules)]
+ all-min-retracted (-> one-min-retracted-session
+ (retract (->ColdAndWindy 10 10))
+ fire-rules)]
- (is (= (query all-temps-session cold-query)
- [{:?t 10}])
- (str "With all 3 ColdAndWindy facts in the session the minimum of 10 should be chosen for node type " node-type))
+ (is (= (query all-temps-session cold-query)
+ [{:?t 10}])
+ (str "With all 3 ColdAndWindy facts in the session the minimum of 10 should be chosen for node type " node-type))
- (is (= (query one-min-retracted-session cold-query)
- [{:?t 10}])
- (str "With only one of the ColdAndWindy facts of a temperature of 10 retracted the minimum is still 10 for node type " node-type))
+ (is (= (query one-min-retracted-session cold-query)
+ [{:?t 10}])
+ (str "With only one of the ColdAndWindy facts of a temperature of 10 retracted the minimum is still 10 for node type " node-type))
- (is (= (query all-min-retracted cold-query)
- [{:?t 20}])
- (str "With both ColdAndWindy facts with a temperature of 10, the new minimum is 20 for node type " node-type)))))
+ (is (= (query all-min-retracted cold-query)
+ [{:?t 20}])
+ (str "With both ColdAndWindy facts with a temperature of 10, the new minimum is 20 for node type " node-type)))))
(def-rules-test test-no-retractions-of-nil-initial-value-accumulator-results
@@ -1231,7 +1198,7 @@
(insert! (->Cold ?coldest-temp))]
filter-join [[[Hot (= ?max-temp temperature)]
- [?coldest-temp <- nil-unsafe-accum-init-value-accum :from [ColdAndWindy (< temperature ?max-temp)]]]
+ [?coldest-temp <- nil-unsafe-accum-init-value-accum :from [ColdAndWindy (< temperature ?max-temp)]]]
(insert! (->Cold ?coldest-temp))]]
:sessions [hash-join-session [hash-join cold-query] {}
@@ -1316,16 +1283,16 @@
(def-rules-test test-accumulate-left-retract-initial-value-new-bindings-token-add-and-remove
{:rules [r1 [[[?w <- WindSpeed (= ?loc location)]
- [?t <- (assoc (acc/all) :convert-return-fn (constantly []))
- :from [Temperature (= ?loc location) (= ?degrees temperature)]]]
+ [?t <- (assoc (acc/all) :convert-return-fn (constantly []))
+ :from [Temperature (= ?loc location) (= ?degrees temperature)]]]
(insert! (->TemperatureHistory [?loc ?degrees]))]
r2 [[[?w <- WindSpeed (= ?loc location)]
- [?t <- (assoc (acc/all) :convert-return-fn (constantly []))
+ [?t <- (assoc (acc/all) :convert-return-fn (constantly []))
;; Note that only the binding that comes from a previous condition can use a filter function
;; other than equality. The = symbol is special-cased to potentially create a new binding;
;; if we used (tu/join-filter-equals ?degrees temperature) here we would have an invalid rule constraint.
- :from [Temperature (tu/join-filter-equals ?loc location) (= ?degrees temperature)]]]
+ :from [Temperature (tu/join-filter-equals ?loc location) (= ?degrees temperature)]]]
(insert! (->TemperatureHistory [?loc ?degrees]))]]
:queries [q [[] [[TemperatureHistory (= ?ts temperatures)]]]]
@@ -1356,11 +1323,11 @@
(def-rules-test test-accumulate-with-bindings-from-parent
{:rules [r1 [[[?w <- WindSpeed (= ?loc location)]
- [?ts <- (acc/all) :from [Temperature (= ?loc location)]]]
+ [?ts <- (acc/all) :from [Temperature (= ?loc location)]]]
(insert! (->TemperatureHistory [?loc (map :temperature ?ts)]))]
r2 [[[?w <- WindSpeed (= ?loc location)]
- [?ts <- (acc/all) :from [Temperature (tu/join-filter-equals ?loc location)]]]
+ [?ts <- (acc/all) :from [Temperature (tu/join-filter-equals ?loc location)]]]
(insert! (->TemperatureHistory [?loc (map :temperature ?ts)]))]]
:queries [q [[] [[TemperatureHistory (= ?ts temperatures)]]]]
@@ -1371,37 +1338,37 @@
(doseq [[empty-session join-type] [[s1 "simple hash join"]
[s2 "filter join"]]]
- (is (= (-> empty-session
- (insert (->WindSpeed 10 "MCI"))
- fire-rules
- (query q))
- [{:?ts ["MCI" []]}])
- (str "Simple case of joining with an empty accumulator with binding from a parent" \newline
- "for a " join-type))
-
- (is (= (-> empty-session
- (insert (->WindSpeed 10 "MCI") (->Temperature 20 "MCI"))
- fire-rules
- (query q))
- [{:?ts ["MCI" [20]]}])
- (str "Simple case of joining with a non-empty accumulator with a binding from a parent" \newline
- "for a " join-type))
-
- (is (= (-> empty-session
- (insert (->WindSpeed 10 "MCI") (->Temperature 30 "MCI") (->Temperature 20 "LAX"))
- fire-rules
- (query q))
- [{:?ts ["MCI" [30]]}])
- (str "One value can join with parent node, but the other value has no matching parent " \newline
- "for a " join-type))
-
- (is (= (-> empty-session
- (insert (->WindSpeed 10 "MCI") (->Temperature 20 "LAX"))
- fire-rules
- (query q))
- [{:?ts ["MCI" []]}])
- (str "Creation of a non-equal binding from a parent node " \newline
- "should not allow an accumulator to fire for another binding value for a " join-type))))
+ (is (= (-> empty-session
+ (insert (->WindSpeed 10 "MCI"))
+ fire-rules
+ (query q))
+ [{:?ts ["MCI" []]}])
+ (str "Simple case of joining with an empty accumulator with binding from a parent" \newline
+ "for a " join-type))
+
+ (is (= (-> empty-session
+ (insert (->WindSpeed 10 "MCI") (->Temperature 20 "MCI"))
+ fire-rules
+ (query q))
+ [{:?ts ["MCI" [20]]}])
+ (str "Simple case of joining with a non-empty accumulator with a binding from a parent" \newline
+ "for a " join-type))
+
+ (is (= (-> empty-session
+ (insert (->WindSpeed 10 "MCI") (->Temperature 30 "MCI") (->Temperature 20 "LAX"))
+ fire-rules
+ (query q))
+ [{:?ts ["MCI" [30]]}])
+ (str "One value can join with parent node, but the other value has no matching parent " \newline
+ "for a " join-type))
+
+ (is (= (-> empty-session
+ (insert (->WindSpeed 10 "MCI") (->Temperature 20 "LAX"))
+ fire-rules
+ (query q))
+ [{:?ts ["MCI" []]}])
+ (str "Creation of a non-equal binding from a parent node " \newline
+ "should not allow an accumulator to fire for another binding value for a " join-type))))
(def-rules-test test-accumulate-with-explicit-nil-binding-value
@@ -1409,11 +1376,11 @@
(insert! (->Temperature [?t []] "MCI"))]
binding-from-parent [[[Cold (= ?t temperature)]
- [?hot-facts <- (acc/all) :from [Hot (= ?t temperature)]]]
+ [?hot-facts <- (acc/all) :from [Hot (= ?t temperature)]]]
(insert! (->Temperature [?t []] "MCI"))]
binding-from-parent-non-equals [[[Cold (= ?t temperature)]
- [?hot-facts <- (acc/all) :from [Hot (tu/join-filter-equals ?t temperature)]]]
+ [?hot-facts <- (acc/all) :from [Hot (tu/join-filter-equals ?t temperature)]]]
(insert! (->Temperature [?t []] "MCI"))]]
:queries [q [[] [[Temperature (= ?t temperature)]]]]
@@ -1423,10 +1390,10 @@
s3 [binding-from-parent-non-equals q] {}]}
(is (= [{:?t [nil []]}]
- (-> s1
- (insert (->Hot nil))
- fire-rules
- (query q)))
+ (-> s1
+ (insert (->Hot nil))
+ fire-rules
+ (query q)))
"An explicit value of nil in a field used to create a binding group should allow the binding to be created.")
(doseq [[empty-session constraint-type] [[s2 "simple hash join"]
@@ -1606,14 +1573,14 @@
{:queries [q [[] [[TemperatureHistory (= ?ws temperatures)]]]]
:rules [r1 [[[Cold (= ?t temperature)]
- [?ws <- (acc/all) :from [ColdAndWindy
- (and (tu/join-filter-equals ?t temperature)
- (even? windspeed))]]]
+ [?ws <- (acc/all) :from [ColdAndWindy
+ (and (tu/join-filter-equals ?t temperature)
+ (even? windspeed))]]]
(insert! (->TemperatureHistory (map :windspeed ?ws)))]
r2 [[[Cold (= ?t temperature)]
- [?ws <- (acc/all) :from [ColdAndWindy (= ?t temperature)
- (even? windspeed)]]]
+ [?ws <- (acc/all) :from [ColdAndWindy (= ?t temperature)
+ (even? windspeed)]]]
(insert! (->TemperatureHistory (map :windspeed ?ws)))]]
:sessions [s1 [r1 q] {}
@@ -1622,58 +1589,58 @@
(doseq [[empty-session join-type] [[s1 "filter join"]
[s2 "simple hash join"]]
- :let [non-matching-first (-> empty-session
- (insert (->ColdAndWindy 10 23))
- (insert (->ColdAndWindy 10 20))
- (insert (->ColdAndWindy 10 22))
- (insert (->Cold 10))
- fire-rules)
+ :let [non-matching-first (-> empty-session
+ (insert (->ColdAndWindy 10 23))
+ (insert (->ColdAndWindy 10 20))
+ (insert (->ColdAndWindy 10 22))
+ (insert (->Cold 10))
+ fire-rules)
- non-matching-middle (-> empty-session
- (insert (->ColdAndWindy 10 20))
- (insert (->ColdAndWindy 10 23))
- (insert (->ColdAndWindy 10 22))
- (insert (->Cold 10))
- fire-rules)
-
- non-matching-last (-> empty-session
+ non-matching-middle (-> empty-session
(insert (->ColdAndWindy 10 20))
- (insert (->ColdAndWindy 10 22))
(insert (->ColdAndWindy 10 23))
+ (insert (->ColdAndWindy 10 22))
(insert (->Cold 10))
fire-rules)
- [first-removed middle-removed last-removed] (for [init-session [non-matching-first
- non-matching-middle
- non-matching-last]]
- (-> init-session
- (retract (->ColdAndWindy 10 23))
+ non-matching-last (-> empty-session
+ (insert (->ColdAndWindy 10 20))
+ (insert (->ColdAndWindy 10 22))
+ (insert (->ColdAndWindy 10 23))
+ (insert (->Cold 10))
+ fire-rules)
+
+ [first-removed middle-removed last-removed] (for [init-session [non-matching-first
+ non-matching-middle
+ non-matching-last]]
+ (-> init-session
+ (retract (->ColdAndWindy 10 23))
;; Perform an additional operation to reveal any discrepancy between the element ordering
;; in the candidates and the resulting ordering in downstream tokens.
- (insert (->ColdAndWindy 10 24))
- fire-rules))
+ (insert (->ColdAndWindy 10 24))
+ fire-rules))
- without-rhs-ordering (fn [query-results]
- (map (fn [result]
- (update result :?ws set))
- query-results))]]
+ without-rhs-ordering (fn [query-results]
+ (map (fn [result]
+ (update result :?ws set))
+ query-results))]]
- (is (= [{:?ws #{20 22}}]
+ (is (= [{:?ws #{20 22}}]
;; Put elements in a set to be independent of the ordering of acc/all in the assertions.
;; All we care about is that the cardinality of the query results is correct, which may
;; not be the case if the ordering is inconsistent between operations. User code, however, needs
;; to be OK with getting any ordering in the RHS, so we just test that contract here.
- (without-rhs-ordering (query non-matching-first q))
- (without-rhs-ordering (query non-matching-middle q))
- (without-rhs-ordering (query non-matching-last q)))
- (str "Sanity test of our rule that does not exercise right-retract, the main purpose of this test, for a " join-type))
-
- (doseq [[final-session message] [[first-removed "first right activation"]
- [middle-removed "middle right activation"]
- [last-removed "last right activation"]]]
- (is (= [{:?ws #{20 22 24}}]
- (without-rhs-ordering (query final-session q)))
- (str "Retracting non-matching " message "for a " join-type)))))
+ (without-rhs-ordering (query non-matching-first q))
+ (without-rhs-ordering (query non-matching-middle q))
+ (without-rhs-ordering (query non-matching-last q)))
+ (str "Sanity test of our rule that does not exercise right-retract, the main purpose of this test, for a " join-type))
+
+ (doseq [[final-session message] [[first-removed "first right activation"]
+ [middle-removed "middle right activation"]
+ [last-removed "last right activation"]]]
+ (is (= [{:?ws #{20 22 24}}]
+ (without-rhs-ordering (query final-session q)))
+ (str "Retracting non-matching " message "for a " join-type)))))
(def min-accum-convert-return-fn-nil-to-zero (acc/min :temperature))
@@ -1682,32 +1649,32 @@
{:queries [lousy-weather-query [[] [[LousyWeather]]]]
:rules [min-non-cold [[[?min-cold <- (acc/min :temperature) :from [Cold]]
- [?min-temp <- (acc/min :temperature) :from [Temperature (< temperature ?min-cold)]]]
+ [?min-temp <- (acc/min :temperature) :from [Temperature (< temperature ?min-cold)]]]
(insert! (->LousyWeather))]]
:sessions [empty-session [lousy-weather-query min-non-cold] {}]}
- (is (empty? (-> empty-session
- (insert (->Temperature 20 "MCI"))
- (insert (->Cold 10))
- fire-rules
- (query lousy-weather-query)))
- "Insert non-matching element first then the token.")
+ (is (empty? (-> empty-session
+ (insert (->Temperature 20 "MCI"))
+ (insert (->Cold 10))
+ fire-rules
+ (query lousy-weather-query)))
+ "Insert non-matching element first then the token.")
- (is (empty? (-> empty-session
- (insert (->Cold 10))
- (insert (->Temperature 20 "MCI"))
- fire-rules
- (query lousy-weather-query)))
- "Insert token first then non-matching element.")
+ (is (empty? (-> empty-session
+ (insert (->Cold 10))
+ (insert (->Temperature 20 "MCI"))
+ fire-rules
+ (query lousy-weather-query)))
+ "Insert token first then non-matching element.")
- (is (empty? (-> empty-session
- (insert (->Cold 10) (->Temperature 20 "MCI"))
- fire-rules
- (retract (->Temperature 20 "MCI"))
- fire-rules
- (query lousy-weather-query)))
- "Insert facts matching both conditions first and then remove a Temperature fact, thus causing the accumulator on Temperature to
+ (is (empty? (-> empty-session
+ (insert (->Cold 10) (->Temperature 20 "MCI"))
+ fire-rules
+ (retract (->Temperature 20 "MCI"))
+ fire-rules
+ (query lousy-weather-query)))
+ "Insert facts matching both conditions first and then remove a Temperature fact, thus causing the accumulator on Temperature to
right-retract back to its initial value. Since the initial value is nil the activation of the rule should be removed."))
(def-rules-test test-initial-value-used-when-non-nil-and-new-binding-group-created
@@ -1718,7 +1685,7 @@
{:queries [q [[] [[TemperatureHistory (= ?ts temperatures)]]]]
:rules [hash-join-rule [[[WindSpeed (= ?loc location)]
- [?temp-count <- (acc/count) :from [Temperature (= ?loc location) (= ?temp temperature)]]]
+ [?temp-count <- (acc/count) :from [Temperature (= ?loc location) (= ?temp temperature)]]]
(insert! (->TemperatureHistory [?loc ?temp ?temp-count]))]
filter-join-rule [[[WindSpeed (= ?loc location)]
@@ -1733,20 +1700,20 @@
[empty-session-filter-join
"join filter"]]]
- (is (= (-> empty-session
- (insert (->WindSpeed 10 "MCI"))
- fire-rules
- (insert (->Temperature 20 "MCI"))
- fire-rules
- (query q))
- [{:?ts ["MCI" 20 1]}])
- (str "Inserting a WindSpeed and then later a matching Temperature for join type " join-type))))
+ (is (= (-> empty-session
+ (insert (->WindSpeed 10 "MCI"))
+ fire-rules
+ (insert (->Temperature 20 "MCI"))
+ fire-rules
+ (query q))
+ [{:?ts ["MCI" 20 1]}])
+ (str "Inserting a WindSpeed and then later a matching Temperature for join type " join-type))))
(def-rules-test test-retract-of-fact-matching-accumulator-causes-downstream-retraction
{:rules [create-cold [[[?cs <- (acc/all) :from [ColdAndWindy]]]
- (doseq [c ?cs]
- (insert! (->Cold (:temperature c))))]
+ (doseq [c ?cs]
+ (insert! (->Cold (:temperature c))))]
temp-from-cold [[[Cold (= ?t temperature)]]
(insert! (->Temperature ?t "MCI"))]]
diff --git a/src/test/common/clara/test_accumulators.cljc b/src/test/clojure/clara/test_accumulators.clj
similarity index 89%
rename from src/test/common/clara/test_accumulators.cljc
rename to src/test/clojure/clara/test_accumulators.clj
index 6459e18d..92f73753 100644
--- a/src/test/common/clara/test_accumulators.cljc
+++ b/src/test/clojure/clara/test_accumulators.clj
@@ -1,46 +1,26 @@
-#?(:clj
- (ns clara.test-accumulators
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert!
- retract
- query]]
-
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
- ->TemperatureHistory]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators :as acc]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- TemperatureHistory
- Cold
- WindSpeed]))
-
- :cljs
- (ns clara.test-accumulators
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- retract
- query]]
- [clara.rules.accumulators :as acc]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->TemperatureHistory TemperatureHistory
- ->Cold Cold
- ->WindSpeed WindSpeed]]
- [cljs.test :as test]
- [schema.test :as st])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
-
+(ns clara.test-accumulators
+ (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert!
+ retract
+ query]]
+
+ [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
+ ->TemperatureHistory]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.accumulators :as acc]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Temperature
+ TemperatureHistory
+ Cold
+ WindSpeed]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
+
(def-rules-test test-max
-
{:queries [hottest [[]
[[?t <- (acc/max :temperature) from [Temperature]]]]]
@@ -128,7 +108,6 @@
(is (empty (query empty-session hottest)))
(is (= [{:?t 80}] (query session hottest)))
-
(is (= #{{:?t (->Temperature 80 "MCI")}}
(set (query session hottest-fact))))
@@ -140,7 +119,7 @@
{:queries [sum-query [[] [[?t <- (acc/sum :temperature) from [Temperature]]]]]
:sessions [empty-session [sum-query] {}]}
-
+
(let [session (-> empty-session
(insert (->Temperature 30 "MCI"))
(insert (->Temperature 10 "MCI"))
@@ -169,7 +148,7 @@
(retract (->Temperature 30 "MCI"))
fire-rules)]
- (is (= [{:?c 0}] (query empty-session count-query)))
+ (is (= [{:?c 0}] (query empty-session count-query)))
(is (= [{:?c 3}] (query session count-query)))
(is (= [{:?c 2}] (query retracted count-query)))))
@@ -189,17 +168,17 @@
(retract (->Temperature 90 "MCI"))
fire-rules)]
- (is (= #{{:?t #{ (->Temperature 80 "MCI")
+ (is (= #{{:?t #{(->Temperature 80 "MCI")
(->Temperature 90 "MCI")}}}
(set (query session distinct-query))))
- (is (= #{{:?t #{ (->Temperature 80 "MCI")}}}
+ (is (= #{{:?t #{(->Temperature 80 "MCI")}}}
(set (query retracted distinct-query))))
- (is (= #{{:?t #{ 80 90}}}
+ (is (= #{{:?t #{80 90}}}
(set (query session distinct-field-query))))
- (is (= #{{:?t #{ 80}}}
+ (is (= #{{:?t #{80}}}
(set (query retracted distinct-field-query))))
;; Tests for https://github.com/cerner/clara-rules/issues/325 without the field argument.
@@ -414,12 +393,12 @@
(def reduced-max-accum
(acc/reduce-to-accum
- (fn [previous value]
- (if previous
- (if (> (:temperature value) (:temperature previous))
- value
- previous)
- value))))
+ (fn [previous value]
+ (if previous
+ (if (> (:temperature value) (:temperature previous))
+ value
+ previous)
+ value))))
(def-rules-test test-reduce-to-accum-max
@@ -550,4 +529,4 @@
(testing "Custom initial value structure is maintained"
(is (and (= (count (query session wind-query)) 1)
(= [(->WindSpeed 75 "KCI")] wind-facts)
- (seq? wind-facts))))))
\ No newline at end of file
+ (seq? wind-facts))))))
diff --git a/src/test/common/clara/test_bindings.cljc b/src/test/clojure/clara/test_bindings.clj
similarity index 86%
rename from src/test/common/clara/test_bindings.cljc
rename to src/test/clojure/clara/test_bindings.clj
index c1f6766c..a868dd49 100644
--- a/src/test/common/clara/test_bindings.cljc
+++ b/src/test/clojure/clara/test_bindings.clj
@@ -1,47 +1,27 @@
-#?(:clj
- (ns clara.test-bindings
- "Tests focused on the creation of binding variables and their use
- in joins between rule and query condition. Binding variables
- begin with ?."
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert!
- retract
- query]]
-
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
- ->ColdAndWindy]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators :as acc]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- Cold
- WindSpeed
- ColdAndWindy]))
-
- :cljs
- (ns clara.test-bindings
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- retract
- query]]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->Cold Cold
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy]]
- [clara.rules.accumulators :as acc]
- [cljs.test]
- [schema.test :as st]
- [clara.tools.testing-utils :as tu])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
+(ns clara.test-bindings
+ "Tests focused on the creation of binding variables and their use
+ in joins between rule and query condition. Binding variables
+ begin with ?."
+ (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert!
+ retract
+ query]]
+
+ [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
+ ->ColdAndWindy]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.accumulators :as acc]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Temperature
+ Cold
+ WindSpeed
+ ColdAndWindy]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
(use-fixtures :each tu/side-effect-holder-fixture)
(def side-effect-atom (atom nil))
@@ -190,7 +170,7 @@
{:queries [temps-with-addition [[]
[[Temperature (= ?t1 temperature)
- (= "MCI" location )]
+ (= "MCI" location)]
[Temperature (= ?t2 temperature)
(= ?foo (+ 20 ?t1))
(= "SFO" location)]
@@ -200,7 +180,7 @@
temps-with-negation [[]
[[Temperature (= ?t1 temperature)
- (= "MCI" location )]
+ (= "MCI" location)]
[Temperature (= ?t2 temperature)
(= ?foo (+ 20 ?t1))
(= "SFO" location)]
@@ -220,8 +200,7 @@
(fire-rules)
(query temps-with-addition))))
-
- ;; Test if not all conditions are satisfied.
+;; Test if not all conditions are satisfied.
(is (empty? (-> session
(insert (->Temperature 10 "MCI")
(->Temperature 21 "SFO")
@@ -344,7 +323,6 @@
(is (= [{:?x nil}] (query session test-query)))))
-
;; https://github.com/cerner/clara-rules/issues/357
(def-rules-test test-accumulator-before-equality-test-in-test-node
diff --git a/src/test/clojure/clara/test_clear_ns_productions.clj b/src/test/clojure/clara/test_clear_ns_productions.clj
new file mode 100644
index 00000000..fa970f7a
--- /dev/null
+++ b/src/test/clojure/clara/test_clear_ns_productions.clj
@@ -0,0 +1,69 @@
+;;; Tests that clear-ns-productions! correction clears all vars marked as productions from the namespace.
+(ns clara.test-clear-ns-productions
+ (:require
+ [clara.rules :refer [clear-ns-productions! defquery defrule defsession
+ fire-rules insert insert! query]]
+ [clara.tools.testing-utils :as tu]
+ [clojure.test :refer [deftest is testing use-fixtures]])
+ (:import
+ [java.lang IllegalArgumentException]))
+
+(use-fixtures :each tu/side-effect-holder-fixture)
+
+(defrule rule-to-be-cleared
+ [:a]
+ =>
+ (reset! tu/side-effect-holder :before-clearing)
+ (insert! :before-clearing))
+
+(defquery query-to-be-cleared [] [?f <- :before-clearing])
+
+(def ^:production-seq ns-production-seq-to-be-cleared
+ [{:doc "Before clearing"
+ :name "clara.test-clear-ns-productions/production-seq-to-be-cleared"
+ :lhs '[{:type :a
+ :constraints []}]
+ :rhs '(clara.rules/insert! :before-clearing-seq)}])
+
+(defsession uncleared-session 'clara.test-clear-ns-productions :fact-type-fn identity)
+
+(clear-ns-productions!)
+
+(defrule rule-after-clearing
+ [:a]
+ =>
+ (insert! :after-clearing))
+
+(defquery query-before-clearing [] [?f <- :before-clearing])
+(defquery query-after-clearing [] [?f <- :after-clearing])
+(defquery query-before-clearing-seq [] [?f <- :before-clearing-seq])
+(defquery query-after-clearing-seq [] [?f <- :after-clearing-seq])
+
+(def ^:production-seq production-seq-after-clearing
+ [{:doc "After clearing"
+ :name "clara.test-clear-ns-productions/production-seq-after-clearing"
+ :lhs '[{:type :a
+ :constraints []}]
+ :rhs '(clara.rules/insert! :after-clearing-seq)}])
+
+(defsession cleared-session 'clara.test-clear-ns-productions :fact-type-fn identity)
+
+;;; Then tests validating what productions the respective sessions have.
+(deftest cleared?
+ (let [uncleared (-> uncleared-session (insert :a) (fire-rules))]
+ (is (= :before-clearing @tu/side-effect-holder))
+ (reset! tu/side-effect-holder nil))
+ (let [cleared (-> cleared-session (insert :a) (fire-rules))]
+ (testing "cleared-session should not contain any productions before (clear-ns-productions!)"
+ (is (= nil @tu/side-effect-holder))
+ (is (empty? (query cleared query-before-clearing)))
+ (is (not-empty (query cleared query-after-clearing))))
+ (is (empty? (query cleared query-before-clearing-seq)))
+ (is (not-empty (query cleared query-after-clearing-seq)))))
+
+(deftest query-cleared?
+ (let [uncleared (-> uncleared-session (insert :a) (fire-rules))
+ cleared (-> cleared-session (insert :a) (fire-rules))]
+ (is (not-empty (query uncleared "clara.test-clear-ns-productions/query-to-be-cleared")))
+ (is (thrown-with-msg? IllegalArgumentException #"clara.test-clear-ns-productions/query-to-be-cleared"
+ (query cleared "clara.test-clear-ns-productions/query-to-be-cleared")))))
diff --git a/src/test/common/clara/test_common.cljc b/src/test/clojure/clara/test_common.clj
similarity index 81%
rename from src/test/common/clara/test_common.cljc
rename to src/test/clojure/clara/test_common.clj
index 386d9906..83ed03ad 100644
--- a/src/test/common/clara/test_common.cljc
+++ b/src/test/clojure/clara/test_common.clj
@@ -1,16 +1,12 @@
(ns clara.test-common
"Common tests for Clara in Clojure and ClojureScript."
- (:require #?(:clj [clojure.test :refer :all]
- :cljs [cljs.test :refer-macros [is deftest testing]])
-
- #?(:clj [clara.rules :refer [defrule defsession defquery
- insert fire-rules query]]
- :cljs [clara.rules :refer [insert fire-rules query]
- :refer-macros [defrule defsession defquery]])
-
- [clara.rules.accumulators :as acc]
-
- [clara.rules.platform :as platform]))
+ (:require
+ [clara.rules :refer [defquery defrule defsession fire-rules insert query]]
+ [clara.rules.accumulators :as acc]
+ [clara.rules.platform :as platform]
+ [clojure.test :refer :all])
+ (:import
+ [java.lang IllegalArgumentException]))
(defn- has-fact? [token fact]
(some #{fact} (map first (:matches token))))
@@ -59,7 +55,6 @@
(is (= #{{:?t 15} {:?t 10}}
(set (query session cold-query))))))
-
(defquery temps-below-threshold
[]
[:threshold [{value :value}] (= ?threshold value)]
@@ -70,12 +65,12 @@
(deftest test-accum-with-filter
(is (= [{:?threshold 0, :?low-temps []}]
- (-> accum-with-filter-session
- (insert {:type :temperature :value 20})
- (insert {:type :threshold :value 0})
- (insert {:type :temperature :value 10})
- fire-rules
- (query temps-below-threshold))))
+ (-> accum-with-filter-session
+ (insert {:type :temperature :value 20})
+ (insert {:type :threshold :value 0})
+ (insert {:type :temperature :value 10})
+ fire-rules
+ (query temps-below-threshold))))
(let [results (-> accum-with-filter-session
(insert {:type :temperature :value 20})
@@ -85,7 +80,7 @@
fire-rules
(query temps-below-threshold))
- [{threshold :?threshold low-temps :?low-temps }] results]
+ [{threshold :?threshold low-temps :?low-temps}] results]
(is (= 1 (count results)))
@@ -120,11 +115,9 @@
(try
(platform/query-param "?value")
(is false "Running the rules in this test should cause an exception.")
- (catch #?(:clj java.lang.IllegalArgumentException
- :cljs js/Error) e
+ (catch IllegalArgumentException e
(is (= "Query bindings must be specified as a keyword or symbol: ?value"
- #?(:clj (.getMessage e)
- :cljs (.-message e))))))))
+ (.getMessage e)))))))
(defsession negation-with-filter-session [none-below-threshold] :fact-type-fn :type)
diff --git a/src/test/common/clara/test_dsl.cljc b/src/test/clojure/clara/test_dsl.clj
similarity index 52%
rename from src/test/common/clara/test_dsl.cljc
rename to src/test/clojure/clara/test_dsl.clj
index 3d5a3c74..5145ed94 100644
--- a/src/test/common/clara/test_dsl.cljc
+++ b/src/test/clojure/clara/test_dsl.clj
@@ -1,61 +1,30 @@
-#?(:clj
- (ns clara.test-dsl
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert-all!
- insert!
- retract
- query]]
- [clara.rules.accumulators :as acc]
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
- ->ColdAndWindy map->FlexibleFields
- ->First ->Second ->Third ->Fourth]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- Cold
- WindSpeed
- ColdAndWindy
- FlexibleFields
- First
- Second
- Third
- Fourth]))
-
- :cljs
- (ns clara.test-dsl
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- insert-all!
- retract
- query]]
- [clara.rules.testfacts :refer [map->FlexibleFields FlexibleFields
- ->Temperature Temperature
- ->Cold Cold
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy
- ->First First
- ->Second Second
- ->Third Third
- ->Fourth Fourth]]
- [clara.rules.accumulators :as acc]
- [clara.tools.testing-utils :as tu]
- [cljs.test]
- [schema.test :as st])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
+(ns clara.test-dsl
+ (:require
+ [clara.rules :refer :all]
+ [clara.rules.testfacts :refer [->Cold ->First ->Fourth ->Second
+ ->Temperature ->Third ->WindSpeed map->FlexibleFields]]
+ [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clojure.test :refer [is testing use-fixtures]]
+ [schema.test :as st])
+ (:import
+ [java.util TimeZone])
+ (:import [clara.rules.testfacts
+ Temperature
+ WindSpeed
+ Cold
+ ColdAndWindy
+ LousyWeather
+ First
+ Second
+ Third
+ Fourth
+ FlexibleFields]))
;; Tests focused on DSL functionality such as binding visibility in edge cases, fact field access, etc.
;; The distinction between tests here and tests in files focused on the aspects of Clara that the DSL represents
;; may not be clear and there are borderline cases.
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
(def side-effect-holder (atom nil))
@@ -80,7 +49,7 @@
(testing "Unrelated constraint on the field and then a binding with the variable first"
(reset! side-effect-holder nil)
-
+
(-> rule1-session
(insert (->Temperature 10 "MCI"))
fire-rules)
@@ -89,7 +58,7 @@
(testing "A binding with the variable first and then an unrelated constraint on the field"
(reset! side-effect-holder nil)
-
+
(-> rule2-session
(insert (->Temperature 10 "MCI"))
fire-rules)
@@ -98,7 +67,7 @@
(testing "Unrelated constraint on the field and then a binding with the variable second"
(reset! side-effect-holder nil)
-
+
(-> rule3-session
(insert (->Temperature 10 "MCI"))
fire-rules)
@@ -107,7 +76,7 @@
(testing "A binding with the variable second and then an unrelated constraint on the field"
(reset! side-effect-holder nil)
-
+
(-> rule4-session
(insert (->Temperature 10 "MCI"))
fire-rules)
@@ -133,24 +102,23 @@
:?u 10
:?v 10})))
-#?(:clj
- (def-rules-test test-bean-test
- {:queries [tz-offset-query [[:?offset]
- [[java.util.TimeZone (= ?offset rawOffset)
- (= ?id ID)]]]]
+(def-rules-test test-bean-test
+ {:queries [tz-offset-query [[:?offset]
+ [[java.util.TimeZone (= ?offset rawOffset)
+ (= ?id ID)]]]]
- :sessions [empty-session [tz-offset-query] {}]}
+ :sessions [empty-session [tz-offset-query] {}]}
- (let [session (-> empty-session
- (insert (java.util.TimeZone/getTimeZone "America/Chicago")
- (java.util.TimeZone/getTimeZone "UTC"))
- fire-rules)]
+ (let [session (-> empty-session
+ (insert (TimeZone/getTimeZone "America/Chicago")
+ (TimeZone/getTimeZone "UTC"))
+ fire-rules)]
- (is (= #{{:?id "America/Chicago" :?offset -21600000}}
- (set (query session tz-offset-query :?offset -21600000))))
+ (is (= #{{:?id "America/Chicago" :?offset -21600000}}
+ (set (query session tz-offset-query :?offset -21600000))))
- (is (= #{{:?id "UTC" :?offset 0}}
- (set (query session tz-offset-query :?offset 0)))))))
+ (is (= #{{:?id "UTC" :?offset 0}}
+ (set (query session tz-offset-query :?offset 0))))))
(def-rules-test test-destructured-args
{:queries [cold-query [[]
@@ -211,34 +179,31 @@
(is (= {{:?f 15} 1}
(frequencies (query session q))))))
-;; FIXME: This doesn't pass in ClojureScript and it should.
-#?(:clj
- (def-rules-test test-variable-visibility
+(def-rules-test test-variable-visibility
+ {:rules [temps-for-locations [[[:location (= ?loc (:loc this))]
- {:rules [temps-for-locations [[[:location (= ?loc (:loc this))]
+ [Temperature
+ (= ?temp temperature)
+ ;; TODO: This comment was copied from clara.test-rules, is it still accurate?
+ ;;
+ ;; This can only have one binding right
+ ;; now due to work that needs to be done
+ ;; still in clara.rules.compiler/extract-from-constraint
+ ;; around support multiple bindings in a condition.
+ (contains? #{?loc} location)]]
+ (insert! (->Cold ?temp))]]
- [Temperature
- (= ?temp temperature)
- ;; TODO: This comment was copied from clara.test-rules, is it still accurate?
- ;;
- ;; This can only have one binding right
- ;; now due to work that needs to be done
- ;; still in clara.rules.compiler/extract-from-constraint
- ;; around support multiple bindings in a condition.
- (contains? #{?loc} location)]]
- (insert! (->Cold ?temp))]]
+ :queries [find-cold [[] [[?c <- Cold]]]]
- :queries [find-cold [[] [[?c <- Cold]]]]
+ :sessions [empty-session [temps-for-locations find-cold] {}]}
- :sessions [empty-session [temps-for-locations find-cold] {}]}
-
- (let [session (-> empty-session
- (insert ^{:type :location} {:loc "MCI"})
- (insert (->Temperature 10 "MCI"))
- fire-rules)]
+ (let [session (-> empty-session
+ (insert ^{:type :location} {:loc "MCI"})
+ (insert (->Temperature 10 "MCI"))
+ fire-rules)]
- (is (= {{:?c (->Cold 10)} 1}
- (frequencies (query session find-cold)))))))
+ (is (= {{:?c (->Cold 10)} 1}
+ (frequencies (query session find-cold))))))
(def-rules-test test-nested-binding
@@ -272,101 +237,96 @@
(fire-rules)
(query same-wind-and-temp)))))
-;; FIXME: Make this pass in ClojureScript
-#?(:clj
- ;; Test for: https://github.com/cerner/clara-rules/issues/97
- (def-rules-test test-nested-binding-with-disjunction
+;; Test for: https://github.com/cerner/clara-rules/issues/97
+(def-rules-test test-nested-binding-with-disjunction
- {:rules [any-cold [[[Temperature (= ?t temperature)]
- [:or
- [Cold (< temperature ?t)]
- [Cold (< temperature 5)]]]
+ {:rules [any-cold [[[Temperature (= ?t temperature)]
+ [:or
+ [Cold (< temperature ?t)]
+ [Cold (< temperature 5)]]]
- (insert! ^{:type :found-cold} {:found true})]]
+ (insert! ^{:type :found-cold} {:found true})]]
- :queries [found-cold [[] [[?f <- :found-cold]]]]
+ :queries [found-cold [[] [[?f <- :found-cold]]]]
- :sessions [empty-session [any-cold found-cold] {}]}
+ :sessions [empty-session [any-cold found-cold] {}]}
- (let [session (-> empty-session
- (insert (->Temperature 10 "MCI")
- (->Cold 5))
- fire-rules)
+ (let [session (-> empty-session
+ (insert (->Temperature 10 "MCI")
+ (->Cold 5))
+ fire-rules)
- results (query session found-cold)]
+ results (query session found-cold)]
- (is (= 1 (count results)))
+ (is (= 1 (count results)))
- (is (= ^{:type found-cold} {:?f {:found true}}
- (first results))))))
+ (is (= ^{:type found-cold} {:?f {:found true}}
+ (first results)))))
(def locals-shadowing-tester
- "Used to demonstrate local shadowing works in `test-rhs-locals-shadowing-vars` below."
- :bad)
-
-;; FIXME: This doesn't work in ClojureScript and it should; it gives a compilation error.
-#?(:clj
- (def-rules-test test-rhs-locals-shadowing-vars
-
- {:rules [r1 [[[Temperature]]
- (let [{:keys [locals-shadowing-tester]} {:locals-shadowing-tester :good}]
- (insert! ^{:type :result}
- {:r :r1
- :v locals-shadowing-tester}))]
-
- r2 [[[Temperature]]
- (let [locals-shadowing-tester :good]
- (insert! ^{:type :result}
- {:r :r2
- :v locals-shadowing-tester}))]
-
- r3 [[[Temperature]]
- (let [[locals-shadowing-tester] [:good]]
- (insert! ^{:type :result}
- {:r :r3
- :v locals-shadowing-tester}))]
-
- r4 [[[Temperature]]
- (insert-all! (for [_ (range 1)
- :let [locals-shadowing-tester :good]]
- ^{:type :result}
- {:r :r4
- :v locals-shadowing-tester}))]]
-
- :queries [q [[] [[?r <- :result]]]]
-
- :sessions [empty-session [r1 r2 r3 r4 q] {}]}
-
- (is (= (frequencies [{:r :r1
- :v :good}
- {:r :r2
- :v :good}
- {:r :r3
- :v :good}
- {:r :r4
- :v :good}])
- (->> (-> empty-session
- (insert (->Temperature 10 "MCI"))
- fire-rules
- (query q))
- (map :?r)
- frequencies)))))
-
-#?(:clj
- (def-rules-test test-qualified-java-introp
+ "Used to demonstrate local shadowing works in `test-rhs-locals-shadowing-vars` below."
+ :bad)
+
+(def-rules-test test-rhs-locals-shadowing-vars
+
+ {:rules [r1 [[[Temperature]]
+ (let [{:keys [locals-shadowing-tester]} {:locals-shadowing-tester :good}]
+ (insert! ^{:type :result}
+ {:r :r1
+ :v locals-shadowing-tester}))]
+
+ r2 [[[Temperature]]
+ (let [locals-shadowing-tester :good]
+ (insert! ^{:type :result}
+ {:r :r2
+ :v locals-shadowing-tester}))]
+
+ r3 [[[Temperature]]
+ (let [[locals-shadowing-tester] [:good]]
+ (insert! ^{:type :result}
+ {:r :r3
+ :v locals-shadowing-tester}))]
+
+ r4 [[[Temperature]]
+ (insert-all! (for [_ (range 1)
+ :let [locals-shadowing-tester :good]]
+ ^{:type :result}
+ {:r :r4
+ :v locals-shadowing-tester}))]]
+
+ :queries [q [[] [[?r <- :result]]]]
+
+ :sessions [empty-session [r1 r2 r3 r4 q] {}]}
+
+ (is (= (frequencies [{:r :r1
+ :v :good}
+ {:r :r2
+ :v :good}
+ {:r :r3
+ :v :good}
+ {:r :r4
+ :v :good}])
+ (->> (-> empty-session
+ (insert (->Temperature 10 "MCI"))
+ fire-rules
+ (query q))
+ (map :?r)
+ frequencies))))
+
+(def-rules-test test-qualified-java-introp
+
+ {:queries [find-string-substring [[]
+ [[?s <- String (and (<= 2 (count this))
+ (.. this (substring 2) toString))]]]]
+
+ :sessions [empty-session [find-string-substring] {}]}
- {:queries [find-string-substring [[]
- [[?s <- String (and (<= 2 (count this))
- (.. this (substring 2) toString))]]]]
-
- :sessions [empty-session [find-string-substring] {}]}
-
- (let [session (-> empty-session
- (insert "abc")
- fire-rules)]
+ (let [session (-> empty-session
+ (insert "abc")
+ fire-rules)]
- (is (= [{:?s "abc"}]
- (query session find-string-substring))))))
+ (is (= [{:?s "abc"}]
+ (query session find-string-substring)))))
(def-rules-test record-fields-with-munged-names
{:queries [q [[]
diff --git a/src/test/clojure/clara/test_durability.clj b/src/test/clojure/clara/test_durability.clj
index eaa0e166..bb14e21d 100644
--- a/src/test/clojure/clara/test_durability.clj
+++ b/src/test/clojure/clara/test_durability.clj
@@ -14,7 +14,6 @@
[clara.tools.testing-utils :as tu])
(:import [clara.rules.testfacts Temperature]
[clara.rules.engine TestNode]))
-
(use-fixtures :once st/validate-schemas)
@@ -34,7 +33,7 @@
;; cases.
(when (is (= expected-fact fact)
"The expected and actual must be equal")
-
+
(or (identical? expected-fact fact)
(and (is (= (coll? expected-fact)
(coll? fact))
@@ -88,7 +87,7 @@
chi "CHI"
irk "IRK"
ten 10
- twenty 20
+ twenty 20
fifty 50
forty 40
thirty 30
@@ -167,13 +166,13 @@
ws50
ws40
ws10] (:all-objs results)
-
+
fired (:fired-session results)
{:keys [unpaired-res
cold-res
hot-res
- temp-his-res
+ temp-his-res
temps-under-thresh-res]} (:query-results results)
create-serializer (fn [stream]
@@ -191,7 +190,7 @@
mem-serializer (->LocalMemorySerializer holder)]
;; Serialize the data. Store the rulebase seperately. This is likely to be the most common usage.
-
+
(d/serialize-rulebase fired
rulebase-serializer)
(d/serialize-session-state fired
@@ -210,7 +209,7 @@
restored (d/deserialize-session-state session-serializer
mem-serializer
{:base-rulebase restored-rulebase})
-
+
r-unpaired-res (query restored dr/unpaired-wind-speed)
r-cold-res (query restored dr/cold-temp)
r-hot-res (query restored dr/hot-temp)
@@ -295,14 +294,14 @@
hot30
hot50
[temp40 temp30 temp20]]]
-
+
(is (= (count expected-facts)
(count facts))
(str "expected facts:" \newline
(vec expected-facts) \newline
"actual facts:" \newline
(vec facts)))
-
+
(doseq [i (range (count expected-facts))
:let [expected-fact (nth expected-facts i)
fact (nth facts i)]]
@@ -317,7 +316,7 @@
(if deserialize-opts
(d/deserialize-rulebase (df/create-session-serializer bais) deserialize-opts)
(d/deserialize-rulebase (df/create-session-serializer bais)))))))
-
+
(deftest test-durability-fressian-serde
(testing "SerDe of the rulebase along with working memory"
(durability-test :fressian))
@@ -334,7 +333,7 @@
init-qresults (:query-results (session-test s))
restored-qresults1 (:query-results (session-test restored1))
restored-qresults2 (:query-results (session-test restored2))]
-
+
(is (= init-qresults
restored-qresults1
restored-qresults2)))))
@@ -378,7 +377,7 @@
activation-group-sort-fn-called? (volatile! false)
fact-type-fn-called? (volatile! false)
ancestors-fn-called? (volatile! false)
-
+
opts {:activation-group-fn (fn [x]
(vreset! activation-group-fn-called? true)
(or (some-> x :props :salience)
@@ -394,7 +393,7 @@
(ancestors x))}
rulebase (rb-serde orig opts)
-
+
restored (if assemble-with-memory?
(d/assemble-restored-session rulebase memory opts)
(d/assemble-restored-session rulebase opts))]
diff --git a/src/test/common/clara/test_exists.cljc b/src/test/clojure/clara/test_exists.clj
similarity index 73%
rename from src/test/common/clara/test_exists.cljc
rename to src/test/clojure/clara/test_exists.clj
index 7611d70f..e8b8b659 100644
--- a/src/test/common/clara/test_exists.cljc
+++ b/src/test/clojure/clara/test_exists.clj
@@ -1,45 +1,25 @@
-#?(:clj
- (ns clara.test-exists
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert-unconditional!
- insert!
- retract
- query]]
-
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
- ->ColdAndWindy]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- Cold
- WindSpeed
- ColdAndWindy]))
-
- :cljs
- (ns clara.test-exists
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- insert-unconditional!
- retract
- query]]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->Cold Cold
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy]]
- [clara.rules.accumulators]
- [cljs.test]
- [schema.test :as st])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
+(ns clara.test-exists
+ (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert-unconditional!
+ insert!
+ retract
+ query]]
+
+ [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
+ ->ColdAndWindy]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.accumulators]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Temperature
+ Cold
+ WindSpeed
+ ColdAndWindy]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
(def-rules-test test-simple-exists
{:queries [has-windspeed [[] [[:exists [WindSpeed (= ?location location)]]]]]
@@ -174,7 +154,7 @@
(def-rules-test test-additional-item-noop
{:rules [exists-rule [[[:exists [Temperature (< temperature 0)]]]
- (insert-unconditional! (->Cold :freezing))]]
+ (insert-unconditional! (->Cold :freezing))]]
:queries [cold-query [[] [[Cold (= ?t temperature)]]]]
diff --git a/src/test/clojure/clara/test_fressian.clj b/src/test/clojure/clara/test_fressian.clj
index 994bcceb..6416f753 100644
--- a/src/test/clojure/clara/test_fressian.clj
+++ b/src/test/clojure/clara/test_fressian.clj
@@ -57,7 +57,7 @@
(testing "set"
(test-serde-with-meta #{:x :y} #{:x :y}))
-
+
(testing "vec"
(test-serde-with-meta [1 2 3] [1 2 3]))
@@ -78,13 +78,13 @@
(test-serde [:x 1] e)
(is (instance? clojure.lang.MapEntry (serde e))
"preserves map entry type to be sure to still work with `key` and `val`")))
-
+
(testing "sym"
(test-serde-with-meta 't 't))
(testing "record"
(test-serde-with-meta (->Tester 10) (->Tester 10)))
-
+
(testing "sorted collections"
(let [ss (sorted-set 1 10)
ss-custom (with-meta (sorted-set-by custom-comparator 1 10)
diff --git a/src/test/clojure/clara/test_infinite_loops.clj b/src/test/clojure/clara/test_infinite_loops.clj
index 67d53a74..05d0150b 100644
--- a/src/test/clojure/clara/test_infinite_loops.clj
+++ b/src/test/clojure/clara/test_infinite_loops.clj
@@ -65,7 +65,6 @@
;; two rules that form the loop have salience and are thus parts of different
;; activation groups.
-
{:rules [hot-rule [[[:not [Hot]]]
(insert! (->Cold nil))
{:salience 1}]
@@ -83,7 +82,6 @@
(assert-ex-data {:clara-rules/infinite-loop-suspected true} (fire-rules session))))
-
(def-rules-test test-recursive-insertion
;; Test of an infinite loop due to runaway insertions without retractions.
@@ -257,8 +255,8 @@
(insert! (->Cold nil))]]
:sessions [empty-session [hot-rule] {}]}
-
+
(assert-ex-data
{:clara-rules/max-cycles-exceeded-fn "NOT A FUNCTION"}
(ld/with-loop-detection empty-session 3000 "NOT A FUNCTION")))
-
+
diff --git a/src/test/clojure/clara/test_java.clj b/src/test/clojure/clara/test_java.clj
index fa1185ed..c26d8c98 100644
--- a/src/test/clojure/clara/test_java.clj
+++ b/src/test/clojure/clara/test_java.clj
@@ -6,7 +6,7 @@
(:import [clara.rules.testfacts Temperature WindSpeed Cold ColdAndWindy LousyWeather First Second Third Fourth]
[clara.rules QueryResult RuleLoader WorkingMemory]))
-(defn- java-namespace-args
+(defn- java-namespace-args
"The java API expects an arra of strings containing namespace names, so create that."
[]
(doto (make-array String 2)
@@ -24,13 +24,13 @@
(.add (->Temperature -10 "CHI")))
;; Testing Java interop, so session is a clara.rules.WorkingMemory object.
- session (-> (RuleLoader/loadRules (java-namespace-args))
+ session (-> (RuleLoader/loadRules (java-namespace-args))
(.insert facts)
(.fireRules))
subzero-locs (.query session "clara.other-ruleset/subzero-locations" {})
freezing-locs (.query session "clara.sample-ruleset/freezing-locations" {})]
-
+
(is (= #{"CHI"}
(set (map #(.getResult % "?loc") subzero-locs))))
@@ -40,9 +40,9 @@
(deftest query-with-args
(let [session
(-> (RuleLoader/loadRules (java-namespace-args))
- (.insert [(->Temperature 15 "MCI")
- (->Temperature 10 "BOS")
- (->Temperature 50 "SFO")
+ (.insert [(->Temperature 15 "MCI")
+ (->Temperature 10 "BOS")
+ (->Temperature 50 "SFO")
(->Temperature -10 "CHI")])
(.fireRules))
@@ -51,6 +51,6 @@
(.put "?loc" "CHI"))
chicago-temp (.query session "clara.other-ruleset/temp-by-location" java-args)]
-
+
(is (= #{-10}
(set (map #(.getResult % "?temp") chicago-temp))))))
diff --git a/src/test/common/clara/test_memory.cljc b/src/test/clojure/clara/test_memory.clj
similarity index 76%
rename from src/test/common/clara/test_memory.cljc
rename to src/test/clojure/clara/test_memory.clj
index dc47251d..eb2250b0 100644
--- a/src/test/common/clara/test_memory.cljc
+++ b/src/test/clojure/clara/test_memory.clj
@@ -1,49 +1,26 @@
-#?(:clj
- (ns clara.test-memory
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert!
- retract
- query]]
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed ->Hot
- ->ColdAndWindy ->First ->Second]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators :as acc]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- Hot
- Cold
- WindSpeed
- ColdAndWindy
- First
- Second]))
-
- :cljs
- (ns clara.test-memory
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- retract
- query]]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->Cold Cold
- ->Hot Hot
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy
- ->First First
- ->Second Second]]
- [clara.rules.accumulators :as acc]
- [clara.tools.testing-utils :as tu]
- [cljs.test]
- [schema.test :as st])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
+(ns clara.test-memory
+ (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert!
+ retract
+ query]]
+ [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed ->Hot
+ ->ColdAndWindy ->First ->Second]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.accumulators :as acc]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Temperature
+ Hot
+ Cold
+ WindSpeed
+ ColdAndWindy
+ First
+ Second]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
;; While the memory is tested through rules and queries, rather than direct unit tests on the memory,
;; the intent of these tests is to create patterns in the engine that cover edge cases and other paths
@@ -113,7 +90,6 @@
(is (= n
(count (query session cold-query))))))
-
(def-rules-test test-many-retract-accumulated-for-same-accumulate-with-join-filter-node
{:rules [count-cold-temps [[[Cold (= ?cold-temp temperature)]
diff --git a/src/test/common/clara/test_negation.cljc b/src/test/clojure/clara/test_negation.clj
similarity index 83%
rename from src/test/common/clara/test_negation.cljc
rename to src/test/clojure/clara/test_negation.clj
index 2bf4eb0e..5bf1851c 100644
--- a/src/test/common/clara/test_negation.cljc
+++ b/src/test/clojure/clara/test_negation.clj
@@ -1,49 +1,28 @@
-#?(:clj
- (ns clara.test-negation
- (:require [clara.tools.testing-utils :refer [def-rules-test
- side-effect-holder] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert-all!
- insert!
- retract
- query]]
-
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
- ->ColdAndWindy ->LousyWeather]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- Cold
- WindSpeed
- ColdAndWindy
- LousyWeather]))
-
- :cljs
- (ns clara.test-negation
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- insert-all!
- retract
- query]]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->Cold Cold
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy
- ->LousyWeather LousyWeather]]
- [clara.rules.accumulators]
- [cljs.test]
- [schema.test :as st]
- [clara.tools.testing-utils :refer [side-effect-holder] :as tu])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
+(ns clara.test-negation
+ (:require [clara.tools.testing-utils :refer [def-rules-test
+ side-effect-holder] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert-all!
+ insert!
+ retract
+ query]]
+
+ [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
+ ->ColdAndWindy ->LousyWeather]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.accumulators]
+ [schema.test :as st])
+ (:import [clara.rules.engine ISystemFact]
+ [clara.rules.testfacts
+ Temperature
+ Cold
+ WindSpeed
+ ColdAndWindy
+ LousyWeather]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
(def-rules-test test-exists-inside-boolean-conjunction-and-disjunction
@@ -142,25 +121,25 @@
;; The cold fact should be retracted because inserting this
;; triggers an insert that matches the negation.
- (is (empty? (-> empty-session
- (insert (->WindSpeed 100 "MCI"))
- (fire-rules)
- (query cold-query))))
+ (is (empty? (-> empty-session
+ (insert (->WindSpeed 100 "MCI"))
+ (fire-rules)
+ (query cold-query))))
;; The cold fact should exist again because we are indirectly retracting
;; the fact that matched the negation originially
- (is (= [{:?c (->Cold 0)}]
- (-> empty-session
- (insert (->WindSpeed 100 "MCI"))
- (fire-rules)
- (retract (->WindSpeed 100 "MCI"))
- (fire-rules)
- (query cold-query))))))
+ (is (= [{:?c (->Cold 0)}]
+ (-> empty-session
+ (insert (->WindSpeed 100 "MCI"))
+ (fire-rules)
+ (retract (->WindSpeed 100 "MCI"))
+ (fire-rules)
+ (query cold-query))))))
(def-rules-test test-negation-with-other-conditions
{:queries [windy-but-not-cold-query [[] [[WindSpeed (> windspeed 30) (= ?w windspeed)]
- [:not [ Temperature (< temperature 20)]]]]]
+ [:not [Temperature (< temperature 20)]]]]]
:sessions [empty-session [windy-but-not-cold-query] {}]}
@@ -265,8 +244,7 @@
(not facts-expected?)
(not-empty (query session object-query)))
"If the object query returns nothing the setup of this test is invalid")
- (not-any? (fn [fact] #?(:clj (instance? clara.rules.engine.ISystemFact fact)
- :cljs (isa? :clara.rules.engine/system-type fact)))
+ (not-any? (fn [fact] (instance? ISystemFact fact))
(as-> session x
(query x object-query)
(map :?o x))))]
diff --git a/src/test/common/clara/test_node_sharing.cljc b/src/test/clojure/clara/test_node_sharing.clj
similarity index 81%
rename from src/test/common/clara/test_node_sharing.cljc
rename to src/test/clojure/clara/test_node_sharing.clj
index 1a192c7f..a4022b89 100644
--- a/src/test/common/clara/test_node_sharing.cljc
+++ b/src/test/clojure/clara/test_node_sharing.clj
@@ -1,47 +1,26 @@
-#?(:clj
- (ns clara.test-node-sharing
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert-unconditional!
- insert!
- retract
- query]]
-
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
- ->ColdAndWindy]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- Cold
- WindSpeed
- ColdAndWindy
- WindSpeed]))
-
- :cljs
- (ns clara.test-node-sharing
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- insert-unconditional!
- retract
- query]]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->Cold Cold
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy]]
- [clara.rules.accumulators]
- [cljs.test]
- [schema.test :as st]
- [clara.tools.testing-utils :as tu])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture) tu/side-effect-holder-fixture)
+(ns clara.test-node-sharing
+ (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert-unconditional!
+ insert!
+ retract
+ query]]
+
+ [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
+ ->ColdAndWindy]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.accumulators]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Temperature
+ Cold
+ WindSpeed
+ ColdAndWindy
+ WindSpeed]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture tu/side-effect-holder-fixture)
;; See issue 433 for more information
(def-rules-test test-or-sharing-same-condition
@@ -95,7 +74,6 @@
(is (= (query session c-query)
[{:?c {:fact-type ::c}}]))))
-
;; FIXME: Log an issue for this bug and uncomment when it is resolved. Since an :or
;; condition is essentially creating two rules I'd expect this to insert twice.
(comment
@@ -104,8 +82,7 @@
[:and [:or
[::a]
[::b]]
- [::d]
- ]
+ [::d]]
[:and
[::a]
[::d]]]]
@@ -215,7 +192,6 @@
(is (= (set (query session e-query))
#{}))))
-
(def-rules-test test-sharing-simple-condition-followed-by-or
{:rules [cold-and-windy-rule [[[WindSpeed (> windspeed 20) (do (swap! tu/side-effect-holder inc)
diff --git a/src/test/clojure/clara/test_performance_optimizations.clj b/src/test/clojure/clara/test_performance_optimizations.clj
new file mode 100644
index 00000000..927db1e8
--- /dev/null
+++ b/src/test/clojure/clara/test_performance_optimizations.clj
@@ -0,0 +1,45 @@
+;; These tests validate that operations that the rules engine should optimize
+;; away are in fact optimized away. The target here is not the actual execution time,
+;; which will vary per system, but verification that the action operations in question are not performed.
+(ns clara.test-performance-optimizations
+ (:require [clara.tools.testing-utils :refer [def-rules-test
+ side-effect-holder] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert!
+ query]]
+
+ [clara.rules.testfacts :refer [->Cold ->ColdAndWindy]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.accumulators]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Cold
+ ColdAndWindy]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
+(use-fixtures :each tu/side-effect-holder-fixture)
+
+(defmacro true-if-binding-absent
+ []
+ (not (contains? &env '?unused-binding)))
+
+;; See issue https://github.com/cerner/clara-rules/issues/383
+;; This validates that we don't create let bindings for binding
+;; variables that aren't used. Doing so both imposes runtime costs
+;; and increases the size of the generated code that must be evaluated.
+(def-rules-test test-unused-rhs-binding-not-bound
+
+ {:rules [cold-windy-rule [[[ColdAndWindy (= ?used-binding temperature) (= ?unused-binding windspeed)]]
+ (when (true-if-binding-absent)
+ (insert! (->Cold ?used-binding)))]]
+
+ :queries [cold-query [[] [[Cold (= ?c temperature)]]]]
+
+ :sessions [empty-session [cold-windy-rule cold-query] {}]}
+
+ (is (= [{:?c 0}]
+ (-> empty-session
+ (insert (->ColdAndWindy 0 0))
+ fire-rules
+ (query cold-query)))))
diff --git a/src/test/common/clara/test_queries.cljc b/src/test/clojure/clara/test_queries.clj
similarity index 58%
rename from src/test/common/clara/test_queries.cljc
rename to src/test/clojure/clara/test_queries.clj
index a181febc..f10b7622 100644
--- a/src/test/common/clara/test_queries.cljc
+++ b/src/test/clojure/clara/test_queries.clj
@@ -1,29 +1,17 @@
-#?(:clj
- (ns clara.test-queries
- (:require [clara.tools.testing-utils :refer [def-rules-test
- side-effect-holder] :as tu]
- [clara.rules :refer [fire-rules
- insert
- query]]
-
- [clara.rules.testfacts :refer [->Temperature]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature]))
-
- :cljs
- (ns clara.test-queries
- (:require [clara.rules :refer [fire-rules
- insert
- query]]
- [clara.rules.testfacts :refer [->Temperature Temperature]]
- [cljs.test]
- [schema.test :as st])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
+(ns clara.test-queries
+ (:require [clara.tools.testing-utils :refer [def-rules-test
+ side-effect-holder] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ query]]
+
+ [clara.rules.testfacts :refer [->Temperature]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Temperature]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
(def-rules-test test-simple-query
@@ -69,5 +57,5 @@
(is (= #{{:?l "ORD" :?t 10}}
(set (query session cold-query :?l "ORD"))))))
-
+
diff --git a/src/test/clojure/clara/test_rhs_retract.clj b/src/test/clojure/clara/test_rhs_retract.clj
new file mode 100644
index 00000000..74e9d326
--- /dev/null
+++ b/src/test/clojure/clara/test_rhs_retract.clj
@@ -0,0 +1,44 @@
+(ns clara.test-rhs-retract
+ (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert!
+ retract
+ query
+ retract!]]
+
+ [clara.rules.testfacts :refer [->Temperature ->Cold]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.accumulators]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Temperature
+ Cold]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
+
+(def-rules-test test-retract!
+
+ {:rules [not-cold-rule [[[Temperature (> temperature 50)]]
+ (retract! (->Cold 20))]]
+
+ :queries [cold-query [[]
+ [[Cold (= ?t temperature)]]]]
+
+ :sessions [empty-session [not-cold-rule cold-query] {}]}
+
+ (let [session (-> empty-session
+ (insert (->Cold 20))
+ (fire-rules))]
+
+ ;; The session should contain our initial cold reading.
+ (is (= #{{:?t 20}}
+ (set (query session cold-query))))
+
+ ;; Insert a higher temperature and ensure the cold fact was retracted.
+ (is (= #{}
+ (set (query (-> session
+ (insert (->Temperature 80 "MCI"))
+ (fire-rules))
+ cold-query))))))
diff --git a/src/test/clojure/clara/test_rules.clj b/src/test/clojure/clara/test_rules.clj
index 1d2ea810..3e99af4d 100644
--- a/src/test/clojure/clara/test_rules.clj
+++ b/src/test/clojure/clara/test_rules.clj
@@ -58,16 +58,16 @@
(defn average-value
"Test accumulator that returns the average of a field"
[field]
- (accumulate
- :initial-value [0 0]
- :reduce-fn (fn [[value count] item]
- [(+ value (field item)) (inc count)])
- :combine-fn (fn [[value1 count1] [value2 count2]]
- [(+ value1 value2) (+ count1 count2)])
- :retract-fn identity-retract
- :convert-return-fn (fn [[value count]] (if (= 0 count)
- nil
- (/ value count)))))
+ (acc/accum
+ {:initial-value [0 0]
+ :reduce-fn (fn [[value count] item]
+ [(+ value (field item)) (inc count)])
+ :combine-fn (fn [[value1 count1] [value2 count2]]
+ [(+ value1 value2) (+ count1 count2)])
+ :retract-fn identity-retract
+ :convert-return-fn (fn [[value count]] (if (= 0 count)
+ nil
+ (/ value count)))}))
(deftest test-negation-with-complex-retractions
(let [;; Non-blocked rule, where "blocked" means there is a
@@ -139,8 +139,7 @@
(let [all-temps-are-max (dsl/parse-query
[]
[[?t <- (acc/max :temperature) :from [Temperature]]
- [:not [Temperature (< temperature ?t)]]
- ])
+ [:not [Temperature (< temperature ?t)]]])
session (mk-session [all-temps-are-max])]
@@ -276,11 +275,10 @@
(deftest test-disjunction-with-nested-and
-
(let [really-cold-or-cold-and-windy
(dsl/parse-query [] [[:or [Temperature (< temperature 0) (= ?t temperature)]
- [:and [Temperature (< temperature 20) (= ?t temperature)]
- [WindSpeed (> windspeed 30) (= ?w windspeed)]]]])
+ [:and [Temperature (< temperature 20) (= ?t temperature)]
+ [WindSpeed (> windspeed 30) (= ?w windspeed)]]]])
rulebase [really-cold-or-cold-and-windy]
@@ -361,14 +359,13 @@
{:type Temperature :constraints ['(> 2 1)]}
{:type Temperature :constraints ['(> 3 2)]}
{:type Temperature :constraints ['(> 4 3)]}]
- (com/to-dnf
- [:or
- {:type Temperature :constraints ['(> 2 1)]}
- {:type Temperature :constraints ['(> 3 2)]}
- {:type Temperature :constraints ['(> 4 3)]}])))
-
+ (com/to-dnf
+ [:or
+ {:type Temperature :constraints ['(> 2 1)]}
+ {:type Temperature :constraints ['(> 3 2)]}
+ {:type Temperature :constraints ['(> 4 3)]}])))
- ;; Test simple disjunction with nested conjunction.
+;; Test simple disjunction with nested conjunction.
(is (= [:or
{:type Temperature :constraints ['(> 2 1)]}
[:and
@@ -441,10 +438,10 @@
{:type Temperature :constraints ['(> 4 3)]}]]])))
(is (= [:or
- {:type Temperature :constraints ['(> 2 1)]}
- [:and
- {:type Temperature :constraints ['(> 3 2)]}
- {:type Temperature :constraints ['(> 4 3)]}]]
+ {:type Temperature :constraints ['(> 2 1)]}
+ [:and
+ {:type Temperature :constraints ['(> 3 2)]}
+ {:type Temperature :constraints ['(> 4 3)]}]]
(com/to-dnf
[:and
@@ -519,14 +516,14 @@
(defquery cold-query
[:?l]
[Temperature (< temperature 50)
- (= ?t temperature)
- (= ?l location)])
+ (= ?t temperature)
+ (= ?l location)])
(defquery hot-query
[?l]
[Temperature (>= temperature 50)
- (= ?t temperature)
- (= ?l location)])
+ (= ?t temperature)
+ (= ?l location)])
(deftest test-defquery
(let [session (-> (mk-session [cold-query hot-query])
@@ -617,12 +614,12 @@
(insert (->WindSpeed 45 "MCI"))
(fire-rules)
(query sample/find-lousy-weather))))
- (str "Failed to find LousyWeather using sample-ruleset namespace " sample-ruleset-ns))))
+ (str "Failed to find LousyWeather using sample-ruleset namespace " sample-ruleset-ns))))
(deftest test-mark-as-fired
(let [rule-output (atom nil)
cold-rule (dsl/parse-rule [[Temperature (< temperature 20)]]
- (reset! rule-output ?__token__) )
+ (reset! rule-output ?__token__))
session (-> (mk-session [cold-rule])
(insert (->Temperature 10 "MCI"))
@@ -644,7 +641,6 @@
(is (has-fact? @rule-output (->Temperature 10 "MCI")))))
-
(deftest test-chained-inference
(let [item-query (dsl/parse-query [] [[?item <- Fourth]])
@@ -695,7 +691,7 @@
fire-rules)]
;; Finds two temperatures such that t1 is less than t2.
- (is (= #{ {:?t1 10, :?t2 15}}
+ (is (= #{{:?t1 10, :?t2 15}}
(set (query session distinct-temps-query))))))
(deftest test-test-in-negation
@@ -724,12 +720,12 @@
(deftest test-empty-test-condition
(let [exception-data {:line 123 :column 456}]
(is (assert-ex-data
- exception-data
- (dsl/parse-query*
- []
- [[:test]]
- {}
- exception-data)))))
+ exception-data
+ (dsl/parse-query*
+ []
+ [[:test]]
+ {}
+ exception-data)))))
(deftest test-multi-insert-retract
@@ -753,7 +749,6 @@
(fire-rules)
(query sample/freezing-locations)))))
-
(let [session (-> (mk-session 'clara.sample-ruleset)
(insert (->Temperature 15 "MCI"))
(insert (->WindSpeed 45 "MCI"))
@@ -769,14 +764,13 @@
(deftest test-no-loop
(let [reduce-temp (dsl/parse-rule [[?t <- Temperature (> temperature 0) (= ?v temperature)]]
- (do
- (retract! ?t)
- (insert! (->Temperature (- ?v 1) "MCI")))
- {:no-loop true})
+ (do
+ (retract! ?t)
+ (insert! (->Temperature (- ?v 1) "MCI")))
+ {:no-loop true})
temp-query (dsl/parse-query [] [[Temperature (= ?t temperature)]])
-
session (-> (mk-session [reduce-temp temp-query])
(insert (->Temperature 10 "MCI"))
(fire-rules))]
@@ -829,7 +823,6 @@
;; Only one reduced temperature should be present.
(is (= [{:?t 9}] (query session temp-query)))))
-
;; Test behavior discussed in https://github.com/cerner/clara-rules/issues/35
(deftest test-identical-facts
(let [ident-query (dsl/parse-query [] [[?t1 <- Temperature (= ?loc location)]
@@ -850,8 +843,7 @@
;; The inserted facts are identical, so there cannot be a non-identicial match.
(is (empty? (query session ident-query)))
-
- ;; Duplications should have two matches, since either fact can bind to either condition.
+;; Duplications should have two matches, since either fact can bind to either condition.
(is (= [{:?t1 #clara.rules.testfacts.Temperature{:temperature 15, :location "MCI"}
:?t2 #clara.rules.testfacts.Temperature{:temperature 15, :location "MCI"}
:?loc "MCI"}
@@ -860,7 +852,6 @@
:?loc "MCI"}]
(query session-with-dups ident-query)))))
-
;; An EDN string for testing. This would normally be stored in an external file. The structure simply needs to be a
;; sequence of maps matching the clara.rules.schema/Production schema.
(def external-rules "[{:name \"cold-query\", :params #{:?l}, :lhs [{:type clara.rules.testfacts.Temperature, :constraints [(< temperature 50) (= ?t temperature) (= ?l location)]}]}]")
@@ -884,18 +875,17 @@
(is (= #{{:?l "ORD" :?t 10}}
(set (query session "cold-query" :?l "ORD"))))))
-
(defsession my-session 'clara.sample-ruleset)
(deftest test-defsession
(is (= #{{:?loc "MCI"} {:?loc "BOS"}}
- (set (-> my-session
- (insert (->Temperature 15 "MCI"))
- (insert (->Temperature 22 "BOS"))
- (insert (->Temperature 50 "SFO"))
- fire-rules
- (query sample/freezing-locations)))))
+ (set (-> my-session
+ (insert (->Temperature 15 "MCI"))
+ (insert (->Temperature 22 "BOS"))
+ (insert (->Temperature 50 "SFO"))
+ fire-rules
+ (query sample/freezing-locations)))))
(let [session (-> (mk-session 'clara.sample-ruleset)
(insert (->Temperature 15 "MCI"))
@@ -932,9 +922,9 @@
;; The special ancestor query should match everything since our trivial
;; ancestry function treats :my-ancestor as an ancestor of everything.
- (is (= #{{:?result (->Temperature 15 "MCI") }
- {:?result (->Temperature 10 "MCI") }
- {:?result (->Temperature 80 "MCI") }}
+ (is (= #{{:?result (->Temperature 15 "MCI")}
+ {:?result (->Temperature 10 "MCI")}
+ {:?result (->Temperature 80 "MCI")}}
(set (query session special-ancestor-query))))
;; There shouldn't be anything that matches our typical ancestor here.
@@ -974,10 +964,10 @@
(deftest test-extract-simple-test
(let [distinct-temps-query (dsl/parse-query [] [[Temperature (< temperature 20)
- (= ?t1 temperature)]
+ (= ?t1 temperature)]
[Temperature (< temperature 20)
- (= ?t2 temperature)
+ (= ?t2 temperature)
(< ?t1 temperature)]])
session (-> (mk-session [distinct-temps-query])
@@ -987,15 +977,15 @@
fire-rules)]
;; Finds two temperatures such that t1 is less than t2.
- (is (= #{ {:?t1 10, :?t2 15}}
+ (is (= #{{:?t1 10, :?t2 15}}
(set (query session distinct-temps-query))))))
(deftest test-extract-nested-test
(let [distinct-temps-query (dsl/parse-query [] [[Temperature (< temperature 20)
- (= ?t1 temperature)]
+ (= ?t1 temperature)]
[Temperature (< temperature 20)
- (= ?t2 temperature)
+ (= ?t2 temperature)
(< (- ?t1 0) temperature)]])
session (-> (mk-session [distinct-temps-query])
@@ -1005,10 +995,9 @@
fire-rules)]
;; Finds two temperatures such that t1 is less than t2.
- (is (= #{ {:?t1 10, :?t2 15}}
+ (is (= #{{:?t1 10, :?t2 15}}
(set (query session distinct-temps-query))))))
-
;; External structure to ensure that salience works with defrule as well.
(def salience-rule-output (atom []))
@@ -1031,12 +1020,10 @@
[:numeric-greatest-sort :salience-group :forward-order]
[:numeric-greatest-sort :neg-salience-group :backward-order]
-
[:boolean-greatest-sort :default-group :forward-order]
[:boolean-greatest-sort :salience-group :forward-order]
[:boolean-greatest-sort :neg-salience-group :backward-order]
-
[:numeric-least-sort :default-group :backward-order]
[:numeric-least-sort :salience-group :backward-order]
[:numeric-least-sort :neg-salience-group :forward-order]
@@ -1051,12 +1038,12 @@
:props {:salience 100})
rule2 (assoc
- (dsl/parse-rule [[Temperature ]]
+ (dsl/parse-rule [[Temperature]]
(swap! salience-rule-output conj 50))
:props {:salience 50})
rule3 (assoc
- (dsl/parse-rule [[Temperature ]]
+ (dsl/parse-rule [[Temperature]]
(swap! salience-rule-output conj 0))
:props {:salience 0})
@@ -1109,61 +1096,61 @@
test-fail-str)))))))
(deftest test-negation-with-extracted-test
- (let [colder-temp (dsl/parse-rule [[Temperature (= ?t temperature)]
- [:not [Cold (or (< temperature ?t)
- (< temperature 0))]]]
+ (let [colder-temp (dsl/parse-rule [[Temperature (= ?t temperature)]
+ [:not [Cold (or (< temperature ?t)
+ (< temperature 0))]]]
(insert! ^{:type :found-colder} {:found true}))
- find-colder (dsl/parse-query [] [[?f <- :found-colder]])
+ find-colder (dsl/parse-query [] [[?f <- :found-colder]])
- session (-> (mk-session [colder-temp find-colder] :cache false))]
+ session (-> (mk-session [colder-temp find-colder] :cache false))]
;; Test no token.
- (is (empty? (-> session
- (insert (->Cold 11))
- (fire-rules)
- (query find-colder))))
+ (is (empty? (-> session
+ (insert (->Cold 11))
+ (fire-rules)
+ (query find-colder))))
;; Test simple insertion.
- (is (= [{:?f {:found true}}]
- (-> session
- (insert (->Temperature 10 "MCI"))
- (insert (->Cold 11))
- (fire-rules)
- (query find-colder))))
+ (is (= [{:?f {:found true}}]
+ (-> session
+ (insert (->Temperature 10 "MCI"))
+ (insert (->Cold 11))
+ (fire-rules)
+ (query find-colder))))
;; Test insertion with right-hand match first.
- (is (= [{:?f {:found true}}]
- (-> session
- (insert (->Cold 11))
- (insert (->Temperature 10 "MCI"))
- (fire-rules)
- (query find-colder))))
+ (is (= [{:?f {:found true}}]
+ (-> session
+ (insert (->Cold 11))
+ (insert (->Temperature 10 "MCI"))
+ (fire-rules)
+ (query find-colder))))
;; Test no fact matching not.
- (is (= [{:?f {:found true}}]
- (-> session
- (insert (->Temperature 10 "MCI"))
- (fire-rules)
- (query find-colder))))
+ (is (= [{:?f {:found true}}]
+ (-> session
+ (insert (->Temperature 10 "MCI"))
+ (fire-rules)
+ (query find-colder))))
;; Test violate negation.
- (is (empty? (-> session
- (insert (->Cold 9))
- (insert (->Temperature 10 "MCI"))
- (fire-rules)
- (query find-colder))))
+ (is (empty? (-> session
+ (insert (->Cold 9))
+ (insert (->Temperature 10 "MCI"))
+ (fire-rules)
+ (query find-colder))))
;; Test violate negation alternate order.
- (is (empty? (-> session
- (insert (->Temperature 10 "MCI"))
- (insert (->Cold 9))
- (fire-rules)
- (query find-colder))))
+ (is (empty? (-> session
+ (insert (->Temperature 10 "MCI"))
+ (insert (->Cold 9))
+ (fire-rules)
+ (query find-colder))))
;; Test retract violation.
- (is (= [{:?f {:found true}}]
+ (is (= [{:?f {:found true}}]
(-> session
(insert (->Cold 9))
(insert (->Temperature 10 "MCI"))
@@ -1174,23 +1161,23 @@
;; Test only partial retraction of violation,
;; ensuring the remaining violation holds.
- (is (empty? (-> session
- (insert (->Cold 9))
- (insert (->Cold 9))
- (insert (->Temperature 10 "MCI"))
- (fire-rules)
- (retract (->Cold 9))
- (fire-rules)
- (query find-colder))))
+ (is (empty? (-> session
+ (insert (->Cold 9))
+ (insert (->Cold 9))
+ (insert (->Temperature 10 "MCI"))
+ (fire-rules)
+ (retract (->Cold 9))
+ (fire-rules)
+ (query find-colder))))
;; Test violate negation after success.
- (is (empty? (-> session
- (insert (->Cold 11))
- (insert (->Temperature 10 "MCI"))
- (fire-rules)
- (insert (->Cold 9))
- (fire-rules)
- (query find-colder))))))
+ (is (empty? (-> session
+ (insert (->Cold 11))
+ (insert (->Temperature 10 "MCI"))
+ (fire-rules)
+ (insert (->Cold 9))
+ (fire-rules)
+ (query find-colder))))))
;; TODO: Move this once it succeeds with def-rules-test. The def-rules-test macro may
;; be stripping the metadata somewhere.
@@ -1257,29 +1244,29 @@
(let [accum-condition (dsl/parse-query []
[[?ts <- (acc/all) :from [Temperature (and ?bogus (< ?bogus temperature))]]])
negation-condition (dsl/parse-query []
- [[:not [WindSpeed (not= ?invalid location)]]])
+ [[:not [WindSpeed (not= ?invalid location)]]])
test-condition (dsl/parse-query []
- [[:test (< ?missing 10)]])
+ [[:test (< ?missing 10)]])
multi-conditions (dsl/parse-query []
- [[Temperature (= ?temp temperature)
- (= ?loc location)]
- [Temperature (= ?loc location)
- (< ?temp temperature)]
- [Cold (< ?extra1 ?temp ?extra2)]])
+ [[Temperature (= ?temp temperature)
+ (= ?loc location)]
+ [Temperature (= ?loc location)
+ (< ?temp temperature)]
+ [Cold (< ?extra1 ?temp ?extra2)]])
nested-conditions (dsl/parse-query []
[[?t <- Temperature (= ?temp temperature)
- (= ?loc location)]
- [Cold (= ?temp temperature)
+ (= ?loc location)]
+ [Cold (= ?temp temperature)
;; Demonstrating using an available :fact-binding
- (some? (:location ?t))
- (and (< ?unbound temperature 10))]])
+ (some? (:location ?t))
+ (and (< ?unbound temperature 10))]])
nested-accum-conditions (dsl/parse-query []
[[Temperature (= ?loc location)]
[?ts <- (acc/all) :from [Temperature (= ?loc location) (< ?invalid temperature)]]])
bool-conditions (dsl/parse-query []
[[?t <- Temperature (= ?temp temperature)
- (= ?loc location)]
+ (= ?loc location)]
[:or
[Cold (= ?temp temperature)
@@ -1322,8 +1309,8 @@
rule {:name "clara.test-destructured-binding/test-destructured-binding"
:env {:rule-output rule-output-env} ; Rule environment so we can check its output.
:lhs '[{:args [[e a v]]
- :type :foo
- :constraints [(= e 1) (= v ?value)]}]
+ :type :foo
+ :constraints [(= e 1) (= v ?value)]}]
:rhs '(reset! rule-output ?value)}]
(-> (mk-session [rule] :fact-type-fn second)
@@ -1361,8 +1348,8 @@
:lhs '[{:type :test
:constraints []}]
:rhs '(clara.rules/insert! ^{:type :result}
- {:r :r1
- :v all-rules})}
+ {:r :r1
+ :v all-rules})}
;; However, rule structures are not required to specific a :ns-name if
;; they do not need them. This would be safe if the :rhs form already
;; had qualified symbols used, which may typically be the case for
@@ -1371,8 +1358,8 @@
:lhs '[{:type :test
:constraints []}]
:rhs `(insert! ^{:type :result}
- {:r :r2
- :v locals-shadowing-tester})}
+ {:r :r2
+ :v locals-shadowing-tester})}
q (dsl/parse-query [] [[?r <- :result]])]
(is (= #{{:r :r1
:v srs/all-rules}
@@ -1433,8 +1420,8 @@
(let [q (dsl/parse-query []
;; Make two conditions that are very similar, but differ
;; where a nil will be compared to something else.
- [[(accumulate :retract-fn identity-retract :reduce-fn (fn [x y] nil)) :from [Temperature]]
- [(accumulate :retract-fn identity-retract :reduce-fn (fn [x y] 10)) :from [Temperature]]])
+ [[(acc/accum {:retract-fn identity-retract :reduce-fn (fn [x y] nil)}) :from [Temperature]]
+ [(acc/accum {:retract-fn identity-retract :reduce-fn (fn [x y] 10)}) :from [Temperature]]])
s (mk-session [q])]
;; Mostly just ensuring the rulebase was compiled successfully.
@@ -1620,7 +1607,7 @@
[s1 s2 s3 s4 s5 s6 s7 s8])]
(is (= distinct-sessions
[s1 s2 s5 s6 s8]))
- (reset! @#'com/session-cache original-cache)))
+ (reset! @#'com/session-cache original-cache)))
#_{:clj-kondo/ignore [:unresolved-symbol]}
(deftest test-try-eval-failures-includes-compile-ctx
@@ -1658,7 +1645,6 @@
:else 1))))
[]))))
-
(def some-rules [(assoc (dsl/parse-query [] [[Temperature (< temperature 40) (= ?t temperature) (= ?l location)]])
:name "cold")
(assoc (dsl/parse-query [] [[Temperature (> temperature 80) (= ?t temperature) (= ?l location)]])
@@ -1689,7 +1675,7 @@
;; Test bogus symbol
(is (thrown? clojure.lang.ExceptionInfo
- (mk-session 'clara.test-rules/bogus-symbol))))
+ (mk-session 'clara.test-rules/bogus-symbol))))
(defprotocol Getter
(getX [this])
@@ -1941,7 +1927,7 @@
(let [q (dsl/parse-query [] [[Temperature (= ?t (+ 5 temperature)) (< ?t 10)]])
;; Test that a value must be bound before use.
- invalid (dsl/parse-query [] [[Temperature (< ?t 10) (= ?t (+ 5 temperature))]] )
+ invalid (dsl/parse-query [] [[Temperature (< ?t 10) (= ?t (+ 5 temperature))]])
s (mk-session [q] :cache false)]
@@ -1954,10 +1940,10 @@
;; Item that does satisfy second criterion should match.
(is (= [{:?t 5}]
- (-> s
- (insert (->Temperature 0 "MCI"))
- (fire-rules)
- (query q))))
+ (-> s
+ (insert (->Temperature 0 "MCI"))
+ (fire-rules)
+ (query q))))
;; The variable used out of order should be marked as unbound.
(assert-ex-data {:variables #{'?t}}
@@ -2016,13 +2002,13 @@
(let [accum-state (atom [])
stateful-accum (acc/accum
- {:initial-value []
- :reduce-fn conj
- :retract-fn (fn [items retracted] (remove #{retracted} items))
- :convert-return-fn (fn [items]
- (do
- (swap! accum-state conj items)
- items))})
+ {:initial-value []
+ :reduce-fn conj
+ :retract-fn (fn [items retracted] (remove #{retracted} items))
+ :convert-return-fn (fn [items]
+ (do
+ (swap! accum-state conj items)
+ items))})
common-ancestor-rule (dsl/parse-rule [[?lists <- stateful-accum :from [List]]]
;; don't care about whats inserted
@@ -2032,8 +2018,8 @@
linked-list (LinkedList.)
ses (-> (mk-session [common-ancestor-rule])
- (insert-all [array-list linked-list])
- (fire-rules))]
+ (insert-all [array-list linked-list])
+ (fire-rules))]
(is (not-any? #(= 1 (count %)) @accum-state)
"Facts with common ancestors should be batched together, expected either the initial accumulator value or a vector containing both lists but never a vector containing one list.")))
@@ -2419,12 +2405,13 @@
(deftest test-duplicate-name
(assert-ex-data {:names #{::rules-data/is-cold-and-windy-data}}
(com/mk-session*
- (set (com/add-production-load-order (conj (rules-data/weather-rules-with-keyword-names)
- {:doc "An extra rule to test for duplicate names."
- :name :clara.rules.test-rules-data/is-cold-and-windy-data
- :lhs []
- :rhs '(println "I have no meaning outside of this test")}))) {})))
+ (set (com/add-production-load-order (conj (rules-data/weather-rules-with-keyword-names)
+ {:doc "An extra rule to test for duplicate names."
+ :name :clara.rules.test-rules-data/is-cold-and-windy-data
+ :lhs []
+ :rhs '(println "I have no meaning outside of this test")}))) {})))
+#_{:clj-kondo/ignore [:unresolved-symbol]}
(deftest test-negation-multiple-children-exception
(let [not-rule (dsl/parse-rule [[:not
[Hot (= ?t temperature)]
diff --git a/src/test/common/clara/test_rules_require.cljc b/src/test/clojure/clara/test_rules_require.clj
similarity index 82%
rename from src/test/common/clara/test_rules_require.cljc
rename to src/test/clojure/clara/test_rules_require.clj
index f98e60d8..22f588f1 100644
--- a/src/test/common/clara/test_rules_require.cljc
+++ b/src/test/clojure/clara/test_rules_require.clj
@@ -2,10 +2,8 @@
(:require [clara.tools.testing-utils :as tu]
[clara.rule-defs :as rd]
[clara.rules.testfacts :as facts]
- #?(:clj [clojure.test :refer [is deftest run-tests testing use-fixtures]])
- #?(:cljs [cljs.test :refer-macros [is deftest run-tests testing use-fixtures]])
- #?(:clj [clara.rules :refer [insert fire-rules query defsession]])
- #?(:cljs [clara.rules :refer [insert fire-rules query] :refer-macros [defsession]])))
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules :refer [insert fire-rules query defsession]]))
;; Tests the case where rules/facts are required from a different namespace where the session is defined,
;; without an explicit :refer.
@@ -51,5 +49,5 @@
(is (= #{{:?fact (facts/->ColdAndWindy 15 45)}}
(set
- (query session rd/find-cold-and-windy))))))
+ (query session rd/find-cold-and-windy))))))
diff --git a/src/test/clojure/clara/test_setup.clj b/src/test/clojure/clara/test_setup.clj
new file mode 100644
index 00000000..498cb939
--- /dev/null
+++ b/src/test/clojure/clara/test_setup.clj
@@ -0,0 +1,18 @@
+(ns clara.test-setup
+ "Things we need to do in order to run tests with a smile"
+ (:require [kaocha.hierarchy :as hierarchy]
+ [pjstadig.humane-test-output :as hto]))
+
+(hierarchy/derive! ::ignore :kaocha/known-key)
+
+(hto/activate!)
+
+(defn defuse-zero-assertions
+ "Don't fail the test suite if we hide an `is` within a `doseq`.
+
+ See also https://cljdoc.org/d/lambdaisland/kaocha/1.80.1274/doc/-clojure-test-assertion-extensions#detecting-missing-assertions"
+ [event]
+ (if (= (:type event) :kaocha.type.var/zero-assertions)
+ (assoc event :type ::ignore)
+ event))
+
diff --git a/src/test/common/clara/test_simple_rules.cljc b/src/test/clojure/clara/test_simple_rules.clj
similarity index 70%
rename from src/test/common/clara/test_simple_rules.cljc
rename to src/test/clojure/clara/test_simple_rules.clj
index f72d0d5e..6560c027 100644
--- a/src/test/common/clara/test_simple_rules.cljc
+++ b/src/test/clojure/clara/test_simple_rules.clj
@@ -3,52 +3,24 @@
;; test namespaces due to this simplicity. This functionality is transitively
;; tested by numerous other tests, but there is some value in having direct tests
;; in case the complexity of those tests obscured a simpler issue.
-#?(:clj
- (ns clara.test-simple-rules
- (:require [clara.tools.testing-utils :refer [def-rules-test
- side-effect-holder] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert-all!
- insert!
- retract
- query]]
-
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
- ->ColdAndWindy ->LousyWeather]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- Cold
- WindSpeed
- ColdAndWindy
- LousyWeather]))
-
- :cljs
- (ns clara.test-simple-rules
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- insert-all!
- retract
- query]]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->Cold Cold
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy
- ->LousyWeather LousyWeather]]
- [clara.rules.accumulators]
- [cljs.test]
- [schema.test :as st]
- [clara.tools.testing-utils :refer [side-effect-holder] :as tu])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
+(ns clara.test-simple-rules
+ (:require
+ [clara.rules :refer [fire-rules insert query retract insert! retract! insert-all!]]
+ [clara.rules.accumulators]
+ [clara.rules.testfacts :refer [->Temperature ->WindSpeed ->Cold ->LousyWeather]]
+ [clara.tools.testing-utils :refer [def-rules-test side-effect-holder] :as tu]
+ [clojure.test :refer [is use-fixtures]]
+ [schema.test :as st])
+ (:import
+ [java.lang IllegalArgumentException]
+ [clara.rules.testfacts
+ Temperature
+ WindSpeed
+ Cold
+ ColdAndWindy
+ LousyWeather]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
(use-fixtures :each tu/side-effect-holder-fixture)
(defn- has-fact? [token fact]
@@ -191,7 +163,7 @@
(insert (->Temperature -10 "MCI"))
fire-rules)]
- (is (has-fact? (:cold @side-effect-holder) (->Temperature -10 "MCI") ))
+ (is (has-fact? (:cold @side-effect-holder) (->Temperature -10 "MCI")))
(is (has-fact? (:subzero @side-effect-holder) (->Temperature -10 "MCI")))))
@@ -213,14 +185,11 @@
(is (= #{{:?t 10}}
(set (query session temp-query :?t 10))))
- (is (thrown-with-msg? #?(:clj IllegalArgumentException :cljs js/Error)
- expected-msg
+ (is (thrown-with-msg? IllegalArgumentException expected-msg
(query session temp-query :?another-param 42)))
- (is (thrown-with-msg? #?(:clj IllegalArgumentException :cljs js/Error)
- expected-msg
+ (is (thrown-with-msg? IllegalArgumentException expected-msg
(query session temp-query)))
- (is (thrown-with-msg? #?(:clj IllegalArgumentException :cljs js/Error)
- expected-msg
+ (is (thrown-with-msg? IllegalArgumentException expected-msg
(query session temp-query :?t 42 :?another-param 42)))))
diff --git a/src/test/common/clara/test_testing_utils.cljc b/src/test/clojure/clara/test_testing_utils.clj
similarity index 68%
rename from src/test/common/clara/test_testing_utils.cljc
rename to src/test/clojure/clara/test_testing_utils.clj
index 00179cea..98126676 100644
--- a/src/test/common/clara/test_testing_utils.cljc
+++ b/src/test/clojure/clara/test_testing_utils.clj
@@ -1,24 +1,13 @@
-#?(:clj
- (ns clara.test-testing-utils
- (:require [clara.tools.testing-utils :refer [def-rules-test
- run-performance-test]]
- [clara.rules :as r]
-
- [clara.rules.testfacts :refer [->Temperature ->Cold]]
- [clojure.test :refer [is deftest run-tests] :as t])
- (:import [clara.rules.testfacts
- Temperature
- Cold]))
-
- :cljs
- (ns clara.test-testing-utils
- (:require [clara.rules :as r]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->Cold Cold]]
- [cljs.test :as t]
- [clara.tools.testing-utils :refer [run-performance-test]])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer (is deftest run-tests)])))
+(ns clara.test-testing-utils
+ (:require [clara.tools.testing-utils :refer [def-rules-test
+ run-performance-test]]
+ [clara.rules :as r]
+
+ [clara.rules.testfacts :refer [->Temperature ->Cold]]
+ [clojure.test :refer [is deftest run-tests] :as t])
+ (:import [clara.rules.testfacts
+ Temperature
+ Cold]))
(def test-ran-atom (atom false))
diff --git a/src/test/common/clara/test_truth_maintenance.cljc b/src/test/clojure/clara/test_truth_maintenance.clj
similarity index 83%
rename from src/test/common/clara/test_truth_maintenance.cljc
rename to src/test/clojure/clara/test_truth_maintenance.clj
index 41dfe6f2..d2a1d00c 100644
--- a/src/test/common/clara/test_truth_maintenance.cljc
+++ b/src/test/clojure/clara/test_truth_maintenance.clj
@@ -1,58 +1,32 @@
-#?(:clj
- (ns clara.test-truth-maintenance
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert!
- insert-unconditional!
- insert-all-unconditional!
- retract
- retract!
- query]]
- [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
- ->TemperatureHistory ->LousyWeather
- ->ColdAndWindy ->First ->Second ->Third]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators :as acc]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- TemperatureHistory
- Cold
- ColdAndWindy
- WindSpeed
- LousyWeather
- First
- Second
- Third]))
-
- :cljs
- (ns clara.test-truth-maintenance
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-unconditional!
- insert-all-unconditional!
- insert-all
- retract
- retract!
- query]]
- [clara.rules.accumulators :as acc]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->TemperatureHistory TemperatureHistory
- ->Cold Cold
- ->ColdAndWindy ColdAndWindy
- ->WindSpeed WindSpeed
- ->LousyWeather LousyWeather
- ->First First
- ->Second Second
- ->Third Third]]
- [schema.test :as st])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
+(ns clara.test-truth-maintenance
+ (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert!
+ insert-unconditional!
+ insert-all-unconditional!
+ retract
+ retract!
+ query]]
+ [clara.rules.testfacts :refer [->Temperature ->Cold ->WindSpeed
+ ->TemperatureHistory ->LousyWeather
+ ->ColdAndWindy ->First ->Second ->Third]]
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.accumulators :as acc]
+ [schema.test :as st])
+ (:import [clara.rules.testfacts
+ Temperature
+ TemperatureHistory
+ Cold
+ ColdAndWindy
+ WindSpeed
+ LousyWeather
+ First
+ Second
+ Third]))
+
+(use-fixtures :once st/validate-schemas tu/opts-fixture)
;; Any tests using this atom are expected to reset its value during initialization
;; of the test rather than requiring tests to reset after completing.
@@ -308,12 +282,12 @@
(is (= [{:?t (->TemperatureHistory #{(->Temperature 20 "MCI")
(->Temperature 25 "MCI")
(->Temperature 30 "SFO")})}]
-
+
(-> session1 fire-rules (query temp-query))
(-> session2 fire-rules (query temp-query))
(-> session3 fire-rules (query temp-query))
(-> session4 fire-rules (query temp-query))
-
+
(-> session5 fire-rules (query temp-query))
(-> session6 fire-rules (query temp-query))
(-> session7 fire-rules (query temp-query))
@@ -421,10 +395,10 @@
{:rules [r1 [[[First]]
;; As of writing the engine rearranges
;; this so that the retraction comes last.
- (do (retract! (->Cold 5))
- (insert! (->Third)))]
+ (do (retract! (->Cold 5))
+ (insert! (->Third)))]
r2 [[[Second]
- [:not [Third]]]
+ [:not [Third]]]
(insert! (->Cold 5))]]
:queries [q [[] [[Cold (= ?t temperature)]]]]
@@ -432,13 +406,13 @@
:sessions [base-session [r1 r2 q] {}]}
(is (= (-> base-session
- (insert (->Second))
- (fire-rules)
- (insert (->First))
- (fire-rules)
- (query q))
- [])
- "A retraction that becomes redundant after reordering of insertions
+ (insert (->Second))
+ (fire-rules)
+ (insert (->First))
+ (fire-rules)
+ (query q))
+ [])
+ "A retraction that becomes redundant after reordering of insertions
and retractions due to batching should not cause failure."))
(def-rules-test test-remove-pending-activation-with-equal-previous-insertion
diff --git a/src/test/common/clara/tools/test_fact_graph.cljc b/src/test/clojure/clara/tools/test_fact_graph.clj
similarity index 95%
rename from src/test/common/clara/tools/test_fact_graph.cljc
rename to src/test/clojure/clara/tools/test_fact_graph.clj
index b3eeeebc..b6cb979b 100644
--- a/src/test/common/clara/tools/test_fact_graph.cljc
+++ b/src/test/clojure/clara/tools/test_fact_graph.clj
@@ -4,12 +4,9 @@
[clara.rules :as cr]
[clara.rules.accumulators :as acc]
[schema.test :as st]
- #?(:clj [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- :cljs [cljs.test :refer-macros [is deftest run-tests testing use-fixtures]])
- #?(:clj [clara.rules.testfacts :as tf]
- :cljs [clara.rules.testfacts :refer [Cold WindSpeed ColdAndWindy] :as tf]))
- #?(:clj
- (:import [clara.rules.testfacts Cold WindSpeed ColdAndWindy])))
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.testfacts :as tf])
+ (:import [clara.rules.testfacts Cold WindSpeed ColdAndWindy]))
(use-fixtures :once st/validate-schemas)
diff --git a/src/test/common/clara/tools/test_inspect.cljc b/src/test/clojure/clara/tools/test_inspect.clj
similarity index 91%
rename from src/test/common/clara/tools/test_inspect.cljc
rename to src/test/clojure/clara/tools/test_inspect.clj
index 1a847b6b..e245c7df 100644
--- a/src/test/common/clara/tools/test_inspect.cljc
+++ b/src/test/clojure/clara/tools/test_inspect.clj
@@ -9,24 +9,13 @@
[clara.rules.accumulators :as acc]
[schema.test :as st]
[clojure.walk :as w]
- #?(:cljs [goog.string :as gstr])
- #?(:cljs [cljs.core :refer [ExceptionInfo]])
- #?(:clj [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- :cljs [cljs.test :refer-macros [is deftest run-tests testing use-fixtures]])
- #?(:clj [clara.rules.testfacts :refer :all]
- :cljs [clara.rules.testfacts :refer [Temperature TemperatureHistory
- WindSpeed Cold Hot ColdAndWindy LousyWeather
- First Second Third Fourth
- map->Cold map->Hot
- ->Temperature ->TemperatureHistory
- ->WindSpeed ->Cold ->Hot ->ColdAndWindy ->LousyWeather
- ->First ->Second ->Third ->Fourth]]))
- #?(:clj
- (:import [clara.rules.testfacts Temperature TemperatureHistory
- WindSpeed Cold Hot ColdAndWindy LousyWeather
- First Second Third Fourth]
- [clojure.lang
- ExceptionInfo])))
+ [clojure.test :refer [is deftest run-tests testing use-fixtures]]
+ [clara.rules.testfacts :refer :all])
+ (:import [clara.rules.testfacts Temperature TemperatureHistory
+ WindSpeed Cold Hot ColdAndWindy LousyWeather
+ First Second Third Fourth]
+ [clojure.lang
+ ExceptionInfo]))
(use-fixtures :once schema.test/validate-schemas)
@@ -191,8 +180,7 @@
constraints (map (comp :constraints :condition) matches)
term (flatten constraints)]
(is (not (and (symbol? term)
- #?(:clj (.startsWith (name term) "?__gen"))
- #?(:cljs (gstr/startsWith (name term) "?__gen"))))))))
+ (.startsWith (name term) "?__gen")))))))
(defn session->accumulated-facts-map
"Given a session, return a map of logically inserted facts to any accumulated-over facts
@@ -225,21 +213,21 @@
matches))]
(as-> session x
- (inspect x)
- (:fact->explanations x)
- (into {}
- (map (fn [[k v]]
- [k (->> v
- fail-on-multiple-insertions
- first
- :explanation
- :matches
- (filter (comp :accumulator :condition))
- fail-on-multiple-accum-conditions
- first
- :facts-accumulated
- frequencies)]))
- x))))
+ (inspect x)
+ (:fact->explanations x)
+ (into {}
+ (map (fn [[k v]]
+ [k (->> v
+ fail-on-multiple-insertions
+ first
+ :explanation
+ :matches
+ (filter (comp :accumulator :condition))
+ fail-on-multiple-accum-conditions
+ first
+ :facts-accumulated
+ frequencies)]))
+ x))))
(tu/def-rules-test test-get-matching-accum-facts-with-no-previous-conditions-and-new-binding
{:rules [min-freezing-at-loc-rule [[[?min <- (acc/min :temperature) from [Temperature (< temperature 0) (= ?loc location)]]]
@@ -332,7 +320,7 @@
empty-with-temps-complex-join-unused-previous-binding [windspeed-with-temps-complex-join-unused-previous-binding
temp-history-query] {:cache false}
empty-with-temps-complex-join-subsequent-binding [windspeed-with-temps-complex-join-subsequent-binding
- temp-history-query] {:cache false}]}
+ temp-history-query] {:cache false}]}
(doseq [[empty-session
join-type
unused-previous-binding
@@ -435,9 +423,9 @@
[condition-matches]
(let [rename-fn (fn [form]
(if
- (and
- (map? form)
- (contains? form :original-constraints))
+ (and
+ (map? form)
+ (contains? form :original-constraints))
(-> form
(dissoc :original-constraints)
(assoc :constraints (:original-constraints form)))
@@ -530,18 +518,18 @@
(query complex-successful-join cold-windy-query)
[{:?t 0 :?w 50}]))
- (is (= (get-condition-match simple-successful-join #?(:clj Temperature :cljs `Temperature))
- (get-condition-match complex-successful-join #?(:clj Temperature :cljs `Temperature))
+ (is (= (get-condition-match simple-successful-join Temperature)
+ (get-condition-match complex-successful-join Temperature)
[(->Temperature 0 "MCI")]))
- (is (= (get-condition-match simple-failed-join #?(:clj Temperature :cljs `Temperature))
- (get-condition-match complex-failed-join #?(:clj Temperature :cljs `Temperature))
+ (is (= (get-condition-match simple-failed-join Temperature)
+ (get-condition-match complex-failed-join Temperature)
[(->Temperature 0 "ORD")]))
- (is (= (get-condition-match simple-successful-join #?(:clj WindSpeed :cljs `WindSpeed))
- (get-condition-match complex-successful-join #?(:clj WindSpeed :cljs `WindSpeed))
- (get-condition-match simple-failed-join #?(:clj WindSpeed :cljs `WindSpeed))
- (get-condition-match complex-failed-join #?(:clj WindSpeed :cljs `WindSpeed))
+ (is (= (get-condition-match simple-successful-join WindSpeed)
+ (get-condition-match complex-successful-join WindSpeed)
+ (get-condition-match simple-failed-join WindSpeed)
+ (get-condition-match complex-failed-join WindSpeed)
[(->WindSpeed 50 "MCI")]))))
(tu/def-rules-test test-explain-activations-does-not-crash
@@ -554,7 +542,7 @@
(is (with-out-str (explain-activations session)))))
(tu/def-rules-test test-unconditional-rule-matches
-
+
{:rules [cold-rule [[[Cold]]
(insert-unconditional! (->LousyWeather))]]
:sessions [base-session [cold-rule] {}]}
@@ -635,4 +623,4 @@
(catch ExceptionInfo exc
(is (= (ex-data exc)
{:node-id "9001"
- :simple-name "AN-9001-AE"}))))))
\ No newline at end of file
+ :simple-name "AN-9001-AE"}))))))
diff --git a/src/test/common/clara/tools/test_tracing.cljc b/src/test/clojure/clara/tools/test_tracing.clj
similarity index 70%
rename from src/test/common/clara/tools/test_tracing.cljc
rename to src/test/clojure/clara/tools/test_tracing.clj
index d560b8f9..ece16714 100644
--- a/src/test/common/clara/tools/test_tracing.cljc
+++ b/src/test/clojure/clara/tools/test_tracing.clj
@@ -1,39 +1,18 @@
-#?(:clj
- (ns clara.tools.test-tracing
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert!
- retract
- retract!
- query]]
- [clara.tools.tracing :as t]
- [clara.rules.accumulators :as acc]
- [clara.rules.testfacts :refer :all]
- [clojure.test :refer :all])
- (:import [clara.rules.testfacts Temperature WindSpeed Cold Hot TemperatureHistory
- ColdAndWindy LousyWeather First Second Third Fourth]))
-
- :cljs
- (ns clara.tools.test-tracing
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- retract!
- insert-all
- retract
- query]]
- [clara.tools.tracing :as t]
- [clara.rules.accumulators :as acc]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->TemperatureHistory TemperatureHistory
- ->Hot Hot
- ->Cold Cold
- ->ColdAndWindy ColdAndWindy
- ->WindSpeed WindSpeed]])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
+(ns clara.tools.test-tracing
+ (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
+ [clara.rules :refer [fire-rules
+ insert
+ insert-all
+ insert!
+ retract
+ retract!
+ query]]
+ [clara.tools.tracing :as t]
+ [clara.rules.accumulators :as acc]
+ [clara.rules.testfacts :refer :all]
+ [clojure.test :refer :all])
+ (:import [clara.rules.testfacts Temperature WindSpeed Cold Hot TemperatureHistory
+ ColdAndWindy LousyWeather First Second Third Fourth]))
(def-rules-test test-tracing-toggle
{:rules [cold-rule [[[Temperature (< temperature 20)]]
@@ -85,7 +64,7 @@
:alpha-retract :right-retract :left-retract])
"Validate that a retract! call in the RHS side of a rule appears in the trace
before the :right-retract")))
-
+
(def-rules-test test-accumulate-trace
{:queries [coldest-query [[]
[[?t <- (acc/min :temperature :returns-fact true) from [Temperature]]]]]
@@ -125,14 +104,14 @@
(insert! (->Cold ?temperature))]]
:sessions [empty-session [cold-rule] {}]}
- (let [session (-> empty-session
- (t/with-tracing)
- (insert (->Temperature 10 "MCI"))
- (fire-rules))]
+ (let [session (-> empty-session
+ (t/with-tracing)
+ (insert (->Temperature 10 "MCI"))
+ (fire-rules))]
;; Ensure expected events occur in order.
- (is (= [:add-facts :alpha-activate :right-activate :left-activate
- :add-activations :fire-activation :add-facts-logical :activation-group-transition]
+ (is (= [:add-facts :alpha-activate :right-activate :left-activate
+ :add-activations :fire-activation :add-facts-logical :activation-group-transition]
(map :type (t/get-trace session))))))
(def-rules-test test-insert-and-retract-trace
@@ -140,7 +119,7 @@
(insert! (->Cold ?temperature))]]
:sessions [empty-session [cold-rule] {:cache false}]}
- (let [session (-> empty-session
+ (let [session (-> empty-session
(t/with-tracing)
(insert (->Temperature 10 "MCI")
(->Temperature 20 "MCI"))
@@ -148,17 +127,17 @@
(retract (->Temperature 10 "MCI"))
(fire-rules))
- session-trace (t/get-trace session)]
+ session-trace (t/get-trace session)]
;; Ensure expected events occur in order.
- (is (= [:add-facts :alpha-activate :right-activate :left-activate :add-activations :fire-activation
- :add-facts-logical :activation-group-transition :retract-facts :alpha-retract :right-retract
- :left-retract :remove-activations :retract-facts-logical]
- (map :type session-trace)))
+ (is (= [:add-facts :alpha-activate :right-activate :left-activate :add-activations :fire-activation
+ :add-facts-logical :activation-group-transition :retract-facts :alpha-retract :right-retract
+ :left-retract :remove-activations :retract-facts-logical]
+ (map :type session-trace)))
;; Ensure only the expected fact was indicated as retracted.
- (let [retraction (first (filter #(= :retract-facts-logical (:type %)) session-trace))]
- (is (= [(->Cold 10)] (:facts retraction))))))
+ (let [retraction (first (filter #(= :retract-facts-logical (:type %)) session-trace))]
+ (is (= [(->Cold 10)] (:facts retraction))))))
(def-rules-test test-ranked-productions
{:rules [temperature-rule [[[Temperature (= ?temperature temperature) (< temperature 20)]]
diff --git a/src/test/clojurescript/clara/test.cljs b/src/test/clojurescript/clara/test.cljs
deleted file mode 100644
index 4f54f66d..00000000
--- a/src/test/clojurescript/clara/test.cljs
+++ /dev/null
@@ -1,66 +0,0 @@
-(ns clara.test
- (:require-macros [cljs.test :as test])
- (:require [clara.test-rules]
- [clara.test-rules-require]
- [cljs.test]
- [clara.test-salience]
- [clara.test-complex-negation]
- [clara.test-common]
- [clara.test-testing-utils]
- [clara.test-accumulators]
- [clara.test-exists]
- [clara.tools.test-tracing]
- [clara.tools.test-fact-graph]
- [clara.tools.test-inspect]
- [clara.test-truth-maintenance]
- [clara.test-dsl]
- [clara.test-accumulation]
- [clara.test-memory]
- [clara.test-simple-rules]
- [clara.test-rhs-retract]
- [clara.test-bindings]
- [clara.test-clear-ns-productions]
- [clara.test-negation]
- [clara.performance.test-rule-execution]
- [clara.test-node-sharing]
- [clara.test-queries]))
-
-(enable-console-print!)
-
-(def ^:dynamic *successful?* nil)
-
-(defmethod cljs.test/report [:cljs.test/default :end-run-tests] [m]
- (if (cljs.test/successful? m)
- (do
- (println "Success!")
- (reset! *successful?* true))
- (do
- (println "FAIL")
- (reset! *successful?* false))))
-
-(defn ^:export run []
- (binding [*successful?* (atom nil)]
- (test/run-tests 'clara.test-rules
- 'clara.test-rules-require
- 'clara.test-common
- 'clara.test-salience
- 'clara.test-testing-utils
- 'clara.test-complex-negation
- 'clara.test-accumulators
- 'clara.test-exists
- 'clara.tools.test-tracing
- 'clara.tools.test-fact-graph
- 'clara.tools.test-inspect
- 'clara.test-truth-maintenance
- 'clara.test-dsl
- 'clara.test-accumulation
- 'clara.test-memory
- 'clara.test-simple-rules
- 'clara.test-rhs-retract
- 'clara.test-bindings
- 'clara.test-clear-ns-productions
- 'clara.test-negation
- 'clara.performance.test-rule-execution
- 'clara.test-node-sharing
- 'clara.test-queries)
- @*successful?*))
diff --git a/src/test/clojurescript/clara/test_complex_negation.cljs b/src/test/clojurescript/clara/test_complex_negation.cljs
deleted file mode 100644
index 8f0a4a5f..00000000
--- a/src/test/clojurescript/clara/test_complex_negation.cljs
+++ /dev/null
@@ -1,80 +0,0 @@
-(ns clara.test-complex-negation
- "Tests that validate that we wrap the fact-type-fn and ancestors-fn so that Clara's internal
- facts, for example NegationResult facts (added to fix issue 149) are not provided to user-provided
- custom fact-type-fn or ancestors-fn functions."
- (:require [clara.rules
- :refer-macros [defquery
- defsession]
- :refer [query
- insert
- fire-rules]]
- [clara.rules.testfacts :refer [Temperature ->Temperature
- WindSpeed ->WindSpeed
- Cold ->Cold]]
- [cljs.test :as t]
- [cljs.test :refer-macros [run-tests
- deftest
- is]
- :include-macros true]))
-
-(defquery negation-inside-negation-query
- []
- [:windspeed (= ?l (:location this))]
- [:not [:and
- [?t <- :temperature (= ?l (:location this))]
- [:not [:cold (= (:temperature this) (:temperature ?t))]]]])
-
-;; Use ancestors of the fact types to ensure that the custom ancestors-fn
-;; is used and that its arguments are the types from the custom fact-type-fn
-(defquery negation-inside-negation-ancestors-query
- []
- [:windspeed-ancestor (= ?l (:location this))]
- [:not [:and
- [?t <- :temperature-ancestor (= ?l (:location this))]
- [:not [:cold-ancestor (= (:temperature this) (:temperature ?t))]]]])
-
-(defn type->keyword
- [fact]
- (cond
- (instance? WindSpeed fact) :windspeed
- (instance? Temperature fact) :temperature
- (instance? Cold fact) :cold
- ;; If we reach the :else case then we are probably calling the user-provided :fact-type-fn
- ;; on an internal NegationResult fact which we should not do; see issue 241.
- :else (throw (ex-info "A fact that is not a WindSpeed, Temperature, or Cold was provided."
- {:fact fact}))))
-
-(defn keyword->ancestors
- [type-key]
- (condp = type-key
- :windspeed #{:windspeed-ancestor}
- :temperature #{:temperature-ancestor}
- :cold #{:cold-ancestor}
-
- (throw "A type that is not :windspeed, :temperature, or :cold was provided"
- {:type type})))
-
-(defsession test-session 'clara.test-complex-negation
- :fact-type-fn type->keyword
- :ancestors-fn keyword->ancestors)
-
-(deftest test-complex-negation
- (let [different-temps (-> test-session
- (insert (->WindSpeed 10 "MCI")
- (->Temperature 10 "MCI")
- (->Cold 20))
- (fire-rules))
-
- same-temps (-> test-session
- (insert (->WindSpeed 10 "MCI")
- (->Temperature 10 "MCI")
- (->Cold 10))
- (fire-rules))]
- (is (empty?
- (query different-temps negation-inside-negation-query)))
- (is (empty?
- (query different-temps negation-inside-negation-ancestors-query)))
-
- (is (= [{:?l "MCI"}]
- (query same-temps negation-inside-negation-query)
- (query same-temps negation-inside-negation-ancestors-query)))))
diff --git a/src/test/clojurescript/clara/test_rules.cljs b/src/test/clojurescript/clara/test_rules.cljs
deleted file mode 100644
index aa302263..00000000
--- a/src/test/clojurescript/clara/test_rules.cljs
+++ /dev/null
@@ -1,240 +0,0 @@
-(ns clara.test-rules
- (:require-macros [cljs.test :refer (is deftest run-tests testing)]
- [clara.rules.test-rules-data])
- (:require [cljs.test :as t]
- [clara.rules.engine :as eng]
- [clara.rules.accumulators :as acc]
- [clara.rules :refer [insert retract fire-rules query insert!]
- :refer-macros [defrule defsession defquery]]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy]]))
-
-(comment
-;; Launch browser repl.
- (cemerick.piggieback/cljs-repl :repl-env (cemerick.austin/exec-env))
-)
-
-(defn- has-fact? [token fact]
- (some #{fact} (map first (:matches token))))
-
-(def simple-defrule-side-effect (atom nil))
-(def other-defrule-side-effect (atom nil))
-
-(defrule test-rule
- [Temperature (< temperature 20)]
- =>
- (reset! other-defrule-side-effect ?__token__)
- (reset! simple-defrule-side-effect ?__token__))
-
-(defquery cold-query
- []
- [Temperature (< temperature 20) (== ?t temperature)])
-
-;; Accumulator for getting the lowest temperature.
-(def lowest-temp (acc/min :temperature))
-
-(defquery coldest-query
- []
- [?t <- lowest-temp :from [Temperature]])
-
-(defrule is-cold-and-windy
- "Rule to determine whether it is indeed cold and windy."
-
- (Temperature (< temperature 20) (== ?t temperature))
- (WindSpeed (> windspeed 30) (== ?w windspeed))
- =>
- (insert! (->ColdAndWindy ?t ?w)))
-
-(defrule is-cold-and-windy-map
- "A rule which uses a custom type on a map, to determine whether it
- is indeed cold and windy"
-
- [:temp [{degrees :degrees}] (< degrees 20) (== ?t degrees)]
- [:wind [{mph :mph}] (> mph 30) (== ?w mph)]
- =>
- (insert! {:type :cold-and-windy
- :temp ?t
- :wind ?w}))
-
-(defrule throw-on-bad-temp
- "Rule to test exception flow."
- [Temperature (> temperature 10000) (= ?t temperature)]
- =>
- (throw (ex-info "Bad temperature!" {:temp ?t})))
-
-(defquery find-cold-and-windy
- []
- [?fact <- ColdAndWindy])
-
-(defquery find-cold-and-windy-map
- []
- [?fact <- :cold-and-windy])
-
-(defquery wind-without-temperature
- []
- [WindSpeed (== ?w windspeed)]
- [:not [Temperature]])
-
-(defquery wind-with-temperature
- []
- [WindSpeed (== ?w windspeed) (== ?loc location)]
- [Temperature (== ?t temperature) (== ?loc location)])
-
-;; The idea here is that Number will resolve to java.lang.Number in a Clojure environment,
-;; so this validates that we correctly handle symbols in a ClojureScript rule that happen
-;; to resolve to something in a Clojure environment. Since ClojureScript's compiler
-;; is in Clojure failing to handle this correctly can cause us to attempt to embed
-;; Java objects in ClojureScript code, which won't work. See issue 300.
-(defrecord Number [value])
-
-(defquery num-query
- []
- [?n <- Number])
-
-(defsession my-session 'clara.test-rules)
-(defsession my-session-map 'clara.test-rules :fact-type-fn :type)
-(defsession my-session-data (clara.rules.test-rules-data/weather-rules))
-
-(deftest test-number-query
- (is (= (-> my-session
- (insert (->Number 5))
- fire-rules
- (query num-query))
- [{:?n (->Number 5)}])))
-
-(deftest test-simple-defrule
- (let [session (insert my-session (->Temperature 10 "MCI"))]
-
- (fire-rules session)
-
- (is (has-fact? @simple-defrule-side-effect (->Temperature 10 "MCI")))
- (is (has-fact? @other-defrule-side-effect (->Temperature 10 "MCI")))))
-
-(deftest test-simple-query
- (let [session (-> my-session
- (insert (->Temperature 15 "MCI"))
- (insert (->Temperature 10 "MCI"))
- (insert (->Temperature 80 "MCI"))
- fire-rules)]
-
- ;; The query should identify all items that wer einserted and matchd the
- ;; expected criteria.
- (is (= #{{:?t 15} {:?t 10}}
- (set (query session cold-query))))))
-
-(deftest test-simple-accumulator
- (let [session (-> my-session
- (insert (->Temperature 15 "MCI"))
- (insert (->Temperature 10 "MCI"))
- (insert (->Temperature 80 "MCI"))
- fire-rules)]
-
- ;; Accumulator returns the lowest value.
- (is (= #{{:?t 10}}
- (set (query session coldest-query))))))
-
-(deftest test-simple-insert
- (let [session (-> my-session
- (insert (->Temperature 15 "MCI"))
- (insert (->WindSpeed 45 "MCI"))
- (fire-rules))]
-
- (is (= #{{:?fact (->ColdAndWindy 15 45)}}
- (set
- (query session find-cold-and-windy))))))
-
-(deftest test-simple-insert-map
-
- (let [session (-> my-session-map
- (insert {:type :temp :degrees 15})
- (insert {:type :wind :mph 45})
- (fire-rules))]
- (is (= #{{:?fact {:type :cold-and-windy :temp 15 :wind 45}}}
- (set
- (query session find-cold-and-windy-map))))))
-
-(deftest test-simple-insert-data
-
- (let [session (-> my-session-data
- (insert (->Temperature 15 "MCI"))
- (insert (->WindSpeed 45 "MCI"))
- (fire-rules))]
- (is (= #{{:?fact (->ColdAndWindy 15 45)}}
- (set
- (query session "clara.rules.test-rules-data/find-cold-and-windy-data"))))))
-
-(deftest test-no-temperature
-
- ;; Test that a temperature cancels the match.
- (let [session (-> my-session
- (insert (->Temperature 15 "MCI"))
- (insert (->WindSpeed 45 "MCI"))
- (fire-rules))]
-
- (is (= #{}
- (set
- (query session wind-without-temperature)))))
-
- ;; Now test the no temperature scenario.
- (let [session (-> my-session
- (insert (->WindSpeed 45 "MCI"))
- (fire-rules))]
-
- (is (= #{{:?w 45}}
- (set
- (query session wind-without-temperature))))))
-
-
-(deftest test-simple-join
-
- (let [session (-> my-session
- (insert (->Temperature 15 "MCI"))
- (insert (->WindSpeed 45 "MCI"))
- (fire-rules))]
-
- (is (= #{{:?w 45 :?t 15 :?loc "MCI"}}
- (set
- (query session wind-with-temperature))))))
-
-(deftest test-throw-rhs
-
- (try
- (-> my-session
- (insert (->Temperature 999999 "MCI"))
- (fire-rules))
- (catch :default e
-
- (is (= {:?t 999999}
- (:bindings (ex-data e))))
- (is (= "clara.test-rules/throw-on-bad-temp"
- (:name (ex-data e)))))))
-
-(deftest test-remove-pending-rule-activation
- (let [no-activations-session (-> my-session
- (insert (->Temperature -10 "ORD")
- (->WindSpeed 50 "ORD"))
- (retract (->WindSpeed 50 "ORD"))
- fire-rules)
-
- one-activation-session (-> my-session
- (insert (->Temperature -10 "ORD")
- (->WindSpeed 50 "ORD")
- (->WindSpeed 50 "ORD"))
- (retract (->WindSpeed 50 "ORD"))
- fire-rules)]
-
- (is (= (query no-activations-session find-cold-and-windy) []))
- (is (= (query one-activation-session find-cold-and-windy)
- [{:?fact (->ColdAndWindy -10 50)}]))))
-
-;;; Basic test of keyword names
-(defsession my-session-data-with-keyword-names (clara.rules.test-rules-data/weather-rules-with-keyword-names))
-(deftest test-simple-insert-data-with-keyword-names
-
- (let [session (-> my-session-data-with-keyword-names
- (insert (->Temperature 15 "MCI"))
- (insert (->WindSpeed 45 "MCI"))
- (fire-rules))]
- (is (= [{:?fact (->ColdAndWindy 15 45)}]
- (query session :clara.rules.test-rules-data/find-cold-and-windy-data)))))
\ No newline at end of file
diff --git a/src/test/clojurescript/clara/test_salience.cljs b/src/test/clojurescript/clara/test_salience.cljs
deleted file mode 100644
index 96632abe..00000000
--- a/src/test/clojurescript/clara/test_salience.cljs
+++ /dev/null
@@ -1,115 +0,0 @@
-(ns clara.test-salience
- (:require-macros [cljs.test :refer (is deftest run-tests testing)]
- [clara.rules.test-rules-data])
- (:require [cljs.test :as t]
- [clara.rules.engine :as eng]
- [clara.rules.accumulators :as acc]
- [clara.rules :refer [insert retract fire-rules query insert!]
- :refer-macros [defrule defsession defquery]]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->WindSpeed WindSpeed
- ->ColdAndWindy ColdAndWindy]]))
-
-(def salience-rule-output (atom []))
-
-(defrule salience-rule1
- {:salience 100}
- [Temperature]
- =>
- (swap! salience-rule-output conj 100))
-
-(defrule salience-rule2
- {:salience 50}
- [Temperature]
- =>
- (swap! salience-rule-output conj 50))
-
-(defrule salience-rule3
- {:salience 0}
- [Temperature]
- =>
- (swap! salience-rule-output conj 0))
-
-(defrule salience-rule4
- {:salience -50}
- [Temperature]
- =>
- (swap! salience-rule-output conj -50))
-
-
-(deftest test-salience
- (doseq [[sort-fn
- group-fn
- expected-order]
-
- [[:default-sort :default-group :forward-order]
- [:default-sort :salience-group :forward-order]
- [:default-sort :neg-salience-group :backward-order]
-
- [:numeric-greatest-sort :default-group :forward-order]
- [:numeric-greatest-sort :salience-group :forward-order]
- [:numeric-greatest-sort :neg-salience-group :backward-order]
-
-
- [:boolean-greatest-sort :default-group :forward-order]
- [:boolean-greatest-sort :salience-group :forward-order]
- [:boolean-greatest-sort :neg-salience-group :backward-order]
-
-
- [:numeric-least-sort :default-group :backward-order]
- [:numeric-least-sort :salience-group :backward-order]
- [:numeric-least-sort :neg-salience-group :forward-order]
-
- [:boolean-least-sort :default-group :backward-order]
- [:boolean-least-sort :salience-group :backward-order]
- [:boolean-least-sort :neg-salience-group :forward-order]]]
-
- (let [numeric-greatest-sort (fn [x y]
- (cond
- (= x y) 0
- (> x y) -1
- :else 1))
-
- numeric-least-sort (fn [x y]
- (numeric-greatest-sort y x))
-
- salience-group-fn (fn [production]
- (or (some-> production :props :salience)
- 0))
-
- neg-salience-group-fn (fn [p]
- (- (salience-group-fn p)))]
-
- ;; A CLJS macro that behaves like mk-session (creates a session but does not intern a Var)
- ;; has been proposed in #292. Internally, this would facilitate session generation for CLJS
- ;; tests such as this one, and may be useful if exposed publicly.
-
- (defsession test-salience-session 'clara.test-salience
- :cache false
- :activation-group-sort-fn (condp = sort-fn
- :default-sort nil
- :numeric-greatest-sort numeric-greatest-sort
- :numeric-least-sort numeric-least-sort
- :boolean-greatest-sort >
- :boolean-least-sort <)
- :activation-group-fn (condp = group-fn
- :default-group nil
- :salience-group salience-group-fn
- :neg-salience-group neg-salience-group-fn))
-
- (reset! salience-rule-output [])
-
- (-> test-salience-session
- (insert (->Temperature 10 "MCI"))
- (fire-rules))
-
- (let [test-fail-str
- (str "Failure with sort-fn: " sort-fn ", group-fn: " group-fn ", and expected order: " expected-order)]
- (condp = expected-order
- :forward-order
- (is (= [100 50 0 -50] @salience-rule-output)
- test-fail-str)
-
- :backward-order
- (is (= [-50 0 50 100] @salience-rule-output)
- test-fail-str))))))
diff --git a/src/test/common/clara/rule_defs.cljc b/src/test/common/clara/rule_defs.cljc
deleted file mode 100644
index 44229983..00000000
--- a/src/test/common/clara/rule_defs.cljc
+++ /dev/null
@@ -1,39 +0,0 @@
-(ns clara.rule-defs
- (:require [clara.rules.accumulators :as acc]
- [clara.rules.testfacts :as tf]
- [clara.tools.testing-utils :as tu]
- #?(:clj [clara.rules :refer [defrule defquery insert!]])
- #?(:cljs [clara.rules :refer-macros [defrule defquery] :refer [insert!]]))
- #?(:clj
- (:import [clara.rules.testfacts Temperature WindSpeed ColdAndWindy])))
-
-;; Rule definitions used for tests in clara.test-rules-require.
-
-(defrule test-rule
- [?t <- #?(:clj Temperature :cljs tf/Temperature) (< temperature 20)]
- =>
- (reset! tu/side-effect-holder ?t))
-
-(defquery cold-query
- []
- [#?(:clj Temperature :cljs tf/Temperature) (< temperature 20) (== ?t temperature)])
-
-;; Accumulator for getting the lowest temperature.
-(def lowest-temp (acc/min :temperature))
-
-(defquery coldest-query
- []
- [?t <- lowest-temp :from [#?(:clj Temperature :cljs tf/Temperature)]])
-
-(defrule is-cold-and-windy
- "Rule to determine whether it is indeed cold and windy."
-
- (#?(:clj Temperature :cljs tf/Temperature) (< temperature 20) (== ?t temperature))
- (#?(:clj WindSpeed :cljs tf/WindSpeed) (> windspeed 30) (== ?w windspeed))
- =>
- (insert! (tf/->ColdAndWindy ?t ?w)))
-
-(defquery find-cold-and-windy
- []
- [?fact <- #?(:clj ColdAndWindy :cljs tf/ColdAndWindy)])
-
diff --git a/src/test/common/clara/test_clear_ns_productions.cljc b/src/test/common/clara/test_clear_ns_productions.cljc
deleted file mode 100644
index 2a0b52a5..00000000
--- a/src/test/common/clara/test_clear_ns_productions.cljc
+++ /dev/null
@@ -1,84 +0,0 @@
-;;; Tests that clear-ns-productions! correction clears all vars marked as productions from the namespace.
-#?(:clj
- (ns clara.test-clear-ns-productions
- (:require [clara.tools.testing-utils :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert!
- query
- defrule
- defquery
- defsession
- clear-ns-productions!]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]))
- :cljs
- (ns clara.test-clear-ns-productions
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- query]]
- [cljs.test]
- [clara.tools.testing-utils :as tu])
- (:require-macros [clara.rules :refer [defrule defquery defsession clear-ns-productions!]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :each tu/side-effect-holder-fixture)
-
-(defrule rule-to-be-cleared
- [:a]
- =>
- (reset! tu/side-effect-holder :before-clearing)
- (insert! :before-clearing))
-
-(defquery query-to-be-cleared [] [?f <- :before-clearing])
-
-#?(:clj
- (def ^:production-seq ns-production-seq-to-be-cleared
- [{:doc "Before clearing"
- :name "clara.test-clear-ns-productions/production-seq-to-be-cleared"
- :lhs '[{:type :a
- :constraints []}]
- :rhs '(clara.rules/insert! :before-clearing-seq)}]))
-
-(defsession uncleared-session 'clara.test-clear-ns-productions :fact-type-fn identity)
-
-(clear-ns-productions!)
-
-(defrule rule-after-clearing
- [:a]
- =>
- (insert! :after-clearing))
-
-(defquery query-before-clearing [] [?f <- :before-clearing])
-(defquery query-after-clearing [] [?f <- :after-clearing])
-(defquery query-before-clearing-seq [] [?f <- :before-clearing-seq])
-(defquery query-after-clearing-seq [] [?f <- :after-clearing-seq])
-#?(:clj
- (def ^:production-seq production-seq-after-clearing
- [{:doc "After clearing"
- :name "clara.test-clear-ns-productions/production-seq-after-clearing"
- :lhs '[{:type :a
- :constraints []}]
- :rhs '(clara.rules/insert! :after-clearing-seq)}]))
-
-(defsession cleared-session 'clara.test-clear-ns-productions :fact-type-fn identity)
-
-;;; Then tests validating what productions the respective sessions have.
-(deftest cleared?
- (let [uncleared (-> uncleared-session (insert :a) (fire-rules))]
- (is (= :before-clearing @tu/side-effect-holder))
- (reset! tu/side-effect-holder nil))
- (let [cleared (-> cleared-session (insert :a) (fire-rules))]
- (testing "cleared-session should not contain any productions before (clear-ns-productions!)"
- (is (= nil @tu/side-effect-holder))
- (is (empty? (query cleared query-before-clearing)))
- #?(:clj (is (not-empty (query cleared query-after-clearing)))))
- (is (empty? (query cleared query-before-clearing-seq)))
- #?(:clj (is (not-empty (query cleared query-after-clearing-seq))))))
-
-(deftest query-cleared?
- (let [uncleared (-> uncleared-session (insert :a) (fire-rules))
- cleared (-> cleared-session (insert :a) (fire-rules))]
- (is (not-empty (query uncleared "clara.test-clear-ns-productions/query-to-be-cleared")))
- (is (thrown-with-msg? #?(:clj IllegalArgumentException :cljs js/Error) #"clara.test-clear-ns-productions/query-to-be-cleared"
- (query cleared "clara.test-clear-ns-productions/query-to-be-cleared")))))
\ No newline at end of file
diff --git a/src/test/common/clara/test_performance_optimizations.cljc b/src/test/common/clara/test_performance_optimizations.cljc
deleted file mode 100644
index 0fccfebc..00000000
--- a/src/test/common/clara/test_performance_optimizations.cljc
+++ /dev/null
@@ -1,62 +0,0 @@
-;; These tests validate that operations that the rules engine should optimize
-;; away are in fact optimized away. The target here is not the actual execution time,
-;; which will vary per system, but verification that the action operations in question are not performed.
-#?(:clj
- (ns clara.test-performance-optimizations
- (:require [clara.tools.testing-utils :refer [def-rules-test
- side-effect-holder] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert!
- query]]
-
- [clara.rules.testfacts :refer [->Cold ->ColdAndWindy]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Cold
- ColdAndWindy]))
-
- :cljs
- (ns clara.test-performance-optimizations
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- query]]
- [clara.rules.testfacts :refer [->Cold Cold
- ->ColdAndWindy ColdAndWindy]]
- [clara.rules.accumulators]
- [cljs.test]
- [schema.test :as st]
- [clara.tools.testing-utils :refer [side-effect-holder] :as tu])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
-(use-fixtures :each tu/side-effect-holder-fixture)
-
-#?(:clj
- (defmacro true-if-binding-absent
- []
- (not (contains? &env '?unused-binding))))
-
-;; See issue https://github.com/cerner/clara-rules/issues/383
-;; This validates that we don't create let bindings for binding
-;; variables that aren't used. Doing so both imposes runtime costs
-;; and increases the size of the generated code that must be evaluated.
-(def-rules-test test-unused-rhs-binding-not-bound
-
- {:rules [cold-windy-rule [[[ColdAndWindy (= ?used-binding temperature) (= ?unused-binding windspeed)]]
- (when (true-if-binding-absent)
- (insert! (->Cold ?used-binding)))]]
-
- :queries [cold-query [[] [[Cold (= ?c temperature)]]]]
-
- :sessions [empty-session [cold-windy-rule cold-query] {}]}
-
- (is (= [{:?c 0}]
- (-> empty-session
- (insert (->ColdAndWindy 0 0))
- fire-rules
- (query cold-query)))))
diff --git a/src/test/common/clara/test_rhs_retract.cljc b/src/test/common/clara/test_rhs_retract.cljc
deleted file mode 100644
index 5a6e592e..00000000
--- a/src/test/common/clara/test_rhs_retract.cljc
+++ /dev/null
@@ -1,62 +0,0 @@
-#?(:clj
- (ns clara.test-rhs-retract
- (:require [clara.tools.testing-utils :refer [def-rules-test] :as tu]
- [clara.rules :refer [fire-rules
- insert
- insert-all
- insert!
- retract
- query
- retract!]]
-
- [clara.rules.testfacts :refer [->Temperature ->Cold]]
- [clojure.test :refer [is deftest run-tests testing use-fixtures]]
- [clara.rules.accumulators]
- [schema.test :as st])
- (:import [clara.rules.testfacts
- Temperature
- Cold]))
-
- :cljs
- (ns clara.test-rhs-retract
- (:require [clara.rules :refer [fire-rules
- insert
- insert!
- insert-all
- retract
- query
- retract!]]
- [clara.rules.testfacts :refer [->Temperature Temperature
- ->Cold Cold]]
- [clara.rules.accumulators]
- [cljs.test]
- [schema.test :as st])
- (:require-macros [clara.tools.testing-utils :refer [def-rules-test]]
- [cljs.test :refer [is deftest run-tests testing use-fixtures]])))
-
-(use-fixtures :once st/validate-schemas #?(:clj tu/opts-fixture))
-
-(def-rules-test test-retract!
-
- {:rules [not-cold-rule [[[Temperature (> temperature 50)]]
- (retract! (->Cold 20))]]
-
- :queries [cold-query [[]
- [[Cold (= ?t temperature)]]]]
-
- :sessions [empty-session [not-cold-rule cold-query] {}]}
-
- (let [session (-> empty-session
- (insert (->Cold 20))
- (fire-rules))]
-
- ;; The session should contain our initial cold reading.
- (is (= #{{:?t 20}}
- (set (query session cold-query))))
-
- ;; Insert a higher temperature and ensure the cold fact was retracted.
- (is (= #{}
- (set (query (-> session
- (insert (->Temperature 80 "MCI"))
- (fire-rules))
- cold-query))))))
diff --git a/src/test/html/advanced.html b/src/test/html/advanced.html
deleted file mode 100644
index 5ccb1ba6..00000000
--- a/src/test/html/advanced.html
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/src/test/html/simple.html b/src/test/html/simple.html
deleted file mode 100644
index fb3cbdcf..00000000
--- a/src/test/html/simple.html
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
diff --git a/src/test/js/runner.js b/src/test/js/runner.js
deleted file mode 100644
index f99628cc..00000000
--- a/src/test/js/runner.js
+++ /dev/null
@@ -1,37 +0,0 @@
-var page = require('webpage').create();
-var system = require('system');
-
-if (system.args.length !== 2) {
- console.log('Expected a target URL parameter.');
- phantom.exit(1);
-}
-
-page.onConsoleMessage = function (message) {
- console.log(message);
-};
-
-var url = system.args[1];
-
-page.open(url, function (status) {
- if (status !== "success") {
- console.log('Failed to open ' + url);
- setTimeout(function() {
- phantom.exit(1);
- }, 0);
- }
-
- // Note that we have to return a primitive value from this function
- // rather than setting a closure variable. The executed function is sandboxed
- // by PhantomJS and can't set variables outside its scope.
- var success = page.evaluate(function() {
- return clara.test.run();
- });
-
- setTimeout(function() {
- if (success){
- phantom.exit(0);
- } else {
- phantom.exit(1);
- }
- }, 0);
-});
diff --git a/tests.edn b/tests.edn
new file mode 100644
index 00000000..2fa1537c
--- /dev/null
+++ b/tests.edn
@@ -0,0 +1,19 @@
+#kaocha/v1 {:capture-output? true
+ :kaocha/fail-fast? false
+ :plugins [:kaocha.plugin/profiling
+ :kaocha.plugin/gc-profiling
+ :kaocha.plugin/print-invocations
+ :kaocha.plugin/hooks
+ :preloads]
+ :kaocha.hooks/pre-report [clara.test-setup/defuse-zero-assertions]
+ :kaocha.plugin.preloads/ns-names [clara.test-setup]
+ :tests [{:id :unit
+ :kaocha/source-paths ["src/main/clojure"]
+ :kaocha/test-paths ["src/test/clojure"]
+ :ns-patterns [".*"]
+ :skip-meta [:generative]}
+ {:id :generative
+ :kaocha/source-paths ["src/main/clojure"]
+ :kaocha/test-paths ["src/test/clojure"]
+ :ns-patterns [".*"]
+ :focus-meta [:generative]}]}
diff --git a/tool/build.clj b/tool/build.clj
new file mode 100644
index 00000000..a7f42adb
--- /dev/null
+++ b/tool/build.clj
@@ -0,0 +1,19 @@
+(ns build
+ (:require [clojure.tools.build.api :as b]))
+
+(def basis (b/create-basis {:project "deps.edn"}))
+
+(defn- compile-java
+ [sources classes]
+ (b/javac {:src-dirs sources
+ :class-dir classes
+ :basis basis
+ :javac-opts ["--release" "11"]}))
+
+(defn compile-main-java
+ [_]
+ (compile-java ["src/main/java"] "target/main/classes"))
+
+(defn compile-test-java [_]
+ [_]
+ (compile-java ["src/test/java"] "target/test/classes"))
From 36d4283a7b2a3520cee106319f45b11ab1cbe0f1 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 19:50:59 -0600
Subject: [PATCH 02/87] chore: remove old project.clj file
---
project.clj | 101 ----------------------------------------------------
1 file changed, 101 deletions(-)
delete mode 100644 project.clj
diff --git a/project.clj b/project.clj
deleted file mode 100644
index 4309d8b8..00000000
--- a/project.clj
+++ /dev/null
@@ -1,101 +0,0 @@
-(defproject k13labs/clara-rules "0.24.0-SNAPSHOT"
- :description "Clara Rules Engine"
- :url "https://github.com/k13labs/clara-rules"
- :license {:name "Apache License Version 2.0"
- :url "https://www.apache.org/licenses/LICENSE-2.0"}
- :dependencies [[org.clojure/clojure "1.7.0"]
- [prismatic/schema "1.1.6"]]
- :profiles {:dev {:dependencies [[org.clojure/math.combinatorics "0.1.3"]
- [org.clojure/data.fressian "0.2.1"]
- [clj-kondo/clj-kondo "2023.04.14"]]
- :java-source-paths ["src/test/java"]
- :global-vars {*warn-on-reflection* true}}
- :provided {:dependencies [[org.clojure/clojurescript "1.7.170"]]}
- :recent-clj {:dependencies [^:replace [org.clojure/clojure "1.9.0"]
- ^:replace [org.clojure/clojurescript "1.9.946"]]}
- :java9 {:jvm-opts ["--add-modules=java.xml.bind"]}}
- :plugins [[lein-codox "0.10.3" :exclusions [org.clojure/clojure
- org.clojure/clojurescript]]
- [lein-javadoc "0.3.0" :exclusions [org.clojure/clojure
- org.clojure/clojurescript]]
- [lein-cljsbuild "1.1.7" :exclusions [org.clojure/clojure
- org.clojure/clojurescript]]
- [lein-figwheel "0.5.14" :exclusions [org.clojure/clojure
- org.clojure/clojurescript]]
- [com.github.clj-kondo/lein-clj-kondo "0.2.4" :exclusions [org.clojure/clojure
- org.clojure/clojurescript]]]
- :aliases {"clj-kondo-deps" ["clj-kondo" "--copy-configs" "--dependencies" "--parallel" "--lint" "$classpath"]
- "clj-kondo-lint" ["do" ["clj-kondo-deps"] ["clj-kondo" "--lint" "src/main:src/test" "--fail-level" "error"]]}
- :codox {:namespaces [clara.rules clara.rules.dsl clara.rules.accumulators
- clara.rules.listener clara.rules.durability
- clara.tools.inspect clara.tools.tracing
- clara.tools.fact-graph]
- :metadata {:doc/format :markdown}}
- :javadoc-opts {:package-names "clara.rules"}
- :source-paths ["src/main/clojure"]
- :resource-paths ["clj-kondo"]
- :test-paths ["src/test/clojure" "src/test/common"]
- :java-source-paths ["src/main/java"]
- :javac-options ["-target" "1.6" "-source" "1.6"]
- :clean-targets ^{:protect false} ["resources/public/js" "target"]
- :hooks [leiningen.cljsbuild]
- :cljsbuild {:builds [;; Simple mode compilation for tests.
- {:id "figwheel"
- :source-paths ["src/test/clojurescript" "src/test/common"]
- :figwheel true
- :compiler {:main "clara.test"
- :output-to "resources/public/js/simple.js"
- :output-dir "resources/public/js/out"
- :asset-path "js/out"
- :optimizations :none}}
-
- {:id "simple"
- :source-paths ["src/test/clojurescript" "src/test/common"]
- :compiler {:output-to "target/js/simple.js"
- :optimizations :whitespace}}
-
- ;; Advanced mode compilation for tests.
- {:id "advanced"
- :source-paths ["src/test/clojurescript" "src/test/common"]
- :compiler {:output-to "target/js/advanced.js"
- :anon-fn-naming-policy :mapped
- :optimizations :advanced}}]
-
- :test-commands {"phantom-simple" ["phantomjs"
- "src/test/js/runner.js"
- "src/test/html/simple.html"]
-
- "phantom-advanced" ["phantomjs"
- "src/test/js/runner.js"
- "src/test/html/advanced.html"]}}
-
- :repl-options {;; The large number of ClojureScript tests is causing long compilation times
- ;; to start the REPL.
- :timeout 180000}
-
- ;; Factoring out the duplication of this test selector function causes an error,
- ;; perhaps because Leiningen is using this as uneval'ed code.
- ;; For now just duplicate the line.
- :test-selectors {:default (complement (fn [x]
- (let [blacklisted-packages #{"generative" "performance"}
- patterns (into []
- (comp
- (map #(str "^clara\\." % ".*"))
- (interpose "|"))
- blacklisted-packages)]
- (some->> x :ns ns-name str (re-matches (re-pattern (apply str patterns)))))))
- :generative (fn [x] (some->> x :ns ns-name str (re-matches #"^clara\.generative.*")))
- :performance (fn [x] (some->> x :ns ns-name str (re-matches #"^clara\.performance.*")))}
-
- :scm {:name "git"
- :url "https://github.com/k13labs/clara-rules"}
- :pom-addition [:developers [:developer
- [:id "k13gomez"]
- [:name "Jose Gomez"]
- [:url "http://www.k13labs.com/clara-rules"]]]
- :deploy-repositories [["snapshots" {:url "https://oss.sonatype.org/content/repositories/snapshots/"
- :creds :gpg}]
- ["clojars" {:url "https://repo.clojars.org"
- :username :env/clojars_username
- :password :env/clojars_password
- :sign-releases false}]])
From 0c1ea125152709622cd1685049fff2b259e15f7c Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 20:05:42 -0600
Subject: [PATCH 03/87] fix: ensure buidl runs, add linter support
---
.github/workflows/clojure.yml | 21 +++++++--------------
Makefile | 4 ++++
deps.edn | 4 +++-
3 files changed, 14 insertions(+), 15 deletions(-)
diff --git a/.github/workflows/clojure.yml b/.github/workflows/clojure.yml
index a6ed758e..9f0fffc1 100644
--- a/.github/workflows/clojure.yml
+++ b/.github/workflows/clojure.yml
@@ -15,21 +15,14 @@ jobs:
uses: actions/setup-java@v1.4.4
with:
# The Java version to make available on the path. Takes a whole or semver Java version, or 1.x syntax (e.g. 1.8 => Java 8.x). Early access versions can be specified in the form of e.g. 14-ea, 14.0.0-ea, or 14.0.0-ea.28
- java-version: 1.8
- - name: Setup Node.js environment
- uses: actions/setup-node@v2.5.2
- with:
- # Version Spec of the version to use. Examples: 12.x, 10.15.1, >=10.15.0
- node-version: 18.16.0
- - name: Install PhantomJS
- run: npm install -g phantomjs-prebuilt
+ java-version: 11
- name: Install dependencies
- run: lein deps
+ run: clj -X:deps prep
- name: Run tests
- run: lein test
+ run: make test
- name: Run generative tests
- run: lein test :generative
- - name: Run recent-clj tests
- run: lein with-profile dev,recent-clj test
+ run: lein test-generative
- name: Run clj-kondo linter
- run: lein with-profile dev,recent-clj clj-kondo-lint
+ run: lein lint
+ - name: Run build jar
+ run: make clean build
diff --git a/Makefile b/Makefile
index d92ea926..64c1b834 100644
--- a/Makefile
+++ b/Makefile
@@ -24,6 +24,10 @@ test-config:
clean:
rm -rf pom.xml target build
+lint: compile-test-java
+ clj -M:dev:test:clj-kondo --copy-configs --dependencies --parallel --lint "$(shell clj -A:dev:test -Spath)"
+ clj -M:dev:test:clj-kondo --lint "src/main:src/test" --fail-level "error"
+
build: compile-main-java
clj -Spom
clj -X:jar \
diff --git a/deps.edn b/deps.edn
index cdab20a1..e748f41e 100644
--- a/deps.edn
+++ b/deps.edn
@@ -15,9 +15,11 @@
:dev {:extra-paths ["dev"]
:extra-deps {reloaded.repl/reloaded.repl {:mvn/version "0.2.4"}
org.clojure/math.combinatorics {:mvn/version "0.1.3"}
- clj-kondo/clj-kondo {:mvn/version "2023.04.14"}
criterium/criterium {:mvn/version "0.4.6"}}}
+ :clj-kondo {:deps {clj-kondo/clj-kondo {:mvn/version "2023.04.14"}}
+ :main-opts ["-m" "clj-kondo.main"]}
+
:test {:extra-paths ["src/test/clojure" "target/test/classes"]
:extra-deps {lambdaisland/kaocha {:mvn/version "1.80.1274"}
org.clojure/test.check {:mvn/version "1.1.1"}
From fcff5c7951b3f72f56d9b42398e61d1ac29db55d Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 20:10:52 -0600
Subject: [PATCH 04/87] feat: ensure clean build deploy
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
index 64c1b834..7fd6b053 100644
--- a/Makefile
+++ b/Makefile
@@ -36,7 +36,7 @@ build: compile-main-java
:artifact-id "clara-rules" \
:version '"$(VERSION)"'
-deploy:
+deploy: clean build
clj -X:deploy-maven
install:
From 2478d16111ad17fcccfd76fa482e1f69e4e578cc Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 20:17:20 -0600
Subject: [PATCH 05/87] fix: build add clojure and corretto jvm
---
.github/workflows/clojure.yml | 23 ++++++++++++++---------
1 file changed, 14 insertions(+), 9 deletions(-)
diff --git a/.github/workflows/clojure.yml b/.github/workflows/clojure.yml
index 9f0fffc1..76a26b86 100644
--- a/.github/workflows/clojure.yml
+++ b/.github/workflows/clojure.yml
@@ -10,19 +10,24 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
- - name: Setup Java JDK
- uses: actions/setup-java@v1.4.4
+ - name: Checkout code
+ uses: actions/checkout@v3
+ - name: Setup Java
+ uses: actions/setup-java@v3
with:
- # The Java version to make available on the path. Takes a whole or semver Java version, or 1.x syntax (e.g. 1.8 => Java 8.x). Early access versions can be specified in the form of e.g. 14-ea, 14.0.0-ea, or 14.0.0-ea.28
- java-version: 11
- - name: Install dependencies
- run: clj -X:deps prep
+ # see: https://github.com/marketplace/actions/setup-java
+ distribution: 'corretto'
+ java-version: '11'
+ - name: Install clojure tools
+ uses: DeLaGuardo/setup-clojure@12.1
+ with:
+ # see: https://github.com/marketplace/actions/setup-clojure
+ cli: 1.11.1.1429
- name: Run tests
run: make test
- name: Run generative tests
- run: lein test-generative
+ run: make test-generative
- name: Run clj-kondo linter
- run: lein lint
+ run: make lint
- name: Run build jar
run: make clean build
From ec822d35187d63672f771b9bd0cafe1afaefeca5 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 20:21:40 -0600
Subject: [PATCH 06/87] feat: use clojure
---
Makefile | 28 ++++++++++++++--------------
1 file changed, 14 insertions(+), 14 deletions(-)
diff --git a/Makefile b/Makefile
index 7fd6b053..6043a797 100644
--- a/Makefile
+++ b/Makefile
@@ -4,46 +4,46 @@ SHELL := /bin/bash
VERSION := 0.9.0-SNAPSHOT
compile-main-java:
- clj -T:build compile-main-java
+ clojure -T:build compile-main-java
compile-test-java: compile-main-java
- clj -T:build compile-test-java
+ clojure -T:build compile-test-java
repl: compile-test-java
- clj -M:dev:test:repl
+ clojure -M:dev:test:repl
test: compile-test-java
- clj -M:dev:test:runner --focus :unit --reporter kaocha.report/tap
+ clojure -M:dev:test:runner --focus :unit --reporter kaocha.report/tap
test-generative: compile-test-java
- clj -M:dev:test:runner --focus :generative --reporter kaocha.report/tap
+ clojure -M:dev:test:runner --focus :generative --reporter kaocha.report/tap
test-config:
- clj -M:dev:test:runner --print-config
+ clojure -M:dev:test:runner --print-config
clean:
rm -rf pom.xml target build
lint: compile-test-java
- clj -M:dev:test:clj-kondo --copy-configs --dependencies --parallel --lint "$(shell clj -A:dev:test -Spath)"
- clj -M:dev:test:clj-kondo --lint "src/main:src/test" --fail-level "error"
+ clojure -M:dev:test:clj-kondo --copy-configs --dependencies --parallel --lint "$(shell clojure -A:dev:test -Spath)"
+ clojure -M:dev:test:clj-kondo --lint "src/main:src/test" --fail-level "error"
build: compile-main-java
- clj -Spom
- clj -X:jar \
+ clojure -Spom
+ clojure -X:jar \
:sync-pom true \
:group-id "k13labs" \
:artifact-id "clara-rules" \
:version '"$(VERSION)"'
deploy: clean build
- clj -X:deploy-maven
+ clojure -X:deploy-maven
install:
- clj -X:install-maven
+ clojure -X:install-maven
format-check:
- clj -M:format-check
+ clojure -M:format-check
format-fix:
- clj -M:format-fix
+ clojure -M:format-fix
From 5f8d726da2ca92d298622dfff0ec11c93844ec9b Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 20:30:38 -0600
Subject: [PATCH 07/87] feat: cleanup
---
.gitignore | 1 +
Makefile | 2 +-
build/.gitkeep | 0
build/META-INF/MANIFEST.MF | 4 -
.../maven/k13labs/clara-rules/pom.properties | 6 -
build/clara/rules.clj | 257 --
build/clara/rules/accumulators.clj | 212 --
build/clara/rules/compiler.clj | 2039 ----------------
build/clara/rules/dsl.clj | 315 ---
build/clara/rules/durability.clj | 699 ------
build/clara/rules/durability/fressian.clj | 642 -----
build/clara/rules/engine.clj | 2119 -----------------
build/clara/rules/java.clj | 50 -
build/clara/rules/listener.clj | 176 --
build/clara/rules/memory.clj | 909 -------
build/clara/rules/platform.clj | 93 -
build/clara/rules/schema.clj | 200 --
build/clara/rules/test_rules_data.clj | 37 -
build/clara/rules/testfacts.clj | 24 -
build/clara/rules/update_cache/cancelling.clj | 146 --
build/clara/rules/update_cache/core.clj | 32 -
build/clara/tools/fact_graph.clj | 96 -
build/clara/tools/inspect.clj | 354 ---
build/clara/tools/internal/inspect.clj | 80 -
build/clara/tools/loop_detector.clj | 102 -
build/clara/tools/testing_utils.clj | 204 --
build/clara/tools/tracing.clj | 175 --
.../clj-kondo.exports/clara/rules/config.edn | 7 -
.../clara/rules/hooks/clara_rules.clj_kondo | 444 ----
29 files changed, 2 insertions(+), 9423 deletions(-)
delete mode 100644 build/.gitkeep
delete mode 100644 build/META-INF/MANIFEST.MF
delete mode 100644 build/META-INF/maven/k13labs/clara-rules/pom.properties
delete mode 100644 build/clara/rules.clj
delete mode 100644 build/clara/rules/accumulators.clj
delete mode 100644 build/clara/rules/compiler.clj
delete mode 100644 build/clara/rules/dsl.clj
delete mode 100644 build/clara/rules/durability.clj
delete mode 100644 build/clara/rules/durability/fressian.clj
delete mode 100644 build/clara/rules/engine.clj
delete mode 100644 build/clara/rules/java.clj
delete mode 100644 build/clara/rules/listener.clj
delete mode 100644 build/clara/rules/memory.clj
delete mode 100644 build/clara/rules/platform.clj
delete mode 100644 build/clara/rules/schema.clj
delete mode 100644 build/clara/rules/test_rules_data.clj
delete mode 100644 build/clara/rules/testfacts.clj
delete mode 100644 build/clara/rules/update_cache/cancelling.clj
delete mode 100644 build/clara/rules/update_cache/core.clj
delete mode 100644 build/clara/tools/fact_graph.clj
delete mode 100644 build/clara/tools/inspect.clj
delete mode 100644 build/clara/tools/internal/inspect.clj
delete mode 100644 build/clara/tools/loop_detector.clj
delete mode 100644 build/clara/tools/testing_utils.clj
delete mode 100644 build/clara/tools/tracing.clj
delete mode 100644 build/clj-kondo.exports/clara/rules/config.edn
delete mode 100644 build/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
diff --git a/.gitignore b/.gitignore
index 35299ec9..58e9522a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+/build
/target
/lib
/classes
diff --git a/Makefile b/Makefile
index 6043a797..aa534744 100644
--- a/Makefile
+++ b/Makefile
@@ -32,7 +32,7 @@ build: compile-main-java
clojure -Spom
clojure -X:jar \
:sync-pom true \
- :group-id "k13labs" \
+ :group-id "com.github.k13labs" \
:artifact-id "clara-rules" \
:version '"$(VERSION)"'
diff --git a/build/.gitkeep b/build/.gitkeep
deleted file mode 100644
index e69de29b..00000000
diff --git a/build/META-INF/MANIFEST.MF b/build/META-INF/MANIFEST.MF
deleted file mode 100644
index d49f8288..00000000
--- a/build/META-INF/MANIFEST.MF
+++ /dev/null
@@ -1,4 +0,0 @@
-Manifest-Version: 1.0
-Created-By: depstar
-Built-By: jose.gomez
-Build-Jdk: 11.0.20
diff --git a/build/META-INF/maven/k13labs/clara-rules/pom.properties b/build/META-INF/maven/k13labs/clara-rules/pom.properties
deleted file mode 100644
index 7d25afdb..00000000
--- a/build/META-INF/maven/k13labs/clara-rules/pom.properties
+++ /dev/null
@@ -1,6 +0,0 @@
-#Generated by depstar
-#Tue Dec 26 19:45:19 CST 2023
-revision=3d3c776dd19f1fc80d50ae13c9d1fc51acc40da0
-version=0.9.0-SNAPSHOT
-groupId=k13labs
-artifactId=clara-rules
diff --git a/build/clara/rules.clj b/build/clara/rules.clj
deleted file mode 100644
index c26815b1..00000000
--- a/build/clara/rules.clj
+++ /dev/null
@@ -1,257 +0,0 @@
-(ns clara.rules
- "Forward-chaining rules for Clojure. The primary API is in this namespace."
- (:require [clara.rules.engine :as eng]
- [clara.rules.platform :as platform]
- [clara.rules.compiler :as com]
- [clara.rules.dsl :as dsl]))
-
-(defn insert
- "Inserts one or more facts into a working session. It does not modify the given
- session, but returns a new session with the facts added."
- [session & facts]
- (eng/insert session facts))
-
-(defn insert-all
- "Inserts a sequence of facts into a working session. It does not modify the given
- session, but returns a new session with the facts added."
- [session fact-seq]
- (eng/insert session fact-seq))
-
-(defn retract
- "Retracts a fact from a working session. It does not modify the given session,
- but returns a new session with the facts retracted."
- [session & facts]
- (eng/retract session facts))
-
-(defn fire-rules
- "Fires are rules in the given session. Once a rule is fired, it is labeled in a fired
- state and will not be re-fired unless facts affecting the rule are added or retracted.
-
- This function does not modify the given session to mark rules as fired. Instead, it returns
- a new session in which the rules are marked as fired.
-
- This take an additional map of options as a second argument. Current options:
-
- :cancelling true (EXPERIMENTAL, subject to change/removal. Not supported in ClojureScript.):
- Simultaneously propagate insertions and retractions through the rules network, at every step using the insertion and retractions of equals facts to cancel each
- other out and avoid operations deeper in the rules network. The behavior of unconditional insertions and RHS (right-hand side) retractions
- is undefined when this option is enabled and this option should not be used when calling fire-rules can result in these operations.
- Note that this is purely a performance optimization and no guarantees are made at this time on whether a given rule's RHS will be called.
- When this option is used rule RHS code that is executed shouldn't do anything that impacts state other than perform logical insertions."
- ([session] (eng/fire-rules session {}))
- ([session opts] (eng/fire-rules session opts)))
-
-(defn query
- "Runs the given query with the optional given parameters against the session.
- The optional parameters should be in map form. For example, a query call might be:
-
- (query session get-by-last-name :?last-name \"Jones\")
-
- The query itself may be either the var created by a defquery statement,
- or the actual name of the query.
- "
- [session query & params]
- (let [params-map (->> (for [[param value] (apply hash-map params)]
- [(platform/query-param param) value])
- (into {}))]
- (eng/query session query params-map)))
-
-(defn insert!
- "To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
-
- Inserted facts are logical, in that if the support for the insertion is removed, the fact
- will automatically be retracted. For instance, if there is a rule that inserts a \"Cold\" fact
- if a \"Temperature\" fact is below a threshold, and the \"Temperature\" fact that triggered
- the rule is retracted, the \"Cold\" fact the rule inserted is also retracted. This is the underlying
- truth maintenance facillity.
-
- This truth maintenance is also transitive: if a rule depends on some criteria to fire, and a
- criterion becomes invalid, it may retract facts that invalidate other rules, which in turn
- retract their conclusions. This way we can ensure that information inferred by rules is always
- in a consistent state."
- [& facts]
- (eng/insert-facts! facts false))
-
-(defn insert-all!
- "Behaves the same as insert!, but accepts a sequence of facts to be inserted. This can be simpler and more efficient for
- rules needing to insert multiple facts.
-
- See the doc in insert! for details on insert behavior.."
- [facts]
- (eng/insert-facts! facts false))
-
-(defn insert-unconditional!
- "To be executed within a rule's right-hand side, this inserts a new fact or facts into working memory.
-
- This differs from insert! in that it is unconditional. The facts inserted will not be retracted
- even if the rule activation doing the insert becomes false. Most users should prefer the simple insert!
- function as described above, but this function is available for use cases that don't wish to use
- Clara's truth maintenance."
- [& facts]
- (eng/insert-facts! facts true))
-
-(defn insert-all-unconditional!
- "Behaves the same as insert-unconditional!, but accepts a sequence of facts to be inserted rather than individual facts.
-
- See the doc in insert-unconditional! for details on uncondotional insert behavior."
- [facts]
- (eng/insert-facts! facts true))
-
-(defn retract!
- "To be executed within a rule's right-hand side, this retracts a fact or facts from the working memory.
-
- Retracting facts from the right-hand side has slightly different semantics than insertion. As described
- in the insert! documentation, inserts are logical and will automatically be retracted if the rule
- that inserted them becomes false. This retract! function does not follow the inverse; retracted items
- are simply removed, and not re-added if the rule that retracted them becomes false.
-
- The reason for this is that retractions remove information from the knowledge base, and doing truth
- maintenance over retractions would require holding onto all retracted items, which would be an issue
- in some use cases. This retract! method is included to help with certain use cases, but unless you
- have a specific need, it is better to simply do inserts on the rule's right-hand side, and let
- Clara's underlying truth maintenance retract inserted items if their support becomes false."
- [& facts]
- (eng/rhs-retract-facts! facts))
-
-(extend-type clojure.lang.Symbol
- com/IRuleSource
- (load-rules [sym]
- ;; Find the rules and queries in the namespace, shred them,
- ;; and compile them into a rule base.
- (if (namespace sym)
- ;; The symbol is qualified, so load rules in the qualified symbol.
- (let [resolved (resolve sym)]
- (when (nil? resolved)
- (throw (ex-info (str "Unable to resolve rule source: " sym) {:sym sym})))
-
- (cond
- ;; The symbol references a rule or query, so just return it
- (or (:query (meta resolved))
- (:rule (meta resolved))) [@resolved]
-
- ;; The symbol refernces a sequence, so return it.
- (sequential? @resolved) @resolved
-
- :else
- (throw (ex-info (str "The source referenced by " sym " is not valid.") {:sym sym}))))
-
- ;; The symbol is not qualified, so treat it as a namespace.
- (->> (ns-interns sym)
- (vals) ; Get the references in the namespace.
- (filter var?)
- (filter (comp (some-fn :rule :query :production-seq) meta)) ; Filter down to rules, queries, and seqs of both.
- ;; If definitions are created dynamically (i.e. are not reflected in an actual code file)
- ;; it is possible that they won't have :line metadata, so we have a default of 0.
- (sort (fn [v1 v2]
- (compare (or (:line (meta v1)) 0)
- (or (:line (meta v2)) 0))))
- (mapcat #(if (:production-seq (meta %))
- (deref %)
- [(deref %)]))))))
-
-(defmacro mk-session
- "Creates a new session using the given rule sources. The resulting session
- is immutable, and can be used with insert, retract, fire-rules, and query functions.
-
- If no sources are provided, it will attempt to load rules from the caller's namespace,
- which is determined by reading Clojure's *ns* var.
-
- This will use rules defined with defrule, queries defined with defquery, and sequences
- of rule and/or query structures in vars that are annotated with the metadata ^:production-seq.
-
- The caller may also specify keyword-style options at the end of the parameters. Currently five
- options are supported, although most users will either not need these or just the first two:
-
- * :fact-type-fn, which must have a value of a function used to determine the logical type of a given
- fact. Defaults to Clojure's type function.
- * :cache, indicating whether the session creation can be cached, effectively memoizing mk-session.
- Defaults to true. Callers may wish to set this to false when needing to dynamically reload rules.
- * :ancestors-fn, which returns a collection of ancestors for a given type. Defaults to Clojure's ancestors function. A
- fact of a given type will match any rule that uses one of that type's ancestors. Note that if the collection is ordered
- this ordering will be maintained by Clara; ordering the ancestors consistently will increase the consistency of overall performance.
- * :activation-group-fn, a function applied to production structures and returns the group they should be activated with.
- It defaults to checking the :salience property, or 0 if none exists.
- * :activation-group-sort-fn, a comparator function used to sort the values returned by the above :activation-group-fn.
- Defaults to >, so rules with a higher salience are executed first.
- * :forms-per-eval - The maximum number of expressions that will be evaluated per call to eval.
- Larger batch sizes should see better performance compared to smaller batch sizes. (Only applicable to Clojure)
- Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
- * :omit-compile-ctx - When false Clara, in Clojure, retains additional information to improve error messages during
- session deserialization at the cost of additional memory use.
- By default this information is retained until the session is initially compiled and then will be discarded. This
- information might prove useful for debugging compilation errors within the rulebase, eg. rulebase serialization
- (ie. via Clara's durability support).
- Defaults to true, see clara.rules.compiler/omit-compile-ctx-default for more information.
-
- This is not supported in ClojureScript, since it requires eval to dynamically build a session. ClojureScript
- users must use pre-defined rule sessions using defsession."
- [& args]
- (if (and (seq args) (not (keyword? (first args))))
- `(com/mk-session ~(vec args)) ; At least one namespace given, so use it.
- `(com/mk-session (concat [(ns-name *ns*)] ~(vec args))))) ; No namespace given, so use the current one.
-
-(defmacro defsession
- "Creates a sesson given a list of sources and keyword-style options, which are typically Clojure namespaces.
-
- Typical usage would be like this, with a session defined as a var:
-
- (defsession my-session 'example.namespace)
-
- That var contains an immutable session that then can be used as a starting point to create sessions with
- caller-provided data. Since the session itself is immutable, it can be safely used from multiple threads
- and will not be modified by callers. So a user might grab it, insert facts, and otherwise
- use it as follows:
-
- (-> my-session
- (insert (->Temperature 23))
- (fire-rules))"
- [name & sources-and-options]
- `(def ~name (com/mk-session ~(vec sources-and-options))))
-
-(defmacro defrule
- "Defines a rule and stores it in the given var. For instance, a simple rule would look like this:
-
- (defrule hvac-approval
- \"HVAC repairs need the appropriate paperwork, so insert
- a validation error if approval is not present.\"
- [WorkOrder (= type :hvac)]
- [:not [ApprovalForm (= formname \"27B-6\")]]
- =>
- (insert! (->ValidationError
- :approval
- \"HVAC repairs must include a 27B-6 form.\")))
-
- See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
- [name & body]
- (let [doc (if (string? (first body)) (first body) nil)]
- `(def ~(vary-meta name assoc :rule true :doc doc)
- ~(dsl/build-rule name body (meta &form)))))
-
-(defmacro defquery
- "Defines a query and stored it in the given var. For instance, a simple query that accepts no
- parameters would look like this:
-
- (defquery check-job
- \"Checks the job for validation errors.\"
- []
- [?issue <- ValidationError])
-
- See the [query authoring documentation](http://www.clara-rules.org/docs/queries/) for details."
- [name & body]
- (let [doc (if (string? (first body)) (first body) nil)]
- `(def ~(vary-meta name assoc :query true :doc doc)
- ~(dsl/build-query name body (meta &form)))))
-
-(defmacro clear-ns-productions!
- "Ensures that any rule/query definitions which have been cached will be cleared from the associated namespace.
- Rule and query definitions can be cached such that if their definitions are not explicitly overwritten with the same
- name (i.e. deleted or renamed), the stale definitions can be loaded into a session using that namespace on
- reload via the REPL or mechanism such as figwheel. Place (clear-ns-productions!) at the top of any namespace
- defining rules/queries to ensure the cache is cleared properly."
- []
- (let [production-syms (->> (ns-interns *ns*)
- (filter (comp var? second))
- (filter (comp (some-fn :rule :query :production-seq) meta second)) ; Filter down to rules, queries, and seqs of both.
- (map first))] ; Take the symbols for the rule/query vars
- (doseq [psym production-syms]
- (ns-unmap *ns* psym))))
diff --git a/build/clara/rules/accumulators.clj b/build/clara/rules/accumulators.clj
deleted file mode 100644
index 37da6748..00000000
--- a/build/clara/rules/accumulators.clj
+++ /dev/null
@@ -1,212 +0,0 @@
-(ns clara.rules.accumulators
- "A set of common accumulators usable in Clara rules."
- (:require [clara.rules.engine :as eng]
- [schema.core :as s])
- (:refer-clojure :exclude [min max distinct count]))
-
-(defn accum
- "Creates a new accumulator. Users are encouraged to use a pre-defined
- accumulator in this namespace if one fits their needs. (See min, max, all,
- distinct, and others in this namespace.) This function
- exists for cases where a custom accumulator is necessary.
-
- The following properties are accepted.
-
- * An initial-value to be used with the reduced operations.
- * A reduce-fn that can be used with the Clojure Reducers library to reduce items.
- * An optional combine-fn that can be used with the Clojure Reducers library to combine reduced items.
- * An optional retract-fn that can remove a retracted fact from a previously reduced computation.
- * An optional convert-return-fn that converts the reduced data into something useful to the caller.
- Simply uses identity by default.
- "
- [{:keys [initial-value reduce-fn combine-fn retract-fn convert-return-fn] :as accum-map}]
-
- ;; Validate expected arguments are present.
- (s/validate {(s/optional-key :initial-value) s/Any
- (s/optional-key :combine-fn) s/Any
- (s/optional-key :convert-return-fn) s/Any
- :reduce-fn s/Any
- (s/optional-key :retract-fn) s/Any}
- accum-map)
-
- (eng/map->Accumulator
- (merge {;; Default conversion does nothing, so use identity.
- :convert-return-fn identity}
- accum-map)))
-
-(defn- drop-one-of
- "Removes one instance of the given value from the sequence."
- [items value]
- (let [pred #(not= value %)]
- (into (empty items)
- cat
- [(take-while pred items)
- (rest (drop-while pred items))])))
-
-(defn reduce-to-accum
- "Creates an accumulator using a given reduce function with optional initial value and
- conversion to the final result.
-
- For example, a a simple function that return a Temperature fact with the highest value:
-
- (acc/reduce-to-accum
- (fn [previous value]
- (if previous
- (if (> (:temperature value) (:temperature previous))
- value
- previous)
- value)))
-
- Note that the above example produces the same result as
- (clara.rules.accumulators/max :temperature :returns-fact true),
- and users should prefer to use built-in accumulators when possible. This funciton exists to easily
- convert arbitrary reduce functions to an accumulator.
-
- Callers may optionally pass in an initial value (which defaults to nil),
- a function to transform the value returned by the reduce (which defaults to identity),
- and a function to combine two reduced results (which uses the reduce-fn to add new
- items to the same reduced value by default)."
-
- ([reduce-fn]
- (reduce-to-accum reduce-fn nil))
- ([reduce-fn initial-value]
- (reduce-to-accum reduce-fn initial-value identity))
- ([reduce-fn initial-value convert-return-fn]
- (reduce-to-accum reduce-fn initial-value convert-return-fn nil))
- ([reduce-fn initial-value convert-return-fn combine-fn]
- (accum (cond-> {:initial-value initial-value
- :reduce-fn reduce-fn
- :convert-return-fn convert-return-fn}
- combine-fn (assoc :combine-fn combine-fn)))))
-
-(let [grouping-fn (fnil conj [])]
- (defn grouping-by
- "Return a generic grouping accumulator. Behaves like clojure.core/group-by.
-
- * `field` - required - The field of a fact to group by.
- * `convert-return-fn` - optional - Converts the resulting grouped
- data. Defaults to clojure.core/identity."
- ([field]
- (grouping-by field identity))
- ([field convert-return-fn]
- {:pre [(ifn? convert-return-fn)]}
- (reduce-to-accum
- (fn [m x]
- (let [v (field x)]
- (update m v grouping-fn x)))
- {}
- convert-return-fn))))
-
-(defn- comparison-based
- "Creates a comparison-based result such as min or max"
- [field comparator returns-fact]
- (let [reduce-fn (fn [previous value]
- (if previous
- (if (comparator (field previous) (field value))
- previous
- value)
- value))
-
- convert-return-fn (if returns-fact
- identity
- field)]
- (accum
- {:reduce-fn reduce-fn
- :convert-return-fn convert-return-fn})))
-
-(defn min
- "Returns an accumulator that returns the minimum value of a given field.
-
- The caller may provide the following options:
-
- * :returns-fact Returns the fact rather than the field value if set to true. Defaults to false."
- [field & {:keys [returns-fact]}]
- (comparison-based field < returns-fact))
-
-(defn max
- "Returns an accumulator that returns the maximum value of a given field.
-
- The caller may provide the following options:
-
- * :returns-fact Returns the fact rather than the field value if set to true. Defaults to false."
- [field & {:keys [returns-fact]}]
- (comparison-based field > returns-fact))
-
-(defn average
- "Returns an accumulator that returns the average value of a given field."
- [field]
- (accum
- {:initial-value [0 0]
- :reduce-fn (fn [[value count] item]
- [(+ value (field item)) (inc count)])
- :retract-fn (fn [[value count] retracted]
- [(- value (field retracted)) (dec count)])
- :combine-fn (fn [[value1 count1] [value2 count2]]
- [(+ value1 value2) (+ count1 count2)])
- :convert-return-fn (fn [[value count]]
- (if (= 0 count)
- nil
- (/ value count)))}))
-
-(defn sum
- "Returns an accumulator that returns the sum of values of a given field"
- [field]
- (accum
- {:initial-value 0
- :reduce-fn (fn [total item]
- (+ total (field item)))
- :retract-fn (fn [total item]
- (- total (field item)))
- :combine-fn +}))
-
-(defn count
- "Returns an accumulator that simply counts the number of matching facts"
- []
- (accum
- {:initial-value 0
- :reduce-fn (fn [count value] (inc count))
- :retract-fn (fn [count retracted] (dec count))
- :combine-fn +}))
-
-(defn exists
- "Returns an accumulator that accumulates to true if at least one fact
- exists and nil otherwise, the latter causing the accumulator condition to not match."
- []
- (assoc (count) :convert-return-fn (fn [v]
- ;; This specifically needs to return nil rather than false if the pos? predicate is false so that
- ;; the accumulator condition will fail to match; the accumulator will consider
- ;; boolean false a valid match. See https://github.com/cerner/clara-rules/issues/182#issuecomment-217142418
- ;; and the following comments for the original discussion around suppressing nil accumulator
- ;; return values but propagating boolean false.
- (when (pos? v)
- true))))
-
-(defn distinct
- "Returns an accumulator producing a distinct set of facts.
- If given a field, returns a distinct set of values for that field."
- ([] (distinct identity))
- ([field]
- (accum
- {:initial-value {}
- :reduce-fn (fn [freq-map value] (update freq-map (field value) (fnil inc 0)))
- :retract-fn (fn [freq-map retracted-item]
- (let [item-field (field retracted-item)
- current (get freq-map item-field)]
- (if (= 1 current)
- (dissoc freq-map item-field)
- (update freq-map item-field dec))))
- :convert-return-fn (comp set keys)})))
-
-(defn all
- "Returns an accumulator that preserves all accumulated items.
- If given a field, returns all values in that field."
- ([]
- (accum
- {:initial-value []
- :reduce-fn (fn [items value] (conj items value))
- :retract-fn (fn [items retracted] (drop-one-of items retracted))}))
- ([field]
- (accum
- {:initial-value []
- :reduce-fn (fn [items value] (conj items (field value)))
- :retract-fn (fn [items retracted] (drop-one-of items (field retracted)))})))
diff --git a/build/clara/rules/compiler.clj b/build/clara/rules/compiler.clj
deleted file mode 100644
index 9de679db..00000000
--- a/build/clara/rules/compiler.clj
+++ /dev/null
@@ -1,2039 +0,0 @@
-(ns clara.rules.compiler
- "This namespace is for internal use and may move in the future.
- This is the Clara rules compiler, translating raw data structures into compiled versions and functions.
- Most users should use only the clara.rules namespace."
- (:require [clara.rules.engine :as eng]
- [clara.rules.schema :as schema]
- [clojure.set :as set]
- [clojure.string :as string]
- [clojure.walk :as walk]
- [schema.core :as sc])
- (:import [clara.rules.engine
- ProductionNode
- QueryNode
- AlphaNode
- RootJoinNode
- HashJoinNode
- ExpressionJoinNode
- NegationNode
- NegationWithJoinFilterNode
- TestNode
- AccumulateNode
- AccumulateWithJoinFilterNode
- LocalTransport
- Accumulator
- NegationResult
- ISystemFact]
- [java.beans
- PropertyDescriptor]
- [clojure.lang
- IFn]))
-
-;; Protocol for loading rules from some arbitrary source.
-(defprotocol IRuleSource
- (load-rules [source]))
-
-(sc/defschema BetaNode
- "These nodes exist in the beta network."
- (sc/pred (comp #{ProductionNode
- QueryNode
- RootJoinNode
- HashJoinNode
- ExpressionJoinNode
- NegationNode
- NegationWithJoinFilterNode
- TestNode
- AccumulateNode
- AccumulateWithJoinFilterNode}
- class)
- "Some beta node type"))
-
-;; A rulebase -- essentially an immutable Rete network with a collection of
-;; alpha and beta nodes and supporting structure.
-(sc/defrecord Rulebase [;; Map of matched type to the alpha nodes that handle them.
- alpha-roots :- {sc/Any [AlphaNode]}
- ;; Root beta nodes (join, accumulate, etc.).
- beta-roots :- [BetaNode]
- ;; Productions in the rulebase.
- productions :- #{schema/Production}
- ;; Production nodes.
- production-nodes :- [ProductionNode]
- ;; Map of queries to the nodes hosting them.
- query-nodes :- {sc/Any QueryNode}
- ;; Map of id to one of the alpha or beta nodes (join, accumulate, etc).
- id-to-node :- {sc/Num (sc/conditional
- :activation AlphaNode
- :else BetaNode)}
- ;; Function for sorting activation groups of rules for firing.
- activation-group-sort-fn
- ;; Function that takes a rule and returns its activation group.
- activation-group-fn
- ;; Function that takes facts and determines what alpha nodes they match.
- get-alphas-fn
- ;; A map of [node-id field-name] to function.
- node-expr-fn-lookup :- schema/NodeFnLookup])
-
-(defn- is-variable?
- "Returns true if the given expression is a variable (a symbol prefixed by ?)"
- [expr]
- (and (symbol? expr)
- (.startsWith (name expr) "?")))
-
-(def ^:private reflector
- "For some reason (bug?) the default reflector doesn't use the
- Clojure dynamic class loader, which prevents reflecting on
- `defrecords`. Work around by supplying our own which does."
- (clojure.reflect.JavaReflector. (clojure.lang.RT/makeClassLoader)))
-
-(defn- get-field-accessors
- "Given a clojure.lang.IRecord subclass, returns a map of field name to a
- symbol representing the function used to access it."
- [cls]
- (into {}
- (for [field-name (clojure.lang.Reflector/invokeStaticMethod ^Class cls
- "getBasis"
- ^"[Ljava.lang.Object;" (make-array Object 0))]
- ;; Do not preserve the metadata on the field names returned from
- ;; IRecord.getBasis() since it may not be safe to eval this metadata
- ;; in other contexts. This mostly applies to :tag metadata that may
- ;; be unqualified class names symbols at this point.
- [(with-meta field-name {}) (symbol (str ".-" field-name))])))
-
-(defn- get-bean-accessors
- "Returns a map of bean property name to a symbol representing the function used to access it."
- [cls]
- (into {}
- ;; Iterate through the bean properties, returning tuples and the corresponding methods.
- (for [^PropertyDescriptor property (seq (.. java.beans.Introspector
- (getBeanInfo cls)
- (getPropertyDescriptors)))
- :let [read-method (.getReadMethod property)]
- ;; In the event that there the class has an indexed property without a basic accessor we will simply skip
- ;; the accessor as we will not know how to retrieve the value. see https://github.com/cerner/clara-rules/issues/446
- :when read-method]
- [(symbol (string/replace (.getName property) #"_" "-")) ; Replace underscore with idiomatic dash.
- (symbol (str "." (.getName read-method)))])))
-
-(defn effective-type [type]
- (if (symbol? type)
- (.loadClass (clojure.lang.RT/makeClassLoader) (name type))
- type))
-
-(defn get-fields
- "Returns a map of field name to a symbol representing the function used to access it."
- [type]
- (let [type (effective-type type)]
- (cond
- (isa? type clojure.lang.IRecord) (get-field-accessors type)
- (class? type) (get-bean-accessors type) ; Treat unrecognized classes as beans.
- :else [])))
-
-(defn- equality-expression? [expression]
- (let [qualify-when-sym #(when-let [resolved (and (symbol? %)
- (resolve %))]
- (and (var? resolved)
- (symbol (-> resolved meta :ns ns-name name)
- (-> resolved meta :name name))))
- op (first expression)]
- ;; Check for unqualified = or == to support original Clara unification
- ;; syntax where clojure.core/== was supposed to be excluded explicitly.
- (boolean (or (#{'= '== 'clojure.core/= 'clojure.core/==} op)
- (#{'clojure.core/= 'clojure.core/==} (qualify-when-sym op))))))
-
-(def ^:dynamic *compile-ctx* nil)
-
-(defn try-eval
- "Evals the given `expr`. If an exception is thrown, it is caught and an
- ex-info exception is thrown with more details added. Uses *compile-ctx*
- for additional contextual info to add to the exception details."
- [expr]
- (try
- (eval expr)
- (catch Exception e
- (let [edata (merge {:expr expr}
- (dissoc *compile-ctx* :msg))
- msg (:msg *compile-ctx*)]
- (throw (ex-info (str (if msg (str "Failed " msg) "Failed compiling.") \newline
- ;; Put ex-data specifically in the string since
- ;; often only ExceptionInfo.toString() will be
- ;; called, which doesn't show this data.
- edata \newline)
- edata
- e))))))
-
-(defn- compile-constraints
- "Compiles a sequence of constraints into a structure that can be evaluated.
-
- Callers may also pass a collection of equality-only-variables, which instructs
- this function to only do an equality check on them rather than create a unification binding."
- ([exp-seq]
- (compile-constraints exp-seq #{}))
- ([exp-seq equality-only-variables]
-
- (if (empty? exp-seq)
- `(deref ~'?__bindings__)
- (let [[exp & rest-exp] exp-seq
- variables (into #{}
- (filter (fn [item]
- (and (symbol? item)
- (= \? (first (name item)))
- (not (equality-only-variables item))))
- exp))
- expression-values (remove variables (rest exp))
- binds-variables? (and (equality-expression? exp)
- (seq variables))
-
- ;; if we intend on binding any variables at this level of the
- ;; expression then future layers should not be able to rebind them.
- ;; see https://github.com/cerner/clara-rules/issues/417 for more info
- equality-only-variables (if binds-variables?
- (into equality-only-variables
- variables)
- equality-only-variables)
-
- compiled-rest (compile-constraints rest-exp equality-only-variables)]
-
- (when (and binds-variables?
- (empty? expression-values))
- (throw (ex-info (str "Malformed variable binding for " variables ". No associated value.")
- {:variables (map keyword variables)})))
-
- (cond
- binds-variables?
- ;; Bind each variable with the first value we encounter.
- ;; The additional equality checks are handled below so which value
- ;; we bind to is not important. So an expression like (= ?x value-1 value-2) will
- ;; bind ?x to value-1, and then ensure value-1 and value-2 are equal below.
-
- ;; First assign each value in a let, so it is visible to subsequent expressions.
- `(let [~@(for [variable variables
- let-expression [variable (first expression-values)]]
- let-expression)]
-
- ;; Update the bindings produced by this expression.
- ~@(for [variable variables]
- `(swap! ~'?__bindings__ assoc ~(keyword variable) ~variable))
-
- ;; If there is more than one expression value, we need to ensure they are
- ;; equal as well as doing the bind. This ensures that value-1 and value-2 are
- ;; equal.
- ~(if (> (count expression-values) 1)
-
- `(if ~(cons '= expression-values) ~compiled-rest nil)
- ;; No additional values to check, so move on to the rest of
- ;; the expression
- compiled-rest))
-
-;; A contraint that is empty doesn't need to be added as a check,
- ;; simply move on to the rest
- (empty? exp)
- compiled-rest
-
- ;; No variables to unify, so simply check the expression and
- ;; move on to the rest.
- :else
- `(if ~exp ~compiled-rest nil))))))
-
-(defn flatten-expression
- "Flattens expression as clojure.core/flatten does, except will flatten
- anything that is a collection rather than specifically sequential."
- [expression]
- (filter (complement coll?)
- (tree-seq coll? seq expression)))
-
-(defn variables-as-keywords
- "Returns a set of the symbols in the given s-expression that start with '?' as keywords"
- [expression]
- (into #{} (for [item (flatten-expression expression)
- :when (is-variable? item)]
- (keyword item))))
-
-(defn field-name->accessors-used
- "Returns a map of field name to accessors for any field names of type used
- in the constraints."
- [type constraints]
- (let [field-name->accessor (get-fields type)
- all-fields (set (keys field-name->accessor))
- fields-used (into #{}
- (filter all-fields)
- (flatten-expression constraints))]
- (into {}
- (filter (comp fields-used key))
- field-name->accessor)))
-
-(defn- add-meta
- "Helper function to add metadata."
- [fact-symbol fact-type]
- (let [fact-type (if (symbol? fact-type)
- (try
- (resolve fact-type)
- (catch Exception e
- ;; We shouldn't have to worry about exceptions being thrown here according
- ;; to `resolve`s docs.
- ;; However, due to http://dev.clojure.org/jira/browse/CLJ-1403 being open
- ;; still, it is safer to catch any exceptions thrown.
- fact-type))
- fact-type)]
- (if (class? fact-type)
- (vary-meta fact-symbol assoc :tag (symbol (.getName ^Class fact-type)))
- fact-symbol)))
-
-(defn- mk-node-fn-name
- "A simple helper function to maintain a consistent pattern for naming anonymous functions in the rulebase.
-
- node-type - expected to align with one of the types of nodes defined in clara.rules.engine, and node-type->abbreviated-type.
- node-id - expected to be an integer
- fn-type - an identifier for what the function means to the node
-
- fn-type is required as some nodes might have multiple functions associated to them, ex. Accumulator nodes containing
- filter functions."
- [node-type node-id fn-type]
- (if-let [abbreviated-node-type (get eng/node-type->abbreviated-type node-type)]
- (symbol (str abbreviated-node-type "-" node-id "-" fn-type))
- (throw (ex-info "Unrecognized node type"
- {:node-type node-type
- :node-id node-id
- :fn-type fn-type}))))
-
-(defn compile-condition
- "Returns a function definition that can be used in alpha nodes to test the condition."
- [type node-id destructured-fact constraints result-binding env]
- (let [;; Get a map of fieldnames to access function symbols.
- accessors (field-name->accessors-used type constraints)
- ;; The assignments should use the argument destructuring if provided, or default to accessors otherwise.
- assignments (if destructured-fact
- ;; Simply destructure the fact if arguments are provided.
- [destructured-fact '?__fact__]
- ;; No argument provided, so use our default destructuring logic.
- (concat '(this ?__fact__)
- (mapcat (fn [[name accessor]]
- [name (list accessor '?__fact__)])
- accessors)))
-
- ;; The destructured environment, if any
- destructured-env (if (> (count env) 0)
- {:keys (mapv #(symbol (name %)) (keys env))}
- '?__env__)
-
- ;; Initial bindings used in the return of the compiled condition expresion.
- initial-bindings (if result-binding {result-binding '?__fact__} {})
-
- ;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-condition' to be used for this scenario
- fn-name (mk-node-fn-name "AlphaNode" node-id "AE")]
-
- `(fn ~fn-name [~(add-meta '?__fact__ type)
- ~destructured-env]
- (let [~@assignments
- ~'?__bindings__ (atom ~initial-bindings)]
- ~(compile-constraints constraints)))))
-
-(defn build-token-assignment
- "A helper function to build variable assignment forms for tokens."
- [binding-key]
- (list (symbol (name binding-key))
- (list `-> '?__token__ :bindings binding-key)))
-
-(defn compile-test-handler [node-id constraints env]
- (let [binding-keys (variables-as-keywords constraints)
- assignments (mapcat build-token-assignment binding-keys)
-
- ;; The destructured environment, if any
- destructured-env (if (> (count env) 0)
- {:keys (mapv #(symbol (name %)) (keys env))}
- '?__env__)
-
- ;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-test' to be used for this scenario
- fn-name (mk-node-fn-name "TestNode" node-id "TE")]
- `(fn ~fn-name [~'?__token__ ~destructured-env]
- (let [~@assignments]
- (and ~@constraints)))))
-
-(defn compile-test [node-id constraints env]
- (let [test-handler (compile-test-handler node-id constraints env)]
- `(array-map :handler ~test-handler
- :constraints '~constraints)))
-
-(defn compile-action
- "Compile the right-hand-side action of a rule, returning a function to execute it."
- [node-id binding-keys rhs env]
- (let [;; Avoid creating let bindings in the compile code that aren't actually used in the body.
- ;; The bindings only exist in the scope of the RHS body, not in any code called by it,
- ;; so this scanning strategy will detect all possible uses of binding variables in the RHS.
- ;; Note that some strategies with macros could introduce bindings, but these aren't something
- ;; we're trying to support. If necessary a user could macroexpand their RHS code manually before
- ;; providing it to Clara.
- rhs-bindings-used (variables-as-keywords rhs)
-
- assignments (sequence
- (comp
- (filter rhs-bindings-used)
- (mapcat build-token-assignment))
- binding-keys)
-
- ;; The destructured environment, if any.
- destructured-env (if (> (count env) 0)
- {:keys (mapv #(symbol (name %)) (keys env))}
- '?__env__)
-
- ;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-action' to be used for this scenario
- fn-name (mk-node-fn-name "ProductionNode" node-id "AE")]
- `(fn ~fn-name [~'?__token__ ~destructured-env]
- (let [~@assignments]
- ~rhs))))
-
-(defn compile-accum
- "Used to create accumulators that take the environment into account."
- [node-id node-type accum env]
- (let [destructured-env
- (if (> (count env) 0)
- {:keys (mapv #(symbol (name %)) (keys env))}
- '?__env__)
-
- ;; AccE will stand for AccumExpr
- fn-name (mk-node-fn-name node-type node-id "AccE")]
- `(fn ~fn-name [~destructured-env]
- ~accum)))
-
-(defn compile-join-filter
- "Compiles to a predicate function that ensures the given items can be unified. Returns a ready-to-eval
- function that accepts the following:
-
- * a token from the parent node
- * the fact
- * a map of bindings from the fact, which was typically computed on the alpha side
- * an environment
-
- The function created here returns truthy if the given fact satisfies the criteria."
- [node-id node-type {:keys [type constraints args] :as unification-condition} ancestor-bindings element-bindings env]
- (let [accessors (field-name->accessors-used type constraints)
-
- destructured-env (if (> (count env) 0)
- {:keys (mapv #(symbol (name %)) (keys env))}
- '?__env__)
-
- destructured-fact (first args)
-
- fact-assignments (if destructured-fact
- ;; Simply destructure the fact if arguments are provided.
- [destructured-fact '?__fact__]
- ;; No argument provided, so use our default destructuring logic.
- (concat '(this ?__fact__)
- (mapcat (fn [[name accessor]]
- [name (list accessor '?__fact__)])
- accessors)))
-
- ;; Get the bindings used in the join filter expression that are pulled from
- ;; the token. This is simply the bindings in the constraints with the newly
- ;; created element bindings for this condition removed.
- token-binding-keys (remove element-bindings (variables-as-keywords constraints))
-
- token-assignments (mapcat build-token-assignment token-binding-keys)
-
- new-binding-assignments (mapcat #(list (symbol (name %))
- (list 'get '?__element-bindings__ %))
- element-bindings)
-
- assignments (concat
- fact-assignments
- token-assignments
- new-binding-assignments)
-
- equality-only-variables (into #{} (for [binding ancestor-bindings]
- (symbol (name (keyword binding)))))
-
- ;; JFE will stand for JoinFilterExpr
- fn-name (mk-node-fn-name node-type node-id "JFE")]
-
- `(fn ~fn-name
- [~'?__token__
- ~(add-meta '?__fact__ type)
- ~'?__element-bindings__
- ~destructured-env]
- (let [~@assignments
- ~'?__bindings__ (atom {})]
- ~(compile-constraints constraints equality-only-variables)))))
-
-(defn- expr-type [expression]
- (if (map? expression)
- :condition
- (first expression)))
-
-(defn- cartesian-join
- "Performs a cartesian join to distribute disjunctions for disjunctive normal form.,
- This distributing each disjunction across every other disjunction and also across each
- given conjunction. Returns a sequence where each element contains a sequence
- of conjunctions that can be used in rules."
- [disjunctions-to-distribute conjunctions]
-
- ;; For every disjuction, do a cartesian join to distribute it
- ;; across every other disjuction. We also must distributed it across
- ;; each conjunction
- (reduce
- (fn [distributed-disjunctions disjunction-to-distribute]
-
- (for [expression disjunction-to-distribute
- distributed-disjunction distributed-disjunctions]
- (conj distributed-disjunction expression)))
-
- ;; Start with our conjunctions to join to, since we must distribute
- ;; all disjunctions across these as well.
- [conjunctions]
- disjunctions-to-distribute))
-
-(defn to-dnf
- "Convert a lhs expression to disjunctive normal form."
- [expression]
-
- ;; Always validate the expression schema, as this is only done at compile time.
- (sc/validate schema/Condition expression)
- (condp = (expr-type expression)
- ;; Individual conditions can return unchanged.
- :condition
- expression
-
- :test
- expression
-
- :exists
- expression
-
- ;; Apply de Morgan's law to push negation nodes to the leaves.
- :not
- (let [children (rest expression)
- child (first children)]
-
- (when (not= 1 (count children))
- (throw (ex-info "Negation must have only one child." {:illegal-negation expression})))
-
- (condp = (expr-type child)
-
- ;; If the child is a single condition, simply return the ast.
- :condition expression
-
- :test expression
-
- ;; Note that :exists does not support further nested boolean conditions.
- ;; It is just syntax sugar over an accumulator.
- :exists expression
-
- ;; Double negation, so just return the expression under the second negation.
- :not
- (to-dnf (second child))
-
- ;; DeMorgan's law converting conjunction to negated disjuctions.
- :and (to-dnf (cons :or (for [grandchild (rest child)] [:not grandchild])))
-
- ;; DeMorgan's law converting disjuction to negated conjuctions.
- :or (to-dnf (cons :and (for [grandchild (rest child)] [:not grandchild])))))
-
- ;; For all others, recursively process the children.
- (let [children (map to-dnf (rest expression))
- ;; Get all conjunctions, which will not conain any disjunctions since they were processed above.
- conjunctions (filter #(#{:and :condition :not :exists} (expr-type %)) children)]
-
- ;; If there is only one child, the and or or operator can simply be eliminated.
- (if (= 1 (count children))
- (first children)
-
- (condp = (expr-type expression)
-
- :and
- (let [disjunctions (map rest (filter #(= :or (expr-type %)) children))
- ;; Merge all child conjunctions into a single conjunction.
- combine-conjunctions (fn [children]
- (cons :and
- (for [child children
- nested-child (if (= :and (expr-type child))
- (rest child)
- [child])]
- nested-child)))]
- (if (empty? disjunctions)
- (combine-conjunctions children)
- (cons :or
- (for [c (cartesian-join disjunctions conjunctions)]
- (combine-conjunctions c)))))
- :or
- ;; Merge all child disjunctions into a single disjunction.
- (let [disjunctions (mapcat rest (filter #(#{:or} (expr-type %)) children))]
- (cons :or (concat disjunctions conjunctions))))))))
-
-(defn- non-equality-unification? [expression previously-bound]
- "Returns true if the given expression does a non-equality unification against a variable that
- is not in the previously-bound set, indicating it can't be solved by simple unification."
- (let [found-complex (atom false)
- process-form (fn [form]
- (when (and (seq? form)
- (not (equality-expression? form))
- (some (fn [sym] (and (symbol? sym)
- (.startsWith (name sym) "?")
- (not (previously-bound sym))))
- (flatten-expression form)))
-
- (reset! found-complex true))
-
- form)]
-
- ;; Walk the expression to find use of a symbol that can't be solved by equality-based unificaiton.
- (doall (walk/postwalk process-form expression))
-
- @found-complex))
-
-(defn condition-type
- "Returns the type of a single condition that has been transformed
- to disjunctive normal form. The types are: :negation, :accumulator, :test, :exists, and :join"
- [condition]
- (let [is-negation (= :not (first condition))
- is-exists (= :exists (first condition))
- accumulator (:accumulator condition)
- result-binding (:result-binding condition) ; Get the optional result binding used by accumulators.
- condition (cond
- is-negation (second condition)
- accumulator (:from condition)
- :else condition)
- node-type (cond
- is-negation :negation
- is-exists :exists
- accumulator :accumulator
- (:type condition) :join
- :else :test)]
-
- node-type))
-
-(defn- extract-exists
- "Converts :exists operations into an accumulator to detect
- the presence of a fact and a test to check that count is
- greater than zero.
-
- It may be possible to replace this conversion with a specialized
- ExtractNode in the future, but this transformation is simple
- and meets the functional needs."
- [conditions]
- (for [condition conditions
- expanded (if (= :exists (condition-type condition))
- ;; This is an :exists condition, so expand it
- ;; into an accumulator and a test.
- (let [exists-count (gensym "?__gen__")]
- [{:accumulator '(clara.rules.accumulators/exists)
- :from (second condition)
- :result-binding (keyword exists-count)}])
-
- ;; This is not an :exists condition, so do not change it.
- [condition])]
-
- expanded))
-
-(defn- classify-variables
- "Classifies the variables found in the given contraints into 'bound' vs 'free'
- variables. Bound variables are those that are found in a valid
- equality-based, top-level binding form. All other variables encountered are
- considered free. Returns a tuple of the form
- [bound-variables free-variables]
- where bound-variables and free-variables are the sets of bound and free
- variables found in the constraints respectively."
- [constraints]
- (reduce (fn [[bound-variables free-variables] constraint]
- ;; Only top-level constraint forms can introduce new variable bindings.
- ;; If the top-level constraint is an equality expression, add the
- ;; bound variables to the set of bound variables.
- (if (and (seq? constraint) (equality-expression? constraint))
- [(->> (rest constraint)
- (filterv is-variable?)
- ;; A variable that was marked unbound in a previous expression should
- ;; not be considered bound.
- (remove free-variables)
- (into bound-variables))
- ;; Any other variables in a nested form are now considered "free".
- (->> (rest constraint)
- ;; We already have checked this level symbols for bound variables.
- (remove symbol?)
- flatten-expression
- (filter is-variable?)
- ;; Variables previously bound in an expression are not free.
- (remove bound-variables)
- (into free-variables))]
-
- ;; Binding forms are not supported nested within other forms, so
- ;; any variables that occur now are considered "free" variables.
- [bound-variables
- (->> (flatten-expression constraint)
- (filterv is-variable?)
- ;; Variables previously bound in an expression are not free.
- (remove bound-variables)
- (into free-variables))]))
- [#{} #{}]
- constraints))
-
-(sc/defn analyze-condition :- {;; Variables used in the condition that are bound
- :bound #{sc/Symbol}
-
- ;; Variables used in the condition that are unbound.
- :unbound #{sc/Symbol}
-
- ;; The condition that was analyzed
- :condition schema/Condition
-
- ;; Flag indicating it is an accumulator.
- :is-accumulator sc/Bool}
- [condition :- schema/Condition]
-
- (let [dnf-condition (to-dnf condition)
-
- ;; Break the condition up into leafs
- leaf-conditions (case (first dnf-condition)
-
- ;; A top level disjunction, so get all child conjunctions and
- ;; flatten them.
- :or
- (for [nested-condition (rest dnf-condition)
- leaf-condition (if (= :and (first nested-condition))
- (rest nested-condition)
- [nested-condition])]
- leaf-condition)
-
-;; A top level and of nested conditions, so just use them
- :and
- (rest dnf-condition)
-
- ;; The condition itself is a leaf, so keep it.
- [dnf-condition])]
-
- (reduce
- (fn [{:keys [bound unbound condition is-accumulator]} leaf-condition]
-
-;; The effective leaf for variable purposes may be contained in a negation or
- ;; an accumulator, so extract it.
- (let [effective-leaf (condp = (condition-type leaf-condition)
- :accumulator (:from leaf-condition)
- :negation (second leaf-condition)
- leaf-condition)
-
- constraints (:constraints effective-leaf)
-
- [bound-variables unbound-variables] (if (#{:negation :test} (condition-type leaf-condition))
- ;; Variables used in a negation should be considered
- ;; unbound since they aren't usable in another condition,
- ;; so label all variables as unbound. Similarly, :test
- ;; conditions can't bind new variables since they don't
- ;; have any new facts as input. See:
- ;; https://github.com/cerner/clara-rules/issues/357
- [#{}
- (apply set/union (classify-variables constraints))]
-
- ;; It is not a negation, so simply classify variables.
- (classify-variables constraints))
-
- bound-with-result-bindings (cond-> bound-variables
- (:fact-binding effective-leaf) (conj (symbol (name (:fact-binding effective-leaf))))
- (:result-binding leaf-condition) (conj (symbol (name (:result-binding leaf-condition)))))
-
- ;; All variables bound in this condition.
- all-bound (set/union bound bound-with-result-bindings)
-
- ;; Unbound variables, minus those that have been bound elsewhere in this condition.
- all-unbound (set/difference (set/union unbound-variables unbound) all-bound)]
-
- {:bound all-bound
- :unbound all-unbound
- :condition condition
- :is-accumulator (or is-accumulator
- (= :accumulator
- (condition-type leaf-condition)))}))
-
- {:bound #{}
- :unbound #{}
- :condition condition
- :is-accumulator false}
- leaf-conditions)))
-
-(sc/defn sort-conditions :- [schema/Condition]
- "Performs a topologic sort of conditions to ensure variables needed by
- child conditions are bound."
- [conditions :- [schema/Condition]]
-
- ;; Get the bound and unbound variables for all conditions and sort them.
- (let [classified-conditions (map analyze-condition conditions)]
-
- (loop [sorted-conditions []
- bound-variables #{}
- remaining-conditions classified-conditions]
-
- (if (empty? remaining-conditions)
- ;; No more conditions to sort, so return the raw conditions
- ;; in sorted order.
- (map :condition sorted-conditions)
-
- ;; Unsatisfied conditions remain, so find ones we can satisfy.
- (let [satisfied? (fn [classified-condition]
- (set/subset? (:unbound classified-condition)
- bound-variables))
-
- ;; Find non-accumulator conditions that are satisfied. We defer
- ;; accumulators until later in the rete network because they
- ;; may fire a default value if all needed bindings earlier
- ;; in the network are satisfied.
- satisfied-non-accum? (fn [classified-condition]
- (and (not (:is-accumulator classified-condition))
- (set/subset? (:unbound classified-condition)
- bound-variables)))
-
- has-satisfied-non-accum (some satisfied-non-accum? remaining-conditions)
-
- newly-satisfied (if has-satisfied-non-accum
- (filter satisfied-non-accum? remaining-conditions)
- (filter satisfied? remaining-conditions))
-
- still-unsatisfied (if has-satisfied-non-accum
- (remove satisfied-non-accum? remaining-conditions)
- (remove satisfied? remaining-conditions))
-
- updated-bindings (apply set/union bound-variables
- (map :bound newly-satisfied))]
-
- ;; If no existing variables can be satisfied then the production is invalid.
- (when (empty? newly-satisfied)
-
- ;; Get the subset of variables that cannot be satisfied.
- (let [unsatisfiable (set/difference
- (apply set/union (map :unbound still-unsatisfied))
- bound-variables)]
- (throw (ex-info (str "Using variable that is not previously bound. This can happen "
- "when an expression uses a previously unbound variable, "
- "or if a variable is referenced in a nested part of a parent "
- "expression, such as (or (= ?my-expression my-field) ...). " \newline
- "Note that variables used in negations are not bound for subsequent
- rules since the negation can never match." \newline
- "Production: " \newline
- (:production *compile-ctx*) \newline
- "Unbound variables: "
- unsatisfiable)
- {:production (:production *compile-ctx*)
- :variables unsatisfiable}))))
-
- (recur (into sorted-conditions newly-satisfied)
- updated-bindings
- still-unsatisfied))))))
-
-(defn- non-equality-unifications
- "Returns a set of unifications that do not use equality-based checks."
- [constraints]
- (let [[bound-variables unbound-variables] (classify-variables constraints)]
- (into #{}
- (for [constraint constraints
- :when (non-equality-unification? constraint bound-variables)]
- constraint))))
-
-(sc/defn condition-to-node :- schema/ConditionNode
- "Converts a condition to a node structure."
- [condition :- schema/Condition
- env :- (sc/maybe {sc/Keyword sc/Any})
- parent-bindings :- #{sc/Keyword}]
- (let [node-type (condition-type condition)
- accumulator (:accumulator condition)
- result-binding (:result-binding condition) ; Get the optional result binding used by accumulators.
- condition (cond
- (= :negation node-type) (second condition)
- accumulator (:from condition)
- :else condition)
-
- ;; Convert a test within a negation to a negation of the test. This is necessary
- ;; because negation nodes expect an additional condition to match against.
- [node-type condition] (if (and (= node-type :negation)
- (= :test (condition-type condition)))
-
- ;; Create a negated version of our test condition.
- [:test {:constraints [(list 'not (cons 'and (:constraints condition)))]}]
-
- ;; This was not a test within a negation, so keep the previous values.
- [node-type condition])
-
- ;; Get the set of non-equality unifications that cannot be resolved locally to the rule.
- non-equality-unifications (if (or (= :accumulator node-type)
- (= :negation node-type)
- (= :join node-type))
- (non-equality-unifications (:constraints condition))
- #{})
-
- ;; If there are any non-equality unitifications, create a join with filter expression to handle them.
- join-filter-expressions (if (seq non-equality-unifications)
-
- (assoc condition :constraints (filterv non-equality-unifications (:constraints condition)))
-
- nil)
-
- ;; Remove instances of non-equality constraints from accumulator
- ;; and negation nodes, since those are handled with specialized node implementations.
- condition (if (seq non-equality-unifications)
- (assoc condition
- :constraints (into [] (remove non-equality-unifications (:constraints condition)))
- :original-constraints (:constraints condition))
-
- condition)
-
- ;; Variables used in the constraints
- constraint-bindings (variables-as-keywords (:constraints condition))
-
- ;; Variables used in the condition.
- cond-bindings (if (:fact-binding condition)
- (conj constraint-bindings (:fact-binding condition))
- constraint-bindings)
-
- new-bindings (set/difference (variables-as-keywords (:constraints condition))
- parent-bindings)
-
- join-filter-bindings (if join-filter-expressions
- (variables-as-keywords join-filter-expressions)
- nil)]
-
- (cond->
- {:node-type node-type
- :condition condition
- :new-bindings new-bindings
- :used-bindings (set/union cond-bindings join-filter-bindings)}
-
- (seq env) (assoc :env env)
-
- ;; Add the join bindings to join, accumulator or negation nodes.
- (#{:join :negation :accumulator} node-type) (assoc :join-bindings (set/intersection cond-bindings parent-bindings))
-
- accumulator (assoc :accumulator accumulator)
-
- result-binding (assoc :result-binding result-binding)
-
- join-filter-expressions (assoc :join-filter-expressions join-filter-expressions)
-
- join-filter-bindings (assoc :join-filter-join-bindings (set/intersection join-filter-bindings parent-bindings)))))
-
-(sc/defn ^:private add-node :- schema/BetaGraph
- "Adds a node to the beta graph."
- [beta-graph :- schema/BetaGraph
- source-ids :- [sc/Int]
- target-id :- sc/Int
- target-node :- (sc/either schema/ConditionNode schema/ProductionNode)]
-
- ;; Add the production or condition to the network.
- (let [beta-graph (assoc-in beta-graph [:id-to-new-bindings target-id]
- ;; A ProductionNode will not have the set of new bindings previously created,
- ;; so we assign them an empty set here. A ConditionNode will always have a set of new bindings,
- ;; even if it is an empty one; this is enforced by the schema. Having an empty set of new bindings
- ;; is a valid and meaningful state that just means that all join bindings in that node come from right-activations
- ;; earlier in the network.
- (or (:new-bindings target-node)
- #{}))
-
- beta-graph (if (#{:production :query} (:node-type target-node))
- (assoc-in beta-graph [:id-to-production-node target-id] target-node)
- (assoc-in beta-graph [:id-to-condition-node target-id] target-node))]
-
-;; Associate the forward and backward edges.
- (reduce (fn [beta-graph source-id]
- (let [forward-path [:forward-edges source-id]
- forward-previous (get-in beta-graph forward-path)
- backward-path [:backward-edges target-id]
- backward-previous (get-in beta-graph backward-path)]
- (-> beta-graph
- (assoc-in
- forward-path
- (if forward-previous
- (conj forward-previous target-id)
- (sorted-set target-id)))
- (assoc-in
- backward-path
- (if backward-previous
- (conj backward-previous source-id)
- (sorted-set source-id))))))
-
- beta-graph
- source-ids)))
-
-(declare add-production)
-
-(sc/defn ^:private extract-negation :- {:new-expression schema/Condition
- :beta-with-negations schema/BetaGraph}
-
- "Extracts complex, nested negations and adds a rule to the beta graph to trigger the returned
- negation expression."
- [previous-expressions :- [schema/Condition]
- expression :- schema/Condition
- parent-ids :- [sc/Int]
- ancestor-bindings :- #{sc/Keyword}
- beta-graph :- schema/BetaGraph
- production :- schema/Production
- create-id-fn]
-
- (if (and (= :not (first expression))
- (sequential? (second expression))
- (#{:and :or :not} (first (second expression))))
-
- ;; Dealing with a compound negation, so extract it out.
- (let [negation-expr (second expression)
- gen-rule-name (str (or (:name production)
- (gensym "gen-rule"))
- "__"
- (gensym))
-
- ;; Insert the bindings from ancestors that are used in the negation
- ;; in the NegationResult fact so that the [:not [NegationResult...]]
- ;; condition can assert that the facts matching the negation
- ;; have the necessary bindings.
- ;; See https://github.com/cerner/clara-rules/issues/304 for more details
- ;; and a case that behaves incorrectly without this check.
- ancestor-bindings-in-negation-expr (set/intersection
- (variables-as-keywords negation-expr)
- ancestor-bindings)
-
- ancestor-bindings-insertion-form (into {}
- (map (fn [binding]
- [binding (-> binding
- name
- symbol)]))
- ancestor-bindings-in-negation-expr)
-
- ancestor-binding->restriction-form (fn [b]
- (list '= (-> b name symbol)
- (list b 'ancestor-bindings)))
-
- modified-expression `[:not {:type clara.rules.engine.NegationResult
- :constraints [(~'= ~gen-rule-name ~'gen-rule-name)
- ~@(map ancestor-binding->restriction-form
- ancestor-bindings-in-negation-expr)]}]
-
- generated-rule (cond-> {:name gen-rule-name
- :lhs (concat previous-expressions [negation-expr])
- :rhs `(clara.rules/insert! (eng/->NegationResult ~gen-rule-name
- ~ancestor-bindings-insertion-form))}
-
- ;; Propagate properties like salience to the generated production.
- (:props production) (assoc :props (:props production))
-
- true (assoc-in [:props :clara-rules/internal-salience] :extracted-negation)
-
- ;; Propagate the the environment (such as local bindings) if applicable.
- (:env production) (assoc :env (:env production)))
-
- ;; Add the generated rule to the beta network.
-
- beta-with-negations (add-production generated-rule beta-graph create-id-fn)]
-
- {:new-expression modified-expression
- :beta-with-negations beta-with-negations})
-
- ;; The expression wasn't a negation, so return the previous content.
- {:new-expression expression
- :beta-with-negations beta-graph}))
-
-;; A beta graph with no nodes.
-(def ^:private empty-beta-graph {:forward-edges (sorted-map)
- :backward-edges (sorted-map)
- :id-to-condition-node (sorted-map 0 ::root-condition)
- :id-to-production-node (sorted-map)
- :id-to-new-bindings (sorted-map)})
-
-(sc/defn ^:private add-conjunctions :- {:beta-graph schema/BetaGraph
- :new-ids [sc/Int]
- :bindings #{sc/Keyword}}
-
- "Adds a sequence of conjunctions to the graph in a parent-child relationship."
-
- [conjunctions :- [schema/Condition]
- parent-ids :- [sc/Int]
- env :- (sc/maybe {sc/Keyword sc/Any})
- ancestor-bindings :- #{sc/Keyword}
- beta-graph :- schema/BetaGraph
- create-id-fn]
-
- (loop [beta-graph beta-graph
- parent-ids parent-ids
- bindings ancestor-bindings
- [expression & remaining-expressions] conjunctions]
-
- (if expression
-
- (let [node (condition-to-node expression env bindings)
-
- {:keys [result-binding fact-binding]} expression
-
- all-bindings (cond-> (set/union bindings (:used-bindings node))
- result-binding (conj result-binding)
- fact-binding (conj fact-binding))
-
- ;; Find children that all parent nodes have.
- forward-edges (if (= 1 (count parent-ids))
- ;; If there is only one parent then there is no need to reconstruct
- ;; the set of forward edges. This is an intentional performance optimization for large
- ;; beta graphs that have many nodes branching from a common node, typically the root node.
- (-> (:forward-edges beta-graph)
- (get (first parent-ids)))
- (->> (select-keys parent-ids (:forward-edges beta-graph))
- vals
- ;; Order doesn't matter here as we will effectively sort it using update-node->id later,
- ;; thus adding determinism.
- (into #{} cat)))
-
- id-to-condition-nodes (:id-to-condition-node beta-graph)
-
- ;; Since we require that id-to-condition-nodes have an equal value to "node" under the ID
- ;; for this to be used. In any possible edge cases where there are equal nodes under different IDs,
- ;; maintaining the lowest node id will add determinism.
- ;; Having different nodes under the same ID would be a bug,
- ;; but having an equivalent node under multiple IDs wouldn't necessarily be one.
- ;;
- ;; Using maps(nodes) as keys acts as a performance optimization here, we are relying on the fact that maps cache
- ;; their hash codes. This saves us time in the worst case scenarios when there are large amounts of forward edges
- ;; that will be compared repeatedly. For example, when adding new children to the root node we must first compare
- ;; all of the old children for a prior match, for each additional child added we incur the assoc but the hash code
- ;; has already been cached by prior iterations of add-conjunctions. In these cases we have seen that the hashing
- ;; will out perform equivalence checks.
- update-node->ids (fn [m id]
- (let [node (get id-to-condition-nodes id)
- prev (get m node)]
- (if prev
- ;; There might be nodes with multiple representation under the same parent, this would
- ;; likely be due to nodes that have the same condition but do not share all the same
- ;; parents, see Issue 433 for more information
- (assoc! m node (conj prev id))
- (assoc! m node [id]))))
-
- node->ids (-> (reduce update-node->ids
- (transient {})
- forward-edges)
- persistent!)
-
- backward-edges (:backward-edges beta-graph)
-
- parent-ids-set (set parent-ids)
-
- ;; Use the existing id or create a new one.
- node-id (or (when-let [common-nodes (get node->ids node)]
- ;; We need to validate that the node we intend on sharing shares the same parents as the
- ;; current node we are creating. See Issue 433 for more information
- (some #(when (= (get backward-edges %)
- parent-ids-set)
- %)
- common-nodes))
- (create-id-fn))
-
- graph-with-node (add-node beta-graph parent-ids node-id node)]
-
- (recur graph-with-node
- [node-id]
- all-bindings
- remaining-expressions))
-
- ;; No expressions remaining, so return the structure.
- {:beta-graph beta-graph
- :new-ids parent-ids
- :bindings bindings})))
-
-(sc/defn ^:private add-production :- schema/BetaGraph
- "Adds a production to the graph of beta nodes."
- [production :- schema/Production
- beta-graph :- schema/BetaGraph
- create-id-fn]
-
- ;; Flatten conditions, removing an extraneous ands so we can process them independently.
- (let [flattened-conditions (for [condition (:lhs production)
- child-condition (if (#{'and :and} (first condition))
- (rest condition)
- [condition])]
-
- child-condition)
-
- sorted-conditions (sort-conditions flattened-conditions)]
-
- (loop [previous-conditions []
- [current-condition & remaining-conditions] sorted-conditions
- parent-ids [0]
- ancestor-bindings #{}
- beta-graph beta-graph]
-
- (if current-condition
-
- (let [{:keys [new-expression beta-with-negations]}
- (extract-negation previous-conditions
- current-condition
- parent-ids
- ancestor-bindings
- beta-graph
- production
- create-id-fn)
-
- condition (or new-expression
- current-condition)
-
- ;; Extract disjunctions from the condition.
- dnf-expression (to-dnf condition)
-
- ;; Get a sequence of disjunctions.
- disjunctions (for [expression
- (if (= :or (first dnf-expression)) ; Ignore top-level or in DNF.
- (rest dnf-expression)
- [dnf-expression])]
-
- (if (= :and (first expression)) ; Ignore nested ands in DNF.
- (rest expression)
- [expression]))
-
- {beta-with-nodes :beta-graph new-ids :new-ids all-bindings :bindings}
- (reduce (fn [previous-result conjunctions]
-
- ;; Get the beta graph, new identifiers, and complete bindings
- (let [;; Convert exists operations to accumulator and test nodes.
- exists-extracted (extract-exists conjunctions)
-
- ;; Compute the new beta graph, ids, and bindings with the expressions.
- new-result (add-conjunctions exists-extracted
- parent-ids
- (:env production)
- ancestor-bindings
- (:beta-graph previous-result)
- create-id-fn)]
-
- ;; Combine the newly created beta graph, node ids, and bindings
- ;; for use in descendent nodes.
- {:beta-graph (:beta-graph new-result)
- :new-ids (into (:new-ids previous-result) (:new-ids new-result))
- :bindings (set/union (:bindings previous-result)
- (:bindings new-result))}))
-
- ;; Initial reduce value, combining previous graph, parent ids, and ancestor variable bindings.
- {:beta-graph beta-with-negations
- :new-ids []
- :bindings ancestor-bindings}
-
- ;; Each disjunction contains a sequence of conjunctions.
- disjunctions)]
-
- (recur (conj previous-conditions current-condition)
- remaining-conditions
- new-ids
- all-bindings
- beta-with-nodes))
-
- ;; No more conditions to add, so connect the production.
- (if (:rhs production)
- ;; if its a production node simply add it
- (add-node beta-graph
- parent-ids
- (create-id-fn)
- {:node-type :production
- :production production
- :bindings ancestor-bindings})
- ;; else its a query node and we need to validate that the query has at least the bindings
- ;; specified in the parameters
- (if (every? ancestor-bindings (:params production))
- (add-node beta-graph
- parent-ids
- (create-id-fn)
- {:node-type :query
- :query production})
- (throw (ex-info "Query does not contain bindings specified in parameters."
- {:expected-bindings (:params production)
- :available-bindings ancestor-bindings
- :query (:name production)}))))))))
-
-(sc/defn to-beta-graph :- schema/BetaGraph
- "Produces a description of the beta network."
- [productions :- #{schema/Production}
- create-id-fn :- IFn]
- (reduce (fn [beta-graph production]
- (binding [*compile-ctx* {:production production}]
- (add-production production beta-graph create-id-fn)))
-
- empty-beta-graph
- productions))
-
-(sc/defn ^:private root-node? :- sc/Bool
- "A helper function to determine if the node-id provided is a root node. A given node would be considered a root-node,
- if its only backward edge was that of the artificial root node, node 0."
- [backward-edges :- {sc/Int #{sc/Int}}
- node-id :- sc/Int]
- (= #{0} (get backward-edges node-id)))
-
-(sc/defn extract-exprs :- schema/NodeExprLookup
- "Walks the Alpha and Beta graphs and extracts the expressions that will be used in the construction of the final network.
- The extracted expressions are stored by their key, [ ], this allows for the function to be retrieved
- after it has been compiled.
-
- Note: The keys of the map returned carry the metadata that can be used during evaluation. This metadata will contain,
- if available, the compile context, file and ns. This metadata is not stored on the expression itself because it will
- contain forms that will fail to eval."
- [beta-graph :- schema/BetaGraph
- alpha-graph :- [schema/AlphaNode]]
- (let [backward-edges (:backward-edges beta-graph)
- handle-expr (fn [id->expr s-expr id expr-key compilation-ctx]
- (assoc id->expr
- [id expr-key]
- [s-expr (assoc compilation-ctx expr-key s-expr)]))
-
- ;; If extract-exprs ever became a hot spot, this could be changed out to use more java interop.
- id->expr (reduce (fn [prev alpha-node]
- (let [{:keys [id condition env]} alpha-node
- {:keys [type constraints fact-binding args]} condition
- cmeta (meta condition)]
- (handle-expr prev
- (with-meta (compile-condition
- type id (first args) constraints
- fact-binding env)
- ;; Remove all metadata but file and line number
- ;; to protect from evaluating unsafe metadata
- ;; See PR 243 for more detailed discussion
- (select-keys cmeta [:line :file]))
- id
- :alpha-expr
- {:file (or (:file cmeta) *file*)
- :compile-ctx {:condition condition
- :env env
- :msg "compiling alpha node"}})))
- {}
- alpha-graph)
- id->expr (reduce-kv (fn [prev id production-node]
- (let [production (-> production-node :production)]
- (handle-expr prev
- (with-meta (compile-action id
- (:bindings production-node)
- (:rhs production)
- (:env production))
- (meta (:rhs production)))
- id
- :action-expr
- ;; ProductionNode expressions can be arbitrary code, therefore we need the
- ;; ns where the production was define so that we can compile the expression
- ;; later.
- {:ns (:ns-name production)
- :file (-> production :rhs meta :file)
- :compile-ctx {:production production
- :msg "compiling production node"}})))
- id->expr
- (:id-to-production-node beta-graph))]
- (reduce-kv
- (fn [prev id beta-node]
- (let [condition (:condition beta-node)
- condition (if (symbol? condition)
- (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
- condition)]
- (case (or (:node-type beta-node)
- ;; If there is no :node-type then the node is the ::root-condition
- ;; however, creating a case for nil could potentially cause weird effects if something about the
- ;; compilation of the beta graph changes. Therefore making an explicit case for ::root-condition
- ;; and if there was anything that wasn't ::root-condition this case statement will fail rather than
- ;; failing somewhere else.
- beta-node)
- ::root-condition prev
-
- :join (if (or (root-node? backward-edges id)
- (not (:join-filter-expressions beta-node)))
- ;; This is either a RootJoin or HashJoin node, in either case they do not have an expression
- ;; to capture.
- prev
- (handle-expr prev
- (compile-join-filter id
- "ExpressionJoinNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- (:env beta-node))
- id
- :join-filter-expr
- {:compile-ctx {:condition condition
- :join-filter-expressions (:join-filter-expressions beta-node)
- :env (:env beta-node)
- :msg "compiling expression join node"}}))
- :negation (if (:join-filter-expressions beta-node)
- (handle-expr prev
- (compile-join-filter id
- "NegationWithJoinFilterNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- (:env beta-node))
- id
- :join-filter-expr
- {:compile-ctx {:condition condition
- :join-filter-expressions (:join-filter-expressions beta-node)
- :env (:env beta-node)
- :msg "compiling negation with join filter node"}})
- prev)
- :test (handle-expr prev
- (compile-test id (:constraints condition) (:env beta-node))
- id
- :test-expr
- {:compile-ctx {:condition condition
- :env (:env beta-node)
- :msg "compiling test node"}})
- :accumulator (cond-> (handle-expr prev
- (compile-accum id
- (if (:join-filter-expressions beta-node)
- "AccumulateWithJoinFilterNode"
- "AccumulateNode")
- (:accumulator beta-node)
- (:env beta-node))
- id
- :accum-expr
- {:compile-ctx {:condition condition
- :accumulator (:accumulator beta-node)
- :env (:env beta-node)
- :msg "compiling accumulator"}})
-
- (:join-filter-expressions beta-node)
- (handle-expr (compile-join-filter id
- "AccumulateWithJoinFilterNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- (:env beta-node))
- id
- :join-filter-expr
- {:compile-ctx {:condition condition
- :join-filter-expressions (:join-filter-expressions beta-node)
- :env (:env beta-node)
- :msg "compiling accumulate with join filter node"}}))
- :query prev
-
- ;; This error should only be thrown if there are changes to the compilation of the beta-graph
- ;; such as an addition of a node type.
- (throw (ex-info "Invalid node type encountered while compiling rulebase."
- {:node beta-node})))))
- id->expr
- (:id-to-condition-node beta-graph))))
-
-(sc/defn compile-exprs :- schema/NodeFnLookup
- "Takes a map in the form produced by extract-exprs and evaluates the values(expressions) of the map in a batched manner.
- This allows the eval calls to be more effecient, rather than evaluating each expression on its own.
- See #381 for more details."
- [key->expr :- schema/NodeExprLookup
- partition-size :- sc/Int]
- (let [batching-try-eval (fn [compilation-ctxs exprs]
- ;; Try to evaluate all of the expressions as a batch. If they fail the batch eval then we
- ;; try them one by one with their compilation context, this is slow but we were going to fail
- ;; anyway.
- (try
- (mapv vector (eval exprs) compilation-ctxs)
- (catch Exception e
- ;; Using mapv here rather than map to avoid laziness, otherwise compilation failure might
- ;; fall into the throw below for the wrong reason.
- (mapv (fn [expr compilation-ctx]
- (with-bindings
- {#'*compile-ctx* (:compile-ctx compilation-ctx)
- #'*file* (:file compilation-ctx *file*)}
- (try-eval expr)))
- exprs
- compilation-ctxs)
- ;; If none of the rules are the issue, it is likely that the
- ;; size of the code trying to be evaluated has exceeded the limit
- ;; set by java.
- (throw (ex-info (str "There was a failure while batch evaling the node expressions, " \newline
- "but wasn't present when evaling them individually. This likely indicates " \newline
- "that the method size exceeded the maximum set by the jvm, see the cause for the actual error.")
- {:compilation-ctxs compilation-ctxs}
- e)))))]
- (into {}
- cat
- ;; Grouping by ns, most expressions will not have a defined ns, only expressions from production nodes.
- ;; These expressions must be evaluated in the ns where they were defined, as they will likely contain code
- ;; that is namespace dependent.
- (for [[nspace group] (sort-by key (group-by (comp :ns second val) key->expr))
- ;; Partitioning the number of forms to be evaluated, Java has a limit to the size of methods if we were
- ;; evaluate all expressions at once it would likely exceed this limit and throw an exception.
- expr-batch (partition-all partition-size group)
- :let [node-expr-keys (mapv first expr-batch)
- compilation-ctxs (mapv (comp second val) expr-batch)
- exprs (mapv (comp first val) expr-batch)]]
- (mapv vector
- node-expr-keys
- (with-bindings (if nspace
- {#'*ns* (the-ns nspace)}
- {})
- (batching-try-eval compilation-ctxs exprs)))))))
-
-(defn safe-get
- "A helper function for retrieving a given key from the provided map. If the key doesn't exist within the map this
- function will throw an exception."
- [m k]
- (let [not-found ::not-found
- v (get m k not-found)]
- (if (identical? v not-found)
- (throw (ex-info "Key not found with safe-get" {:map m :key k}))
- v)))
-
-(sc/defn ^:private compile-node
- "Compiles a given node description into a node usable in the network with the
- given children."
- [beta-node :- (sc/either schema/ConditionNode schema/ProductionNode)
- id :- sc/Int
- is-root :- sc/Bool
- children :- [sc/Any]
- expr-fn-lookup :- schema/NodeFnLookup
- new-bindings :- #{sc/Keyword}]
-
- (let [{:keys [condition production query join-bindings env]} beta-node
-
- condition (if (symbol? condition)
- (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
- condition)
-
- compiled-expr-fn (fn [id field] (first (safe-get expr-fn-lookup [id field])))]
-
- (case (:node-type beta-node)
-
- :join
- ;; Use an specialized root node for efficiency in this case.
- (if is-root
- (eng/->RootJoinNode
- id
- condition
- children
- join-bindings)
-
- ;; If the join operation includes arbitrary expressions
- ;; that can't expressed as a hash join, we must use the expressions
- (if (:join-filter-expressions beta-node)
- (eng/->ExpressionJoinNode
- id
- condition
- (compiled-expr-fn id :join-filter-expr)
- children
- join-bindings)
- (eng/->HashJoinNode
- id
- condition
- children
- join-bindings)))
-
- :negation
- ;; Check to see if the negation includes an
- ;; expression that must be joined to the incoming token
- ;; and use the appropriate node type.
- (if (:join-filter-expressions beta-node)
- (eng/->NegationWithJoinFilterNode
- id
- condition
- (compiled-expr-fn id :join-filter-expr)
- children
- join-bindings)
- (eng/->NegationNode
- id
- condition
- children
- join-bindings))
-
- :test
- (eng/->TestNode
- id
- env
- (compiled-expr-fn id :test-expr)
- children)
-
- :accumulator
- ;; We create an accumulator that accepts the environment for the beta node
- ;; into its context, hence the function with the given environment.
- (let [compiled-node (compiled-expr-fn id :accum-expr)
- compiled-accum (compiled-node (:env beta-node))]
-
- ;; Ensure the compiled accumulator has the expected structure
- (when (not (instance? Accumulator compiled-accum))
- (throw (IllegalArgumentException. (str (:accumulator beta-node) " is not a valid accumulator."))))
-
- ;; If a non-equality unification is in place, compile the predicate and use
- ;; the specialized accumulate node.
-
- (if (:join-filter-expressions beta-node)
- (eng/->AccumulateWithJoinFilterNode
- id
- ;; Create an accumulator structure for use when examining the node or the tokens
- ;; it produces.
- {:accumulator (:accumulator beta-node)
- ;; Include the original filter expressions in the constraints for inspection tooling.
- :from (update-in condition [:constraints]
- into (-> beta-node :join-filter-expressions :constraints))}
- compiled-accum
- (compiled-expr-fn id :join-filter-expr)
- (:result-binding beta-node)
- children
- join-bindings
- (:new-bindings beta-node))
-
- ;; All unification is based on equality, so just use the simple accumulate node.
- (eng/->AccumulateNode
- id
- ;; Create an accumulator structure for use when examining the node or the tokens
- ;; it produces.
- {:accumulator (:accumulator beta-node)
- :from condition}
- compiled-accum
- (:result-binding beta-node)
- children
- join-bindings
- (:new-bindings beta-node))))
-
- :production
- (eng/->ProductionNode
- id
- production
- (compiled-expr-fn id :action-expr))
-
- :query
- (eng/->QueryNode
- id
- query
- (:params query)))))
-
-(sc/defn ^:private compile-beta-graph :- {sc/Int sc/Any}
- "Compile the beta description to the nodes used at runtime."
- [{:keys [id-to-production-node id-to-condition-node id-to-new-bindings forward-edges backward-edges]} :- schema/BetaGraph
- expr-fn-lookup :- schema/NodeFnLookup]
- (let [;; Sort the ids to compile based on dependencies.
- ids-to-compile (loop [pending-ids (into #{} (concat (keys id-to-production-node) (keys id-to-condition-node)))
- node-deps forward-edges
- sorted-nodes []]
-
- (if (empty? pending-ids)
- sorted-nodes
-
- (let [newly-satisfied-ids (into #{}
- (for [pending-id pending-ids
- :when (empty? (get node-deps pending-id))]
- pending-id))
-
- updated-edges (into {} (for [[dependent-id dependencies] node-deps]
- [dependent-id (set/difference dependencies newly-satisfied-ids)]))]
-
- (recur (set/difference pending-ids newly-satisfied-ids)
- updated-edges
- (concat sorted-nodes newly-satisfied-ids)))))]
-
- (reduce (fn [id-to-compiled-nodes id-to-compile]
-
- ;; Get the condition or production node to compile
- (let [node-to-compile (get id-to-condition-node
- id-to-compile
- (get id-to-production-node id-to-compile))
-
- ;; Get the children. The children should be sorted because the
- ;; id-to-compiled-nodes map is sorted.
- children (->> (get forward-edges id-to-compile)
- (select-keys id-to-compiled-nodes)
- (vals))]
-
- ;; Sanity check for our logic...
- (assert (= (count children)
- (count (get forward-edges id-to-compile)))
- "Each child should be compiled.")
-
- (if (= ::root-condition node-to-compile)
- id-to-compiled-nodes
- (assoc id-to-compiled-nodes
- id-to-compile
- (compile-node node-to-compile
- id-to-compile
- (root-node? backward-edges id-to-compile)
- children
- expr-fn-lookup
- (get id-to-new-bindings id-to-compile))))))
- ;; The node IDs have been determined before now, so we just need to sort the map returned.
- ;; This matters because the engine will left-activate the beta roots with the empty token
- ;; in the order that this map is seq'ed over.
- (sorted-map)
- ids-to-compile)))
-
-(sc/defn to-alpha-graph :- [schema/AlphaNode]
- "Returns a sequence of [condition-fn, [node-ids]] tuples to represent the alpha side of the network."
- [beta-graph :- schema/BetaGraph
- create-id-fn :- IFn]
-
- ;; Create a sequence of tuples of conditions + env to beta node ids.
- (let [condition-to-node-ids (for [[id node] (:id-to-condition-node beta-graph)
- :when (:condition node)]
- [[(:condition node) (:env node)] id])
-
- ;; Merge common conditions together.
- condition-to-node-map (reduce
- (fn [node-map [[condition env] node-id]]
-
- ;; Can't use simple update-in because we need to ensure
- ;; the value is a vector, not a list.
- (if (get node-map [condition env])
- (update-in node-map [[condition env]] conj node-id)
- (assoc node-map [condition env] [node-id])))
- {}
- condition-to-node-ids)
-
- ;; We sort the alpha nodes by the ordered sequence of the node ids they correspond to
- ;; in order to make the order of alpha nodes for any given type consistent. Note that we
- ;; coerce to a vector because we need a type that is comparable.
- condition-to-node-entries (sort-by (fn [[k v]] (-> v sort vec))
- condition-to-node-map)]
-
- ;; Compile conditions into functions.
- (vec
- (for [[[condition env] node-ids] condition-to-node-entries
- :when (:type condition)] ; Exclude test conditions.
-
-
- (cond-> {:id (create-id-fn)
- :condition condition
- :beta-children (distinct node-ids)}
- (seq env) (assoc :env env))))))
-
-(sc/defn compile-alpha-nodes :- [{:id sc/Int
- :type sc/Any
- :alpha-fn sc/Any ;; TODO: is a function...
- (sc/optional-key :env) {sc/Keyword sc/Any}
- :children [sc/Num]}]
- [alpha-nodes :- [schema/AlphaNode]
- expr-fn-lookup :- schema/NodeFnLookup]
- (for [{:keys [id condition beta-children env] :as node} alpha-nodes]
- (cond-> {:id id
- :type (effective-type (:type condition))
- :alpha-fn (first (safe-get expr-fn-lookup [id :alpha-expr]))
- :children beta-children}
- env (assoc :env env))))
-
-;; Wrap the fact-type so that Clojure equality and hashcode semantics are used
-;; even though this is placed in a Java map.
-(deftype AlphaRootsWrapper [fact-type ^int fact-type-hash roots]
- Object
- (equals [this other]
- (let [other ^AlphaRootsWrapper other]
- (cond
-
- (identical? fact-type (.fact-type other))
- true
-
- (not (== fact-type-hash (.fact-type-hash other)))
- false
-
- :else
- (= fact-type (.fact-type other)))))
-
- ;; Since know we will need to find the hashcode of this object in all cases just eagerly calculate it upfront
- ;; and avoid extra calls to hash later.
- (hashCode [this] fact-type-hash))
-
-(defn- create-get-alphas-fn
- "Returns a function that given a sequence of facts,
- returns a map associating alpha nodes with the facts they accept."
- [fact-type-fn ancestors-fn alpha-roots]
-
- (let [;; If a customized fact-type-fn is provided,
- ;; we must use a specialized grouping function
- ;; that handles internal control types that may not
- ;; follow the provided type function.
- wrapped-fact-type-fn (if (= fact-type-fn type)
- type
- (fn [fact]
- (if (instance? ISystemFact fact)
- ;; Internal system types always use Clojure's type mechanism.
- (type fact)
- ;; All other types defer to the provided function.
- (fact-type-fn fact))))
-
- ;; Wrap the ancestors-fn so that we don't send internal facts such as NegationResult
- ;; to user-provided productions. Note that this work is memoized inside fact-type->roots.
- wrapped-ancestors-fn (fn [fact-type]
- (if (isa? fact-type ISystemFact)
- ;; Exclude system types from having ancestors for now
- ;; since none of our use-cases require them. If this changes
- ;; we may need to define a custom hierarchy for them.
- #{}
- (ancestors-fn fact-type)))
-
- fact-type->roots (memoize
- (fn [fact-type]
- ;; There is no inherent ordering here but we put the AlphaRootsWrapper instances
- ;; in a vector rather than a set to avoid nondeterministic ordering (and thus nondeterministic
- ;; performance).
- (into []
- ;; If a given type in the ancestors has no matching alpha roots,
- ;; don't return it as an ancestor. Fact-type->roots is memoized on the fact type,
- ;; but work is performed per group returned on each call the to get-alphas-fn. Therefore
- ;; removing groups with no alpha nodes here will improve performance on subsequent calls
- ;; to the get-alphas-fn with the same fact type.
- (keep #(when-let [roots (not-empty (get alpha-roots %))]
- (AlphaRootsWrapper. % (hash %) roots)))
- ;; If a user-provided ancestors-fn returns a sorted collection, for example for
- ;; ensuring determinism, we respect that ordering here by conj'ing on to the existing
- ;; collection.
- (conj (wrapped-ancestors-fn fact-type) fact-type))))
-
- update-roots->facts! (fn [^java.util.Map roots->facts roots-group fact]
- (if-let [v (.get roots->facts roots-group)]
- (.add ^java.util.List v fact)
- (.put roots->facts roots-group (doto (java.util.LinkedList.)
- (.add fact)))))]
-
- (fn [facts]
- (let [roots->facts (java.util.LinkedHashMap.)]
-
- (doseq [fact facts
- roots-group (fact-type->roots (wrapped-fact-type-fn fact))]
- (update-roots->facts! roots->facts roots-group fact))
-
- (let [return-list (java.util.LinkedList.)
- entries (.entrySet roots->facts)
- entries-it (.iterator entries)]
- ;; We iterate over the LinkedHashMap manually to avoid potential issues described at http://dev.clojure.org/jira/browse/CLJ-1738
- ;; where a Java iterator can return the same entry object repeatedly and mutate it after each next() call. We use mutable lists
- ;; for performance but wrap them in unmodifiableList to make it clear that the caller is not expected to mutate these lists.
- ;; Since after this function returns the only reference to the fact lists will be through the unmodifiedList we can depend elsewhere
- ;; on these lists not changing. Since the only expected workflow with these lists is to loop through them, not add or remove elements,
- ;; we don't gain much from using a transient (which can be efficiently converted to a persistent data structure) rather than a mutable type.
- (loop []
- (when (.hasNext entries-it)
- (let [^java.util.Map$Entry e (.next entries-it)]
- (.add return-list [(-> e ^AlphaRootsWrapper (.getKey) .roots)
- (java.util.Collections/unmodifiableList (.getValue e))])
- (recur))))
-
- (java.util.Collections/unmodifiableList return-list))))))
-
-(sc/defn build-network
- "Constructs the network from compiled beta tree and condition functions."
- [id-to-node :- {sc/Int sc/Any}
- beta-roots
- alpha-fns
- productions
- fact-type-fn
- ancestors-fn
- activation-group-sort-fn
- activation-group-fn
- expr-fn-lookup]
-
- (let [beta-nodes (vals id-to-node)
-
- production-nodes (for [node beta-nodes
- :when (= ProductionNode (type node))]
- node)
-
- query-nodes (for [node beta-nodes
- :when (= QueryNode (type node))]
- node)
-
- query-map (into {} (for [query-node query-nodes
-
- ;; Queries can be looked up by reference or by name;
- entry [[(:query query-node) query-node]
- [(:name (:query query-node)) query-node]]]
- entry))
-
- ;; type, alpha node tuples.
- alpha-nodes (for [{:keys [id type alpha-fn children env] :as alpha-map} alpha-fns
- :let [beta-children (map id-to-node children)]]
- [type (eng/->AlphaNode id env beta-children alpha-fn type)])
-
- ;; Merge the alpha nodes into a multi-map
- alpha-map (reduce
- (fn [alpha-map [type alpha-node]]
- (update-in alpha-map [type] conj alpha-node))
- {}
- alpha-nodes)
-
- ;; Merge the alpha nodes into the id-to-node map
- id-to-node (into id-to-node
- (map (juxt :id identity))
- (mapv second alpha-nodes))
-
- get-alphas-fn (create-get-alphas-fn fact-type-fn ancestors-fn alpha-map)]
-
- #_{:clj-kondo/ignore [:unresolved-symbol]}
- (strict-map->Rulebase
- {:alpha-roots alpha-map
- :beta-roots beta-roots
- :productions productions
- :production-nodes production-nodes
- :query-nodes query-map
- :id-to-node id-to-node
- :activation-group-sort-fn activation-group-sort-fn
- :activation-group-fn activation-group-fn
- :get-alphas-fn get-alphas-fn
- :node-expr-fn-lookup expr-fn-lookup})))
-
-;; Cache of sessions for fast reloading.
-(def ^:private session-cache (atom {}))
-
-(defn clear-session-cache!
- "Clears the cache of reusable Clara sessions, so any subsequent sessions
- will be re-compiled from the rule definitions. This is intended for use
- by tooling or specialized needs; most users can simply specify the :cache false
- option when creating sessions."
- []
- (reset! session-cache {}))
-
-(defn production-load-order-comp [a b]
- (< (-> a meta ::rule-load-order)
- (-> b meta ::rule-load-order)))
-
-(defn validate-names-unique
- "Checks that all productions included in the session have unique names,
- throwing an exception if duplicates are found."
- [productions]
- (let [non-unique (->> productions
- (group-by :name)
- (filter (fn [[k v]] (and (some? k) (not= 1 (count v)))))
- (map key)
- set)]
- (if (empty? non-unique)
- productions
- (throw (ex-info (str "Non-unique production names: " non-unique) {:names non-unique})))))
-
-(def forms-per-eval-default
- "The default max number of forms that will be evaluated together as a single batch.
- 5000 is chosen here due to the way that clojure will evaluate the vector of forms extracted from the nodes.
- The limiting factor here is the max java method size (64KiB), clojure will compile each form in the vector down into
- its own class file and generate another class file that will reference each of the other functions and wrap them in
- a vector inside a static method. For example,
-
- (eval [(fn one [_] ...) (fn two [_] ...)])
- would generate 3 classes.
-
- some_namespace$eval1234
- some_namespace$eval1234$one_1234
- some_namespace$eval1234$two_1235
-
- some_namespace$eval1234$one_1234 and some_namespace$eval1234$two_1235 contian the implementation of the functions,
- where some_namespace$eval1234 will contain two methods, invoke and invokeStatic.
- The invokeStatic method breaks down into something similar to a single create array call followed by 2 array set calls
- with new invocations on the 2 classes the method then returns a new vector created from the array.
-
- 5000 is lower than the absolute max to allow for modifications to how clojure compiles without needing to modify this.
- The current limit should be 5471, this is derived from the following opcode investigation:
-
- Array creation: 5B
- Creating and populating the first 6 elements of the array: 60B
- Creating and populating the next 122 elements of the array: 1,342B
- Creating and populating the next 5343 elements of the array: 64,116B
- Creating the vector and the return statement: 4B
-
- This sums to 65,527B just shy of the 65,536B method size limit."
- 5000)
-
-(def omit-compile-ctx-default
- "During construction of the Session there is data maintained such that if the underlying expressions fail to compile
- then this data can be used to explain the failure and the constraints of the rule who's expression is being evaluated.
- The default behavior will be to discard this data, as there will be no use unless the session will be serialized and
- deserialized into a dissimilar environment, ie function or symbols might be unresolvable. In those sorts of scenarios
- it would be possible to construct the original Session with the `omit-compile-ctx` flag set to false, then the compile
- context should aid in debugging the compilation failure on deserialization."
- true)
-
-(sc/defn mk-session*
- "Compile the rules into a rete network and return the given session."
- [productions :- #{schema/Production}
- options :- {sc/Keyword sc/Any}]
- (let [;; We need to put the productions in a sorted set here, instead of in mk-session, since we don't want
- ;; a sorted set in the session-cache. If we did only rule order would be used to compare set equality
- ;; for finding a cache hit, and we want to use the entire production, which a non-sorted set does.
- ;; When using a sorted set, for constant options,
- ;; any session creation with n productions would be a cache hit if any previous session had n productions.
- ;; Furthermore, we can avoid the work of sorting the set until we know that there was a cache miss.
- ;;
- ;; Note that this ordering is not for correctness; we are just trying to increase consistency of rulebase compilation,
- ;; and hopefully thereby execution times, from run to run.
- _ (validate-names-unique productions)
- productions (with-meta (into (sorted-set-by production-load-order-comp)
- productions)
- ;; Store the name of the custom comparator for durability.
- {:clara.rules.durability/comparator-name `production-load-order-comp})
-
- ;; A stateful counter used for unique ids of the nodes of the graph.
- id-counter (atom 0)
- create-id-fn (fn [] (swap! id-counter inc))
-
- forms-per-eval (:forms-per-eval options forms-per-eval-default)
-
- beta-graph (to-beta-graph productions create-id-fn)
- alpha-graph (to-alpha-graph beta-graph create-id-fn)
-
- ;; Extract the expressions from the graphs and evaluate them in a batch manner.
- ;; This is a performance optimization, see Issue 381 for more information.
- exprs (compile-exprs (extract-exprs beta-graph alpha-graph) forms-per-eval)
-
- ;; If we have made it to this point, it means that we have succeeded in compiling all expressions
- ;; thus we can free the :compile-ctx used for troubleshooting compilation failures.
- ;; The reason that this flag exists is in the event that this session will be serialized with an
- ;; uncertain deserialization environment and this sort of troubleshooting information would be useful
- ;; in diagnosing compilation errors in specific rules.
- omit-compile-ctx (:omit-compile-ctx options omit-compile-ctx-default)
- exprs (if omit-compile-ctx
- (into {}
- (map
- (fn [[k [expr ctx]]]
- [k [expr (dissoc ctx :compile-ctx)]]))
- exprs)
- exprs)
-
- beta-tree (compile-beta-graph beta-graph exprs)
- beta-root-ids (-> beta-graph :forward-edges (get 0)) ; 0 is the id of the virtual root node.
- beta-roots (vals (select-keys beta-tree beta-root-ids))
- alpha-nodes (compile-alpha-nodes alpha-graph exprs)
-
- ;; The fact-type uses Clojure's type function unless overridden.
- fact-type-fn (or (get options :fact-type-fn)
- type)
-
- ;; The ancestors for a logical type uses Clojure's ancestors function unless overridden.
- ancestors-fn (or (get options :ancestors-fn)
- ancestors)
-
- ;; The default is to sort activations in descending order by their salience.
- activation-group-sort-fn (eng/options->activation-group-sort-fn options)
-
- ;; The returned salience will be a tuple of the form [rule-salience internal-salience],
- ;; where internal-salience is considered after the rule-salience and is assigned automatically by the compiler.
- activation-group-fn (eng/options->activation-group-fn options)
-
- rulebase (build-network beta-tree beta-roots alpha-nodes productions
- fact-type-fn ancestors-fn activation-group-sort-fn activation-group-fn
- exprs)
-
- get-alphas-fn (:get-alphas-fn rulebase)
-
- transport (LocalTransport.)]
-
- (eng/assemble {:rulebase rulebase
- :memory (eng/local-memory rulebase transport activation-group-sort-fn activation-group-fn get-alphas-fn)
- :transport transport
- :listeners (get options :listeners [])
- :get-alphas-fn get-alphas-fn})))
-
-(defn add-production-load-order
- "Adds ::rule-load-order to metadata of productions. Custom DSL's may need to use this if
- creating a session in Clojure without calling mk-session below."
- [productions]
- (map (fn [n production]
- (vary-meta production assoc ::rule-load-order (or n 0)))
- (range) productions))
-
-(defn mk-session
- "Creates a new session using the given rule source. The resulting session
- is immutable, and can be used with insert, retract, fire-rules, and query functions."
- ([sources-and-options]
- (let [sources (take-while (complement keyword?) sources-and-options)
- options (apply hash-map (drop-while (complement keyword?) sources-and-options))
- productions (->> sources
- ;; Load rules from the source, or just use the input as a seq.
- (mapcat #(if (satisfies? IRuleSource %)
- (load-rules %)
- %))
- add-production-load-order
- ;; Ensure that we choose the earliest occurrence of a rule for the purpose of rule order.
- ;; There are Clojure core functions for distinctness, of course, but none of them seem to guarantee
- ;; which instance will be chosen in case of duplicates.
- (reduce (fn [previous new-production]
- ;; Contains? is broken for transient sets; see http://dev.clojure.org/jira/browse/CLJ-700
- ;; Since all values in this set should be truthy, we can just use the set as a function instead.
- (if (previous new-production)
- previous
- (conj! previous new-production)))
- (transient #{}))
- persistent!)]
-
- (if-let [session (get @session-cache [productions options])]
- session
- (let [session (mk-session* productions options)]
-
- ;; Cache the session unless instructed not to.
- (when (get options :cache true)
- (swap! session-cache assoc [productions options] session))
-
- ;; Return the session.
- session)))))
diff --git a/build/clara/rules/dsl.clj b/build/clara/rules/dsl.clj
deleted file mode 100644
index aa98fc40..00000000
--- a/build/clara/rules/dsl.clj
+++ /dev/null
@@ -1,315 +0,0 @@
-(ns clara.rules.dsl
- "Implementation of the defrule-style DSL for Clara. Most users should simply use the clara.rules namespace."
- (:require [clojure.walk :as walk]
- [clara.rules.compiler :as com]
- [clara.rules.platform :as platform])
- (:refer-clojure :exclude [qualified-keyword?]))
-
-;; Let operators be symbols or keywords.
-(def ops #{'and 'or 'not 'exists :and :or :not :exists})
-
-(defn- separator?
- "True iff `x` is a rule body separator symbol."
- [x] (and (symbol? x) (= "=>" (name x))))
-
-(defn split-lhs-rhs
- "Given a rule with the =>, splits the left- and right-hand sides."
- [rule-body]
- (let [[lhs [sep & rhs]] (split-with #(not (separator? %)) rule-body)]
-
- {:lhs lhs
- :rhs (when-not (empty? rhs)
- (conj rhs 'do))}))
-
-(defn- throw-dsl-ex
- "Throws an exception indicating a failure parsing a form."
- [message info expr-meta]
- (if expr-meta
- (let [{:keys [line column file]} expr-meta]
- (throw (ex-info
- (str message
- (when line
- (str " line: " line))
- (when column
- (str " column: " column))
- (when file
- (str " file: " file)))
-
- (into info expr-meta))))
- (throw (ex-info message info))))
-
-(defn- construct-condition
- "Creates a condition with the given optional result binding when parsing a rule."
- [condition result-binding expr-meta]
- (let [type (if (symbol? (first condition))
- (if-let [resolved (resolve (first condition))]
-
- ;; If the type resolves to a var, grab its contents for the match.
- (if (var? resolved)
- (deref resolved)
- resolved)
-
- (first condition)) ; For ClojureScript compatibility, we keep the symbol if we can't resolve it.
- (first condition))
- ;; Args is an optional vector of arguments following the type.
- args (if (vector? (second condition)) (second condition) nil)
- constraints (vec (if args (drop 2 condition) (rest condition)))]
-
- (when (and (vector? type)
- (some seq? type))
-
- (throw-dsl-ex (str "Type " type " is a vector and appears to contain expressions. "
- "Is there an extraneous set of brackets in the condition?")
- {}
- expr-meta))
-
- (when (> (count args) 1)
- (throw-dsl-ex "Only one argument can be passed to a condition."
- {}
- expr-meta))
-
- ;; Check if a malformed rule has a nested operator where we expect a type.
- (when (and (sequential? type)
- (seq type)
- (ops (first type)))
- (throw-dsl-ex (str "Attempting to bind into " result-binding
- " nested expression: " (pr-str condition)
- " Nested expressions cannot be bound into higher-level results")
- {}
- expr-meta))
-
- ;; Include the original metadata in the returned condition so line numbers
- ;; can be preserved when we compile it.
- (with-meta
- (cond-> {:type type
- :constraints constraints}
- args (assoc :args args)
- result-binding (assoc :fact-binding result-binding))
-
- (if (seq constraints)
- (assoc (meta (first constraints))
- :file *file*)))))
-
-(defn- parse-condition-or-accum
- "Parse an expression that could be a condition or an accumulator."
- [condition expr-meta]
- ;; Grab the binding of the operation result, if present.
- (let [result-binding (if (= '<- (second condition)) (keyword (first condition)) nil)
- condition (if result-binding (drop 2 condition) condition)]
-
- (when (and (not= nil result-binding)
- (not= \? (first (name result-binding))))
- (throw-dsl-ex (str "Invalid binding for condition: " result-binding)
- {}
- expr-meta))
-
- ;; If it's an s-expression, simply let it expand itself, and assoc the binding with the result.
- (if (#{'from :from} (second condition)) ; If this is an accumulator....
- (let [parsed-accum {:accumulator (first condition)
- :from (construct-condition (nth condition 2) nil expr-meta)}]
- ;; A result binding is optional for an accumulator.
- (if result-binding
- (assoc parsed-accum :result-binding result-binding)
- parsed-accum))
- ;; Not an accumulator, so simply create the condition.
- (construct-condition condition result-binding expr-meta))))
-
-(defn- parse-expression
- "Convert each expression into a condition structure."
- [expression expr-meta]
- (cond
-
- (contains? ops (first expression))
- (into [(keyword (name (first expression)))] ; Ensure expression operator is a keyword.
- (for [nested-expr (rest expression)]
- (parse-expression nested-expr expr-meta)))
-
- (contains? #{'test :test} (first expression))
- (if (= 1 (count expression))
- (throw-dsl-ex (str "Empty :test conditions are not allowed.")
- {}
- expr-meta)
- {:constraints (vec (rest expression))})
-
- :else
- (parse-condition-or-accum expression expr-meta)))
-
-(defn- maybe-qualify
- "Attempt to qualify the given symbol, returning the symbol itself
- if we can't qualify it for any reason."
- [env sym]
- (let [env (set env)]
- (if (and (symbol? sym)
- (not= '.. sym) ; Skip the special .. host interop macro.
- (.endsWith (name sym) "."))
-
- ;; The . suffix indicates a type constructor, so we qualify the type instead, then
- ;; re-add the . to the resolved type.
- (-> (subs (name sym)
- 0
- (dec (count (name sym)))) ; Get the name without the trailing dot.
- (symbol) ; Convert it back to a symbol.
- ^Class (resolve) ; Resolve into the type class.
- (.getName) ; Get the qualified name.
- (str ".") ; Re-add the dot for the constructor, which is now qualified
- (symbol)) ; Convert back into a symbol used at compile time.
-
- ;; Qualify a normal clojure symbol.
- (if (and (symbol? sym) ; Only qualify symbols...
- (not (env sym))) ; not in env (env contains locals).) ; that we can resolve
-
- (cond
- ;; If it's a Java class, simply qualify it.
- (instance? Class (resolve sym))
- (symbol (.getName ^Class (resolve sym)))
-
- ;; Don't qualify clojure.core for portability, since CLJS uses a different namespace.
- (and (resolve sym)
- (not (= "clojure.core"
- (str (ns-name (:ns (meta (resolve sym)))))))
- (name sym))
- (symbol (str (ns-name (:ns (meta (resolve sym))))) (name sym))
-
- ;; See if it's a static method call and qualify it.
- (and (namespace sym)
- (not (resolve sym))
- (instance? Class (resolve (symbol (namespace sym))))) ; The namespace portion is the class name we try to resolve.
- (symbol (.getName ^Class (resolve (symbol (namespace sym))))
- (name sym))
-
- ;; Finally, just return the symbol unchanged if it doesn't match anything above,
- ;; assuming it's a local parameter or variable.n
- :else
- sym)
- sym))))
-
-(defn- qualify-meta
- "Qualify metadata associated with the symbol."
- [env sym]
- (if (:tag (meta sym))
- (vary-meta sym update-in [:tag] (fn [tag] (-> ^Class (resolve tag)
- (.getName)
- (symbol))))
- sym))
-
-(defn- resolve-vars
- "Resolve vars used in expression. TODO: this should be narrowed to resolve only
- those that aren't in the environment, condition, or right-hand side."
- [form env]
- (walk/postwalk
- (fn [sym]
- (->> sym
- (maybe-qualify env)
- (qualify-meta env)))
- form))
-
-(defmacro local-syms []
- (mapv #(list 'quote %) (keys &env)))
-
-(defn destructuring-sym? [sym]
- (or (re-matches #"vec__\d+" (name sym))
- (re-matches #"map__\d+" (name sym))))
-
-(defn- destructure-syms
- [{:keys [args] :as condition}]
- (if args
- (remove destructuring-sym? (eval `(let [~args nil] (local-syms))))))
-
-(defn parse-rule*
- "Creates a rule from the DSL syntax using the given environment map. *ns*
- should be bound to the namespace the rule is meant to be defined in."
- ([lhs rhs properties env]
- (parse-rule* lhs rhs properties env {}))
- ([lhs rhs properties env rule-meta]
- (let [conditions (into [] (for [expr lhs]
- (parse-expression expr rule-meta)))
-
- rule {:ns-name (list 'quote (ns-name *ns*))
- :lhs (list 'quote
- (mapv #(resolve-vars % (destructure-syms %))
- conditions))
- :rhs (list 'quote
- (vary-meta rhs
- assoc :file *file*))}
-
- symbols (set (filter symbol? (com/flatten-expression (concat lhs rhs))))
- matching-env (into {} (for [sym (keys env)
- :when (symbols sym)]
- [(keyword (name sym)) sym]))]
-
- (cond-> rule
-
- ;; Add properties, if given.
- (not (empty? properties)) (assoc :props properties)
-
- ;; Add the environment, if given.
- (not (empty? env)) (assoc :env matching-env)))))
-
-(defn parse-query*
- "Creates a query from the DSL syntax using the given environment map."
- ([params lhs env]
- (parse-query* params lhs env {}))
- ([params lhs env query-meta]
- (let [conditions (into [] (for [expr lhs]
- (parse-expression expr query-meta)))
-
- query {:lhs (list 'quote (mapv #(resolve-vars % (destructure-syms %))
- conditions))
- :params (set (map platform/query-param params))}
-
- symbols (set (filter symbol? (com/flatten-expression lhs)))
- matching-env (into {}
- (for [sym (keys env)
- :when (symbols sym)]
- [(keyword (name sym)) sym]))]
-
- (cond-> query
- (not (empty? env)) (assoc :env matching-env)))))
-
-(defmacro parse-rule
- "Macro used to dynamically create a new rule using the DSL syntax."
- ([lhs rhs]
- (parse-rule* lhs rhs nil &env))
- ([lhs rhs properties]
- (parse-rule* lhs rhs properties &env)))
-
-;;; added to clojure.core in 1.9
-(defn- qualified-keyword?
- "Return true if x is a keyword with a namespace"
- [x] (and (keyword? x) (namespace x) true))
-
-(defn- production-name
- [prod-name]
- (cond
- (qualified-keyword? prod-name) prod-name
- :else (str (name (ns-name *ns*)) "/" (name prod-name))))
-
-(defn build-rule
- "Function used to parse and build a rule using the DSL syntax."
- ([name body] (build-rule name body {}))
- ([name body form-meta]
- (let [doc (if (string? (first body)) (first body) nil)
- body (if doc (rest body) body)
- properties (if (map? (first body)) (first body) nil)
- definition (if properties (rest body) body)
- {:keys [lhs rhs]} (split-lhs-rhs definition)]
- (cond-> (parse-rule* lhs rhs properties {} form-meta)
-
- name (assoc :name (production-name name))
- doc (assoc :doc doc)))))
-
-(defmacro parse-query
- "Macro used to dynamically create a new rule using the DSL syntax."
- [params lhs]
- (parse-query* params lhs &env))
-
-(defn build-query
- "Function used to parse and build a query using the DSL syntax."
- ([name body] (build-query name body {}))
- ([name body form-meta]
- (let [doc (if (string? (first body)) (first body) nil)
- binding (if doc (second body) (first body))
- definition (if doc (drop 2 body) (rest body))]
- (cond-> (parse-query* binding definition {} form-meta)
- name (assoc :name (production-name name))
- doc (assoc :doc doc)))))
diff --git a/build/clara/rules/durability.clj b/build/clara/rules/durability.clj
deleted file mode 100644
index 41f1149a..00000000
--- a/build/clara/rules/durability.clj
+++ /dev/null
@@ -1,699 +0,0 @@
-(ns clara.rules.durability
- "Support for persisting Clara sessions to an external store.
- Provides the ability to store and restore an entire session working memory state. The restored
- session is able to have additional insert, retract, query, and fire rule calls performed
- immediately after.
-
- See https://github.com/cerner/clara-rules/issues/198 for more discussion on this.
-
- Note! This is still an EXPERIMENTAL namespace. This may change non-passively without warning.
- Any session or rulebase serialized in one version of Clara is not guaranteed to deserialize
- successfully against another version of Clara."
- (:require [clara.rules.engine :as eng]
- [clara.rules.compiler :as com]
- [clara.rules.memory :as mem]
- [schema.core :as s])
- (:import [clara.rules.compiler Rulebase]
- [clara.rules.memory RuleOrderedActivation]
- [java.util List Map]))
-
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;;;; Rulebase serialization helpers.
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-
-(def ^:internal ^ThreadLocal node-id->node-cache
- "Useful for caching rulebase network nodes by id during serialization and deserialization to
- avoid creating multiple object instances for the same node."
- (ThreadLocal.))
-
-(def ^:internal ^ThreadLocal node-fn-cache
- "A cache for holding the fns used to reconstruct the nodes. Only applicable during read time, specifically
- this will be bound to a Map of [ ] to IFn before the rulebase is deserialized. While the
- rulebase is deserialized the nodes will reference this cache to repopulate their fns."
- (ThreadLocal.))
-
-(defn- add-node-fn [node fn-key expr-key]
- (assoc node
- fn-key
- (first (get (.get node-fn-cache) [(:id node) expr-key]))))
-
-(defn add-rhs-fn [node]
- (add-node-fn node :rhs :action-expr))
-
-(defn add-alpha-fn [node]
- (add-node-fn node :activation :alpha-expr))
-
-(defn add-join-filter-fn [node]
- (add-node-fn node :join-filter-fn :join-filter-expr))
-
-(defn add-test-fn [node]
- (add-node-fn node :test :test-expr))
-
-(defn add-accumulator [node]
- (assoc node
- :accumulator ((first (get (.get node-fn-cache) [(:id node) :accum-expr]))
- (:env node))))
-
-(defn node-id->node
- "Lookup the node for the given node-id in the node-id->node-cache cache."
- [node-id]
- (@(.get node-id->node-cache) node-id))
-
-(defn cache-node
- "Cache the node in the node-id->node-cache. Returns the node."
- [node]
- (when-let [node-id (:id node)]
- (vswap! (.get node-id->node-cache) assoc node-id node))
- node)
-
-(def ^:internal ^ThreadLocal clj-struct-holder
- "A cache for writing and reading Clojure records. At write time, an IdentityHashMap can be
- used to keep track of repeated references to the same object instance occurring in
- the serialization stream. At read time, a plain ArrayList (mutable and indexed for speed)
- can be used to add records to when they are first seen, then look up repeated occurrences
- of references to the same record instance later."
- (ThreadLocal.))
-
-(defn clj-struct->idx
- "Gets the numeric index for the given struct from the clj-struct-holder."
- [fact]
- (-> clj-struct-holder
- ^Map (.get)
- (.get fact)))
-
-(defn clj-struct-holder-add-fact-idx!
- "Adds the fact to the clj-struct-holder with a new index. This can later be retrieved
- with clj-struct->idx."
- [fact]
- ;; Note the values will be int type here. This shouldn't be a problem since they
- ;; will be read later as longs and both will be compatible with the index lookup
- ;; at read-time. This could have a cast to long here, but it would waste time
- ;; unnecessarily.
- (-> clj-struct-holder
- ^Map (.get)
- (.put fact (-> clj-struct-holder
- ^Map (.get)
- (.size)))))
-
-(defn clj-struct-idx->obj
- "The reverse of clj-struct->idx. Returns an object for the given index found
- in clj-struct-holder."
- [id]
- (-> clj-struct-holder
- ^List (.get)
- (.get id)))
-
-(defn clj-struct-holder-add-obj!
- "The reverse of clj-struct-holder-add-fact-idx!. Adds the object to the clj-struct-holder
- at the next available index."
- [fact]
- (-> clj-struct-holder
- ^List (.get)
- (.add fact))
- fact)
-
-(defn create-map-entry
- "Helper to create map entries. This can be useful for serialization implementations
- on clojure.lang.MapEntry types.
- Using the ctor instead of clojure.lang.MapEntry/create since this method
- doesn't exist prior to clj 1.8.0"
- [k v]
- (clojure.lang.MapEntry. k v))
-
-;;;; To deal with http://dev.clojure.org/jira/browse/CLJ-1733 we need to impl a way to serialize
-;;;; sorted sets and maps. However, this is not sufficient for arbitrary comparators. If
-;;;; arbitrary comparators are used for the sorted coll, the comparator has to be restored
-;;;; explicitly since arbitrary functions are not serializable in any stable way right now.
-
-(defn sorted-comparator-name
- "Sorted collections are not easily serializable since they have an opaque function object instance
- associated with them. To deal with that, the sorted collection can provide a ::comparator-name
- in the metadata that indicates a symbolic name for the function used as the comparator. With this
- name the function can be looked up and associated to the sorted collection again during
- deserialization time.
- * If the sorted collection has metadata ::comparator-name, then the value should be a name
- symbol and is returned.
- * If the sorted collection has the clojure.lang.RT/DEFAULT_COMPARATOR, returns nil.
- * If neither of the above are true, an exception is thrown indicating that there is no way to provide
- a useful name for this sorted collection, so it won't be able to be serialized."
- [^clojure.lang.Sorted s]
- (let [cname (-> s meta ::comparator-name)]
-
- ;; Fail if reliable serialization of this sorted coll isn't possible.
- (when (and (not cname)
- (not= (.comparator s) clojure.lang.RT/DEFAULT_COMPARATOR))
- (throw (ex-info (str "Cannot serialize sorted collection with non-default"
- " comparator because no :clara.rules.durability/comparator-name provided in metadata.")
- {:sorted-coll s
- :comparator (.comparator s)})))
-
- cname))
-
-(defn seq->sorted-set
- "Helper to create a sorted set from a seq given an optional comparator."
- [s ^java.util.Comparator c]
- (if c
- (clojure.lang.PersistentTreeSet/create c (seq s))
- (clojure.lang.PersistentTreeSet/create (seq s))))
-
-(defn seq->sorted-map
- "Helper to create a sorted map from a seq given an optional comparator."
- [s ^java.util.Comparator c]
- (if c
- (clojure.lang.PersistentTreeMap/create c ^clojure.lang.ISeq (sequence cat s))
- (clojure.lang.PersistentTreeMap/create ^clojure.lang.ISeq (sequence cat s))))
-
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;;;; Memory serialization via "indexing" working memory facts.
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-
-;;; A placeholder to put in working memory locations where consumer-defined, domain-specific facts
-;;; were stored at. This placeholder is used to track the position of a fact so that the fact can
-;;; be serialized externally by IWorkingMemorySerializer and later the deserialized fact can be put
-;;; back in place of this placeholder.
-;;; See the ISessionSerializer and IWorkingMemorySerializer protocols and
-;;; indexed-session-memory-state for more details.
-(defrecord MemIdx [idx])
-
-;;; Same as MemIdx but specific to internal objects, such as Token or Element.
-(defrecord InternalMemIdx [idx])
-
-(defn find-index
- "Finds the fact in the fact->idx-map. The fact is assumed to be a key. Returns the value for
- that key, which should just be a numeric index used to track where facts are stubbed out with
- MemIdx's in working memory so that they can be 'put back' later."
- [^Map fact->idx-map fact]
- (.get fact->idx-map fact))
-
-(defn- find-index-or-add!
- "The same as find-index, but if the fact is not found, it is added to the map (destructively)
- and the index it was mapped to is returned.
- This implies that the map must support the mutable map interface, namely java.util.Map.put()."
- [^Map fact->index-map fact]
- (or (.get fact->index-map fact)
- (let [n (.size fact->index-map)
- idx (->MemIdx n)]
- (.put fact->index-map fact idx)
- idx)))
-
-(defn- add-mem-internal-idx!
- "Adds an element to fact->idx-map. The fact is assumed to be a key. The value is a tuple containing
- both the InternalMemIdx and the 'indexed' form of the element. The indexed form is the element that
- has had all of its internal facts stubbed with MemIdxs. The actual element is used as the key because
- the act of stubbing the internal fields of the element changes the identity of the element thus making
- every indexed-element unique. The indexed-element is stored so that it can be serialized rather than
- the element itself. This function simply adds a new key, unlike find-index-or-add!, as such the caller
- should first check that the key is not already present before calling this method.
- Returns the stub used to represent an internal fact, so that it can be 'put back' later."
- [^Map fact->idx-map
- element
- indexed-element]
- (let [n (.size fact->idx-map)
- idx (->InternalMemIdx n)]
- (.put fact->idx-map element [idx indexed-element])
- idx))
-
-(defn- find-mem-internal-idx
- "Returns the InternalMemIdx for the given element."
- [^Map fact->idx-map
- element]
- (nth (.get fact->idx-map element) 0))
-
-;;; Similar what is in clara.rules.memory currently, but just copied for now to avoid dependency issues.
-(defn- update-vals [m update-fn]
- (->> m
- (reduce-kv (fn [m k v]
- (assoc! m k (update-fn v)))
- (transient {}))
- persistent!))
-
-(defn- index-bindings
- [seen bindings]
- (update-vals bindings
- #(find-index-or-add! seen %)))
-
-(defn- index-update-bindings-keys [index-update-bindings-fn
- bindings-map]
- (persistent!
- (reduce-kv (fn [m k v]
- (assoc! m
- (index-update-bindings-fn k)
- v))
- (transient {})
- bindings-map)))
-
-(defn- index-token [internal-seen seen token]
- (if-let [idx (find-mem-internal-idx internal-seen token)]
- idx
- (let [indexed (-> token
- (update :matches
- #(mapv (fn [[fact node-id]]
- [(find-index-or-add! seen fact)
- node-id])
- %))
- (update :bindings
- #(index-bindings seen %)))]
- (add-mem-internal-idx! internal-seen token indexed))))
-
-(defn index-alpha-memory [internal-seen seen amem]
- (let [index-update-bindings-fn #(index-bindings seen %)
- index-update-fact-fn #(find-index-or-add! seen %)
- index-update-elements (fn [elements]
- (mapv #(if-let [idx (find-mem-internal-idx internal-seen %)]
- idx
- (let [indexed (-> %
- (update :fact
- index-update-fact-fn)
- (update :bindings
- index-update-bindings-fn))]
- (add-mem-internal-idx! internal-seen % indexed)))
- elements))]
- (update-vals amem
- #(-> (index-update-bindings-keys index-update-bindings-fn %)
- (update-vals index-update-elements)))))
-
-(defn index-accum-memory [seen accum-mem]
- (let [index-update-bindings-fn #(index-bindings seen %)
- index-facts (fn [facts]
- (mapv #(find-index-or-add! seen %) facts))
- index-update-accum-reduced (fn [node-id accum-reduced]
- (let [m (meta accum-reduced)]
- (if (::eng/accum-node m)
- ;; AccumulateNode
- (let [[facts res] accum-reduced
- facts (index-facts facts)]
- (with-meta
- [facts
- (if (= ::eng/not-reduced res)
- res
- (find-index-or-add! seen res))]
- m))
-
- ;; AccumulateWithJoinFilterNode
- (with-meta (index-facts accum-reduced)
- m))))
- index-update-bindings-map (fn [node-id bindings-map]
- (-> (index-update-bindings-keys index-update-bindings-fn bindings-map)
- (update-vals #(index-update-accum-reduced node-id %))))]
-
- (->> accum-mem
- (reduce-kv (fn [m node-id bindings-map]
- (assoc! m node-id (-> (index-update-bindings-keys index-update-bindings-fn bindings-map)
- (update-vals #(index-update-bindings-map node-id %)))))
- (transient {}))
- persistent!)))
-
-(defn index-beta-memory [internal-seen seen bmem]
- (let [index-update-tokens (fn [tokens]
- (mapv #(index-token internal-seen seen %)
- tokens))]
- (update-vals bmem
- (fn [v]
- (-> (index-update-bindings-keys #(index-bindings seen %) v)
- (update-vals index-update-tokens))))))
-
-(defn index-production-memory [internal-seen seen pmem]
- (let [index-update-facts (fn [facts]
- (mapv #(or (find-index seen %)
- (find-index-or-add! seen %))
- facts))]
- (update-vals pmem
- (fn [token-map]
- (->> token-map
- (reduce-kv (fn [m k v]
- (assoc! m
- (index-token internal-seen seen k)
- (mapv index-update-facts v)))
- (transient {}))
- persistent!)))))
-
-(defn index-activation-map [internal-seen seen actmap]
- (update-vals actmap
- #(mapv (fn [^RuleOrderedActivation act]
- (mem/->RuleOrderedActivation (.-node-id act)
- (index-token internal-seen seen (.-token act))
- (.-activation act)
- (.-rule-load-order act)
- false))
- %)))
-
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;;;; Commonly useful session serialization helpers.
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-
-(def ^:internal ^ThreadLocal ^List mem-facts
- "Useful for ISessionSerializer implementors to have a reference to the facts deserialized via
- IWorkingMemorySerializer that are needed to restore working memory whose locations were stubbed
- with a MemIdx during serialization."
- (ThreadLocal.))
-
-(def ^:internal ^ThreadLocal ^List mem-internal
- "Useful for ISessionSerializer implementors to have a reference to the facts deserialized via
- IWorkingMemorySerializer that are needed to restore working memory whose locations were stubbed
- with a InternalMemIdx during serialization. These objects are specific to the Clare engine,
- and as such will be serialized and deserialized along with the memory."
- (ThreadLocal.))
-
-(defn find-mem-idx
- "Finds the fact from mem-facts at the given index. See docs on mem-facts for more."
- [idx]
- (-> mem-facts
- (.get)
- (get idx)))
-
-(defn find-internal-idx
- "Finds the fact from mem-internal at the given index. See docs on mem-internal for more."
- [idx]
- (-> mem-internal
- (.get)
- (get idx)))
-
-(defn indexed-session-memory-state
- "Takes the working memory from a session and strips it down to only the memory needed for
- serialization. Along with this, replaces all working memory facts with MemIdx place holders.
- The terminology being used here is to call this step 'indexing' the memory.
-
- A map is returned with two keys:
- * :memory - The working memory representation that is the same as the given memory's :memory,
- however, all facts in the memory are replaced with MemIdx placeholders.
- * :indexed-facts - the facts replaced with MemIdx placeholders. The facts are returned in a
- sequential collection. Each fact is the n'th item of the collection if the MemIdx for that
- fact has :idx = n. No facts returned should be identical? (i.e. multiple references to the
- same object instance). However, it is possible for some facts returned to be aggregations
- containing other facts that do appear elsewhere in the fact sequence. It is up to the
- implementation of the IWorkingMemorySerializer to deal with these possible, identical? object
- references correctly. This is generally true for most serialization mechanisms.
-
- Note! This function should not typically be used. It is left public to assist in ISessionSerializer
- durability implementations. Use clara.rules/mk-session typically to make rule sessions.
-
- Note! Currently this only supports the clara.rules.memory.PersistentLocalMemory implementation
- of memory."
- [memory]
- (let [idx-fact-arr-pair-fn (fn [^java.util.Map$Entry e]
- [(:idx (.getValue e)) (.getKey e)])
-
- internal-fact-arr-pair-fn (fn [^java.util.Map$Entry e]
- ;; Intenal facts are stored with a key
- ;; of the original object to a tuple of
- ;; the InternalMemIdx and the indexed object.
- ;; When serializing these facts, to avoid serializing
- ;; consumer facts contained within internal facts,
- ;; the indexed object is serialized instead of
- ;; the original object itself. See add-mem-internal-idx!
- ;; for more details
- (let [v (.getValue e)]
- [(:idx (nth v 0)) (nth v 1)]))
-
- vec-indexed-facts (fn [^Map fact->index-map
- map-entry->arr-idx-pair]
- ;; It is not generally safe to reduce or seq over a mutable Java Map.
- ;; One example is IdentityHashMap. The iterator of the IdentityHashMap
- ;; mutates the map entry values in place and it is never safe to call a
- ;; Iterator.hasNext() when not finished working with the previous value
- ;; returned from Iterator.next(). This is subtle and is actually only a
- ;; problem in JDK6 for IdentityHashMap. JDK7+ appear to have discontinued
- ;; this mutable map entry. However, this is not something to rely on and
- ;; JDK6 support is still expected to work for Clara. The only trasducer
- ;; in Clojure that can currently, safely consume the JDK6-style
- ;; IdentityHashMap via its entry set iterator is Eduction. This doesn't
- ;; appear to be due to explicit semantics though, but rather an
- ;; implementation detail.
- ;; For further context, this situation is related, but not exactly the
- ;; same as http://dev.clojure.org/jira/browse/CLJ-1738.
- (let [;; The use of a primitive array here isn't strictly necessary. However,
- ;; it doesn't add much in terms of complexity and is faster than the
- ;; alternatives.
- ^"[Ljava.lang.Object;" arr (make-array Object (.size fact->index-map))
- es (.entrySet fact->index-map)
- it (.iterator es)]
- (when (.hasNext it)
- (loop [^java.util.Map$Entry e (.next it)]
- (let [pair (map-entry->arr-idx-pair e)]
- (aset arr (nth pair 0) (nth pair 1)))
- (when (.hasNext it)
- (recur (.next it)))))
- (into [] arr)))
-
- index-memory (fn [memory]
- (let [internal-seen (java.util.IdentityHashMap.)
- seen (java.util.IdentityHashMap.)
-
- indexed (-> memory
- (update :accum-memory #(index-accum-memory seen %))
- (update :alpha-memory #(index-alpha-memory internal-seen seen %))
- (update :beta-memory #(index-beta-memory internal-seen seen %))
- (update :production-memory #(index-production-memory internal-seen seen %))
- (update :activation-map #(index-activation-map internal-seen seen %)))]
-
- {:memory indexed
- :indexed-facts (vec-indexed-facts seen idx-fact-arr-pair-fn)
- :internal-indexed-facts (vec-indexed-facts internal-seen internal-fact-arr-pair-fn)}))]
- (-> memory
- index-memory
- (update :memory
- ;; Assoc nil values rather than using dissoc in order to preserve the type of the memory.
- assoc
- ;; The rulebase does need to be stored per memory. It will be restored during deserialization.
- :rulebase nil
- ;; Currently these do not support serialization and must be provided during deserialization via a
- ;; base-session or they default to the standard defaults.
- :activation-group-sort-fn nil
- :activation-group-fn nil
- :alphas-fn nil))))
-
-(def ^:private create-get-alphas-fn @#'com/create-get-alphas-fn)
-
-(defn opts->get-alphas-fn [rulebase opts]
- (let [fact-type-fn (:fact-type-fn opts type)
- ancestors-fn (:ancestors-fn opts ancestors)]
- (create-get-alphas-fn fact-type-fn
- ancestors-fn
- (:alpha-roots rulebase))))
-
-(defn assemble-restored-session
- "Builds a Clara session from the given rulebase and memory components. When no memory is given a new
- one is created with all of the defaults of eng/local-memory.
- Note! This function should not typically be used. It is left public to assist in ISessionSerializer
- durability implementations. Use clara.rules/mk-session typically to make rule sessions.
-
- If the options are not provided, they will default to the Clara session defaults. The available options
- on the session (as opposed to the rulebase) are the transport and listeners.
-
- Note! Currently this only supports the clara.rules.memory.PersistentLocalMemory implementation
- of memory."
- ([rulebase opts]
- (let [{:keys [listeners transport]} opts]
-
- (eng/assemble {:rulebase rulebase
- :memory (eng/local-memory rulebase
- (clara.rules.engine.LocalTransport.)
- (:activation-group-sort-fn rulebase)
- (:activation-group-fn rulebase)
- ;; TODO: Memory doesn't seem to ever need this or use
- ;; it. Can we just remove it from memory?
- (:get-alphas-fn rulebase))
- :transport (or transport (clara.rules.engine.LocalTransport.))
- :listeners (or listeners [])
- :get-alphas-fn (:get-alphas-fn rulebase)})))
-
- ([rulebase memory opts]
- (let [{:keys [listeners transport]} opts]
-
- (eng/assemble {:rulebase rulebase
- :memory (assoc memory
- :rulebase rulebase
- :activation-group-sort-fn (:activation-group-sort-fn rulebase)
- :activation-group-fn (:activation-group-fn rulebase)
- :alphas-fn (:get-alphas-fn rulebase))
- :transport (or transport (clara.rules.engine.LocalTransport.))
- :listeners (or listeners [])
- :get-alphas-fn (:get-alphas-fn rulebase)}))))
-
-(defn rulebase->rulebase-with-opts
- "Intended for use in rulebase deserialization implementations where these functions were stripped
- off the rulebase implementation; this function takes these options and wraps them in the same manner
- as clara.rules/mk-session. This function should typically only be used when implementing ISessionSerializer."
- [without-opts-rulebase opts]
- (assoc without-opts-rulebase
- :activation-group-sort-fn (eng/options->activation-group-sort-fn opts)
- :activation-group-fn (eng/options->activation-group-fn opts)
- :get-alphas-fn (opts->get-alphas-fn without-opts-rulebase opts)))
-
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;;;; Serialization protocols.
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-
-(defprotocol ISessionSerializer
- "Provides the ability to serialize and deserialize a session. Options can be given and supported
- via the opts argument to both serialize and deserialize. Certain options are expected and
- required to be supported by any implementation of ISessionSerializer. These are referred to as
- the 'standard' options.
-
- These include:
-
- * :rulebase-only? - When true indicates the rulebase is the only part of the session to serializer.
- The *default* is false for the serialize-session-state function. It is defaulted to true for the
- serialize-rulebase convenience function. This is useful for when many sessions are to be
- serialized, but all having a common rulebase. Storing the rulebase only, will likely save both
- space and time in these scenarios.
-
- * :with-rulebase? - When true the rulebase is included in the serialized state of the session.
- The *default* behavior is false when serializing a session via the serialize-session-state function.
-
- * :base-rulebase - A rulebase to attach to the session being deserialized. The assumption here is that
- the session was serialized without the rulebase, i.e. :with-rulebase? = false, so it needs a rulebase
- to be 'attached' back onto it to be usable.
-
- * :forms-per-eval - The maximum number of expressions that will be evaluated per call to eval.
- Larger batch sizes should see better performance compared to smaller batch sizes.
- Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
-
- Options for the rulebase semantics that are documented at clara.rules/mk-session include:
-
- * :fact-type-fn
- * :ancestors-fn
- * :activation-group-sort-fn
- * :activation-group-fn
-
-
- Other options can be supported by specific implementors of ISessionSerializer."
-
- (serialize [this session opts]
- "Serialize the given session with the given options. Where the session state is stored is dependent
- on the implementation of this instance e.g. it may store it in a known reference to an IO stream.")
-
- (deserialize [this mem-facts opts]
- "Deserialize the session state associated to this instance e.g. it may be coming from a known reference
- to an IO stream. mem-facts is a sequential collection of the working memory facts that were
- serialized and deserialized by an implementation of IWorkingMemorySerializer."))
-
-(defprotocol IWorkingMemorySerializer
- "Provides the ability to serialize and deserialize the facts stored in the working memory of a session.
- Facts can be serialized in whatever way makes sense for a given domain. The domain of facts can vary
- greatly from one use-case of the rules engine to the next. So the mechanism of serializing the facts
- in memory can vary greatly as a result of this. Clara does not yet provide any default implementations
- for this, but may in the future. However, many of the handlers defined in clara.rules.durability.fressian
- can be reused if the consumer wishes to serialize via Fressian. See more on this in
- the clara.rules.durability.fressian namespace docs.
-
- The important part of this serialization protocol is that the facts returned from deserialize-facts are in
- the *same order* as how they were given to serialize-facts."
-
- (serialize-facts [this fact-seq]
- "Serialize the given fact-seq, which is an order sequence of facts from working memory of a session.
- Note, as mentioned in the protocol docs, the *order* these are given is *important* and should be preserved
- when they are returned via deserialize-facts.")
-
- (deserialize-facts [this]
- "Returns the facts associated to this instance deserialized in the same order that they were given
- to serialize-facts."))
-
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;;;; Durability API.
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-
-(s/defn serialize-rulebase
- "Serialize *only* the rulebase portion of the given session. The serialization is done by the
- given session-serializer implementor of ISessionSerializer.
-
- Options can be given as an optional argument. These are passed through to the session-serializer
- implementation. See the description of standard options an ISessionSerializer should provide in
- the ISessionSerializer docs. Also, see the specific ISessionSerializer implementation docs for
- any non-standard options supported/not supported.
- See ISessionSerializer docs for more on that.
-
- The rulebase is the stateless structure that controls the flow of productions, i.e. the 'rete'
- rule network. The ability to serialize only the rulebase is supported so that the rulebase can
- be stored and retrieved a single time for potentially many sessions containing different working
- memory data, for the same rules. This function is only a convenience for passing the
- :rulebase-only? true flag to the serialize-session-state function.
- See serialize-session-state for more."
- ([session :- (s/protocol eng/ISession)
- session-serializer :- (s/protocol ISessionSerializer)]
- (serialize-rulebase session
- session-serializer
- {}))
-
- ([session :- (s/protocol eng/ISession)
- session-serializer :- (s/protocol ISessionSerializer)
- opts :- {s/Any s/Any}]
- (serialize session-serializer
- session
- (assoc opts :rulebase-only? true))))
-
-(s/defn deserialize-rulebase :- Rulebase
- "Deserializes the rulebase stored via the serialize-rulebase function. This is done via the given
- session-serializer implementor of ISessionSerializer.
-
- Options can be given as an optional argument. These are passed through to the session-serializer
- implementation. See the description of standard options an ISessionSerializer should provide in
- the ISessionSerializer docs. Also, see the specific ISessionSerializer implementation docs for
- any non-standard options supported/not supported.
- See ISessionSerializer docs for more on that."
- ([session-serializer :- (s/protocol ISessionSerializer)]
- (deserialize-rulebase session-serializer
- {}))
-
- ([session-serializer :- (s/protocol ISessionSerializer)
- opts :- {s/Any s/Any}]
- (deserialize session-serializer
- nil
- (assoc opts :rulebase-only? true))))
-
-(s/defn serialize-session-state
- "Serializes the state of the given session. By default, this *excludes* the rulebase from being
- serialized alongside the working memory state of the session. The rulebase, if specified, and
- the working memory of the session are serialized by the session-serializer implementor of
- ISessionSerializer. The memory-serializer implementor of IWorkingMemorySerializer is used to
- serialize the actual facts stored within working memory.
-
- Typically, the caller can use a pre-defined default session-serializer, such as
- clara.rules.durability.fressian/create-session-serializer.
- See clara.rules.durability.fressian for more specific details regarding this, including the extra
- required dependency on Fressian notes found there.
- The memory-facts-serializer is often a custom provided implemenation since the facts stored in
- working memory are domain specific to the consumers' usage of the rules.
- See the IWorkingMemorySerializer docs for more.
-
- Options can be given as an optional argument. These are passed through to the session-serializer
- implementation. See the description of standard options an ISessionSerializer should provide in
- the ISessionSerializer docs. Also, see the specific ISessionSerializer implementation docs for
- any non-standard options supported/not supported."
- ([session :- (s/protocol eng/ISession)
- session-serializer :- (s/protocol ISessionSerializer)
- memory-facts-serializer :- (s/protocol IWorkingMemorySerializer)]
- (serialize-session-state session
- session-serializer
- memory-facts-serializer
- {:with-rulebase? false}))
-
- ([session :- (s/protocol eng/ISession)
- session-serializer :- (s/protocol ISessionSerializer)
- memory-facts-serializer :- (s/protocol IWorkingMemorySerializer)
- opts :- {s/Any s/Any}]
- (serialize-facts memory-facts-serializer
- (serialize session-serializer session opts))))
-
-(s/defn deserialize-session-state :- (s/protocol eng/ISession)
- "Deserializes the session that was stored via the serialize-session-state function. Similar to
- what is described there, this uses the session-serializer implementor of ISessionSerializer to
- deserialize the session and working memory state. The memory-facts-serializer implementor of
- IWorkingMemorySerializer is used to deserialize the actual facts stored in working memory.
-
- Options can be given as an optional argument. These are passed through to the session-serializer
- implementation. See the description of standard options an ISessionSerializer should provide in
- the ISessionSerializer docs. Also, see the specific ISessionSerializer implementation docs for
- any non-standard options supported/not supported."
- ([session-serializer :- (s/protocol ISessionSerializer)
- memory-facts-serializer :- (s/protocol IWorkingMemorySerializer)]
- (deserialize-session-state session-serializer
- memory-facts-serializer
- {}))
-
- ([session-serializer :- (s/protocol ISessionSerializer)
- memory-facts-serializer :- (s/protocol IWorkingMemorySerializer)
- opts :- {s/Any s/Any}]
- (deserialize session-serializer
- (deserialize-facts memory-facts-serializer)
- opts)))
diff --git a/build/clara/rules/durability/fressian.clj b/build/clara/rules/durability/fressian.clj
deleted file mode 100644
index c60c9b00..00000000
--- a/build/clara/rules/durability/fressian.clj
+++ /dev/null
@@ -1,642 +0,0 @@
-(ns clara.rules.durability.fressian
- "A default Fressian-based implementation of d/ISessionSerializer.
-
- Note! Currently this only supports the clara.rules.memory.PersistentLocalMemory implementation
- of memory."
- (:require [clara.rules.durability :as d]
- [clara.rules.memory :as mem]
- [clara.rules.engine :as eng]
- [clara.rules.compiler :as com]
- [clara.rules.platform :as pform]
- [schema.core :as s]
- [clojure.data.fressian :as fres]
- [clojure.java.io :as jio]
- [clojure.main :as cm])
- (:import [clara.rules.durability
- MemIdx
- InternalMemIdx]
- [clara.rules.memory
- RuleOrderedActivation]
- [clara.rules.engine
- Token
- Element
- ProductionNode
- QueryNode
- AlphaNode
- RootJoinNode
- HashJoinNode
- ExpressionJoinNode
- NegationNode
- NegationWithJoinFilterNode
- TestNode
- AccumulateNode
- AccumulateWithJoinFilterNode]
- [org.fressian
- StreamingWriter
- Writer
- Reader
- FressianWriter
- FressianReader]
- [org.fressian.handlers
- WriteHandler
- ReadHandler]
- [java.util
- ArrayList
- IdentityHashMap
- Map
- WeakHashMap]
- [java.io
- InputStream
- OutputStream]))
-
-;; Use this map to cache the symbol for the map->RecordNameHere
-;; factory function created for every Clojure record to improve
-;; serialization performance.
-;; See https://github.com/cerner/clara-rules/issues/245 for more extensive discussion.
-(def ^:private ^Map class->factory-fn-sym (java.util.Collections/synchronizedMap
- (WeakHashMap.)))
-
-(defn record-map-constructor-name
- "Return the 'map->' prefix, factory constructor function for a Clojure record."
- [rec]
- (let [klass (class rec)]
- (if-let [cached-sym (.get class->factory-fn-sym klass)]
- cached-sym
- (let [class-name (.getName ^Class klass)
- idx (.lastIndexOf class-name (int \.))
- ns-nom (.substring class-name 0 idx)
- nom (.substring class-name (inc idx))
- factory-fn-sym (symbol (str (cm/demunge ns-nom)
- "/map->"
- (cm/demunge nom)))]
- (.put class->factory-fn-sym klass factory-fn-sym)
- factory-fn-sym))))
-
-(defn write-map
- "Writes a map as Fressian with the tag 'map' and all keys cached."
- [^Writer w m]
- (.writeTag w "map" 1)
- (.beginClosedList ^StreamingWriter w)
- (reduce-kv
- (fn [^Writer w k v]
- (.writeObject w k true)
- (.writeObject w v))
- w
- m)
- (.endList ^StreamingWriter w))
-
-(defn write-with-meta
- "Writes the object to the writer under the given tag. If the record has metadata, the metadata
- will also be written. read-with-meta will associated this metadata back with the object
- when reading."
- ([w tag o]
- (write-with-meta w tag o (fn [^Writer w o] (.writeList w o))))
- ([^Writer w tag o write-fn]
- (let [m (meta o)]
- (do
- (.writeTag w tag 2)
- (write-fn w o)
- (if m
- (.writeObject w m)
- (.writeNull w))))))
-
-(defn- read-meta [^Reader rdr]
- (some->> rdr
- .readObject
- (into {})))
-
-(defn read-with-meta
- "Reads an object from the reader that was written via write-with-meta. If the object was written
- with metadata the metadata will be associated on the object returned."
- [^Reader rdr build-fn]
- (let [o (build-fn (.readObject rdr))
- m (read-meta rdr)]
- (cond-> o
- m (with-meta m))))
-
-(defn write-record
- "Same as write-with-meta, but with Clojure record support. The type of the record will
- be preserved."
- [^Writer w tag rec]
- (let [m (meta rec)]
- (.writeTag w tag 3)
- (.writeObject w (record-map-constructor-name rec) true)
- (write-map w rec)
- (if m
- (.writeObject w m)
- (.writeNull w))))
-
-(defn read-record
- "Same as read-with-meta, but with Clojure record support. The type of the record will
- be preserved."
- ([^Reader rdr]
- (read-record rdr nil))
- ([^Reader rdr add-fn]
- (let [builder (-> (.readObject rdr) resolve deref)
- build-map (.readObject rdr)
- m (read-meta rdr)]
- (cond-> (builder build-map)
- m (with-meta m)
- add-fn add-fn))))
-
-(defn- create-cached-node-handler
- ([clazz
- tag
- tag-for-cached]
- {:class clazz
- :writer (reify WriteHandler
- (write [_ w o]
- (let [node-id (:id o)]
- (if (@(.get d/node-id->node-cache) node-id)
- (do
- (.writeTag w tag-for-cached 1)
- (.writeInt w node-id))
- (do
- (d/cache-node o)
- (write-record w tag o))))))
- :readers {tag-for-cached
- (reify ReadHandler
- (read [_ rdr tag component-count]
- (d/node-id->node (.readObject rdr))))
- tag
- (reify ReadHandler
- (read [_ rdr tag component-count]
- (-> rdr
- read-record
- d/cache-node)))}})
- ([clazz
- tag
- tag-for-cached
- remove-node-expr-fn
- add-node-expr-fn]
- {:class clazz
- :writer (reify WriteHandler
- (write [_ w o]
- (let [node-id (:id o)]
- (if (@(.get d/node-id->node-cache) node-id)
- (do
- (.writeTag w tag-for-cached 1)
- (.writeInt w node-id))
- (do
- (d/cache-node o)
- (write-record w tag (remove-node-expr-fn o)))))))
- :readers {tag-for-cached
- (reify ReadHandler
- (read [_ rdr tag component-count]
- (d/node-id->node (.readObject rdr))))
- tag
- (reify ReadHandler
- (read [_ rdr tag component-count]
- (-> rdr
- (read-record add-node-expr-fn)
- d/cache-node)))}}))
-
-(defn- create-identity-based-handler
- [clazz
- tag
- write-fn
- read-fn]
- (let [indexed-tag (str tag "-idx")]
- ;; Write an object a single time per object reference to that object. The object is then "cached"
- ;; with the IdentityHashMap `d/clj-struct-holder`. If another reference to this object instance
- ;; is encountered later, only the "index" of the object in the map will be written.
- {:class clazz
- :writer (reify WriteHandler
- (write [_ w o]
- (if-let [idx (d/clj-struct->idx o)]
- (do
- (.writeTag w indexed-tag 1)
- (.writeInt w idx))
- (do
- ;; We are writing all nested objects prior to adding the original object to the cache here as
- ;; this will be the order that will occur on read, ie, the reader will have traverse to the bottom
- ;; of the struct before rebuilding the object.
- (write-fn w tag o)
- (d/clj-struct-holder-add-fact-idx! o)))))
- ;; When reading the first time a reference to an object instance is found, the entire object will
- ;; need to be constructed. It is then put into indexed cache. If more references to this object
- ;; instance are encountered later, they will be in the form of a numeric index into this cache.
- ;; This is guaranteed by the semantics of the corresponding WriteHandler.
- :readers {indexed-tag
- (reify ReadHandler
- (read [_ rdr _ _]
- (d/clj-struct-idx->obj (.readInt rdr))))
- tag
- (reify ReadHandler
- (read [_ rdr _ _]
- (-> rdr
- read-fn
- d/clj-struct-holder-add-obj!)))}}))
-
-(def handlers
- "A structure tying together the custom Fressian write and read handlers used
- by FressianSessionSerializer's."
- {"java/class"
- {:class Class
- :writer (reify WriteHandler
- (write [_ w c]
- (.writeTag w "java/class" 1)
- (.writeObject w (symbol (.getName ^Class c)) true)))
- :readers {"java/class"
- (reify ReadHandler
- (read [_ rdr tag component-count]
- (resolve (.readObject rdr))))}}
-
- "clj/set"
- (create-identity-based-handler
- clojure.lang.APersistentSet
- "clj/set"
- write-with-meta
- (fn clj-set-reader [rdr] (read-with-meta rdr set)))
-
- "clj/vector"
- (create-identity-based-handler
- clojure.lang.APersistentVector
- "clj/vector"
- write-with-meta
- (fn clj-vec-reader [rdr] (read-with-meta rdr vec)))
-
- "clj/list"
- (create-identity-based-handler
- clojure.lang.PersistentList
- "clj/list"
- write-with-meta
- (fn clj-list-reader [rdr] (read-with-meta rdr #(apply list %))))
-
- "clj/emptylist"
- ;; Not using the identity based handler as this will always be identical anyway
- ;; then meta data will be added in the reader
- {:class clojure.lang.PersistentList$EmptyList
- :writer (reify WriteHandler
- (write [_ w o]
- (let [m (meta o)]
- (do
- (.writeTag w "clj/emptylist" 1)
- (if m
- (.writeObject w m)
- (.writeNull w))))))
- :readers {"clj/emptylist"
- (reify ReadHandler
- (read [_ rdr tag component-count]
- (let [m (read-meta rdr)]
- (cond-> '()
- m (with-meta m)))))}}
-
- "clj/aseq"
- (create-identity-based-handler
- clojure.lang.ASeq
- "clj/aseq"
- write-with-meta
- (fn clj-seq-reader [rdr] (read-with-meta rdr sequence)))
-
- "clj/lazyseq"
- (create-identity-based-handler
- clojure.lang.LazySeq
- "clj/lazyseq"
- write-with-meta
- (fn clj-lazy-seq-reader [rdr] (read-with-meta rdr sequence)))
-
- "clj/map"
- (create-identity-based-handler
- clojure.lang.APersistentMap
- "clj/map"
- (fn clj-map-writer [wtr tag m] (write-with-meta wtr tag m write-map))
- (fn clj-map-reader [rdr] (read-with-meta rdr #(into {} %))))
-
- "clj/treeset"
- (create-identity-based-handler
- clojure.lang.PersistentTreeSet
- "clj/treeset"
- (fn clj-treeset-writer [^Writer wtr tag s]
- (let [cname (d/sorted-comparator-name s)]
- (.writeTag wtr tag 3)
- (if cname
- (.writeObject wtr cname true)
- (.writeNull wtr))
- ;; Preserve metadata.
- (if-let [m (meta s)]
- (.writeObject wtr m)
- (.writeNull wtr))
- (.writeList wtr s)))
- (fn clj-treeset-reader [^Reader rdr]
- (let [c (some-> rdr .readObject resolve deref)
- m (.readObject rdr)
- s (-> (.readObject rdr)
- (d/seq->sorted-set c))]
- (if m
- (with-meta s m)
- s))))
-
- "clj/treemap"
- (create-identity-based-handler
- clojure.lang.PersistentTreeMap
- "clj/treemap"
- (fn clj-treemap-writer [^Writer wtr tag o]
- (let [cname (d/sorted-comparator-name o)]
- (.writeTag wtr tag 3)
- (if cname
- (.writeObject wtr cname true)
- (.writeNull wtr))
- ;; Preserve metadata.
- (if-let [m (meta o)]
- (.writeObject wtr m)
- (.writeNull wtr))
- (write-map wtr o)))
- (fn clj-treemap-reader [^Reader rdr]
- (let [c (some-> rdr .readObject resolve deref)
- m (.readObject rdr)
- s (d/seq->sorted-map (.readObject rdr) c)]
- (if m
- (with-meta s m)
- s))))
-
- "clj/mapentry"
- (create-identity-based-handler
- clojure.lang.MapEntry
- "clj/mapentry"
- (fn clj-mapentry-writer [^Writer wtr tag o]
- (.writeTag wtr tag 2)
- (.writeObject wtr (key o) true)
- (.writeObject wtr (val o)))
- (fn clj-mapentry-reader [^Reader rdr]
- (d/create-map-entry (.readObject rdr)
- (.readObject rdr))))
-
- ;; Have to redefine both Symbol and IRecord to support metadata as well
- ;; as identity-based caching for the IRecord case.
-
- "clj/sym"
- (create-identity-based-handler
- clojure.lang.Symbol
- "clj/sym"
- (fn clj-sym-writer [^Writer wtr tag o]
- ;; Mostly copied from private fres/write-named, except the metadata part.
- (.writeTag wtr tag 3)
- (.writeObject wtr (namespace o) true)
- (.writeObject wtr (name o) true)
- (if-let [m (meta o)]
- (.writeObject wtr m)
- (.writeNull wtr)))
- (fn clj-sym-reader [^Reader rdr]
- (let [s (symbol (.readObject rdr) (.readObject rdr))
- m (read-meta rdr)]
- (cond-> s
- m (with-meta m)))))
-
- "clj/record"
- (create-identity-based-handler
- clojure.lang.IRecord
- "clj/record"
- write-record
- read-record)
-
- "clara/productionnode"
- (create-cached-node-handler ProductionNode
- "clara/productionnode"
- "clara/productionnodeid"
- #(assoc % :rhs nil)
- d/add-rhs-fn)
-
- "clara/querynode"
- (create-cached-node-handler QueryNode
- "clara/querynode"
- "clara/querynodeid")
-
- "clara/alphanode"
- (create-cached-node-handler AlphaNode
- "clara/alphanodeid"
- "clara/alphanode"
- #(assoc % :activation nil)
- d/add-alpha-fn)
-
- "clara/rootjoinnode"
- (create-cached-node-handler RootJoinNode
- "clara/rootjoinnode"
- "clara/rootjoinnodeid")
-
- "clara/hashjoinnode"
- (create-cached-node-handler HashJoinNode
- "clara/hashjoinnode"
- "clara/hashjoinnodeid")
-
- "clara/exprjoinnode"
- (create-cached-node-handler ExpressionJoinNode
- "clara/exprjoinnode"
- "clara/exprjoinnodeid"
- #(assoc % :join-filter-fn nil)
- d/add-join-filter-fn)
-
- "clara/negationnode"
- (create-cached-node-handler NegationNode
- "clara/negationnode"
- "clara/negationnodeid")
-
- "clara/negationwjoinnode"
- (create-cached-node-handler NegationWithJoinFilterNode
- "clara/negationwjoinnode"
- "clara/negationwjoinnodeid"
- #(assoc % :join-filter-fn nil)
- d/add-join-filter-fn)
-
- "clara/testnode"
- (create-cached-node-handler TestNode
- "clara/testnode"
- "clara/testnodeid"
- #(assoc % :test nil)
- d/add-test-fn)
-
- "clara/accumnode"
- (create-cached-node-handler AccumulateNode
- "clara/accumnode"
- "clara/accumnodeid"
- #(assoc % :accumulator nil)
- d/add-accumulator)
-
- "clara/accumwjoinnode"
- (create-cached-node-handler AccumulateWithJoinFilterNode
- "clara/accumwjoinnode"
- "clara/accumwjoinnodeid"
- #(assoc % :accumulator nil :join-filter-fn nil)
- (comp d/add-accumulator d/add-join-filter-fn))
-
- "clara/ruleorderactivation"
- {:class RuleOrderedActivation
- :writer (reify WriteHandler
- (write [_ w c]
- (.writeTag w "clara/ruleorderactivation" 4)
- (.writeObject w (.-node-id ^RuleOrderedActivation c) true)
- (.writeObject w (.-token ^RuleOrderedActivation c))
- (.writeObject w (.-activation ^RuleOrderedActivation c))
- (.writeInt w (.-rule-load-order ^RuleOrderedActivation c))))
- :readers {"clara/ruleorderactivation"
- (reify ReadHandler
- (read [_ rdr tag component-count]
- (mem/->RuleOrderedActivation (.readObject rdr)
- (.readObject rdr)
- (.readObject rdr)
- (.readObject rdr)
- false)))}}
-
- "clara/memidx"
- {:class MemIdx
- :writer (reify WriteHandler
- (write [_ w c]
- (.writeTag w "clara/memidx" 1)
- (.writeInt w (:idx c))))
- :readers {"clara/memidx"
- (reify ReadHandler
- (read [_ rdr tag component-count]
- (d/find-mem-idx (.readObject rdr))))}}
-
- "clara/internalmemidx"
- {:class InternalMemIdx
- :writer (reify WriteHandler
- (write [_ w c]
- (.writeTag w "clara/internalmemidx" 1)
- (.writeInt w (:idx c))))
- :readers {"clara/internalmemidx"
- (reify ReadHandler
- (read [_ rdr tag component-count]
- (d/find-internal-idx (.readObject rdr))))}}})
-
-(def write-handlers
- "All Fressian write handlers used by FressianSessionSerializer's."
- (into fres/clojure-write-handlers
- (map (fn [[tag {clazz :class wtr :writer}]]
- [clazz {tag wtr}]))
- handlers))
-
-(def read-handlers
- "All Fressian read handlers used by FressianSessionSerializer's."
- (->> handlers
- vals
- (into fres/clojure-read-handlers
- (mapcat :readers))))
-
-(def write-handler-lookup
- (-> write-handlers
- fres/associative-lookup
- fres/inheritance-lookup))
-
-(def read-handler-lookup
- (fres/associative-lookup read-handlers))
-
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;;;; Session serializer.
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-
-(defrecord FressianSessionSerializer [in-stream out-stream]
- d/ISessionSerializer
- (serialize [_ session opts]
- (let [{:keys [rulebase memory]} (eng/components session)
- node-expr-fn-lookup (:node-expr-fn-lookup rulebase)
- remove-node-fns (fn [expr-lookup]
- (zipmap (keys expr-lookup)
- (mapv second (vals expr-lookup))))
- rulebase (assoc rulebase
- :activation-group-sort-fn nil
- :activation-group-fn nil
- :get-alphas-fn nil
- :node-expr-fn-lookup nil)
- record-holder (IdentityHashMap.)
- do-serialize
- (fn [sources]
- (with-open [^FressianWriter wtr
- (fres/create-writer out-stream :handlers write-handler-lookup)]
- (pform/thread-local-binding [d/node-id->node-cache (volatile! {})
- d/clj-struct-holder record-holder]
- (doseq [s sources] (fres/write-object wtr s)))))]
-
- ;; In this case there is nothing to do with memory, so just serialize immediately.
- (if (:rulebase-only? opts)
- ;; node-expr-fn-lookup is a map with a structure of:
- ;; {[Int Keyword] [IFn {Keyword Any}]}
- ;; as fns are not serializable, we must remove them and alter the structure of the map to be
- ;; {[Int Keyword] {Keyword Any}}
- ;; during deserialization the compilation-context({Keyword Any}), which contains the unevaluated form,
- ;; can be used to reconstruct the original map.
- (do-serialize [(remove-node-fns node-expr-fn-lookup) rulebase])
-
- ;; Otherwise memory needs to have facts extracted to return.
- (let [{:keys [memory indexed-facts internal-indexed-facts]} (d/indexed-session-memory-state memory)
- sources (if (:with-rulebase? opts)
- [(remove-node-fns node-expr-fn-lookup) rulebase internal-indexed-facts memory]
- [internal-indexed-facts memory])]
-
- (do-serialize sources)
-
- ;; Return the facts needing to be serialized still.
- indexed-facts))))
-
- (deserialize [_ mem-facts opts]
-
- (with-open [^FressianReader rdr (fres/create-reader in-stream :handlers read-handler-lookup)]
- (let [{:keys [rulebase-only? base-rulebase forms-per-eval]} opts
-
- record-holder (ArrayList.)
- ;; The rulebase should either be given from the base-session or found in
- ;; the restored session-state.
- maybe-base-rulebase (when (and (not rulebase-only?) base-rulebase)
- base-rulebase)
-
- forms-per-eval (or forms-per-eval com/forms-per-eval-default)
-
- reconstruct-expressions (fn [expr-lookup]
- ;; Rebuilding the expr-lookup map from the serialized map:
- ;; {[Int Keyword] {Keyword Any}} -> {[Int Keyword] [SExpr {Keyword Any}]}
- (into {}
- (for [[node-key compilation-ctx] expr-lookup]
- [node-key [(-> compilation-ctx (get (nth node-key 1)))
- compilation-ctx]])))
-
- rulebase (if maybe-base-rulebase
- maybe-base-rulebase
- (let [without-opts-rulebase
- (pform/thread-local-binding [d/node-id->node-cache (volatile! {})
- d/clj-struct-holder record-holder]
- (pform/thread-local-binding [d/node-fn-cache (-> (fres/read-object rdr)
- reconstruct-expressions
- (com/compile-exprs forms-per-eval))]
- (assoc (fres/read-object rdr)
- :node-expr-fn-lookup
- (.get d/node-fn-cache))))]
- (d/rulebase->rulebase-with-opts without-opts-rulebase opts)))]
-
- (if rulebase-only?
- rulebase
- (d/assemble-restored-session rulebase
- (pform/thread-local-binding [d/clj-struct-holder record-holder
- d/mem-facts mem-facts]
- ;; internal memory contains facts provided by mem-facts
- ;; thus mem-facts must be bound before the call to read
- ;; the internal memory
- (pform/thread-local-binding [d/mem-internal (fres/read-object rdr)]
- (fres/read-object rdr)))
- opts))))))
-
-(s/defn create-session-serializer
- "Creates an instance of FressianSessionSerializer which implements d/ISessionSerializer by using
- Fressian serialization for the session structures.
-
- In the one arity case, takes either an input stream or an output stream. This arity is intended for
- creating a Fressian serializer instance that will only be used for serialization or deserialization,
- but not both. e.g. This is often convenient if serialization and deserialization are not done from
- the same process. If it is to be used for serialization, then the stream given should be an output
- stream. If it is to be used for deserialization, then the stream to be given should be an
- input stream.
-
- In the two arity case, takes an input stream and an output stream. These will be used for
- deserialization and serialization within the created Fressian serializer instance, respectively.
-
- Note! Currently this only supports the clara.rules.memory.PersistentLocalMemory implementation
- of memory."
- ([in-or-out-stream :- (s/pred (some-fn #(instance? InputStream %)
- #(instance? OutputStream %))
- "java.io.InputStream or java.io.OutputStream")]
- (if (instance? InputStream in-or-out-stream)
- (create-session-serializer in-or-out-stream nil)
- (create-session-serializer nil in-or-out-stream)))
-
- ([in-stream :- (s/maybe InputStream)
- out-stream :- (s/maybe OutputStream)]
- (->FressianSessionSerializer in-stream out-stream)))
diff --git a/build/clara/rules/engine.clj b/build/clara/rules/engine.clj
deleted file mode 100644
index 4bbfa1b1..00000000
--- a/build/clara/rules/engine.clj
+++ /dev/null
@@ -1,2119 +0,0 @@
-(ns clara.rules.engine
- "This namespace is for internal use and may move in the future. Most users should use only the clara.rules namespace."
- (:require [clojure.core.reducers :as r]
- [clojure.string :as string]
- [clara.rules.memory :as mem]
- [clara.rules.listener :as l]
- [clara.rules.platform :as platform]
- [clara.rules.update-cache.core :as uc]
- [clara.rules.update-cache.cancelling :as ca]))
-
-;; The accumulator is a Rete extension to run an accumulation (such as sum, average, or similar operation)
-;; over a collection of values passing through the Rete network. This object defines the behavior
-;; of an accumulator. See the AccumulateNode for the actual node implementation in the network.
-(defrecord Accumulator [initial-value retract-fn reduce-fn combine-fn convert-return-fn])
-
-;; A Rete-style token, which contains two items:
-;; * matches, a vector of [fact, node-id] tuples for the facts and corresponding nodes they matched.
-;; NOTE: It is important that this remains an indexed vector for memory optimizations as well as
-;; for correct conj behavior for new elements i.e. added to the end.
-;; * bindings, a map of keyword-to-values for bound variables.
-(defrecord Token [matches bindings])
-
-;; A working memory element, containing a single fact and its corresponding bound variables.
-(defrecord Element [fact bindings])
-
-;; An activation for the given production and token.
-(defrecord Activation [node token])
-
-;; Token with no bindings, used as the root of beta nodes.
-(def empty-token (->Token [] {}))
-
-;; Record indicating the negation existing in the working memory.
-;;
-;; Determining if an object is an instance of a class is a primitive
-;; JVM operation and is much more efficient than determining
-;; if that object descends from a particular object through
-;; Clojure's hierarchy as determined by the isa? function.
-;; See Issue 239 for more details.
-;; A marker interface to identify internal facts.
-(definterface ISystemFact)
-
-(defrecord NegationResult
- [gen-rule-name ancestor-bindings]
- ISystemFact)
-
-;; Schema for the structure returned by the components
-;; function on the session protocol.
-;; This is simply a comment rather than first-class schema
-;; for now since it's unused for validation and created
-;; undesired warnings as described at https://groups.google.com/forum/#!topic/prismatic-plumbing/o65PfJ4CUkI
-(comment
- (def session-components-schema
- {:rulebase s/Any
- :memory s/Any
- :transport s/Any
- :listeners [s/Any]
- :get-alphas-fn s/Any}))
-
-;; Returns a new session with the additional facts inserted.
-(defprotocol ISession
-
- ;; Inserts facts.
- (insert [session facts])
-
- ;; Retracts facts.
- (retract [session facts])
-
- ;; Fires pending rules and returns a new session where they are in a fired state.
- ;;
- ;; Note that clara.rules/fire-rules, the public API for these methods, will handle
- ;; calling the two-arg fire-rules with an empty map itself, but we add handle it in the fire-rules implementation
- ;; as well in case anyone is directly calling the fire-rules protocol function or interface method on the LocalSession.
- ;; The two-argument version of fire-rules was added for issue 249.
- (fire-rules [session] [session opts])
-
- ;; Runs a query agains thte session.
- (query [session query params])
-
- ;; Returns the components of a session as defined in the session-components-schema
- (components [session]))
-
-;; Left activation protocol for various types of beta nodes.
-(defprotocol ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener])
- (left-retract [node join-bindings tokens memory transport listener])
- (description [node])
- (get-join-keys [node]))
-
-;; Right activation protocol to insert new facts, connecting alpha nodes
-;; and beta nodes.
-(defprotocol IRightActivate
- (right-activate [node join-bindings elements memory transport listener])
- (right-retract [node join-bindings elements memory transport listener]))
-
-;; Specialized right activation interface for accumulator nodes,
-;; where the caller has the option of pre-reducing items
-;; to reduce the data sent to the node. This would be useful
-;; if the caller is not in the same memory space as the accumulator node itself.
-(defprotocol IAccumRightActivate
- ;; Pre-reduces elements, returning a map of bindings to reduced elements.
- (pre-reduce [node elements])
-
- ;; Right-activate the node with items reduced in the above pre-reduce step.
- (right-activate-reduced [node join-bindings reduced memory transport listener]))
-
-(defprotocol IAccumInspect
- "This protocol is expected to be implemented on accumulator nodes in the rules network.
- It is not expected that users will implement this protocol, and most likely will not call
- the protocol function directly."
- (token->matching-elements [node memory token]
- "Takes a token that was previously propagated from the node,
- or a token that is a descendant of such a token, and returns the facts in elements
- matching the token propagated from the node. During rules firing
- accumulators only propagate bindings created and the result binding
- downstream rather than all facts that were accumulated over, but there
- are use-cases in session inspection where we want to retrieve the individual facts.
-
- Example: [?min-temp <- (acc/min :temperature) :from [Temperature (= temperature ?loc)]]
- [?windspeed <- [WindSpeed (= location ?loc)]]
-
- Given a token propagated from the node for the WindSpeed condition
- we could retrieve the Temperature facts from the matching location."))
-
-;; The transport protocol for sending and retracting items between nodes.
-(defprotocol ITransport
- (send-elements [transport memory listener nodes elements])
- (send-tokens [transport memory listener nodes tokens])
- (retract-elements [transport memory listener nodes elements])
- (retract-tokens [transport memory listener nodes tokens]))
-
-(defn- propagate-items-to-nodes [transport memory listener nodes items propagate-fn]
- (doseq [node nodes
- :let [join-keys (get-join-keys node)]]
-
- (if (pos? (count join-keys))
-
- ;; Group by the join keys for the activation.
- (doseq [[join-bindings item-group] (platform/group-by-seq #(select-keys (:bindings %) join-keys) items)]
- (propagate-fn node
- join-bindings
- item-group
- memory
- transport
- listener))
-
- ;; The node has no join keys, so just send everything at once
- ;; (if there is something to send.)
- (when (seq items)
- (propagate-fn node
- {}
- items
- memory
- transport
- listener)))))
-
-;; Simple, in-memory transport.
-(deftype LocalTransport []
- ITransport
- (send-elements [transport memory listener nodes elements]
- (propagate-items-to-nodes transport memory listener nodes elements right-activate))
-
- (send-tokens [transport memory listener nodes tokens]
- (propagate-items-to-nodes transport memory listener nodes tokens left-activate))
-
- (retract-elements [transport memory listener nodes elements]
- (propagate-items-to-nodes transport memory listener nodes elements right-retract))
-
- (retract-tokens [transport memory listener nodes tokens]
- (propagate-items-to-nodes transport memory listener nodes tokens left-retract)))
-
-;; Protocol for activation of Rete alpha nodes.
-(defprotocol IAlphaActivate
- (alpha-activate [node facts memory transport listener])
- (alpha-retract [node facts memory transport listener]))
-
-;; Protocol for getting the type (e.g. :production and :query) and name of a
-;; terminal node.
-(defprotocol ITerminalNode
- (terminal-node-type [this]))
-
-;; Protocol for getting a node's condition expression.
-(defprotocol IConditionNode
- (get-condition-description [this]))
-
-(defn get-terminal-node-types
- [node]
- (->> node
- (tree-seq (comp seq :children) :children)
- (keep #(when (satisfies? ITerminalNode %)
- (terminal-node-type %)))
- (into (sorted-set))))
-
-(defn get-conditions-and-rule-names
- "Returns a map from conditions to sets of rules."
- ([node]
- (if-let [condition (when (satisfies? IConditionNode node)
- (get-condition-description node))]
- {condition (get-terminal-node-types node)}
- (->> node
- :children
- (map get-conditions-and-rule-names)
- (reduce (partial merge-with into) {})))))
-
-;; Active session during rule execution.
-(def ^:dynamic *current-session* nil)
-
-;; Note that this can hold facts directly retracted and facts logically retracted
-;; as a result of an external retraction or insertion.
-;; The value is expected to be an atom holding such facts.
-(def ^:dynamic *pending-external-retractions* nil)
-
-;; The token that triggered a rule to fire.
-(def ^:dynamic *rule-context* nil)
-
-(defn ^:private external-retract-loop
- "Retract all facts, then group and retract all facts that must be logically retracted because of these
- retractions, and so forth, until logical consistency is reached. When an external retraction causes multiple
- facts of the same type to be retracted in the same iteration of the loop this improves efficiency since they can be grouped.
- For example, if we have a rule that matches on FactA and inserts FactB, and then a later rule that accumulates on FactB,
- if we have multiple FactA external retractions it is more efficient to logically retract all the FactB instances at once to minimize the number of times we must re-accumulate on FactB.
- This is similar to the function of the pending-updates in the fire-rules* loop."
- [get-alphas-fn memory transport listener]
- (loop []
- (let [retractions (deref *pending-external-retractions*)
- ;; We have already obtained a direct reference to the facts to be
- ;; retracted in this iteration of the loop outside the cache. Now reset
- ;; the cache. The retractions we execute may cause new retractions to be queued
- ;; up, in which case the loop will execute again.
- _ (reset! *pending-external-retractions* [])]
- (doseq [[alpha-roots fact-group] (get-alphas-fn retractions)
- root alpha-roots]
- (alpha-retract root fact-group memory transport listener))
- (when (-> *pending-external-retractions* deref not-empty)
- (recur)))))
-
-(defn- flush-updates
- "Flush all pending updates in the current session. Returns true if there were
- some items to flush, false otherwise"
- [current-session]
- (letfn [(flush-all [current-session flushed-items?]
- (let [{:keys [rulebase transient-memory transport insertions get-alphas-fn listener]} current-session
- pending-updates (-> current-session :pending-updates uc/get-updates-and-reset!)]
-
- (if (empty? pending-updates)
- flushed-items?
- (do
- (doseq [partition pending-updates
- :let [facts (mapcat :facts partition)]
- [alpha-roots fact-group] (get-alphas-fn facts)
- root alpha-roots]
-
- (if (= :insert (:type (first partition)))
- (alpha-activate root fact-group transient-memory transport listener)
- (alpha-retract root fact-group transient-memory transport listener)))
-
- ;; There may be new pending updates due to the flush just
- ;; made. So keep flushing until there are none left. Items
- ;; were flushed though, so flush-items? is now true.
- (flush-all current-session true)))))]
-
- (flush-all current-session false)))
-
-(defn insert-facts!
- "Place facts in a stateful cache to be inserted into the session
- immediately after the RHS of a rule fires."
- [facts unconditional]
- (if unconditional
- (swap! (:batched-unconditional-insertions *rule-context*) into facts)
- (swap! (:batched-logical-insertions *rule-context*) into facts)))
-
-(defn rhs-retract-facts!
- "Place all facts retracted in the RHS in a buffer to be retracted after
- the eval'ed RHS function completes."
- [facts]
- (swap! (:batched-rhs-retractions *rule-context*) into facts))
-
-(defn ^:private flush-rhs-retractions!
- "Retract all facts retracted in the RHS after the eval'ed RHS function completes.
- This should only be used for facts explicitly retracted in a RHS.
- It should not be used for retractions that occur as part of automatic truth maintenance."
- [facts]
- (let [{:keys [rulebase transient-memory transport insertions get-alphas-fn listener]} *current-session*
- {:keys [node token]} *rule-context*]
- ;; Update the count so the rule engine will know when we have normalized.
- (swap! insertions + (count facts))
-
- (when listener
- (l/retract-facts! listener node token facts))
-
- (doseq [[alpha-roots fact-group] (get-alphas-fn facts)
- root alpha-roots]
-
- (alpha-retract root fact-group transient-memory transport listener))))
-
-(defn ^:private flush-insertions!
- "Perform the actual fact insertion, optionally making them unconditional. This should only
- be called once per rule activation for logical insertions."
- [facts unconditional]
- (let [{:keys [rulebase transient-memory transport insertions get-alphas-fn listener]} *current-session*
- {:keys [node token]} *rule-context*]
-
- ;; Update the insertion count.
- (swap! insertions + (count facts))
-
- ;; Track this insertion in our transient memory so logical retractions will remove it.
- (if unconditional
- (l/insert-facts! listener node token facts)
- (do
- (mem/add-insertions! transient-memory node token facts)
- (l/insert-facts-logical! listener node token facts)))
-
- (-> *current-session* :pending-updates (uc/add-insertions! facts))))
-
-(defn retract-facts!
- "Perform the fact retraction."
- [facts]
- (-> *current-session* :pending-updates (uc/add-retractions! facts)))
-
-;; Record for the production node in the Rete network.
-(defrecord ProductionNode [id production rhs]
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
-
- ;; Provide listeners information on all left-activate calls,
- ;; but we don't store these tokens in the beta-memory since the production-memory
- ;; and activation-memory collectively contain all information that ProductionNode
- ;; needs. See https://github.com/cerner/clara-rules/issues/386
- (l/left-activate! listener node tokens)
-
- ;; Fire the rule if it's not a no-loop rule, or if the rule is not
- ;; active in the current context.
- (when (or (not (get-in production [:props :no-loop]))
- (not (= production (get-in *rule-context* [:node :production]))))
-
- (let [activations (platform/eager-for [token tokens]
- (->Activation node token))]
-
- (l/add-activations! listener node activations)
-
- ;; The production matched, so add the tokens to the activation list.
- (mem/add-activations! memory production activations))))
-
- (left-retract [node join-bindings tokens memory transport listener]
-
- ;; Provide listeners information on all left-retract calls for passivity,
- ;; but we don't store these tokens in the beta-memory since the production-memory
- ;; and activation-memory collectively contain all information that ProductionNode
- ;; needs. See https://github.com/cerner/clara-rules/issues/386
- (l/left-retract! listener node tokens)
-
- ;; Remove pending activations triggered by the retracted tokens.
- (let [activations (platform/eager-for [token tokens]
- (->Activation node token))
-
- ;; We attempt to remove a pending activation for all tokens retracted, but our expectation
- ;; is that each token may remove a pending activation
- ;; or logical insertions from a previous rule activation but not both.
- ;; We first attempt to use each token to remove a pending activation but keep track of which
- ;; tokens were not used to remove an activation.
- [removed-activations unremoved-activations]
- (mem/remove-activations! memory production activations)
-
- _ (l/remove-activations! listener node removed-activations)
-
- unremoved-tokens (mapv :token unremoved-activations)
-
- ;; Now use each token that was not used to remove a pending activation to remove
- ;; the logical insertions from a previous activation if the truth maintenance system
- ;; has a matching previous activation.
- token-insertion-map (mem/remove-insertions! memory node unremoved-tokens)]
-
- (when-let [insertions (seq (apply concat (vals token-insertion-map)))]
- ;; If there is current session with rules firing, add these items to the queue
- ;; to be retracted so they occur in the same order as facts being inserted.
- (cond
-
- ;; Both logical retractions resulting from rule network activity and manual RHS retractions
- ;; expect *current-session* to be bound since both happen in the context of a fire-rules call.
- *current-session*
- ;; Retract facts that have become untrue, unless they became untrue
- ;; because of an activation of the current rule that is :no-loop
- (when (or (not (get-in production [:props :no-loop]))
- (not (= production (get-in *rule-context* [:node :production]))))
- (do
- ;; Notify the listener of logical retractions.
- ;; Note that this notification happens immediately, while the
- ;; alpha-retract notification on matching alpha nodes will happen when the
- ;; retraction is actually removed from the buffer and executed in the rules network.
- (doseq [[token token-insertions] token-insertion-map]
- (l/retract-facts-logical! listener node token token-insertions))
- (retract-facts! insertions)))
-
- ;; Any session implementation is required to bind this during external retractions and insertions.
- *pending-external-retractions*
- (do
- (doseq [[token token-insertions] token-insertion-map]
- (l/retract-facts-logical! listener node token token-insertions))
- (swap! *pending-external-retractions* into insertions))
-
- :else
- (throw (ex-info (str "Attempting to retract from a ProductionNode when neither *current-session* nor "
- "*pending-external-retractions* is bound is illegal.")
- {:node node
- :join-bindings join-bindings
- :tokens tokens}))))))
-
- (get-join-keys [node] [])
-
- (description [node] "ProductionNode")
-
- ITerminalNode
- (terminal-node-type [this] [:production (:name production)]))
-
-;; The QueryNode is a terminal node that stores the
-;; state that can be queried by a rule user.
-(defrecord QueryNode [id query param-keys]
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
- (l/left-activate! listener node tokens)
- (mem/add-tokens! memory node join-bindings tokens))
-
- (left-retract [node join-bindings tokens memory transport listener]
- (l/left-retract! listener node tokens)
- (mem/remove-tokens! memory node join-bindings tokens))
-
- (get-join-keys [node] param-keys)
-
- (description [node] (str "QueryNode -- " query))
-
- ITerminalNode
- (terminal-node-type [this] [:query (:name query)]))
-
-(defn node-rule-names
- [child-type node]
- (->> node
- (tree-seq (comp seq :children) :children)
- (keep child-type)
- (map :name)
- (distinct)
- (sort)))
-
-(defn- list-of-names
- "Returns formatted string with correctly pluralized header and
- list of names. Returns nil if no such node is found."
- [singular plural prefix names]
- (let [msg-for-unnamed (str " An unnamed " singular ", provide names to your "
- plural " if you want them to be identified here.")
- names-string (->> names
- (sort)
- (map #(if (nil? %) msg-for-unnamed %))
- (map #(str prefix " " %))
- (string/join "\n"))]
- (if (pos? (count names))
- (str prefix plural ":\n" names-string "\n"))))
-
-(defn- single-condition-message
- [condition-number [condition-definition terminals]]
- (let [productions (->> terminals
- (filter (comp #{:production} first))
- (map second))
- queries (->> terminals
- (filter (comp #{:query} first))
- (map second))
- production-section (list-of-names "rule" "rules" " " productions)
- query-section (list-of-names "query" "queries" " " queries)]
- (string/join
- [(str (inc condition-number) ". " condition-definition "\n")
- production-section
- query-section])))
-
-(defn- throw-condition-exception
- "Adds a useful error message when executing a constraint node raises an exception."
- [{:keys [cause node fact env bindings] :as args}]
- (let [bindings-description (if (empty? bindings)
- "with no bindings"
- (str "with bindings\n " bindings))
- facts-description (if (contains? args :fact)
- (str "when processing fact\n " (pr-str fact))
- "with no fact")
- message-header (string/join ["Condition exception raised.\n"
- (str facts-description "\n")
- (str bindings-description "\n")
- "Conditions:\n"])
- conditions-and-rules (get-conditions-and-rule-names node)
- condition-messages (->> conditions-and-rules
- (map-indexed single-condition-message)
- (string/join "\n"))
- message (str message-header "\n" condition-messages)]
- (throw (ex-info message
- {:fact fact
- :bindings bindings
- :env env
- :conditions-and-rules conditions-and-rules}
- cause))))
-
-(defn- alpha-node-matches
- [facts env activation node]
- (platform/eager-for [fact facts
- :let [bindings (try
- (activation fact env)
- (catch Exception e
- (throw-condition-exception {:cause e
- :node node
- :fact fact
- :env env})))]
- :when bindings] ; FIXME: add env.
- [fact bindings]))
-
-;; Record representing alpha nodes in the Rete network,
-;; each of which evaluates a single condition and
-;; propagates matches to its children.
-(defrecord AlphaNode [id env children activation fact-type]
-
- IAlphaActivate
- (alpha-activate [node facts memory transport listener]
- (let [fact-binding-pairs (alpha-node-matches facts env activation node)]
- (l/alpha-activate! listener node (map first fact-binding-pairs))
- (send-elements
- transport
- memory
- listener
- children
- (platform/eager-for [[fact bindings] fact-binding-pairs]
- (->Element fact bindings)))))
-
- (alpha-retract [node facts memory transport listener]
- (let [fact-binding-pairs (alpha-node-matches facts env activation node)]
- (l/alpha-retract! listener node (map first fact-binding-pairs))
- (retract-elements
- transport
- memory
- listener
- children
- (platform/eager-for [[fact bindings] fact-binding-pairs]
- (->Element fact bindings))))))
-
-(defrecord RootJoinNode [id condition children binding-keys]
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener])
- ;; This specialized root node doesn't need to deal with the
- ;; empty token, so do nothing.
-
- (left-retract [node join-bindings tokens memory transport listener])
- ;; The empty token can't be retracted from the root node,
- ;; so do nothing.
-
- (get-join-keys [node] binding-keys)
-
- (description [node] (str "RootJoinNode -- " (:text condition)))
-
- IRightActivate
- (right-activate [node join-bindings elements memory transport listener]
-
- (l/right-activate! listener node elements)
-
-;; Add elements to the working memory to support analysis tools.
- (mem/add-elements! memory node join-bindings elements)
- ;; Simply create tokens and send it downstream.
- (send-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [{:keys [fact bindings] :as element} elements]
- (->Token [[fact (:id node)]] bindings))))
-
- (right-retract [node join-bindings elements memory transport listener]
-
- (l/right-retract! listener node elements)
-
- ;; Remove matching elements and send the retraction downstream.
- (retract-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)]
- (->Token [[fact (:id node)]] bindings))))
-
- IConditionNode
- (get-condition-description [this]
- (let [{:keys [type constraints]} condition]
- (into [type] constraints))))
-
-;; Record for the join node, a type of beta node in the rete network. This node performs joins
-;; between left and right activations, creating new tokens when joins match and sending them to
-;; its descendents.
-(defrecord HashJoinNode [id condition children binding-keys]
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
- ;; Add token to the node's working memory for future right activations.
- (mem/add-tokens! memory node join-bindings tokens)
- (l/left-activate! listener node tokens)
- (send-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [element (mem/get-elements memory node join-bindings)
- token tokens
- :let [fact (:fact element)
- fact-binding (:bindings element)]]
- (->Token (conj (:matches token) [fact id]) (conj fact-binding (:bindings token))))))
-
- (left-retract [node join-bindings tokens memory transport listener]
- (l/left-retract! listener node tokens)
- (retract-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [token (mem/remove-tokens! memory node join-bindings tokens)
- element (mem/get-elements memory node join-bindings)
- :let [fact (:fact element)
- fact-bindings (:bindings element)]]
- (->Token (conj (:matches token) [fact id]) (conj fact-bindings (:bindings token))))))
-
- (get-join-keys [node] binding-keys)
-
- (description [node] (str "JoinNode -- " (:text condition)))
-
- IRightActivate
- (right-activate [node join-bindings elements memory transport listener]
- (mem/add-elements! memory node join-bindings elements)
- (l/right-activate! listener node elements)
- (send-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [token (mem/get-tokens memory node join-bindings)
- {:keys [fact bindings] :as element} elements]
- (->Token (conj (:matches token) [fact id]) (conj (:bindings token) bindings)))))
-
- (right-retract [node join-bindings elements memory transport listener]
- (l/right-retract! listener node elements)
- (retract-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)
- token (mem/get-tokens memory node join-bindings)]
- (->Token (conj (:matches token) [fact id]) (conj (:bindings token) bindings)))))
-
- IConditionNode
- (get-condition-description [this]
- (let [{:keys [type constraints]} condition]
- (into [type] constraints))))
-
-(defn- join-node-matches
- [node join-filter-fn token fact fact-bindings env]
- (let [beta-bindings (try (join-filter-fn token fact fact-bindings {})
- (catch Exception e
- (throw-condition-exception {:cause e
- :node node
- :fact fact
- :env env
- :bindings (merge (:bindings token)
- fact-bindings)})))]
- beta-bindings))
-
-(defrecord ExpressionJoinNode [id condition join-filter-fn children binding-keys]
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
- ;; Add token to the node's working memory for future right activations.
- (mem/add-tokens! memory node join-bindings tokens)
- (l/left-activate! listener node tokens)
- (send-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [element (mem/get-elements memory node join-bindings)
- token tokens
- :let [fact (:fact element)
- fact-binding (:bindings element)
- beta-bindings (join-node-matches node join-filter-fn token fact fact-binding {})]
- :when beta-bindings]
- (->Token (conj (:matches token) [fact id])
- (conj fact-binding (:bindings token) beta-bindings)))))
-
- (left-retract [node join-bindings tokens memory transport listener]
- (l/left-retract! listener node tokens)
- (retract-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [token (mem/remove-tokens! memory node join-bindings tokens)
- element (mem/get-elements memory node join-bindings)
- :let [fact (:fact element)
- fact-bindings (:bindings element)
- beta-bindings (join-node-matches node join-filter-fn token fact fact-bindings {})]
- :when beta-bindings]
- (->Token (conj (:matches token) [fact id])
- (conj fact-bindings (:bindings token) beta-bindings)))))
-
- (get-join-keys [node] binding-keys)
-
- (description [node] (str "JoinNode -- " (:text condition)))
-
- IRightActivate
- (right-activate [node join-bindings elements memory transport listener]
- (mem/add-elements! memory node join-bindings elements)
- (l/right-activate! listener node elements)
- (send-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [token (mem/get-tokens memory node join-bindings)
- {:keys [fact bindings] :as element} elements
- :let [beta-bindings (join-node-matches node join-filter-fn token fact bindings {})]
- :when beta-bindings]
- (->Token (conj (:matches token) [fact id])
- (conj (:bindings token) bindings beta-bindings)))))
-
- (right-retract [node join-bindings elements memory transport listener]
- (l/right-retract! listener node elements)
- (retract-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)
- token (mem/get-tokens memory node join-bindings)
- :let [beta-bindings (join-node-matches node join-filter-fn token fact bindings {})]
- :when beta-bindings]
- (->Token (conj (:matches token) [fact id])
- (conj (:bindings token) bindings beta-bindings)))))
-
- IConditionNode
- (get-condition-description [this]
- (let [{:keys [type constraints original-constraints]} condition
- full-constraints (if (seq original-constraints)
- original-constraints
- constraints)]
- (into [type] full-constraints))))
-
-;; The NegationNode is a beta node in the Rete network that simply
-;; negates the incoming tokens from its ancestors. It sends tokens
-;; to its descendent only if the negated condition or join fails (is false).
-(defrecord NegationNode [id condition children binding-keys]
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
- ;; Add token to the node's working memory for future right activations.
- (l/left-activate! listener node tokens)
- (mem/add-tokens! memory node join-bindings tokens)
- (when (empty? (mem/get-elements memory node join-bindings))
- (send-tokens transport memory listener children tokens)))
-
- (left-retract [node join-bindings tokens memory transport listener]
- (l/left-retract! listener node tokens)
- (mem/remove-tokens! memory node join-bindings tokens)
- (when (empty? (mem/get-elements memory node join-bindings))
- (retract-tokens transport memory listener children tokens)))
-
- (get-join-keys [node] binding-keys)
-
- (description [node] (str "NegationNode -- " (:text condition)))
-
- IRightActivate
- (right-activate [node join-bindings elements memory transport listener]
- ;; Immediately evaluate whether there are previous elements since mem/get-elements
- ;; returns a mutable list with a LocalMemory on the JVM currently.
- (let [previously-empty? (empty? (mem/get-elements memory node join-bindings))]
- (l/right-activate! listener node elements)
- (mem/add-elements! memory node join-bindings elements)
- ;; Retract tokens that matched the activation if no element matched the negation previously.
- ;; If an element matched the negation already then no elements were propagated and there is
- ;; nothing to retract.
- (when previously-empty?
- (retract-tokens transport memory listener children (mem/get-tokens memory node join-bindings)))))
-
- (right-retract [node join-bindings elements memory transport listener]
- (l/right-retract! listener node elements)
- (mem/remove-elements! memory node join-bindings elements)
- (when (empty? (mem/get-elements memory node join-bindings))
- (send-tokens transport memory listener children (mem/get-tokens memory node join-bindings))))
-
- IConditionNode
- (get-condition-description [this]
- (let [{:keys [type constraints]} condition]
- [:not (into [type] constraints)])))
-
-(defn- matches-some-facts?
- "Returns true if the given token matches one or more of the given elements."
- [node token elements join-filter-fn condition]
- (some (fn [{:keys [fact bindings]}]
- (join-node-matches node join-filter-fn token fact bindings (:env condition)))
- elements))
-
-;; A specialization of the NegationNode that supports additional tests
-;; that have to occur on the beta side of the network. The key difference between this and the simple
-;; negation node is the join-filter-fn, which allows negation tests to
-;; be applied with the parent token in context, rather than just a simple test of the non-existence
-;; on the alpha side.
-(defrecord NegationWithJoinFilterNode [id condition join-filter-fn children binding-keys]
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
- ;; Add token to the node's working memory for future right activations.
- (l/left-activate! listener node tokens)
- (mem/add-tokens! memory node join-bindings tokens)
-
- (send-tokens transport
- memory
- listener
- children
- (let [elements (mem/get-elements memory node join-bindings)]
- (platform/eager-for [token tokens
- :when (not (matches-some-facts? node
- token
- elements
- join-filter-fn
- condition))]
- token))))
-
- (left-retract [node join-bindings tokens memory transport listener]
- (l/left-retract! listener node tokens)
- (mem/remove-tokens! memory node join-bindings tokens)
- (retract-tokens transport
- memory
- listener
- children
-
- ;; Retract only if it previously had no matches in the negation node,
- ;; and therefore had an activation.
- (let [elements (mem/get-elements memory node join-bindings)]
- (platform/eager-for [token tokens
- :when (not (matches-some-facts? node
- token
- elements
- join-filter-fn
- condition))]
- token))))
-
- (get-join-keys [node] binding-keys)
-
- (description [node] (str "NegationWithJoinFilterNode -- " (:text condition)))
-
- IRightActivate
- (right-activate [node join-bindings elements memory transport listener]
- (l/right-activate! listener node elements)
- (let [previous-elements (mem/get-elements memory node join-bindings)]
- ;; Retract tokens that matched the activation, since they are no longer negated.
- (retract-tokens transport
- memory
- listener
- children
- (platform/eager-for [token (mem/get-tokens memory node join-bindings)
-
- ;; Retract downstream if the token now has matching elements and didn't before.
- ;; We check the new elements first in the expectation that the new elements will be
- ;; smaller than the previous elements most of the time
- ;; and that the time to check the elements will be proportional
- ;; to the number of elements.
- :when (and (matches-some-facts? node
- token
- elements
- join-filter-fn
- condition)
- (not (matches-some-facts? node
- token
- previous-elements
- join-filter-fn
- condition)))]
- token))
- ;; Adding the elements will mutate the previous-elements since, on the JVM, the LocalMemory
- ;; currently returns a mutable List from get-elements after changes in issue 184. We need to use the
- ;; new and old elements in the logic above as separate collections. Therefore we need to delay updating the
- ;; memory with the new elements until after we are done with previous-elements.
- (mem/add-elements! memory node join-bindings elements)))
-
- (right-retract [node join-bindings elements memory transport listener]
-
- (l/right-retract! listener node elements)
- (mem/remove-elements! memory node join-bindings elements)
-
- (send-tokens transport
- memory
- listener
- children
- (let [remaining-elements (mem/get-elements memory node join-bindings)]
- (platform/eager-for [token (mem/get-tokens memory node join-bindings)
-
- ;; Propagate tokens when some of the retracted facts joined
- ;; but none of the remaining facts do.
- :when (and (matches-some-facts? node
- token
- elements
- join-filter-fn
- condition)
- (not (matches-some-facts? node
- token
- remaining-elements
- join-filter-fn
- condition)))]
- token))))
-
- IConditionNode
- (get-condition-description [this]
- (let [{:keys [type constraints original-constraints]} condition
- full-constraints (if (seq original-constraints)
- original-constraints
- constraints)]
- [:not (into [type] full-constraints)])))
-
-(defn- test-node-matches
- [node test-handler env token]
- (let [test-result (try
- (test-handler token env)
- (catch Exception e
- (throw-condition-exception {:cause e
- :node node
- :env env
- :bindings (:bindings token)})))]
- test-result))
-
-;; The test node represents a Rete extension in which an arbitrary test condition is run
-;; against bindings from ancestor nodes. Since this node
-;; performs no joins it does not accept right activations or retractions.
-(defrecord TestNode [id env test children]
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
- (l/left-activate! listener node tokens)
- (send-tokens
- transport
- memory
- listener
- children
- (platform/eager-for
- [token tokens
- :when (test-node-matches node (:handler test) env token)]
- token)))
-
- (left-retract [node join-bindings tokens memory transport listener]
- (l/left-retract! listener node tokens)
- (retract-tokens transport memory listener children tokens))
-
- (get-join-keys [node] [])
-
- (description [node] (str "TestNode -- " (:text test)))
-
- IConditionNode
- (get-condition-description [this]
- (into [:test] (:constraints test))))
-
-(defn- do-accumulate
- "Runs the actual accumulation. Returns the accumulated value."
- [accumulator facts]
- (r/reduce (:reduce-fn accumulator)
- (:initial-value accumulator)
- facts))
-
-(defn- retract-accumulated
- "Helper function to retract an accumulated value."
- [node accum-condition accumulator result-binding token converted-result fact-bindings transport memory listener]
- (let [new-facts (conj (:matches token) [converted-result (:id node)])
- new-bindings (merge (:bindings token)
- fact-bindings
- (when result-binding
- {result-binding
- converted-result}))]
-
- (retract-tokens transport memory listener (:children node)
- [(->Token new-facts new-bindings)])))
-
-(defn- send-accumulated
- "Helper function to send the result of an accumulated value to the node's children."
- [node accum-condition accumulator result-binding token converted-result fact-bindings transport memory listener]
- (let [new-bindings (merge (:bindings token)
- fact-bindings
- (when result-binding
- {result-binding
- converted-result}))
-
- ;; This is to check that the produced accumulator result is
- ;; consistent with any variable from another rule condition
- ;; that has the same binding. If another condition binds something
- ;; to ?x, only the accumulator results that match that would propagate.
- ;; We can do this safely because previous states get retracted.
- previous-result (get (:bindings token) result-binding ::no-previous-result)]
-
- (when (or (= previous-result ::no-previous-result)
- (= previous-result converted-result))
-
- (send-tokens transport memory listener (:children node)
- [(->Token (conj (:matches token) [converted-result (:id node)]) new-bindings)]))))
-
-;; The AccumulateNode hosts Accumulators, a Rete extension described above, in the Rete network.
-;; It behaves similarly to a JoinNode, but performs an accumulation function on the incoming
-;; working-memory elements before sending a new token to its descendents.
-(defrecord AccumulateNode [id accum-condition accumulator result-binding children binding-keys new-bindings]
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
- (l/left-activate! listener node tokens)
- (let [previous-results (mem/get-accum-reduced-all memory node join-bindings)
- convert-return-fn (:convert-return-fn accumulator)
- has-matches? (seq previous-results)
- initial-value (when-not has-matches?
- (:initial-value accumulator))
- initial-converted (when (some? initial-value)
- (convert-return-fn initial-value))]
-
- (mem/add-tokens! memory node join-bindings tokens)
-
- (cond
- ;; If there are previously accumulated results to propagate, use them. If this is the
- ;; first time there are matching tokens, then the reduce will have to happen for the
- ;; first time. However, this reduce operation is independent of the specific tokens
- ;; since the elements join to the tokens via pre-computed hash join bindings for this
- ;; node. So only reduce once per binding grouped facts, for all tokens. This includes
- ;; all bindings, not just the join bindings.
- has-matches?
- (doseq [[fact-bindings [previous previous-reduced]] previous-results
- :let [first-reduce? (= ::not-reduced previous-reduced)
- previous-reduced (if first-reduce?
- ;; Need to accumulate since this is the first time we have
- ;; tokens matching so we have not accumulated before.
- (do-accumulate accumulator previous)
- previous-reduced)
- accum-reduced (when first-reduce?
- ^::accum-node [previous previous-reduced])
- converted (when (some? previous-reduced)
- (convert-return-fn previous-reduced))]]
-
- ;; Newly accumulated results need to be added to memory.
- (when first-reduce?
- (l/add-accum-reduced! listener node join-bindings accum-reduced fact-bindings)
- (mem/add-accum-reduced! memory node join-bindings accum-reduced fact-bindings))
-
- (when (some? converted)
- (doseq [token tokens]
- (send-accumulated node accum-condition accumulator result-binding token converted fact-bindings
- transport memory listener))))
-
- ;; There are no previously accumulated results, but we still may need to propagate things
- ;; such as a sum of zero items.
- ;; If an initial value is provided and the converted value is non-nil, we can propagate
- ;; the converted value as the accumulated item.
- (and (some? initial-converted)
- (empty? new-bindings))
-
- ;; Note that this is added to memory a single time for all matching tokens because the memory
- ;; location doesn't depend on bindings from individual tokens.
-
- (let [accum-reduced ^::accum-node [[] initial-value]]
- ;; The fact-bindings are normally a superset of the join-bindings. We have no fact-bindings
- ;; that are not join-bindings in this case since we have verified that new-bindings is empty.
- ;; Therefore the join-bindings and fact-bindings are exactly equal.
- (l/add-accum-reduced! listener node join-bindings accum-reduced join-bindings)
- (mem/add-accum-reduced! memory node join-bindings accum-reduced join-bindings)
-
- ;; Send the created accumulated item to the children for each token.
- (doseq [token tokens]
- (send-accumulated node accum-condition accumulator result-binding token initial-converted {}
- transport memory listener)))
-
- ;; Propagate nothing if the above conditions don't apply.
- :else
- nil)))
-
- (left-retract [node join-bindings tokens memory transport listener]
- (l/left-retract! listener node tokens)
- (doseq [:let [removed-tokens (mem/remove-tokens! memory node join-bindings tokens)
- remaining-tokens (mem/get-tokens memory node join-bindings)
-
- ;; Note: Memory *must* be read here before the memory is potentially cleared in the
- ;; following lines.
- previous-results (mem/get-accum-reduced-all memory node join-bindings)
-
- ;; If there are no new bindings created by the accumulator condition then
- ;; a left-activation can create a new binding group in the accumulator memory.
- ;; If this token is later removed without corresponding elements having been added,
- ;; we remove the binding group from the accum memory. Otherwise adding and then retracting
- ;; tokens could force bindings to retained for the duration of the JVM, regardless of whether
- ;; the backing facts were garbage collectable. This would be a memory leak.
- _ (when (and (empty? remaining-tokens)
- (empty? new-bindings)
- (let [current (mem/get-accum-reduced memory node join-bindings join-bindings)]
- (and
- ;; If there is nothing under these bindings already in the memory then there is no
- ;; need to take further action.
- (not= current ::mem/no-accum-reduced)
- ;; Check to see if there are elements under this binding group.
- ;; If elements are present we must keep the binding group regardless of the
- ;; presence or absence of tokens.
- (-> current first empty?))))
- (mem/remove-accum-reduced! memory node join-bindings join-bindings))]
- ;; There is nothing to do if no tokens were removed.
- :when (seq removed-tokens)
- ;; Note that this will cause a Cartesian join between tokens and elements groups where the token
- ;; and element group share the same join bindings, but the element groups may have additional bindings
- ;; that come from their alpha nodes. Keep in mind that these element groups need elements to be created
- ;; and cannot come from initial values if they have bindings that are not shared with tokens.
- [fact-bindings [previous previous-reduced]] previous-results
- :let [;; If there were tokens before that are now removed, the value would have been accumulated already.
- ;; This means there is no need to check for ::not-reduced here.
- previous-converted (when (some? previous-reduced)
- ((:convert-return-fn accumulator) previous-reduced))]
- ;; A nil previous result should not have been propagated before.
- :when (some? previous-converted)
- token removed-tokens]
- (retract-accumulated node accum-condition accumulator result-binding token previous-converted fact-bindings
- transport memory listener)))
-
- (get-join-keys [node] binding-keys)
-
- (description [node] (str "AccumulateNode -- " accumulator))
-
- IAccumRightActivate
- (pre-reduce [node elements]
- ;; Return a seq tuples with the form [binding-group facts-from-group-elements].
- (platform/eager-for [[bindings element-group] (platform/group-by-seq :bindings elements)]
- [bindings (mapv :fact element-group)]))
-
- (right-activate-reduced [node join-bindings fact-seq memory transport listener]
-
- ;; Combine previously reduced items together, join to matching tokens, and emit child tokens.
- (doseq [:let [convert-return-fn (:convert-return-fn accumulator)
- ;; Note that we want to iterate over all tokens with the desired join bindings later
- ;; independently of the fact binding groups created by elements; that is, a token
- ;; can join with multiple groups of fact bindings when the accumulator condition
- ;; creates new bindings.
- matched-tokens (mem/get-tokens memory node join-bindings)
- has-matches? (seq matched-tokens)]
- [bindings facts] fact-seq
- :let [previous (mem/get-accum-reduced memory node join-bindings bindings)
- has-previous? (not= ::mem/no-accum-reduced previous)
- [previous previous-reduced] (if has-previous?
- previous
- [::mem/no-accum-reduced ::not-reduced])
- combined (if has-previous?
- (into previous facts)
- facts)
- combined-reduced
- (cond
- ;; Reduce all of the combined items for the first time if there are
- ;; now matches, and nothing was reduced before.
- (and has-matches?
- (= ::not-reduced previous-reduced))
- (do-accumulate accumulator combined)
-
- ;; There are matches, a previous reduced value for the previous items and a
- ;; :combine-fn is given. Use the :combine-fn on both the previously reduced
- ;; and the newly reduced results.
- (and has-matches?
- (:combine-fn accumulator))
- ((:combine-fn accumulator) previous-reduced (do-accumulate accumulator facts))
-
- ;; There are matches and there is a previous reduced value for the previous
- ;; items. So just add the new items to the accumulated value.
- has-matches?
- (do-accumulate (assoc accumulator :initial-value previous-reduced) facts)
-
- ;; There are no matches right now. So do not perform any accumulations.
- ;; If there are never matches, time will be saved by never reducing.
- :else
- ::not-reduced)
-
- converted (when (and (some? combined-reduced)
- (not= ::not-reduced combined-reduced))
- (convert-return-fn combined-reduced))
-
- previous-converted (when (and has-previous?
- (some? previous-reduced)
- (not= ::not-reduced previous-reduced))
- (convert-return-fn previous-reduced))
-
- accum-reduced ^::accum-node [combined combined-reduced]]]
-
- ;; Add the combined results to memory.
- (l/add-accum-reduced! listener node join-bindings accum-reduced bindings)
- (mem/add-accum-reduced! memory node join-bindings accum-reduced bindings)
-
- (cond
-
- ;; Do nothing when the result was nil before and after.
- (and (nil? previous-converted)
- (nil? converted))
- nil
-
- (nil? converted)
- (doseq [token matched-tokens]
- (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
- transport memory listener))
-
- (nil? previous-converted)
- (doseq [token matched-tokens]
- (send-accumulated node accum-condition accumulator result-binding token converted bindings
- transport memory listener))
-
-;; If there are previous results, then propagate downstream if the new result differs from
- ;; the previous result. If the new result is equal to the previous result don't do
- ;; anything. Note that the memory has already been updated with the new combined value,
- ;; which may be needed if elements in memory changes later.
- (not= converted previous-converted)
- ;; There is no requirement that we doseq over all retractions then doseq over propagations; we could
- ;; just as easily doseq over tokens at the top level and retract and propagate for each token in turn.
- ;; In the absence of hard evidence either way, doing it this way is just an educated guess as to
- ;; which is likely to be more performant.
- (do
- (doseq [token matched-tokens]
- (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
- transport memory listener))
- (doseq [token matched-tokens]
- (send-accumulated node accum-condition accumulator result-binding token converted bindings
- transport memory listener))))))
-
- IRightActivate
- (right-activate [node join-bindings elements memory transport listener]
-
- (l/right-activate! listener node elements)
- ;; Simple right-activate implementation simple defers to
- ;; accumulator-specific logic.
- (right-activate-reduced
- node
- join-bindings
- (pre-reduce node elements)
- memory
- transport
- listener))
-
- (right-retract [node join-bindings elements memory transport listener]
-
- (l/right-retract! listener node elements)
-
- (doseq [:let [convert-return-fn (:convert-return-fn accumulator)
- ;; As in right-activate-reduced, a token can match with multiple groupings of elements
- ;; by their bindings.
- matched-tokens (mem/get-tokens memory node join-bindings)
- has-matches? (seq matched-tokens)]
- [bindings elements] (platform/group-by-seq :bindings elements)
-
- :let [previous (mem/get-accum-reduced memory node join-bindings bindings)
- has-previous? (not= ::mem/no-accum-reduced previous)
- [previous previous-reduced] (if has-previous?
- previous
- ^::accum-node [::mem/no-accum-reduced ::not-reduced])]
-
- ;; No need to retract anything if there were no previous items.
- :when has-previous?
-
- ;; Compute the new version with the retracted information.
- :let [facts (mapv :fact elements)
- [removed retracted] (mem/remove-first-of-each facts previous)
- all-retracted? (empty? retracted)
- ;; If there is a previous and matches, there would have been a
- ;; propagated and accumulated value. So there is something to
- ;; retract and re-accumulated in place of.
- ;; Otherwise, no reduce is needed right now.
- retracted-reduced (if (and has-matches?
- (not all-retracted?))
- ;; Use the provided :retract-fn if one is provided.
- ;; Otherwise, just re-accumulate based on the
- ;; remaining items after retraction.
- (if-let [retract-fn (:retract-fn accumulator)]
- (r/reduce retract-fn previous-reduced removed)
- (do-accumulate accumulator retracted))
- ::not-reduced)
-
- ;; It is possible that either the retracted or previous reduced are ::not-reduced
- ;; at this point if there are no matching tokens. has-matches? indicates this. If
- ;; this is the case, there are no converted values to calculate. However, memory still
- ;; will be updated since the facts left after this retraction still need to be stored
- ;; for later possible activations.
- retracted-converted (when (and (some? retracted-reduced)
- (not= ::not-reduced retracted-reduced))
- (convert-return-fn retracted-reduced))
- previous-converted (when (and (some? previous-reduced)
- (not= ::not-reduced previous-reduced))
- (convert-return-fn previous-reduced))]]
-
- (if all-retracted?
- (do
- ;; When everything has been retracted we need to remove the accumulated results from memory.
- (l/remove-accum-reduced! listener node join-bindings bindings)
- (mem/remove-accum-reduced! memory node join-bindings bindings)
-
- (doseq [:when (some? previous-converted)
- token matched-tokens]
- ;; Retract the previous token.
- (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
- transport memory listener))
-
- (let [initial-value (:initial-value accumulator)
-
- initial-converted (when initial-value
- (convert-return-fn initial-value))]
-
- (when (and (some? initial-converted)
- (empty? new-bindings))
-
- (doseq [token matched-tokens]
- (l/add-accum-reduced! listener node join-bindings ^::accum-node [[] initial-value] join-bindings)
- (mem/add-accum-reduced! memory node join-bindings ^::accum-node [[] initial-value] join-bindings)
- (send-accumulated node accum-condition accumulator result-binding token initial-converted {}
- transport memory listener)))))
- (do
- ;; Add our newly retracted information to our node.
- (l/add-accum-reduced! listener node join-bindings ^::accum-node [retracted retracted-reduced] bindings)
- (mem/add-accum-reduced! memory node join-bindings ^::accum-node [retracted retracted-reduced] bindings)
-
- (cond
- (and (nil? previous-converted)
- (nil? retracted-converted))
- nil
-
- (nil? previous-converted)
- (doseq [token matched-tokens]
- (send-accumulated node accum-condition accumulator result-binding token retracted-converted bindings
- transport memory listener))
-
- (nil? retracted-converted)
- (doseq [token matched-tokens]
- (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
- transport memory listener))
-
- (not= retracted-converted previous-converted)
- ;; There is no requirement that we doseq over all retractions then doseq over propagations; we could
- ;; just as easily doseq over tokens at the top level and retract and propagate for each token in turn.
- ;; In the absence of hard evidence either way, doing it this way is just an educated guess as to
- ;; which is likely to be more performant.
- (do
- (doseq [token matched-tokens]
- (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings
- transport memory listener))
- (doseq [token matched-tokens]
- (send-accumulated node accum-condition accumulator result-binding token retracted-converted bindings
- transport memory listener))))))))
-
- IConditionNode
- (get-condition-description [this]
- (let [{:keys [accumulator from]} accum-condition
- {:keys [type constraints]} from
- condition (into [type] constraints)
- result-symbol (symbol (name result-binding))]
- [result-symbol '<- accumulator :from condition]))
-
- IAccumInspect
- (token->matching-elements [this memory token]
- ;; Tokens are stored in the memory keyed on join bindings with previous nodes and new bindings
- ;; introduced in this node. Each of these sets of bindings is known at the time of rule network
- ;; compilation. It is expected that this function will receive tokens that were propagated from this
- ;; node to its children and may have had other bindings added in the process. The bindings map entries
- ;; in the tokens created by descendants based on tokens propagated from ancestors are subsets of the bindings
- ;; in each ancestor. Put differently, if token T1 is passed to a child that create a token T2 based on it
- ;; and passes it to its children, the following statement is true:
- ;; (= (select-keys (-> t1 :bindings keys) t2)
- ;; (:bindings t1))
- ;; This being the case, we can use the downstream token to find out what binding key-value pairs were used
- ;; to create the token "stream" of which it is part.
- (let [join-bindings (-> token :bindings (select-keys (get-join-keys this)))
- fact-bindings (-> token :bindings (select-keys new-bindings))]
- (first (mem/get-accum-reduced memory this join-bindings (merge join-bindings fact-bindings))))))
-
-(defn- filter-accum-facts
- "Run a filter on elements against a given token for constraints that are not simple hash joins."
- [node join-filter-fn token candidate-facts bindings]
- (filter #(join-node-matches node join-filter-fn token % bindings {}) candidate-facts))
-
-;; A specialization of the AccumulateNode that supports additional tests
-;; that have to occur on the beta side of the network. The key difference between this and the simple
-;; accumulate node is the join-filter-fn, which accepts a token and a fact and filters out facts that
-;; are not consistent with the given token.
-(defrecord AccumulateWithJoinFilterNode [id accum-condition accumulator join-filter-fn
- result-binding children binding-keys new-bindings]
-
- ILeftActivate
- (left-activate [node join-bindings tokens memory transport listener]
-
- (l/left-activate! listener node tokens)
-
- ;; Facts that are candidates for matching the token are used in this accumulator node,
- ;; which must be filtered before running the accumulation.
- (let [convert-return-fn (:convert-return-fn accumulator)
- grouped-candidate-facts (mem/get-accum-reduced-all memory node join-bindings)]
- (mem/add-tokens! memory node join-bindings tokens)
-
- (cond
-
- (seq grouped-candidate-facts)
- (doseq [token tokens
- [fact-bindings candidate-facts] grouped-candidate-facts
-
- ;; Filter to items that match the incoming token, then apply the accumulator.
- :let [filtered-facts (filter-accum-facts node join-filter-fn token candidate-facts fact-bindings)]
-
- :when (or (seq filtered-facts)
- ;; Even if there no filtered facts, if there are no new bindings we may
- ;; have an initial value to propagate.
- (and (some? (:initial-value accumulator))
- (empty? new-bindings)))
-
- :let [accum-result (do-accumulate accumulator filtered-facts)
- converted-result (when (some? accum-result)
- (convert-return-fn accum-result))]
-
- :when (some? converted-result)]
-
- (send-accumulated node accum-condition accumulator result-binding token
- converted-result fact-bindings transport memory listener))
-
- ;; There are no previously accumulated results, but we still may need to propagate things
- ;; such as a sum of zero items.
- ;; If all variables in the accumulated item are bound and an initial
- ;; value is provided, we can propagate the initial value as the accumulated item.
-
- ;; We need to not propagate nil initial values, regardless of whether the convert-return-fn
- ;; makes them non-nil, in order to not break existing code; this is discussed more in the
- ;; right-activate-reduced implementation.
- (and (some? (:initial-value accumulator))
- (empty? new-bindings)) ; An initial value exists that we can propagate.
- (let [initial-value (:initial-value accumulator)
- ;; Note that we check the the :initial-value is non-nil above, which is why we
- ;; don't need (when initial-value (convert-return-fn initial-value)) here.
- converted-result (convert-return-fn initial-value)]
-
- (when (some? converted-result)
- ;; Send the created accumulated item to the children.
- (doseq [token tokens]
- (send-accumulated node accum-condition accumulator result-binding token
- converted-result join-bindings transport memory listener))))
-
- ;; Propagate nothing if the above conditions don't apply.
- :default nil)))
-
- (left-retract [node join-bindings tokens memory transport listener]
-
- (l/left-retract! listener node tokens)
-
- (let [;; Even if the accumulator didn't propagate anything before we still need to remove the tokens
- ;; in case they would have otherwise been used in the future.
- tokens (mem/remove-tokens! memory node join-bindings tokens)
- convert-return-fn (:convert-return-fn accumulator)
- grouped-candidate-facts (mem/get-accum-reduced-all memory node join-bindings)]
-
- (cond
-
- (seq grouped-candidate-facts)
- (doseq [token tokens
- [fact-bindings candidate-facts] grouped-candidate-facts
-
- :let [filtered-facts (filter-accum-facts node join-filter-fn token candidate-facts fact-bindings)]
-
- :when (or (seq filtered-facts)
- ;; Even if there no filtered facts, if there are no new bindings an initial value
- ;; maybe have propagated, and if so we need to retract it.
- (and (some? (:initial-value accumulator))
- (empty? new-bindings)))
-
- :let [accum-result (do-accumulate accumulator filtered-facts)
- retracted-converted (when (some? accum-result)
- (convert-return-fn accum-result))]
-
- ;; A nil retracted previous result should not have been propagated before.
- :when (some? retracted-converted)]
-
- (retract-accumulated node accum-condition accumulator result-binding token
- retracted-converted fact-bindings transport memory listener))
-
- (and (some? (:initial-value accumulator))
- (empty? new-bindings))
- (let [initial-value (:initial-value accumulator)
- ;; Note that we check the the :initial-value is non-nil above, which is why we
- ;; don't need (when initial-value (convert-return-fn initial-value)) here.
- converted-result (convert-return-fn initial-value)]
-
- (when (some? converted-result)
- (doseq [token tokens]
- (retract-accumulated node accum-condition accumulator result-binding token
- converted-result join-bindings transport memory listener))))
-
- :else nil)))
-
- (get-join-keys [node] binding-keys)
-
- (description [node] (str "AccumulateWithBetaPredicateNode -- " accumulator))
-
- IAccumRightActivate
- (pre-reduce [node elements]
- ;; Return a map of bindings to the candidate facts that match them. This accumulator
- ;; depends on the values from parent facts, so we defer actually running the accumulator
- ;; until we have a token.
- (platform/eager-for [[bindings element-group] (platform/group-by-seq :bindings elements)]
- [bindings (map :fact element-group)]))
-
- (right-activate-reduced [node join-bindings binding-candidates-seq memory transport listener]
-
- ;; Combine previously reduced items together, join to matching tokens,
- ;; and emit child tokens.
- (doseq [:let [convert-return-fn (:convert-return-fn accumulator)
- matched-tokens (mem/get-tokens memory node join-bindings)]
- [bindings candidates] binding-candidates-seq
- :let [previous-candidates (mem/get-accum-reduced memory node join-bindings bindings)
- previously-reduced? (not= ::mem/no-accum-reduced previous-candidates)
- previous-candidates (when previously-reduced? previous-candidates)]]
-
- ;; Combine the newly reduced values with any previous items. Ensure that new items are always added to the end so that
- ;; we have a consistent order for retracting results from accumulators such as acc/all whose results can be in any order. Making this
- ;; ordering consistent allows us to skip the filter step on previous elements on right-activations.
- (let [combined-candidates (into []
- cat
- [previous-candidates candidates])]
-
- (l/add-accum-reduced! listener node join-bindings combined-candidates bindings)
-
- (mem/add-accum-reduced! memory node join-bindings combined-candidates bindings))
-
- (doseq [token matched-tokens
-
- :let [new-filtered-facts (filter-accum-facts node join-filter-fn token candidates bindings)]
-
- ;; If no new elements matched the token, we don't need to do anything for this token
- ;; since the final result is guaranteed to be the same.
- :when (seq new-filtered-facts)
-
- :let [previous-filtered-facts (filter-accum-facts node join-filter-fn token previous-candidates bindings)
-
- previous-accum-result-init (cond
- (seq previous-filtered-facts)
- (do-accumulate accumulator previous-filtered-facts)
-
- (and (-> accumulator :initial-value some?)
- (empty? new-bindings))
- (:initial-value accumulator)
-
- ;; Allow direct determination later of whether there was a previous value
- ;; as determined by the preceding cond conditions.
- :else ::no-previous-value)
-
- previous-accum-result (when (not= previous-accum-result-init ::no-previous-value)
- previous-accum-result-init)
-
- ;; Since the new elements are added onto the end of the previous elements in the accum-memory
- ;; accumulating using the new elements on top of the previous result is an accumulation in the same
- ;; order as the elements are present in memory. As a result, future accumulations on the contents of the accum memory
- ;; prior to further modification of that memory will return the same result as here. This is important since if we use
- ;; something like acc/all to accumulate to and propagate [A B] if B is retracted we need to retract [A B] not [B A]; the latter won't
- ;; actually retract anything, which would be invalid.
- accum-result (let [accum-previous-init (if (not= previous-accum-result-init ::no-previous-value)
- ;; If there was a previous result, use it as the initial value.
- (assoc accumulator :initial-value previous-accum-result)
- ;; If there was no previous result, use the default initial value.
- ;; Note that if there is a non-nil initial value but there are new binding
- ;; groups we consider there to have been no previous value, but we still want
- ;; to use the actual initial value, not nil.
- accumulator)]
- (do-accumulate accum-previous-init new-filtered-facts))
-
- previous-converted (when (some? previous-accum-result)
- (convert-return-fn previous-accum-result))
-
- new-converted (when (some? accum-result)
- (convert-return-fn accum-result))]]
-
- (cond
-
- ;; When both the new and previous result were nil do nothing.
- (and (nil? previous-converted)
- (nil? new-converted))
- nil
-
- (nil? new-converted)
- (retract-accumulated node accum-condition accumulator result-binding token
- previous-converted bindings transport memory listener)
-
- (nil? previous-converted)
- (send-accumulated node accum-condition accumulator result-binding token new-converted bindings transport memory listener)
-
- (not= new-converted previous-converted)
- (do
- (retract-accumulated node accum-condition accumulator result-binding token
- previous-converted bindings transport memory listener)
- (send-accumulated node accum-condition accumulator result-binding token new-converted bindings transport memory listener))))))
-
- IRightActivate
- (right-activate [node join-bindings elements memory transport listener]
-
- (l/right-activate! listener node elements)
-
- ;; Simple right-activate implementation simple defers to
- ;; accumulator-specific logic.
- (right-activate-reduced
- node
- join-bindings
- (pre-reduce node elements)
- memory
- transport
- listener))
-
- (right-retract [node join-bindings elements memory transport listener]
-
- (l/right-retract! listener node elements)
-
- (doseq [:let [convert-return-fn (:convert-return-fn accumulator)
- matched-tokens (mem/get-tokens memory node join-bindings)]
- [bindings elements] (platform/group-by-seq :bindings elements)
- :let [previous-candidates (mem/get-accum-reduced memory node join-bindings bindings)]
-
- ;; No need to retract anything if there was no previous item.
- :when (not= ::mem/no-accum-reduced previous-candidates)
-
- :let [facts (mapv :fact elements)
- new-candidates (second (mem/remove-first-of-each facts previous-candidates))]]
-
- ;; Add the new candidates to our node.
- (l/add-accum-reduced! listener node join-bindings new-candidates bindings)
- (mem/add-accum-reduced! memory node join-bindings new-candidates bindings)
-
- (doseq [;; Get all of the previously matched tokens so we can retract and re-send them.
- token matched-tokens
-
- :let [previous-facts (filter-accum-facts node join-filter-fn token previous-candidates bindings)
-
- new-facts (filter-accum-facts node join-filter-fn token new-candidates bindings)]
-
- ;; The previous matching elements are a superset of the matching elements after retraction.
- ;; Therefore, if the counts before and after are equal nothing retracted actually matched
- ;; and we don't need to do anything else here since the end result shouldn't change.
- :when (not= (count previous-facts)
- (count new-facts))
-
- :let [;; We know from the check above that matching elements existed previously,
- ;; since if there were no previous matching elements the count of matching
- ;; elements before and after a right-retraction cannot be different.
- previous-result (do-accumulate accumulator previous-facts)
-
- ;; TODO: Can we use the retract-fn here if present to improve performance? We'd also potentially
- ;; avoid needing to filter facts twice above, since elements present both before and after retraction
- ;; will given to the join-filter-fn twice (once when creating previous-facts and once when creating new-facts).
- ;; Note that any future optimizations here must ensure that the result propagated here is equal to the result
- ;; that will be recreated as the previous result in right activate, and that this can be dependent on the order
- ;; of candidates in the memory, since, for example (acc/all) can return both [A B] and [B A] but these are not equal.
-
- new-result (cond
-
- (seq new-facts)
- (do-accumulate accumulator new-facts)
-
- (and (-> accumulator :initial-value some?)
- (empty? new-bindings))
- (:initial-value accumulator)
-
- :else nil)
-
- previous-converted (when (some? previous-result)
- (convert-return-fn previous-result))
-
- new-converted (when (some? new-result)
- (convert-return-fn new-result))]]
-
- (cond
-
- ;; When both the previous and new results are nil do nothing.
- (and (nil? previous-converted)
- (nil? new-converted))
- nil
-
- (nil? new-converted)
- (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings transport memory listener)
-
- (nil? previous-converted)
- (send-accumulated node accum-condition accumulator result-binding token new-converted bindings transport memory listener)
-
- (not= previous-converted new-converted)
- (do
- (retract-accumulated node accum-condition accumulator result-binding token previous-converted bindings transport memory listener)
- (send-accumulated node accum-condition accumulator result-binding token new-converted bindings transport memory listener))))))
-
- IConditionNode
- (get-condition-description [this]
- (let [{:keys [accumulator from]} accum-condition
- {:keys [type constraints original-constraints]} from
- result-symbol (symbol (name result-binding))
- full-constraints (if (seq original-constraints)
- original-constraints
- constraints)
- condition (into [type] full-constraints)]
- [result-symbol '<- accumulator :from condition]))
-
- ;; The explanation of the implementation of token->matching-elements on AccumulateNode applies here as well.
- ;; Note that since we store all facts propagated from the alpha network to this condition in the accum memory,
- ;; regardless of whether they meet the join condition with upstream facts from the beta network, we rerun the
- ;; the join filter function. Since the :matches are not used in the join filter function and the bindings in the
- ;; token will contain all bindings used in the "ancestor token" to join with these same facts, we can just pass the token
- ;; as-is to the join filter.
- IAccumInspect
- (token->matching-elements [this memory token]
- (let [join-bindings (-> token :bindings (select-keys (get-join-keys this)))
- fact-bindings (-> token :bindings (select-keys new-bindings))
- unfiltered-facts (mem/get-accum-reduced memory this join-bindings (merge join-bindings fact-bindings))]
- ;; The functionality to throw conditions with meaningful information assumes that all bindings in the token
- ;; are meaningful to the join, which is not the case here since the token passed is from a descendant of this node, not
- ;; this node. The generated error message also wouldn't make much sense in the context of session inspection.
- ;; We could create specialized error handling here, but in reality most cases that cause errors here would also cause
- ;; errors at rule firing time so the benefit would be limited. Nevertheless there would be some benefit and it is
- ;; possible that we will do it in the future..
- (filter (fn [fact] (join-filter-fn token fact fact-bindings {}))
- unfiltered-facts))))
-
-;; This lives here as it is both close to the node that it represents, and is accessible to both clj and cljs
-(def node-type->abbreviated-type
- "To minimize function name length and attempt to prevent issues with filename length we can use these abbreviations to
- shorten the node types. Used during compilation of the rules network."
- {"AlphaNode" "AN"
- "TestNode" "TN"
- "AccumulateNode" "AccN"
- "AccumulateWithJoinFilterNode" "AJFN"
- "ProductionNode" "PN"
- "NegationWithJoinFilterNode" "NJFN"
- "ExpressionJoinNode" "EJN"})
-
-(defn variables-as-keywords
- "Returns symbols in the given s-expression that start with '?' as keywords"
- [expression]
- (into #{} (platform/eager-for [item (tree-seq coll? seq expression)
- :when (and (symbol? item)
- (= \? (first (name item))))]
- (keyword item))))
-
-(defn conj-rulebases
- "DEPRECATED. Simply concat sequences of rules and queries.
-
- Conjoin two rulebases, returning a new one with the same rules."
- [base1 base2]
- (concat base1 base2))
-
-(defn fire-rules*
- "Fire rules for the given nodes."
- [rulebase nodes transient-memory transport listener get-alphas-fn update-cache]
- (binding [*current-session* {:rulebase rulebase
- :transient-memory transient-memory
- :transport transport
- :insertions (atom 0)
- :get-alphas-fn get-alphas-fn
- :pending-updates update-cache
- :listener listener}]
-
- (loop [next-group (mem/next-activation-group transient-memory)
- last-group nil]
-
- (if next-group
-
- (if (and last-group (not= last-group next-group))
-
- ;; We have changed groups, so flush the updates from the previous
- ;; group before continuing.
- (do
- (flush-updates *current-session*)
- (let [upcoming-group (mem/next-activation-group transient-memory)]
- (l/activation-group-transition! listener next-group upcoming-group)
- (recur upcoming-group next-group)))
-
- (do
-
- ;; If there are activations, fire them.
- (when-let [{:keys [node token] :as activation} (mem/pop-activation! transient-memory)]
- ;; Use vectors for the insertion caches so that within an insertion type
- ;; (unconditional or logical) all insertions are done in order after the into
- ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
- ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
- ;; in the sequences.
- (let [batched-logical-insertions (atom [])
- batched-unconditional-insertions (atom [])
- batched-rhs-retractions (atom [])]
- (binding [*rule-context* {:token token
- :node node
- :batched-logical-insertions batched-logical-insertions
- :batched-unconditional-insertions batched-unconditional-insertions
- :batched-rhs-retractions batched-rhs-retractions}]
-
- ;; Fire the rule itself.
- (try
- ((:rhs node) token (:env (:production node)))
- ;; Don't do anything if a given insertion type has no corresponding
- ;; facts to avoid complicating traces. Note that since each no RHS's of
- ;; downstream rules are fired here everything is governed by truth maintenance.
- ;; Therefore, the reordering of retractions and insertions should have no impact
- ;; assuming that the evaluation of rule conditions is pure, which is a general expectation
- ;; of the rules engine.
- ;;
- ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
- ;; below. They will be dereferenced again if an exception is caught, but in the error
- ;; case we aren't worried about performance.
- (let [retrieved-unconditional-insertions @batched-unconditional-insertions
- retrieved-logical-insertions @batched-logical-insertions
- retrieved-rhs-retractions @batched-rhs-retractions]
- (l/fire-activation! listener
- activation
- {:unconditional-insertions retrieved-unconditional-insertions
- :logical-insertions retrieved-logical-insertions
- :rhs-retractions retrieved-rhs-retractions})
- (when-let [batched (seq retrieved-unconditional-insertions)]
- (flush-insertions! batched true))
- (when-let [batched (seq retrieved-logical-insertions)]
- (flush-insertions! batched false))
- (when-let [batched (seq retrieved-rhs-retractions)]
- (flush-rhs-retractions! batched)))
- (catch Exception e
- ;; If the rule fired an exception, help debugging by attaching
- ;; details about the rule itself, cached insertions, and any listeners
- ;; while propagating the cause.
- (let [production (:production node)
- rule-name (:name production)
- rhs (:rhs production)]
- (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
- " with bindings " (pr-str (:bindings token)))
- {:bindings (:bindings token)
- :name rule-name
- :rhs rhs
- :batched-logical-insertions @batched-logical-insertions
- :batched-unconditional-insertions @batched-unconditional-insertions
- :batched-rhs-retractions @batched-rhs-retractions
- :listeners (try
- (let [p-listener (l/to-persistent! listener)]
- (if (l/null-listener? p-listener)
- []
- (l/get-children p-listener)))
- (catch Exception listener-exception
- listener-exception))}
- e)))))
-
- ;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
- ;; will be in context for child rules.
- (when (some-> node :production :props :no-loop)
- (flush-updates *current-session*)))))
-
- (recur (mem/next-activation-group transient-memory) next-group)))
-
- ;; There were no items to be activated, so flush any pending
- ;; updates and recur with a potential new activation group
- ;; since a flushed item may have triggered one.
- (when (flush-updates *current-session*)
- (let [upcoming-group (mem/next-activation-group transient-memory)]
- (l/activation-group-transition! listener next-group upcoming-group)
- (recur upcoming-group next-group)))))))
-
-(deftype LocalSession [rulebase memory transport listener get-alphas-fn pending-operations]
- ISession
- (insert [session facts]
-
- (let [new-pending-operations (conj pending-operations (uc/->PendingUpdate :insertion
- ;; Preserve the behavior prior to https://github.com/cerner/clara-rules/issues/268
- ;; , particularly for the Java API, where the caller could freely mutate a
- ;; collection of facts after passing it to Clara for the constituent
- ;; facts to be inserted or retracted. If the caller passes a persistent
- ;; Clojure collection don't do any additional work.
- (if (coll? facts)
- facts
- (into [] facts))))]
-
- (LocalSession. rulebase
- memory
- transport
- listener
- get-alphas-fn
- new-pending-operations)))
-
- (retract [session facts]
-
- (let [new-pending-operations (conj pending-operations (uc/->PendingUpdate :retraction
- ;; As in insert above defend against facts being a mutable collection.
- (if (coll? facts)
- facts
- (into [] facts))))]
-
- (LocalSession. rulebase
- memory
- transport
- listener
- get-alphas-fn
- new-pending-operations)))
-
- ;; Prior to issue 249 we only had a one-argument fire-rules method. clara.rules/fire-rules will always call the two-argument method now
- ;; but we kept a one-argument version of the fire-rules in case anyone is calling the fire-rules protocol function or method on the session directly.
- (fire-rules [session] (fire-rules session {}))
- (fire-rules [session opts]
-
- (let [transient-memory (mem/to-transient memory)
- transient-listener (l/to-transient listener)]
-
- (if-not (:cancelling opts)
- ;; We originally performed insertions and retractions immediately after the insert and retract calls,
- ;; but this had the downside of making a pattern like "Retract facts, insert other facts, and fire the rules"
- ;; perform at least three transitions between a persistent and transient memory. Delaying the actual execution
- ;; of the insertions and retractions until firing the rules allows us to cut this down to a single transition
- ;; between persistent and transient memory. There is some cost to the runtime dispatch on operation types here,
- ;; but this is presumably less significant than the cost of memory transitions.
- ;;
- ;; We perform the insertions and retractions in the same order as they were applied to the session since
- ;; if a fact is not in the session, retracted, and then subsequently inserted it should be in the session at
- ;; the end.
- (do
- (doseq [{op-type :type facts :facts} pending-operations]
-
- (case op-type
-
- :insertion
- (do
- (l/insert-facts! transient-listener nil nil facts)
-
- (binding [*pending-external-retractions* (atom [])]
- ;; Bind the external retractions cache so that any logical retractions as a result
- ;; of these insertions can be cached and executed as a batch instead of eagerly realizing
- ;; them. An external insertion of a fact that matches
- ;; a negation or accumulator condition can cause logical retractions.
- (doseq [[alpha-roots fact-group] (get-alphas-fn facts)
- root alpha-roots]
- (alpha-activate root fact-group transient-memory transport transient-listener))
- (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))
-
- :retraction
- (do
- (l/retract-facts! transient-listener nil nil facts)
-
- (binding [*pending-external-retractions* (atom facts)]
- (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))))
-
- (fire-rules* rulebase
- (:production-nodes rulebase)
- transient-memory
- transport
- transient-listener
- get-alphas-fn
- (uc/get-ordered-update-cache)))
-
- (let [insertions (sequence
- (comp (filter (fn [pending-op]
- (= (:type pending-op)
- :insertion)))
- (mapcat :facts))
- pending-operations)
-
- retractions (sequence
- (comp (filter (fn [pending-op]
- (= (:type pending-op)
- :retraction)))
- (mapcat :facts))
- pending-operations)
-
- update-cache (ca/get-cancelling-update-cache)]
-
- (binding [*current-session* {:rulebase rulebase
- :transient-memory transient-memory
- :transport transport
- :insertions (atom 0)
- :get-alphas-fn get-alphas-fn
- :pending-updates update-cache
- :listener transient-listener}]
-
- ;; Insertions should come before retractions so that if we insert and then retract the same
- ;; fact that is not already in the session the end result will be that the session won't have that fact.
- ;; If retractions came first then we'd first retract a fact that isn't in the session, which doesn't do anything,
- ;; and then later we would insert the fact.
- (doseq [[alpha-roots fact-group] (get-alphas-fn insertions)
- root alpha-roots]
- (alpha-activate root fact-group transient-memory transport transient-listener))
-
- (doseq [[alpha-roots fact-group] (get-alphas-fn retractions)
- root alpha-roots]
- (alpha-retract root fact-group transient-memory transport transient-listener))
-
- (fire-rules* rulebase
- (:production-nodes rulebase)
- transient-memory
- transport
- transient-listener
- get-alphas-fn
- ;; This continues to use the cancelling cache after the first batch of insertions and retractions.
- ;; If this is suboptimal for some workflows we can revisit this.
- update-cache))))
-
- (LocalSession. rulebase
- (mem/to-persistent! transient-memory)
- transport
- (l/to-persistent! transient-listener)
- get-alphas-fn
- [])))
-
- (query [session query params]
- (let [query-node (get-in rulebase [:query-nodes query])]
- (when (= nil query-node)
- (platform/throw-error (str "The query " query " is invalid or not included in the rule base.")))
- (when-not (= (into #{} (keys params)) ;; nil params should be equivalent to #{}
- (:param-keys query-node))
- (platform/throw-error (str "The query " query " was not provided with the correct parameters, expected: "
- (:param-keys query-node) ", provided: " (set (keys params)))))
-
- (->> (mem/get-tokens memory query-node params)
-
- ;; Get the bindings for each token and filter generate symbols.
- (map (fn [{bindings :bindings}]
-
- ;; Filter generated symbols. We check first since this is an uncommon flow.
- (if (some #(re-find #"__gen" (name %)) (keys bindings))
-
- (into {} (remove (fn [[k v]] (re-find #"__gen" (name k)))
- bindings))
- bindings))))))
-
- (components [session]
- {:rulebase rulebase
- :memory memory
- :transport transport
- :listeners (l/flatten-listener listener)
- :get-alphas-fn get-alphas-fn}))
-
-(defn assemble
- "Assembles a session from the given components, which must be a map
- containing the following:
-
- :rulebase A recorec matching the clara.rules.compiler/Rulebase structure.
- :memory An implementation of the clara.rules.memory/IMemoryReader protocol
- :transport An implementation of the clara.rules.engine/ITransport protocol
- :listeners A vector of listeners implementing the clara.rules.listener/IPersistentListener protocol
- :get-alphas-fn The function used to return the alpha nodes for a fact of the given type."
-
- [{:keys [rulebase memory transport listeners get-alphas-fn]}]
- (LocalSession. rulebase
- memory
- transport
- (if (> (count listeners) 0)
- (l/delegating-listener listeners)
- l/default-listener)
- get-alphas-fn
- []))
-
-(defn with-listener
- "Return a new session with the listener added to the provided session,
- in addition to all listeners previously on the session."
- [session listener]
- (let [{:keys [listeners] :as components} (components session)]
- (assemble (assoc components
- :listeners
- (conj listeners
- listener)))))
-
-(defn remove-listeners
- "Return a new session with all listeners matching the predicate removed"
- [session pred]
- (let [{:keys [listeners] :as components} (components session)]
- (if (some pred listeners)
- (assemble (assoc components
- :listeners
- (into [] (remove pred) listeners)))
- session)))
-
-(defn find-listeners
- "Return all listeners on the session matching the predicate."
- [session pred]
- (let [{:keys [listeners]} (components session)]
- (filterv pred listeners)))
-
-(defn local-memory
- "Returns a local, in-process working memory."
- [rulebase transport activation-group-sort-fn activation-group-fn alphas-fn]
- (let [memory (mem/to-transient (mem/local-memory rulebase activation-group-sort-fn activation-group-fn alphas-fn))]
- (doseq [beta-node (:beta-roots rulebase)]
- (left-activate beta-node {} [empty-token] memory transport l/default-listener))
- (mem/to-persistent! memory)))
-
-(defn options->activation-group-sort-fn
- "Given the map of options for a session, construct an activation group sorting
- function that takes into account the user-provided salience and internal salience.
- User-provided salience is considered first. Under normal circumstances this function should
- only be called by Clara itself."
- [options]
- (let [user-activation-group-sort-fn (or (get options :activation-group-sort-fn)
- ;; Default to sort by descending numerical order.
- >)]
-
- ;; Compare user-provided salience first, using either the provided salience function or the default,
- ;; then use the internal salience if the former does not provide an ordering between the two salience values.
- (fn [salience1 salience2]
- (let [forward-result (user-activation-group-sort-fn (nth salience1 0)
- (nth salience2 0))]
- (if (number? forward-result)
- (if (= 0 forward-result)
- (> (nth salience1 1)
- (nth salience2 1))
-
- forward-result)
- (let [backward-result (user-activation-group-sort-fn (nth salience2 0)
- (nth salience1 0))
- forward-bool (boolean forward-result)
- backward-bool (boolean backward-result)]
- ;; Since we just use Clojure functions, for example >, equality may be implied
- ;; by returning false for comparisons in both directions rather than by returning 0.
- ;; Furthermore, ClojureScript will use truthiness semantics rather than requiring a
- ;; boolean (unlike Clojure), so we use the most permissive semantics between Clojure
- ;; and ClojureScript.
- (if (not= forward-bool backward-bool)
- forward-bool
- (> (nth salience1 1)
- (nth salience2 1)))))))))
-
-(def ^:private internal-salience-levels {:default 0
- ;; Extracted negations need to be prioritized over their original
- ;; rules since their original rule could fire before the extracted condition.
- ;; This is a problem if the original rule performs an unconditional insertion
- ;; or has other side effects not controlled by truth maintenance.
- :extracted-negation 1})
-
-(defn options->activation-group-fn
- "Given a map of options for a session, construct a function that takes a production
- and returns the activation group to which it belongs, considering both user-provided
- and internal salience. Under normal circumstances this function should only be called by
- Clara itself."
- [options]
- (let [rule-salience-fn (or (:activation-group-fn options)
- (fn [production] (or (some-> production :props :salience)
- 0)))]
-
- (fn [production]
- [(rule-salience-fn production)
- (internal-salience-levels (or (some-> production :props :clara-rules/internal-salience)
- :default))])))
diff --git a/build/clara/rules/java.clj b/build/clara/rules/java.clj
deleted file mode 100644
index 533409cd..00000000
--- a/build/clara/rules/java.clj
+++ /dev/null
@@ -1,50 +0,0 @@
-(ns clara.rules.java
- "This namespace is for internal use and may move in the future.
- Java support. Users should use the Java API, or the clara.rules namespace from Clojure."
- (:require [clara.rules :as clara]
- [clara.rules.engine :as eng]
- [clara.rules.compiler :as com])
- (:refer-clojure :exclude [==])
- (:import [clara.rules WorkingMemory QueryResult]))
-
-(deftype JavaQueryResult [result]
- QueryResult
- (getResult [_ fieldName]
- (get result (keyword fieldName)))
- Object
- (toString [_]
- (.toString result)))
-
-(defn- run-query [session name args]
- (let [query-var (or (resolve (symbol name))
- (throw (IllegalArgumentException.
- (str "Unable to resolve symbol to query: " name))))
-
- ;; Keywordize string keys from Java.
- keyword-args (into {}
- (for [[k v] args]
- [(keyword k) v]))
- results (eng/query session (deref query-var) keyword-args)]
- (map #(JavaQueryResult. %) results)))
-
-(deftype JavaWorkingMemory [session]
- WorkingMemory
-
- (insert [this facts]
- (JavaWorkingMemory. (apply clara/insert session facts)))
-
- (retract [this facts]
- (JavaWorkingMemory. (apply clara/retract session facts)))
-
- (fireRules [this]
- (JavaWorkingMemory. (clara/fire-rules session)))
-
- (query [this name args]
- (run-query session name args))
-
- (query [this name]
- (run-query session name {})))
-
-(defn mk-java-session [rulesets]
- (JavaWorkingMemory.
- (com/mk-session (map symbol rulesets))))
diff --git a/build/clara/rules/listener.clj b/build/clara/rules/listener.clj
deleted file mode 100644
index a5b1368d..00000000
--- a/build/clara/rules/listener.clj
+++ /dev/null
@@ -1,176 +0,0 @@
-(ns clara.rules.listener
- "Event listeners for analyzing the flow through Clara. This is for primarily for use by
- tooling, but advanced users may use this to analyze sessions.")
-
-(defprotocol IPersistentEventListener
- (to-transient [listener]))
-
-;; TODO: Handle add-accum-reduced
-(defprotocol ITransientEventListener
- (left-activate! [listener node tokens])
- (left-retract! [listener node tokens])
- (right-activate! [listener node elements])
- (right-retract! [listener node elements])
- (insert-facts! [listener node token facts])
- (alpha-activate! [listener node facts])
- (insert-facts-logical! [listener node token facts])
- (retract-facts! [listener node token facts])
- (alpha-retract! [listener node facts])
- (retract-facts-logical! [listener node token facts])
- (add-accum-reduced! [listener node join-bindings result fact-bindings])
- (remove-accum-reduced! [listener node join-bindings fact-bindings])
- (add-activations! [listener node activations])
- (remove-activations! [listener node activations])
- (fire-activation! [listener activation resulting-operations])
- (fire-rules! [listener node])
- (activation-group-transition! [listener original-group new-group])
- (to-persistent! [listener]))
-
-;; A listener that does nothing.
-(deftype NullListener []
- ITransientEventListener
- (left-activate! [listener node tokens]
- listener)
- (left-retract! [listener node tokens]
- listener)
- (right-activate! [listener node elements]
- listener)
- (right-retract! [listener node elements]
- listener)
- (insert-facts! [listener node token facts]
- listener)
- (alpha-activate! [listener node facts]
- listener)
- (insert-facts-logical! [listener node token facts]
- listener)
- (retract-facts! [listener node token facts]
- listener)
- (alpha-retract! [listener node facts]
- listener)
- (retract-facts-logical! [listener node token facts]
- listener)
- (add-accum-reduced! [listener node join-bindings result fact-bindings]
- listener)
- (remove-accum-reduced! [listener node join-bindings fact-bindings]
- listener)
- (add-activations! [listener node activations]
- listener)
- (remove-activations! [listener node activations]
- listener)
- (fire-activation! [listener activation resulting-operations]
- listener)
- (fire-rules! [listener node]
- listener)
- (activation-group-transition! [listener original-group new-group]
- listener)
- (to-persistent! [listener]
- listener)
-
- IPersistentEventListener
- (to-transient [listener]
- listener))
-
-(declare delegating-listener)
-
-;; A listener that simply delegates to others
-(deftype DelegatingListener [children]
- ITransientEventListener
- (left-activate! [listener node tokens]
- (doseq [child children]
- (left-activate! child node tokens)))
-
- (left-retract! [listener node tokens]
- (doseq [child children]
- (left-retract! child node tokens)))
-
- (right-activate! [listener node elements]
- (doseq [child children]
- (right-activate! child node elements)))
-
- (right-retract! [listener node elements]
- (doseq [child children]
- (right-retract! child node elements)))
-
- (insert-facts! [listener node token facts]
- (doseq [child children]
- (insert-facts! child node token facts)))
-
- (alpha-activate! [listener node facts]
- (doseq [child children]
- (alpha-activate! child node facts)))
-
- (insert-facts-logical! [listener node token facts]
- (doseq [child children]
- (insert-facts-logical! child node token facts)))
-
- (retract-facts! [listener node token facts]
- (doseq [child children]
- (retract-facts! child node token facts)))
-
- (alpha-retract! [listener node facts]
- (doseq [child children]
- (alpha-retract! child node facts)))
-
- (retract-facts-logical! [listener node token facts]
- (doseq [child children]
- (retract-facts-logical! child node token facts)))
-
- (add-accum-reduced! [listener node join-bindings result fact-bindings]
- (doseq [child children]
- (add-accum-reduced! child node join-bindings result fact-bindings)))
-
- (remove-accum-reduced! [listener node join-bindings fact-bindings]
- (doseq [child children]
- (remove-accum-reduced! child node join-bindings fact-bindings)))
-
- (add-activations! [listener node activations]
- (doseq [child children]
- (add-activations! child node activations)))
-
- (remove-activations! [listener node activations]
- (doseq [child children]
- (remove-activations! child node activations)))
-
- (fire-activation! [listener activation resulting-operations]
- (doseq [child children]
- (fire-activation! child activation resulting-operations)))
-
- (fire-rules! [listener node]
- (doseq [child children]
- (fire-rules! child node)))
-
- (activation-group-transition! [listener original-group new-group]
- (doseq [child children]
- (activation-group-transition! child original-group new-group)))
-
- (to-persistent! [listener]
- (delegating-listener (map to-persistent! children))))
-
-(deftype PersistentDelegatingListener [children]
- IPersistentEventListener
- (to-transient [listener]
- (DelegatingListener. (map to-transient children))))
-
-(defn delegating-listener
- "Returns a listener that delegates to its children."
- [children]
- (PersistentDelegatingListener. children))
-
-(defn null-listener?
- "Returns true if the given listener is the null listener, false otherwise."
- [listener]
- (instance? NullListener listener))
-
-(defn get-children
- "Returns the children of a delegating listener."
- [^PersistentDelegatingListener listener]
- (.-children listener))
-
-;; Default listener.
-(def default-listener (NullListener.))
-
-(defn ^:internal ^:no-doc flatten-listener
- [listener]
- (if (null-listener? listener)
- []
- (get-children listener)))
diff --git a/build/clara/rules/memory.clj b/build/clara/rules/memory.clj
deleted file mode 100644
index 958234ba..00000000
--- a/build/clara/rules/memory.clj
+++ /dev/null
@@ -1,909 +0,0 @@
-(ns clara.rules.memory
- "This namespace is for internal use and may move in the future.
- Specification and default implementation of working memory"
- (:import [java.util
- Collections
- LinkedList
- NavigableMap
- PriorityQueue
- TreeMap]))
-
-(defprotocol IPersistentMemory
- (to-transient [memory]))
-
-(defprotocol IMemoryReader
- ;; Returns the rulebase associated with the given memory.
- (get-rulebase [memory])
-
- ;; Returns a function that produces a map of alpha nodes to
- ;; facts that match the type of the node
- (get-alphas-fn [memory])
-
- ;; Returns the elements assoicated with the given node.
- (get-elements [memory node bindings])
-
- ;; Returns all elements associated with the given node, regardless of bindings.
- (get-elements-all [memory node])
-
- ;; Returns the tokens associated with the given node.
- (get-tokens [memory node bindings])
-
- ;; Returns all tokens associated with the given node, regardless of bindings
- (get-tokens-all [memory node])
-
- ;; Returns the reduced form of objects processed by an accumulator node
- ;; for facts that match the given bindings.
- (get-accum-reduced [memory node join-bindings fact-bindings])
-
- ;; Returns all reduced results for the given node that matches
- ;; the given join bindings, independent of the individual fact-bindings
- ;; created by the accumulator's condition.
- (get-accum-reduced-all [memory node join-bindings])
-
- ;; Returns a tuple of [join-bindings fact-bindings result] for all
- ;; accumulated items on this node.
- (get-accum-reduced-complete [memory node])
-
- ;; Returns insertions that occurred at the given node for the given token.
- ;; Returns a sequence of the form
- ;; [facts-inserted-for-one-rule-activation facts-inserted-for-another-rule-activation]
- (get-insertions [memory node token])
-
- ;; Returns all insertions that occurred in the given node's RHS; this takes the form
- ;; {token [facts-inserted-for-one-rule-activation facts-inserted-for-another-rule-activation]}
- (get-insertions-all [memory node])
-
- ;; Returns a map of nodes with pending activations to the activations themselves.
- (get-activations [memory]))
-
-(defprotocol ITransientMemory
-
- ;; Adds working memory elements to the given working memory at the given node.
- (add-elements! [memory node join-bindings elements])
-
- ;; Remove working memory elements from the given working memory at the given node.
- (remove-elements! [memory node elements join-bindings])
-
- ;; Add tokens to the given working memory at the given node.
- (add-tokens! [memory node join-bindings tokens])
-
- ;; Removes tokens from the given working memory at the given node.
- (remove-tokens! [memory node join-bindings tokens])
-
- ;; Adds the result of a reduced accumulator execution to the given memory and node.
- (add-accum-reduced! [memory node join-bindings accum-result fact-bindings])
-
- ;; Removes the result of a reduced accumulator execution to the given memory and node.
- (remove-accum-reduced! [memory node join-bindings fact-bindings])
-
- ;; Add a record that a given fact twas inserted at a given node with
- ;; the given support. Used for truth maintenance.
- ;; This should be called at most once per rule activation.
- (add-insertions! [memory node token facts])
-
- ;; Removes all records of facts that were inserted at the given node
- ;; due to the given token. Used for truth maintenance.
- ;; This function returns a map of each token to the associated facts
- ;; it removed.
- (remove-insertions! [memory node token])
-
- ;; Add a sequence of activations.
- (add-activations! [memory production activations])
-
- ;; Pop an activation from the working memory. Returns nil if no
- ;; activations are pending.
- (pop-activation! [memory])
-
- ;; Returns the group of the next activation, or nil if none are pending.
- (next-activation-group [memory])
-
- ;; Remove the given activations from the working memory. This is expected
- ;; to return a tuple of the form [removed-activations unremoved-activations],
- ;; where unremoved-activations is comprised of activations passed to the memory
- ;; for removal but which were not removed because they were not present in the memory's
- ;; store of pending activations.
- (remove-activations! [memory production activations])
-
- ;; Clear all activations from the working memory
- (clear-activations! [memory])
-
- ;; Converts the transient memory to persistent form.
- (to-persistent! [memory]))
-
-(defn- coll-empty?
- "Returns true if the collection is empty. Does not call seq due to avoid
- overhead that may cause for non-persistent collection types, e.g.
- java.util.LinkedList, etc."
- [^java.util.Collection coll]
- (or (nil? coll) (.isEmpty coll)))
-
-(defn- list-remove!
- "Removes the item, to-remove, from the given list, lst. If it is found and removed,
- returns true. Otherwise returns false. Only removes the first element in the list
- that is equal to to-remove. Equality is done based on the given eq-pred function.
- If it isn't given, the default is = . If others are equal, they will not be removed.
- This is similar to java.util.List.remove(Object). lst is updated in place for performance.
- This implies that the list must support the mutable list interface, namely via the
- java.util.List.listIterator()."
- ([^java.util.List lst to-remove]
- (list-remove! lst to-remove =))
- ([^java.util.List lst to-remove eq-pred]
- (if-not (coll-empty? lst)
- (let [li (.listIterator lst)]
- (loop [x (.next li)]
- (cond
- (eq-pred to-remove x)
- (do
- (.remove li)
- true)
-
- (.hasNext li)
- (recur (.next li))
-
- :else
- false)))
- false)))
-
-(defn- add-all!
- "Adds all items from source to the destination dest collection
- destructively. Avoids using Collection.addAll() due to unnecessary
- performance overhead of calling Collection.toArray() on the
- incoming source. Returns dest."
- [^java.util.Collection dest source]
- (doseq [x source]
- (.add dest x))
- dest)
-
-(defn- ->linked-list
- "Creates a new java.util.LinkedList from the coll, but avoids using
- Collection.addAll(Collection) since there is unnecessary overhead
- in this of calling Collection.toArray() on coll."
- ^java.util.List [coll]
- (if (instance? LinkedList coll)
- coll
- (add-all! (LinkedList.) coll)))
-
-(defn- remove-first-of-each!
- "Remove the first instance of each item in the given remove-seq that
- appears in the collection coll. coll is updated in place for
- performance. This implies that the coll must support the mutable
- collection interface method Collection.remove(Object). Returns a tuple
- of the form [remove-seq-items-removed remove-seq-items-not-removed].
- An optional compare-fn can be given to specify how to compare remove-seq
- items against items in coll. The default compare-fn is = .
- For immutable collection removal, use the non-destructive remove-first-of-each
- defined below."
- ([remove-seq ^java.util.List coll]
- (remove-first-of-each! remove-seq coll =))
-
- ([remove-seq ^java.util.List coll compare-fn]
- ;; Optimization for special case of one item to remove,
- ;; which occurs frequently.
- (if (= 1 (count remove-seq))
- (let [to-remove (first remove-seq)]
- (if (list-remove! coll to-remove compare-fn)
- [remove-seq []]
- [[] remove-seq]))
-
- ;; Otherwise, perform a linear search for items to remove.
- (loop [to-remove (first remove-seq)
- remove-seq (next remove-seq)
- removed (transient [])
- not-removed (transient [])]
- (if to-remove
- (let [found? (list-remove! coll to-remove compare-fn)
- removed (if found?
- (conj! removed to-remove)
- removed)
- not-removed (if found?
- not-removed
- (conj! not-removed to-remove))]
- (recur (first remove-seq)
- (next remove-seq)
- removed
- not-removed))
- ;; If this is expensive, using a mutable collection maybe good to
- ;; consider here in a future optimization.
- [(persistent! removed) (persistent! not-removed)])))))
-
-(defn remove-first-of-each
- "Remove the first instance of each item in the given remove-seq that
- appears in the collection. This also tracks which items were found
- and removed. Returns a tuple of the form:
- [items-removed coll-with-items-removed items-not-removed]
- This function does so eagerly since
- the working memories with large numbers of insertions and retractions
- can cause lazy sequences to become deeply nested."
- [remove-seq coll]
- (cond
-
- ;; There is nothing to remove.
- (empty? remove-seq) [[] coll]
-
- ;; Otherwise, perform a linear search for items to remove.
- :else (loop [f (first coll)
- r (rest coll)
- [remove-seq items-removed result] [remove-seq (transient []) (transient [])]]
-
- (if f
- (recur (first r)
- (rest r)
-
- ;; Determine if f matches any of the items to remove.
- (loop [to-remove (first remove-seq)
- remove-seq (rest remove-seq)
- ;; Remember what is left to remove for later.
- left-to-remove (transient [])]
-
- ;; Try to find if f matches anything to-remove.
- (if to-remove
- (if (= to-remove f)
-
- ;; Found a match, so the search is done.
- [(persistent! (reduce conj! left-to-remove remove-seq))
- (conj! items-removed to-remove)
- result]
-
- ;; Keep searching for a match.
- (recur (first remove-seq)
- (rest remove-seq)
- (conj! left-to-remove to-remove)))
-
- ;; No matches found.
- [(persistent! left-to-remove)
- items-removed
- (conj! result f)])))
-
- [(persistent! items-removed) (persistent! result) remove-seq]))))
-
-(defn fast-token-compare [compare-fn token other-token]
- ;; Fastest path is if the two tokens are truly identical.
- (or (identical? token other-token)
- ;; Assumption is that both arguments given are tokens already.
- (and (let [bindings (:bindings token)
- other-bindings (:bindings other-token)]
- ;; Calling `count` on these Clojure maps shows up as a bottleneck
- ;; even with clojure.lang.IPersistentMap being clojure.lang.Counted unfortunately.
- (and (= (.size ^java.util.Map bindings)
- (.size ^java.util.Map other-bindings))
- ;; `every?` is too slow for a performance critical place like this. It
- ;; calls `seq` too many times on the underlying maps. Instead `seq` one
- ;; time and keep using that same seq.
- ;; Also avoiding Clojure destructuring since even that is not as fast
- ;; pre-1.9.0.
- (if-let [^clojure.lang.ISeq entries (.seq ^clojure.lang.Seqable bindings)]
- ;; Type hint to Indexed vs MapEntry just because MapEntry seems to be a
- ;; less stable impl detail to rely on.
- (loop [^clojure.lang.Indexed entry (.first entries)
- entries (.next entries)]
- (let [k (some-> entry (.nth 0))
- v (some-> entry (.nth 1))]
- (if (and k
- ;; other-bindings will always be persistent map so invoke
- ;; it directly. It is faster than `get`.
- (compare-fn v (other-bindings k)))
- (recur (some-> entries .first)
- (some-> entries .next))
- ;; If there is no k left, then every entry matched. If there is a k,
- ;; that means the comparison failed, so the maps aren't equal.
- (not k))))
- ;; Empty bindings on both sides.
- true)))
-
- ;; Check the :matches on each token. :matches need to be in the same order on both
- ;; tokens to be considered the same.
- (let [^clojure.lang.Indexed matches (:matches token)
- ^clojure.lang.Indexed other-matches (:matches other-token)
- count-matches (.size ^java.util.List matches)]
- (and (= count-matches
- (.size ^java.util.List other-matches))
- (loop [i 0]
- (cond
- (= i count-matches)
- true
-
- ;; Compare node-id's first. Fallback to comparing facts. This will
- ;; most very likely be the most expensive part to execute.
- (let [^clojure.lang.Indexed m (.nth matches i)
- ^clojure.lang.Indexed om (.nth other-matches i)]
- ;; A token :matches tuple is of the form [fact node-id].
- (and (= (.nth m 1) (.nth om 1))
- (compare-fn (.nth m 0) (.nth om 0))))
- (recur (inc i))
-
- :else
- false)))))))
-
-(defprotocol IdentityComparable
- (using-token-identity! [this bool]))
-
-(deftype RuleOrderedActivation [node-id
- token
- activation
- rule-load-order
- ^:unsynchronized-mutable use-token-identity?]
- IdentityComparable
- ;; NOTE! This should never be called on a RuleOrderedActivation instance that has been stored
- ;; somewhere in local memory because it could cause interference across threads that have
- ;; multiple versions of local memories that are sharing some of their state. This is only intended
- ;; to be called by ephemeral, only-local references to RuleOrderedActivation instances used to
- ;; search for activations to remove from memory when performing `remove-activations!` operations.
- ;; The reason this mutable state exists at all is to "flip" a single instance of a RuleOrderedActivation
- ;; from identity to value equality based comparable when doing the "two-pass" removal search operation
- ;; of `remove-activations!`. This avoids having to create different instances for each pass.
- (using-token-identity! [this bool]
- (set! use-token-identity? bool)
- this)
- Object
- ;; Two RuleOrderedActivation instances should be equal if and only if their
- ;; activation is equal. Note that if the node of two activations is the same,
- ;; their rule-load-order must be the same. Using a deftype wrapper allows us to
- ;; use Clojure equality to determine this while placing the wrapper in a Java data
- ;; structure that uses Java equality; the Java equality check will simply end up calling
- ;; Clojure equality checks.
- (equals [this other]
- ;; Note that the .equals method is only called by PriorityQueue.remove, and the object provided
- ;; to the .remove method will never be identical to any object in the queue. A short-circuiting
- ;; check for reference equality would therefore be pointless here because it would never be true.
- (boolean
- (when (instance? RuleOrderedActivation other)
- (let [^RuleOrderedActivation other other]
- (and
- ;; If the node-id of two nodes is equal then we can assume that the nodes are equal.
- (= node-id
- (.node-id other))
-
- ;; We check with identity based semantics on the other when the use-token-identity? field
- ;; indicates to do so.
- (if (or use-token-identity? (.-use-token-identity? other))
- (fast-token-compare identical? token (.-token other))
- (fast-token-compare = token (.-token other)))))))))
-
-(defn- ->rule-ordered-activation
- "Take an activation from the engine and wrap it in a map that includes information
- on the rule load order. In Clojure, as opposed to ClojureScript, each activation should
- be wrapped in this way exactly once (that is, the value of the :activation key should
- be an activation from the engine.)"
- ([activation]
- (->rule-ordered-activation activation false))
- ([activation use-token-identity?]
- (let [node (:node activation)]
- (RuleOrderedActivation. (:id node)
- (:token activation)
- activation
- (or (-> node
- :production
- meta
- :clara.rules.compiler/rule-load-order)
- 0)
- use-token-identity?))))
-
-(defn- queue-activations!
- "Add activations to a queue. The wrap-rule-order? option should be true
- unless the activations in question have previously been wrapped."
- ([^java.util.Queue pq activations]
- (queue-activations! pq activations true))
- ([^java.util.Queue pq activations wrap-rule-order?]
- (if wrap-rule-order?
- (doseq [act activations]
- (.add pq (->rule-ordered-activation act)))
- (doseq [act activations]
- (.add pq act)))
- pq))
-
-(defn- ->activation-priority-queue
- "Given activations, create a priority queue based on rule ordering.
- The activations should be wrapped by using the wrap-rule-order? option
- if they have not been wrapped already."
- ([activations]
- (->activation-priority-queue activations true))
- ([activations wrap-rule-order?]
- (let [init-cnt (count activations)
- ;; Note that 11 is the default initial value; there is no constructor
- ;; for PriorityQueue that takes a custom comparator and does not require
- ;; an initial size to be passed.
- pq (PriorityQueue. (if (pos? init-cnt) init-cnt 11)
- (fn [^RuleOrderedActivation x
- ^RuleOrderedActivation y]
- (compare (.rule-load-order x)
- (.rule-load-order y))))]
- (queue-activations! pq activations wrap-rule-order?))))
-
-(declare ->PersistentLocalMemory)
-
-;;; Transient local memory implementation. Typically only persistent memory will be visible externally.
-
-(deftype TransientLocalMemory [rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- ^:unsynchronized-mutable alpha-memory
- ^:unsynchronized-mutable beta-memory
- ^:unsynchronized-mutable accum-memory
- ^:unsynchronized-mutable production-memory
- ^:unsynchronized-mutable ^NavigableMap activation-map]
-
- IMemoryReader
- (get-rulebase [memory] rulebase)
-
- (get-alphas-fn [memory] alphas-fn)
-
- (get-elements [memory node bindings]
- (get (get alpha-memory (:id node) {})
- bindings
- []))
-
- (get-elements-all [memory node]
- (sequence
- cat
- (vals
- (get alpha-memory (:id node) {}))))
-
- (get-tokens [memory node bindings]
- (get (get beta-memory (:id node) {})
- bindings
- []))
-
- (get-tokens-all [memory node]
- (sequence
- cat
- (vals (get beta-memory (:id node) {}))))
-
- (get-accum-reduced [memory node join-bindings fact-bindings]
- (get-in accum-memory [(:id node) join-bindings fact-bindings] ::no-accum-reduced))
-
- (get-accum-reduced-all [memory node join-bindings]
- (get
- (get accum-memory (:id node) {})
- join-bindings))
-
- ;; TODO: rename existing get-accum-reduced-all and use something better here.
- (get-accum-reduced-complete [memory node]
- (for [[join-binding joins] (get accum-memory (:id node) {})
- [fact-binding reduced] joins]
- {:join-bindings join-binding
- :fact-bindings fact-binding
- :result reduced}))
-
- (get-insertions [memory node token]
- (get
- (get production-memory (:id node) {})
- token
- []))
-
- (get-insertions-all [memory node]
- (get production-memory (:id node) {}))
-
- (get-activations [memory]
- (into []
- (comp cat
- (map (fn [^RuleOrderedActivation a]
- (.activation a))))
- (vals activation-map)))
-
- ITransientMemory
- (add-elements! [memory node join-bindings elements]
- (let [binding-element-map (get alpha-memory (:id node) {})
- previous-elements (get binding-element-map join-bindings)]
-
- (cond
- ;; When changing existing persistent collections, just add on
- ;; the new elements.
- (coll? previous-elements)
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- (assoc binding-element-map
- join-bindings
- (into previous-elements elements))))
-
- ;; Already mutable, so update-in-place.
- previous-elements
- (add-all! previous-elements elements)
-
- ;; No previous. We can just leave it persistent if it is
- ;; until we actually need to modify anything. This avoids
- ;; unnecessary copying.
- elements
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- (assoc binding-element-map
- join-bindings
- elements))))))
-
- (remove-elements! [memory node join-bindings elements]
- ;; Do nothing when no elements to remove.
- (when-not (coll-empty? elements)
- (let [binding-element-map (get alpha-memory (:id node) {})
- previous-elements (get binding-element-map join-bindings)]
- (cond
- ;; Do nothing when no previous elements to remove from.
- (coll-empty? previous-elements)
- []
-
- ;; Convert persistent collection to a mutable one prior to calling remove-first-of-each!
- ;; alpha-memory needs to be updated this time since there is now going to be a mutable
- ;; collection associated in this memory location instead.
- (coll? previous-elements)
- (let [remaining-elements (->linked-list previous-elements)
- removed-elements (first (remove-first-of-each! elements remaining-elements))]
- ;; If there are no remaining elements under a binding group for the node remove the binding group.
- ;; This allows these binding values to be garbage collected.
- (let [new-bindings-map (if (.isEmpty ^java.util.List remaining-elements)
- (dissoc binding-element-map join-bindings)
- (assoc binding-element-map
- join-bindings
- remaining-elements))]
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- new-bindings-map))
- removed-elements))
-
- ;; Already mutable, so we do not need to re-associate to alpha-memory.
- previous-elements
- (let [removed-elements (first (remove-first-of-each! elements previous-elements))]
- (when (.isEmpty ^java.util.List previous-elements)
- (set! alpha-memory
- (assoc! alpha-memory
- (:id node)
- (dissoc binding-element-map join-bindings))))
- removed-elements)))))
-
- (add-tokens! [memory node join-bindings tokens]
- (let [binding-token-map (get beta-memory (:id node) {})
- previous-tokens (get binding-token-map join-bindings)]
- ;; The reasoning here is the same as in add-elements! impl above.
- (cond
- (coll? previous-tokens)
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- (assoc binding-token-map
- join-bindings
- (into previous-tokens tokens))))
-
- previous-tokens
- (add-all! previous-tokens tokens)
-
- tokens
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- (assoc binding-token-map
- join-bindings
- tokens))))))
-
- (remove-tokens! [memory node join-bindings tokens]
- ;; The reasoning here is the same as remove-elements!
- (when-not (coll-empty? tokens)
- (let [binding-token-map (get beta-memory (:id node) {})
- previous-tokens (get binding-token-map join-bindings)]
- (if
- (coll-empty? previous-tokens)
- []
-
- (let [;; Attempt to remove tokens using the faster indentity-based equality first since
- ;; most of the time this is all we need and it can be much faster. Any token that
- ;; wasn't removed via identity, has to be "retried" with normal value-based
- ;; equality though since those semantics are supported within the engine. This
- ;; slower path should be rare for any heavy retraction flows - such as those that come
- ;; via truth maintenance.
- two-pass-remove! (fn [remaining-tokens tokens]
- (let [[removed-tokens not-removed-tokens]
- (remove-first-of-each! tokens
- remaining-tokens
- (fn [t1 t2]
- (fast-token-compare identical? t1 t2)))]
-
- (if-let [other-removed (and (seq not-removed-tokens)
- (-> not-removed-tokens
- (remove-first-of-each! remaining-tokens
- (fn [t1 t2]
- (fast-token-compare = t1 t2)))
- first
- seq))]
- (into removed-tokens other-removed)
- removed-tokens)))]
- (cond
- (coll? previous-tokens)
- (let [remaining-tokens (->linked-list previous-tokens)
- removed-tokens (two-pass-remove! remaining-tokens tokens)
- new-tokens-map (if (.isEmpty ^java.util.List remaining-tokens)
- (dissoc binding-token-map join-bindings)
- (assoc binding-token-map join-bindings remaining-tokens))]
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- new-tokens-map))
- removed-tokens)
-
- previous-tokens
- (let [removed-tokens (two-pass-remove! previous-tokens tokens)]
- (when (.isEmpty ^java.util.List previous-tokens)
- (set! beta-memory
- (assoc! beta-memory
- (:id node)
- (dissoc binding-token-map join-bindings))))
-
- removed-tokens)))))))
-
- (add-accum-reduced! [memory node join-bindings accum-result fact-bindings]
- (set! accum-memory
- (assoc! accum-memory
- (:id node)
- (assoc-in (get accum-memory (:id node) {})
- [join-bindings fact-bindings]
- accum-result))))
-
- (remove-accum-reduced! [memory node join-bindings fact-bindings]
- (let [node-id (:id node)
- node-id-mem (get accum-memory node-id {})
- join-mem (dissoc (get node-id-mem join-bindings) fact-bindings)
- node-id-mem (if (empty? join-mem)
- (dissoc node-id-mem join-bindings)
- (assoc node-id-mem join-bindings join-mem))]
- (set! accum-memory
- (if (empty? node-id-mem)
- (dissoc! accum-memory
- node-id)
- (assoc! accum-memory
- node-id
- node-id-mem)))))
-
- ;; The value under each token in the map should be a sequence
- ;; of sequences of facts, with each inner sequence coming from a single
- ;; rule activation.
- (add-insertions! [memory node token facts]
- (let [token-facts-map (get production-memory (:id node) {})]
- (set! production-memory
- (assoc! production-memory
- (:id node)
- (update token-facts-map token conj facts)))))
-
- (remove-insertions! [memory node tokens]
-
- ;; Remove the facts inserted from the given token.
- (let [token-facts-map (get production-memory (:id node) {})
- ;; Get removed tokens for the caller.
- [results
- new-token-facts-map]
-
- (loop [results (transient {})
- token-map (transient token-facts-map)
- to-remove tokens]
- (if-let [head-token (first to-remove)]
- ;; Don't use contains? due to http://dev.clojure.org/jira/browse/CLJ-700
- (if-let [token-insertions (get token-map head-token)]
- (let [;; There is no particular significance in removing the
- ;; first group; we just need to remove exactly one.
- [removed-facts & remaining-facts] token-insertions
- removed-insertion-map (if (not-empty remaining-facts)
- (assoc! token-map head-token remaining-facts)
- (dissoc! token-map head-token))
- prev-token-result (get results head-token [])]
- (recur (assoc! results head-token (into prev-token-result removed-facts))
- removed-insertion-map
- (rest to-remove)))
- ;; If the token isn't present in the insertions just try the next one.
- (recur results token-map (rest to-remove)))
- [(persistent! results)
- (persistent! token-map)]))]
-
- ;; Clear the tokens and update the memory.
- (set! production-memory
- (if (not-empty new-token-facts-map)
- (assoc! production-memory
- (:id node)
- new-token-facts-map)
- (dissoc! production-memory (:id node))))
- results))
-
- (add-activations!
- [memory production new-activations]
- (let [activation-group (activation-group-fn production)
- previous (.get activation-map activation-group)]
- ;; The reasoning here is the same as in add-elements! impl above.
- (cond
- previous
- (queue-activations! previous new-activations)
-
- (not (coll-empty? new-activations))
- (.put activation-map
- activation-group
- (->activation-priority-queue new-activations)))))
-
- (pop-activation!
- [memory]
- (when (not (.isEmpty activation-map))
- (let [entry (.firstEntry activation-map)
- key (.getKey entry)
- ^java.util.Queue value (.getValue entry)
-
- ;; An empty value is illegal and should be removed by an action
- ;; that creates one (e.g. a remove-activations! call).
- ^RuleOrderedActivation activation (.remove value)]
-
- ;; This activation group is empty now, so remove it from
- ;; the map entirely.
- (when (.isEmpty value)
- (.remove activation-map key))
-
- ;; Return the selected activation.
- (.activation activation))))
-
- (next-activation-group
- [memory]
- (when (not (.isEmpty activation-map))
- (let [entry (.firstEntry activation-map)]
- (.getKey entry))))
-
- (remove-activations! [memory production to-remove]
- (when-not (coll-empty? to-remove)
- (let [activation-group (activation-group-fn production)
- ^java.util.Queue activations (.get activation-map activation-group)
- removed-activations (LinkedList.)
- unremoved-activations (LinkedList.)]
-
- (if (coll-empty? activations)
- ;; If there are no activations present under the group
- ;; then we can't remove any.
- [[] to-remove]
- ;; Remove as many activations by identity as possible first.
- (let [not-removed (loop [to-remove-item (first to-remove)
- to-remove (next to-remove)
- not-removed (transient [])]
- (if to-remove-item
- (let [^RuleOrderedActivation act (->rule-ordered-activation to-remove-item true)]
- (if (.remove activations act)
- (do
- (.add removed-activations (.activation act))
- ;; The activations that are not removed in the identity checking sweep
- ;; are a superset of the activations that are removed after the equality
- ;; sweep finishes so we can just create the list of unremoved activations
- ;; during that sweep.
- (recur (first to-remove) (next to-remove) not-removed))
- (recur (first to-remove) (next to-remove) (conj! not-removed act))))
- (persistent! not-removed)))]
-
- ;; There may still be activations not removed since the removal may be based on value-based
- ;; equality semantics. Retractions in the engine do not require that identical object references
- ;; are given to remove object values that are equal.
- (doseq [^RuleOrderedActivation act not-removed]
- (if (.remove activations (using-token-identity! act false))
- (.add removed-activations (.activation act))
- (.add unremoved-activations (.activation act))))
-
- (when (coll-empty? activations)
- (.remove activation-map activation-group))
-
- [(Collections/unmodifiableList removed-activations)
- (Collections/unmodifiableList unremoved-activations)])))))
-
- (clear-activations!
- [memory]
- (.clear activation-map))
-
- (to-persistent! [memory]
- (let [->persistent-coll #(if (coll? %)
- %
- (seq %))
- update-vals (fn [m update-fn]
- (->> m
- (reduce-kv (fn [m k v]
- (assoc! m k (update-fn v)))
- (transient m))
- persistent!))
- persistent-vals #(update-vals % ->persistent-coll)]
- (->PersistentLocalMemory rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- (update-vals (persistent! alpha-memory) persistent-vals)
- (update-vals (persistent! beta-memory) persistent-vals)
- (persistent! accum-memory)
- (persistent! production-memory)
- (into {}
- (map (juxt key (comp ->persistent-coll val)))
- activation-map)))))
-
-(defrecord PersistentLocalMemory [rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- alpha-memory
- beta-memory
- accum-memory
- production-memory
- activation-map]
- IMemoryReader
- (get-rulebase [memory] rulebase)
-
- (get-alphas-fn [memory] alphas-fn)
-
- (get-elements [memory node bindings]
- (get (get alpha-memory (:id node) {})
- bindings
- []))
-
- (get-elements-all [memory node]
- (sequence
- cat
- (vals
- (get alpha-memory (:id node) {}))))
-
- (get-tokens [memory node bindings]
- (get (get beta-memory (:id node) {})
- bindings
- []))
-
- (get-tokens-all [memory node]
- (sequence
- cat
- (vals (get beta-memory (:id node) {}))))
-
- (get-accum-reduced [memory node join-bindings fact-bindings]
- ;; nil is a valid previously reduced value that can be found in the map.
- ;; Return ::no-accum-reduced instead of nil when there is no previously
- ;; reduced value in memory.
- (get-in accum-memory [(:id node) join-bindings fact-bindings] ::no-accum-reduced))
-
- (get-accum-reduced-all [memory node join-bindings]
- (get
- (get accum-memory (:id node) {})
- join-bindings))
-
- (get-accum-reduced-complete [memory node]
- (for [[join-binding joins] (get accum-memory (:id node) {})
- [fact-binding reduced] joins]
- {:join-bindings join-binding
- :fact-bindings fact-binding
- :result reduced}))
-
- (get-insertions [memory node token]
- (get
- (get production-memory (:id node) {})
- token
- []))
-
- (get-insertions-all [memory node]
- (get production-memory (:id node) {}))
-
- (get-activations [memory]
- (into []
- (comp cat
- (map (fn [^RuleOrderedActivation a]
- (.activation a))))
- (vals activation-map)))
-
- IPersistentMemory
- (to-transient [memory]
- (TransientLocalMemory. rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- (transient alpha-memory)
- (transient beta-memory)
- (transient accum-memory)
- (transient production-memory)
- (let [treemap (TreeMap. ^java.util.Comparator activation-group-sort-fn)]
- (doseq [[activation-group activations] activation-map]
- (.put treemap
- activation-group
- (->activation-priority-queue activations false)))
- treemap))))
-
-(defn local-memory
- "Creates an persistent local memory for the given rule base."
- [rulebase activation-group-sort-fn activation-group-fn alphas-fn]
-
- (->PersistentLocalMemory rulebase
- activation-group-sort-fn
- activation-group-fn
- alphas-fn
- {}
- {}
- {}
- {}
- {}))
diff --git a/build/clara/rules/platform.clj b/build/clara/rules/platform.clj
deleted file mode 100644
index 36a88b55..00000000
--- a/build/clara/rules/platform.clj
+++ /dev/null
@@ -1,93 +0,0 @@
-(ns clara.rules.platform
- "This namespace is for internal use and may move in the future.
- Platform unified code Clojure/ClojureScript."
- (:import [java.lang IllegalArgumentException]
- [java.util LinkedHashMap]))
-
-(defn throw-error
- "Throw an error with the given description string."
- [^String description]
- (throw (IllegalArgumentException. description)))
-
-(defn query-param
- "Coerces a query param to a parameter keyword such as :?param, if an unsupported type is
- supplied then an exception will be thrown"
- [p]
- (cond
- (keyword? p) p
- (symbol? p) (keyword p)
- :else
- (throw-error (str "Query bindings must be specified as a keyword or symbol: " p))))
-
-;; This class wraps Clojure objects to ensure Clojure's equality and hash
-;; semantics are visible to Java code. This allows these Clojure objects
-;; to be safely used in things like Java Sets or Maps.
-;; This class also accepts and stores the hash code, since it almost always
-;; will be used once and generally more than once.
-(deftype JavaEqualityWrapper [wrapped ^int hash-code]
-
- Object
- (equals [this other]
- (and (instance? JavaEqualityWrapper other)
- (= wrapped (.wrapped ^JavaEqualityWrapper other))))
-
- (hashCode [this]
- hash-code))
-
-(defn group-by-seq
- "Groups the items of the given coll by f to each item. Returns a seq of tuples of the form
- [f-val xs] where xs are items from the coll and f-val is the result of applying f to any of
- those xs. Each x in xs has the same value (f x). xs will be in the same order as they were
- found in coll.
- The behavior is similar to calling `(seq (group-by f coll))` However, the returned seq will
- always have consistent ordering from process to process. The ordering is insertion order
- as new (f x) values are found traversing the given coll collection in its seq order. The
- returned order is made consistent to ensure that relevant places within the rules engine that
- use this grouping logic have deterministic behavior across different processes."
- [f coll]
- (let [^java.util.Map m (reduce (fn [^java.util.Map m x]
- (let [k (f x)
- ;; Use Java's hashcode for performance reasons as
- ;; discussed at https://github.com/cerner/clara-rules/issues/393
- wrapper (JavaEqualityWrapper. k
- (if (nil? k)
- (int 0)
- (int (.hashCode ^Object k))))
- xs (or (.get m wrapper)
- (transient []))]
- (.put m wrapper (conj! xs x)))
- m)
- (LinkedHashMap.)
- coll)
- it (.iterator (.entrySet m))]
- ;; Explicitly iterate over a Java iterator in order to avoid running into issues as
- ;; discussed in http://dev.clojure.org/jira/browse/CLJ-1738
- (loop [coll (transient [])]
- (if (.hasNext it)
- (let [^java.util.Map$Entry e (.next it)]
- (recur (conj! coll [(.wrapped ^JavaEqualityWrapper (.getKey e)) (persistent! (.getValue e))])))
- (persistent! coll)))))
-
-(defmacro thread-local-binding
- "Wraps given body in a try block, where it sets each given ThreadLocal binding
- and removes it in finally block."
- [bindings & body]
- (when-not (vector? bindings)
- (throw (ex-info "Binding needs to be a vector."
- {:bindings bindings})))
- (when-not (even? (count bindings))
- (throw (ex-info "Needs an even number of forms in binding vector"
- {:bindings bindings})))
- (let [binding-pairs (partition 2 bindings)]
- `(try
- ~@(for [[tl v] binding-pairs]
- `(.set ~tl ~v))
- ~@body
- (finally
- ~@(for [[tl] binding-pairs]
- `(.remove ~tl))))))
-
-(defmacro eager-for
- "A for wrapped with a doall to force realisation. Usage is the same as regular for."
- [& body]
- `(doall (for ~@body)))
diff --git a/build/clara/rules/schema.clj b/build/clara/rules/schema.clj
deleted file mode 100644
index 50669daf..00000000
--- a/build/clara/rules/schema.clj
+++ /dev/null
@@ -1,200 +0,0 @@
-(ns clara.rules.schema
- "Schema definition of Clara data structures using Prismatic's Schema library. This includes structures for rules and queries, as well as the schema
- for the underlying Rete network itself. This can be used by tools or other libraries working with rules."
- (:require [schema.core :as s]))
-
-(s/defn condition-type :- (s/enum :or :not :and :exists :fact :accumulator :test)
- "Returns the type of node in a LHS condition expression."
- [condition]
- (if (map? condition) ; Leaf nodes are maps, per the schema
-
- (cond
- (:type condition) :fact
- (:accumulator condition) :accumulator
- :else :test)
-
- ;; Otherwise the node must a sequential that starts with the boolean operator.
- (first condition)))
-
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;; Rule and query structure schema.
-
-(def SExpr
- (s/pred seq? "s-expression"))
-
-(def FactCondition
- {:type s/Any ;(s/either s/Keyword (s/pred symbol?))
- :constraints [SExpr]
- ;; Original constraints preserved for tooling in case a transformation was applied to the condition.
- (s/optional-key :original-constraints) [SExpr]
- (s/optional-key :fact-binding) s/Keyword
- (s/optional-key :args) s/Any})
-
-(def AccumulatorCondition
- {:accumulator s/Any
- :from FactCondition
- (s/optional-key :result-binding) s/Keyword})
-
-(def TestCondition
- {:constraints [SExpr]})
-
-(def LeafCondition
- (s/conditional
- :type FactCondition
- :accumulator AccumulatorCondition
- :else TestCondition))
-
-(declare Condition)
-
-(def BooleanCondition
- [(s/one (s/enum :or :not :and :exists) "operator")
- (s/recursive #'Condition)])
-
-(def Condition
- (s/conditional
- sequential? BooleanCondition
- map? LeafCondition))
-
-(def Rule
- {;; :ns-name is currently used to eval the :rhs form of a rule in the same
- ;; context that it was originally defined in. It is optional and only used
- ;; when given. It may be used for other purposes in the future.
- (s/optional-key :ns-name) s/Symbol
- (s/optional-key :name) (s/cond-pre s/Str s/Keyword)
- (s/optional-key :doc) s/Str
- (s/optional-key :props) {s/Keyword s/Any}
- (s/optional-key :env) {s/Keyword s/Any}
- :lhs [Condition]
- :rhs s/Any})
-
-(def Query
- {(s/optional-key :name) (s/cond-pre s/Str s/Keyword)
- (s/optional-key :doc) s/Str
- (s/optional-key :props) {s/Keyword s/Any}
- (s/optional-key :env) {s/Keyword s/Any}
- :lhs [Condition]
- :params #{s/Keyword}})
-
-(def Production
- (s/conditional
- :rhs Rule
- :else Query))
-
-;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
-;; Schema for the Rete network itself.
-
-(def ConditionNode
- {:node-type (s/enum :join :negation :test :accumulator)
- :condition LeafCondition
-
- ;; Captured environment in which the condition was defined, like closed variables.
- ;; Most rules (such as those defined by defrule) have no surrounding
- ;; environment, but user generated rules might.
- (s/optional-key :env) {s/Keyword s/Any}
-
- ;; Variables used to join to other expressions in the network.
- (s/optional-key :join-bindings) #{s/Keyword}
-
- ;; Variable bindings used by expressions in this node.
- :used-bindings #{s/Keyword}
-
- ;; Variable bindings used in the constraints that are not present in the ancestors of this node.
- :new-bindings #{s/Keyword}
-
- ;; An expression used to filter joined data.
- (s/optional-key :join-filter-expressions) LeafCondition
-
- ;; Bindings used to perform non-hash joins in the join filter expression.
- ;; this is a subset of :used-bindings.
- (s/optional-key :join-filter-join-bindings) #{s/Keyword}
-
- ;; The expression to create the accumulator.
- (s/optional-key :accumulator) s/Any
-
- ;; The optional fact or accumulator result binding.
- (s/optional-key :result-binding) s/Keyword})
-
-(def ProductionNode
- {:node-type (s/enum :production :query)
-
- ;; Rule for rule nodes.
- (s/optional-key :production) Rule
-
- ;; Query for query nodes.
- (s/optional-key :query) Query
-
- ;; Bindings used in the rule right-hand side.
- (s/optional-key :bindings) #{s/Keyword}})
-
-;; Alpha network schema.
-(def AlphaNode
- {:id s/Int
- :condition FactCondition
- ;; Opional environment for the alpha node.
- (s/optional-key :env) {s/Keyword s/Any}
- ;; IDs of the beta nodes that are the children.
- :beta-children [s/Num]})
-
-;; A graph representing the beta side of the rete network.
-(def BetaGraph
- {;; Edges from parent to child nodes.
- :forward-edges {s/Int #{s/Int}}
-
- ;; Edges from child to parent nodes.
- :backward-edges {s/Int #{s/Int}}
-
- ;; Map of identifier to condition nodes.
- :id-to-condition-node {s/Int (s/cond-pre (s/eq :clara.rules.compiler/root-condition)
- ConditionNode)}
-
- ;; Map of identifier to query or rule nodes.
- :id-to-production-node {s/Int ProductionNode}
-
- ;; Map of identifier to new bindings created by the corresponding node.
- :id-to-new-bindings {s/Int #{s/Keyword}}})
-
-(defn tuple
- "Given `items`, a list of schemas, will generate a schema to validate that a vector contains and is in the order provided
- by `items`."
- [& items]
- (s/constrained [s/Any]
- (fn [tuple-vals]
- (and (= (count tuple-vals)
- (count items))
- (every? nil? (map s/check items tuple-vals))))
- "tuple"))
-
-(def NodeCompilationValue
- (s/constrained {s/Keyword s/Any}
- (fn [compilation]
- (let [expr-keys #{:alpha-expr :action-expr :join-filter-expr :test-expr :accum-expr}]
- (some expr-keys (keys compilation))))
- "node-compilation-value"))
-
-(def NodeCompilationContext
- (s/constrained NodeCompilationValue
- (fn [compilation]
- (let [xor #(and (or %1 %2)
- (not (and %1 %2)))]
- (and (contains? compilation :compile-ctx)
- (contains? (:compile-ctx compilation) :msg)
- (xor (contains? (:compile-ctx compilation) :condition)
- (contains? (:compile-ctx compilation) :production)))))
- "node-compilation-context"))
-
-;; A map of [ ] to SExpression, used in compilation of the rulebase.
-(def NodeExprLookup
- ;; schema should be NodeCompilationContext in standard compilation,
- ;; but during serde it might be either as :compile-ctx is only used for compilation failures
- ;; and can be disabled post compilation.
- {(tuple s/Int s/Keyword) (tuple SExpr (s/conditional :compile-ctx NodeCompilationContext
- :else NodeCompilationValue))})
-
-;; An evaluated version of the schema mentioned above.
-(def NodeFnLookup
- ;; This schema uses a relaxed version of NodeCompilationContext as once the expressions
- ;; have been eval'd there is technically no need for compile-ctx to be maintained except for
- ;; deserialization. In such events the compile-ctx would only be valuable when the environment
- ;; where the Session is being deserialized doesn't match that of the serialization, ie functions
- ;; and symbols cannot be resolved on the deserialization side.
- {(tuple s/Int s/Keyword) (tuple (s/pred ifn? "ifn?") NodeCompilationValue)})
\ No newline at end of file
diff --git a/build/clara/rules/test_rules_data.clj b/build/clara/rules/test_rules_data.clj
deleted file mode 100644
index 8239fb4a..00000000
--- a/build/clara/rules/test_rules_data.clj
+++ /dev/null
@@ -1,37 +0,0 @@
-;;; This namespace exists for testing purposes only, and is temporarily plac.d under src/main/clojure/clara
-;;; due to issues with the CLJS test environment. Move te test/common/clara when this issue is resolved
-;;; and tests can be compiled and run with this file in that location.
-;;; See issue #288 for further info (https://github.com/cerner/clara-rules/issues/388).
-
-(ns clara.rules.test-rules-data
- (:require [clara.rules]
- [clara.rules.testfacts]))
-
-(def the-rules
- [{:doc "Rule to determine whether it is indeed cold and windy."
- :name "clara.rules.test-rules-data/is-cold-and-windy-data"
- :lhs [{:type clara.rules.testfacts.Temperature
- :constraints '[(< temperature 20)
- (== ?t temperature)]}
- {:type clara.rules.testfacts.WindSpeed
- :constraints '[(> windspeed 30)
- (== ?w windspeed)]}]
- :rhs '(clara.rules/insert! (clara.rules.testfacts/->ColdAndWindy ?t ?w))}
-
- {:name "clara.rules.test-rules-data/find-cold-and-windy-data"
- :lhs [{:fact-binding :?fact
- :type clara.rules.testfacts.ColdAndWindy
- :constraints []}]
- :params #{}}])
-
-(defn weather-rules
- "Return some weather rules"
- []
- the-rules)
-
-(def the-rules-with-keyword-names (mapv #(update % :name keyword) the-rules))
-
-(defn weather-rules-with-keyword-names
- "Return some weather rules using keyword names"
- []
- the-rules-with-keyword-names)
diff --git a/build/clara/rules/testfacts.clj b/build/clara/rules/testfacts.clj
deleted file mode 100644
index f11b2ac9..00000000
--- a/build/clara/rules/testfacts.clj
+++ /dev/null
@@ -1,24 +0,0 @@
-(ns clara.rules.testfacts
- "This namespace exists primary for testing purposes, working around the fact that we cannot AOT compile test classes. This should be moved to the tests once a workaround for this is solved.")
-
-;; Reflection against records requires them to be compiled AOT, so we temporarily
-;; place them here as leiningen won't AOT compile test resources.
-(defrecord Temperature [temperature location])
-(defrecord WindSpeed [windspeed location])
-(defrecord Cold [temperature])
-(defrecord Hot [temperature])
-(defrecord ColdAndWindy [temperature windspeed])
-(defrecord LousyWeather [])
-(defrecord TemperatureHistory [temperatures])
-
-;; Test facts for chained rules.
-(defrecord First [])
-(defrecord Second [])
-(defrecord Third [])
-(defrecord Fourth [])
-
-;; Record utilizing clj flexible field names.
-(defrecord FlexibleFields [it-works?
- a->b
- x+y
- bang!])
diff --git a/build/clara/rules/update_cache/cancelling.clj b/build/clara/rules/update_cache/cancelling.clj
deleted file mode 100644
index de506c3d..00000000
--- a/build/clara/rules/update_cache/cancelling.clj
+++ /dev/null
@@ -1,146 +0,0 @@
-(ns clara.rules.update-cache.cancelling
- (:require [clara.rules.update-cache.core :as uc])
- (:import [java.util
- List
- Map
- LinkedList
- LinkedHashMap
- Collections]))
-
-;;; We need a wrapper to use Clojure equality semantics inside
-;;; a Java collection. Furthermore, since we know we will need to do
-;;; a hash operation for each such wrapper created anyway we can ensure
-;;; that if the hashes of two facts are not equal that the equals implementation
-;;; here will quickly return false.
-(deftype FactWrapper [fact ^int fact-hash]
-
- Object
- (equals [this other]
-
- (cond
-
- ;; There are some cases where the inserted and retracted facts could be identical, particularly
- ;; if user code in the RHS has caches, so we go ahead and check for identity as a first-pass check,
- ;; but there are probably aren't enough cases where the facts are identical to make doing a full sweep
- ;; on identity first worthwhile, particularly since in practice the hash check will make the vast majority
- ;; of .equals calls that return false quite fast.
- (identical? fact (.fact ^FactWrapper other))
- true
-
- (not (== fact-hash (.fact_hash ^FactWrapper other)))
- false
-
- :else (= fact (.fact ^FactWrapper other))))
-
- (hashCode [this] fact-hash))
-
-;;; These functions essentially allow us to use a Java map to create a set that stores
-;;; the frequency of its items. Note that when multiple instances of a fact are added
-;;; we keep both instances as distinct objects. We don't strictly speaking need to do this
-;;; but we expect it to perform better. The memory will retain both distinct references
-;;; and future updates are expected to be faster if these references are maintained since
-;;; memory operations look for matches on identity first in tokens before falling back to matching
-;;; on equality.
-(defn inc-fact-count! [^Map m fact]
- (let [wrapper (FactWrapper. fact (hash fact))
- ^List current-val (.get m wrapper)]
- (if current-val
- (.add current-val fact)
- (.put m wrapper (LinkedList. [fact])))))
-
-(defn dec-fact-count! [^Map m fact]
- (let [wrapper (FactWrapper. fact (hash fact))
- ;; Note that we specifically use a LinkedList type hint here since we
- ;; use methods from multiple interfaces here, namely List and Deque.
- ^LinkedList current-val (.get m wrapper)]
- (if current-val
- (do
- (if (= (.size current-val) 1)
- (.remove m wrapper)
- ;;; Since as noted above, the facts are equal, we don't actually care which one we remove.
- ;;; We remove the first here to avoid any work checking equality and since this is a constant-time
- ;;; operation on LinkedList. Since the insertions will be newly inserted facts we probably won't
- ;;; have many identical retractions, so doing a sweep for identical facts first probably wouldn't
- ;;; have enough hits to be worth the cost.
- (.removeFirst current-val))
- true)
- false)))
-
-(defn map->vals-concated
- [^Map m]
- (let [val-list (java.util.LinkedList.)
- it (.iterator (.entrySet m))]
- (loop []
- (when (.hasNext it)
- (do (let [^java.util.Map$Entry e (.next it)
- fact (.fact ^FactWrapper (.getKey e))
- ^Iterable facts-in-val (.getValue e)
- fact-iter (.iterator facts-in-val)]
- (loop []
- (when (.hasNext fact-iter)
- (do
- (.add val-list (.next fact-iter))
- (recur)))))
- (recur))))
- ;; This list will never be exposed to the user; it is simply iterated over
- ;; by the engine and then discarded. This being the case there is no
- ;; need to return a persistent data structure rather than an unmodifiable one.
- (Collections/unmodifiableList val-list)))
-
-;;; This is a pending updates cache that allows
-;; retractions and insertions of equal facts
-;;; to cancel each other out.
-;;; More formally, for i insertions and r retractions
-;;; of a fact f, it will:
-;;; - If i = r, no operations will be performed.
-;;; - If i > r, f will be returned for insertion i - r times.
-;;; - If r > i, f will be returned for retraction r - i times.
-(deftype CancellingUpdateCache [^Map ^:unsynchronized-mutable insertions
- ^Map ^:unsynchronized-mutable retractions]
-
- uc/UpdateCache
-
- (add-insertions! [this facts]
- (let [fact-iter (.iterator ^Iterable facts)]
- (loop []
- (when (.hasNext fact-iter)
- (let [fact (.next fact-iter)]
- (when-not (dec-fact-count! retractions fact)
- (inc-fact-count! insertions fact))
- (recur))))))
-
- (add-retractions! [this facts]
- (let [fact-iter (.iterator ^Iterable facts)]
- (loop []
- (when (.hasNext fact-iter)
- (let [fact (.next fact-iter)]
- (when-not (dec-fact-count! insertions fact)
- (inc-fact-count! retractions fact))
- (recur))))))
-
- (get-updates-and-reset! [this]
- (let [retractions-update (when (-> retractions .size pos?)
- (uc/->PendingUpdate :retract (map->vals-concated retractions)))
- insertions-update (when (-> insertions .size pos?)
- (uc/->PendingUpdate :insert (map->vals-concated insertions)))]
- (set! insertions (LinkedHashMap.))
- (set! retractions (LinkedHashMap.))
-
- (cond
-
- (and insertions-update retractions-update)
- ;; This could be ordered to have insertions before retractions if we ever
- ;; found that that performs better on average. Either ordering should
- ;; be functionally correct.
- [[retractions-update] [insertions-update]]
-
- insertions-update
- [[insertions-update]]
-
- retractions-update
- [[retractions-update]]))))
-
-;; We use LinkedHashMap so that the ordering of the pending updates will be deterministic.
-(defn get-cancelling-update-cache
- []
- (CancellingUpdateCache. (LinkedHashMap.) (LinkedHashMap.)))
diff --git a/build/clara/rules/update_cache/core.clj b/build/clara/rules/update_cache/core.clj
deleted file mode 100644
index 80073ea6..00000000
--- a/build/clara/rules/update_cache/core.clj
+++ /dev/null
@@ -1,32 +0,0 @@
-(ns clara.rules.update-cache.core)
-
-;; Record indicating pending insertion or removal of a sequence of facts.
-(defrecord PendingUpdate [type facts])
-
-;; This is expected to be used while activating rules in a given salience group
-;; to store updates before propagating those updates to the alpha nodes as a group.
-(defprotocol UpdateCache
- (add-insertions! [this facts])
- (add-retractions! [this facts])
- (get-updates-and-reset! [this]))
-
-;; This cache replicates the behavior prior to https://github.com/cerner/clara-rules/issues/249,
-;; just in a stateful object rather than a persistent data structure.
-(deftype OrderedUpdateCache [updates]
-
- UpdateCache
-
- (add-insertions! [this facts]
- (swap! updates into [(->PendingUpdate :insert facts)]))
-
- (add-retractions! [this facts]
- (swap! updates into [(->PendingUpdate :retract facts)]))
-
- (get-updates-and-reset! [this]
- (let [current-updates @updates]
- (reset! updates [])
- (partition-by :type current-updates))))
-
-(defn get-ordered-update-cache
- []
- (OrderedUpdateCache. (atom [])))
diff --git a/build/clara/tools/fact_graph.clj b/build/clara/tools/fact_graph.clj
deleted file mode 100644
index dacc7455..00000000
--- a/build/clara/tools/fact_graph.clj
+++ /dev/null
@@ -1,96 +0,0 @@
-(ns clara.tools.fact-graph
- (:require [clara.tools.inspect :as i]
- [schema.core :as sc]))
-
-;; This node will have either facts or results of accumulations as its parents.
-;; Its children will be facts that the rule inserted.
-(sc/defrecord RuleActivationNode [rule-name :- sc/Str
- id :- sc/Int])
-
-;; The parents of this node are facts over which an accumulation was run.
-;; It will have a single child, the result of the accumulation. So, for example,
-;; with the condition [?t <- (acc/min :temperature) :from [Temperature]], if we have
-;; (->Temperature 50 "MCI") and (->Temperature 30 "MCI") the child of this node will be
-;; an AccumulationResult with the :result 30 and the parents will be the two Temperature facts.
-(sc/defrecord AccumulationNode [id :- sc/Int])
-
-;; As alluded to above, this node represents the result of an accumulation. Its child will be a
-;; RuleActivationNode. Note that there will be an AccumulationResult for each distinct rules firing.
-(sc/defrecord AccumulationResultNode [id :- sc/Int
- result :- sc/Any])
-
-(def ^:private empty-fact-graph {:forward-edges {}
- :backward-edges {}})
-
-(defn ^:private add-edge [graph from to]
- (-> graph
- (update-in [:forward-edges from] (fnil conj #{}) to)
- (update-in [:backward-edges to] (fnil conj #{}) from)))
-
-(defn ^:private add-insertion-to-graph
- [original-graph id-counter fact-inserted {:keys [rule-name explanation]}]
- (let [facts-direct (sequence
- (comp (remove :facts-accumulated)
- (map :fact))
- (:matches explanation))
-
- activation-node (map->RuleActivationNode {:rule-name rule-name
- :id (swap! id-counter inc)})
-
- accum-matches (filter :facts-accumulated (:matches explanation))]
-
- (as-> original-graph graph
- (if (seq accum-matches)
- (reduce (fn [reduce-graph accum-match]
- (let [accum-node (->AccumulationNode (swap! id-counter inc))
- accum-result (->AccumulationResultNode (swap! id-counter inc) (:fact accum-match))]
- (as-> reduce-graph g
- ;; Add edges from individual facts to an AccumulationResultNode.
- (reduce (fn [g accum-element]
- (add-edge g accum-element accum-node))
- g (:facts-accumulated accum-match))
- (add-edge g accum-node accum-result)
- (add-edge g accum-result activation-node))))
- graph
- accum-matches)
- graph)
- ;; Add edges to the rule activation node from the facts that contributed
- ;; to the rule firing that were not in accumulator condition.
- (reduce (fn [g f]
- (add-edge g f activation-node))
- graph
- facts-direct)
- (add-edge graph activation-node fact-inserted))))
-
-(defn session->fact-graph
- "Given a session, return a graph structure connecting all facts to the facts
- that they caused to be logically inserted. Note that such connections will not
- be made for unconditionally inserted facts."
- [session]
- (let [id-counter (atom 0)
- ;; Use a counter, whose value will be added to internal nodes, to the ensure that
- ;; these nodes are not equal to each other. This ensures that the number of the internal
- ;; nodes accurately reflects the cardinality of each fact in the session.
-
- ;; This function generates one of the entries in the map returned by clara.tools.inspect/inspect.
- ;; The function is private since it would be confusing for only one of the entries in clara.tools.inspect/inspect
- ;; to be accessible without generating the entire session inspection map. However, we want to reuse the functionality
- ;; here without the performance penalty of generating all of the inspection data for the session. Therefore, for now
- ;; we break the privacy of the function here. Once issue 286 is completed we should remove this private Var access.
- fact->explanations (@#'i/gen-fact->explanations session)
-
- ;; Produce tuples of the form [inserted-fact {:rule rule :explanation clara.tools.inspect.Explanation}]
- insertion-tuples (into []
- (comp
- (map (fn [[fact v]]
- (map (fn [{:keys [rule explanation]}]
- [fact {:rule-name (:name rule)
- :explanation explanation}])
- v)))
- cat)
- fact->explanations)]
-
- (reduce (fn [graph tuple]
- (apply add-insertion-to-graph graph id-counter tuple))
- empty-fact-graph
- insertion-tuples)))
diff --git a/build/clara/tools/inspect.clj b/build/clara/tools/inspect.clj
deleted file mode 100644
index b2831863..00000000
--- a/build/clara/tools/inspect.clj
+++ /dev/null
@@ -1,354 +0,0 @@
-(ns clara.tools.inspect
- "Tooling to inspect a rule session. The two major methods here are:
-
- * inspect, which returns a data structure describing the session that can be used by tooling.
- * explain-activations, which uses inspect and prints a human-readable description covering
- why each rule activation or query match occurred."
- (:require [clara.rules.engine :as eng]
- [clara.rules.schema :as schema]
- [clara.rules.memory :as mem]
- [clara.tools.internal.inspect :as i]
- [clojure.main :refer [demunge]]
- [schema.core :as s]
- [clojure.string :as str])
- (:import [clara.rules.engine
- ProductionNode
- RootJoinNode
- HashJoinNode
- ExpressionJoinNode
- NegationNode
- NegationWithJoinFilterNode]))
-
-(s/defschema ConditionMatch
- "A structure associating a condition with the facts that matched them. The fields are:
- :fact - A fact propagated from this condition in a rule or query. For non-accumulator conditions,
- this will be the fact matched by the condition. For accumulator conditions, it will be the result
- of the accumulation. So, for example, if we have a condition like
-
- [?cold <- Cold]
-
- a ConditionMatch for this condition will have a Cold fact in its :fact field. If we have a condition like
-
- [?min-cold <- (acc/min :temperature) :from [Cold]]
-
- the value of :fact will be the minimum temperature returned by the accumulator.
-
- :condition - A structure representing this condition. This is the same structure used inside the structures defining
- rules and queries.
-
- :facts-accumulated (nullable) : When the condition is an accumulator condition, this will contain the individual facts over
- which the accumulator ran. For example, in the case above with the condition
-
- [?min-cold <- (acc/min :temperature) :from [Cold]]
-
- this will contain the individual Cold facts over which we accumulated, while the :fact field
- will contain the result of the accumulation."
- {:fact s/Any
- :condition schema/Condition
- (s/optional-key :facts-accumulated) [s/Any]})
-
-;; A structured explanation of why a rule or query matched.
-;; This is derived from the Rete-style tokens, but this token
-;; is designed to propagate all context needed to easily inspect
-;; the state of rules.
-(s/defrecord Explanation [matches :- [ConditionMatch]
- bindings :- {s/Keyword s/Any}]) ; Bound variables
-
-;; Schema of an inspected rule session.
-(def InspectionSchema
- {:rule-matches {schema/Rule [Explanation]}
- :query-matches {schema/Query [Explanation]}
- :condition-matches {schema/Condition [s/Any]}
- :insertions {schema/Rule [{:explanation Explanation :fact s/Any}]}})
-
-(defn- get-condition-matches
- "Returns facts matching each condition"
- [nodes memory]
- (let [node-class->node-type (fn [node]
- (get {ExpressionJoinNode :join
- HashJoinNode :join
- RootJoinNode :join
- NegationNode :negation
- NegationWithJoinFilterNode :negation} (type node)))
-
- join-node-ids (for [beta-node nodes
- :let [node-type (node-class->node-type beta-node)]
- ;; Unsupported and irrelevant node types will have a node-type of nil
- ;; since the map in node-class->node-type won't contain an entry
- ;; for them, so this check will remove them.
- :when (contains? #{:join :negation}
- node-type)]
- [(:id beta-node) (:condition beta-node) node-type])]
- (reduce
- (fn [matches [node-id condition node-type]]
- (update-in matches
- (condp = node-type
-
- :join
- [condition]
-
- ;; Negation nodes store the fact that they are a negation
- ;; in their :node-type and strip the information out of the
- ;; :condition field. We reconstruct the negation boolean condition
- ;; that is contained in rule and query data structures created by defrule
- ;; and that conforms to the Condition schema.
- :negation
- [[:not condition]])
- concat (map :fact (mem/get-elements-all memory {:id node-id}))))
- {}
- join-node-ids)))
-
-(defn- to-explanations
- "Helper function to convert tokens to explanation records."
- [session tokens]
- (let [memory (-> session eng/components :memory)
- id-to-node (get-in (eng/components session) [:rulebase :id-to-node])]
-
- (for [{:keys [matches bindings] :as token} tokens]
- (->Explanation
- ;; Convert matches to explanation structure.
- (for [[fact node-id] matches
- :let [node (id-to-node node-id)
- condition (if (:accum-condition node)
-
- {:accumulator (get-in node [:accum-condition :accumulator])
- :from {:type (get-in node [:accum-condition :from :type])
- :constraints (or (seq (get-in node [:accum-condition :from :original-constraints]))
- (get-in node [:accum-condition :from :constraints]))}}
-
- {:type (:type (:condition node))
- :constraints (or (seq (:original-constraints (:condition node)))
- (:constraints (:condition node)))})]]
- (if (:accum-condition node)
- {:fact fact
- :condition condition
- :facts-accumulated (eng/token->matching-elements node memory token)}
- {:fact fact
- :condition condition}))
-
- ;; Remove generated bindings from user-facing explanation.
- (into {} (remove (fn [[k v]]
- (.startsWith (name k) "?__gen__"))
- bindings))))))
-
-(defn ^:private gen-all-rule-matches
- [session]
- (when-let [activation-info (i/get-activation-info session)]
- (let [grouped-info (group-by #(-> % :activation :node) activation-info)]
- (into {}
- (map (fn [[k v]]
- [(:production k)
- (to-explanations session (map #(-> % :activation :token) v))]))
- grouped-info))))
-
-(defn ^:private gen-fact->explanations
- [session]
-
- (let [{:keys [memory rulebase]} (eng/components session)
- {:keys [productions production-nodes query-nodes]} rulebase
- rule-to-rule-node (into {} (for [rule-node production-nodes]
- [(:production rule-node) rule-node]))]
- (apply merge-with into
- (for [[rule rule-node] rule-to-rule-node
- token (keys (mem/get-insertions-all memory rule-node))
- insertion-group (mem/get-insertions memory rule-node token)
- insertion insertion-group]
- {insertion [{:rule rule
- :explanation (first (to-explanations session [token]))}]}))))
-
-(def ^{:doc "Return a new session on which information will be gathered for optional inspection keys.
- This can significantly increase memory consumption since retracted facts
- cannot be garbage collected as normally."}
- with-full-logging i/with-activation-listening)
-
-(def ^{:doc "Return a new session without information gathering on this session for optional inspection keys.
- This new session will not retain references to any such information previously gathered."}
- without-full-logging i/without-activation-listening)
-
-(s/defn inspect
- " Returns a representation of the given rule session useful to understand the
- state of the underlying rules.
-
- The returned structure always includes the following keys:
-
- * :rule-matches -- a map of rule structures to their matching explanations.
- Note that this only includes rule matches with corresponding logical
- insertions after the rules finished firing.
- * :query-matches -- a map of query structures to their matching explanations.
- * :condition-matches -- a map of conditions pulled from each rule to facts they match.
- * :insertions -- a map of rules to a sequence of {:explanation E, :fact F} records
- to allow inspection of why a given fact was inserted.
- * :fact->explanations -- a map of facts inserted to a sequence
- of maps of the form {:rule rule-structure :explanation explanation},
- where each such map justifies a single insertion of the fact.
-
- And additionally includes the following keys for operations
- performed after a with-full-logging call on the session:
-
- * :unfiltered-rule-matches: A map of rule structures to their matching explanations.
- This includes all rule activations, regardless of whether they led to insertions or if
- they were ultimately retracted. This should be considered low-level information primarily
- useful for debugging purposes rather than application control logic, although legitimate use-cases
- for the latter do exist if care is taken. Patterns of insertion and retraction prior to returning to
- the caller are internal implementation details of Clara unless explicitly controlled by the user.
-
- Users may inspect the entire structure for troubleshooting or explore it
- for specific cases. For instance, the following code snippet could look
- at all matches for some example rule:
-
- (defrule example-rule ... )
-
- ...
-
- (get-in (inspect example-session) [:rule-matches example-rule])
-
- ...
-
- The above segment will return matches for the rule in question."
- [session] :- InspectionSchema
- (let [{:keys [memory rulebase]} (eng/components session)
- {:keys [productions production-nodes query-nodes id-to-node]} rulebase
-
- ;; Map of queries to their nodes in the network.
- query-to-nodes (into {} (for [[query-name query-node] query-nodes]
- [(:query query-node) query-node]))
-
- ;; Map of rules to their nodes in the network.
- rule-to-nodes (into {} (for [rule-node production-nodes]
- [(:production rule-node) rule-node]))
-
- base-info {:rule-matches (into {}
- (for [[rule rule-node] rule-to-nodes]
- [rule (to-explanations session
- (keys (mem/get-insertions-all memory rule-node)))]))
-
- :query-matches (into {}
- (for [[query query-node] query-to-nodes]
- [query (to-explanations session
- (mem/get-tokens-all memory query-node))]))
-
- :condition-matches (get-condition-matches (vals id-to-node) memory)
-
- :insertions (into {}
- (for [[rule rule-node] rule-to-nodes]
- [rule
- (for [token (keys (mem/get-insertions-all memory rule-node))
- insertion-group (get (mem/get-insertions-all memory rule-node) token)
- insertion insertion-group]
- {:explanation (first (to-explanations session [token])) :fact insertion})]))
-
- :fact->explanations (gen-fact->explanations session)}]
-
- (if-let [unfiltered-rule-matches (gen-all-rule-matches session)]
- (assoc base-info :unfiltered-rule-matches unfiltered-rule-matches)
- base-info)))
-
-(defn- explain-activation
- "Prints a human-readable explanation of the facts and conditions that created the Rete token."
- ([explanation] (explain-activation explanation ""))
- ([explanation prefix]
- (doseq [{:keys [fact condition]} (:matches explanation)]
- (if (:from condition)
- ;; Explain why the accumulator matched.
- (let [{:keys [accumulator from]} condition]
- (println prefix fact)
- (println prefix " accumulated with" accumulator)
- (println prefix " from" (:type from))
- (println prefix " where" (:constraints from)))
-
- ;; Explain why a condition matched.
- (let [{:keys [type constraints]} condition]
- (println prefix fact)
- (println prefix " is a" type)
- (println prefix " where" constraints))))))
-
-(defn explain-activations
- "Prints a human-friendly explanation of why rules and queries matched in the given session.
- A caller my optionally pass a :rule-filter-fn, which is a predicate
-
- (clara.tools.inspect/explain-activations session
- :rule-filter-fn (fn [rule] (re-find my-rule-regex (:name rule))))"
-
- [session & {:keys [rule-filter-fn] :as options}]
- (let [filter-fn (or rule-filter-fn (constantly true))]
-
- (doseq [[rule explanations] (:rule-matches (inspect session))
- :when (filter-fn rule)
- :when (seq explanations)]
- (println "rule" (or (:name rule) (str "<" (:lhs rule) ">")))
- (println " executed")
- (println " " (:rhs rule))
- (doseq [explanation explanations]
- (println " with bindings")
- (println " " (:bindings explanation))
- (println " because")
- (explain-activation explanation " "))
- (println))
-
- (doseq [[rule explanations] (:query-matches (inspect session))
- :when (filter-fn rule)
- :when (seq explanations)]
- (println "query" (or (:name rule) (str "<" (:lhs rule) ">")))
- (doseq [explanation explanations]
- (println " with bindings")
- (println " " (:bindings explanation))
- (println " qualified because")
- (explain-activation explanation " "))
- (println))))
-
-(let [inverted-type-lookup (zipmap (vals eng/node-type->abbreviated-type)
- (keys eng/node-type->abbreviated-type))]
- (defn node-fn-name->production-name
- "A helper function for retrieving the name or names of rules that a generated function belongs to.
-
- 'session' - a LocalSession from which a function was retrieved
- 'node-fn' - supports the following types:
- 1. String - expected to be in the format '/__'.
- Expected use-case for string would be in the event that a user copy pasted this function identifier
- from an external tool, ex. a jvm profiler
- 2. Symbol - expected to be in the format '/__.
- Has the same use-case as string, just adds flexibility to the type.
- 3. Function - expected to be the actual function from the Session
- This covers a use-case where the user can capture the function being used and programmatically
- trace it back to the rules being executed."
- [session node-fn]
- (let [fn-name-str (cond
- (string? node-fn)
- node-fn
-
- (fn? node-fn)
- (str node-fn)
-
- (symbol? node-fn)
- (str node-fn)
-
- :else
- (throw (ex-info "Unsupported type for 'node-fn-name->production-name'"
- {:type (type node-fn)
- :supported-types ["string" "symbol" "fn"]})))
- fn-name-str (-> fn-name-str demunge (str/split #"/") last)
-
- simple-fn-name (-> (or (re-find #"(.+)--\d+" fn-name-str) ;; anonymous function
- (re-find #"(.+)" fn-name-str)) ;; regular function
- last)
-
- [node-abr node-id _] (str/split simple-fn-name #"-")]
- ;; used as a sanity check that the fn provided is in the form expected, ie. --
- (if (contains? inverted-type-lookup node-abr)
- (if-let [node (-> (eng/components session)
- :rulebase
- :id-to-node
- (get (Long/valueOf ^String node-id)))]
- (if (= ProductionNode (type node))
- [(-> node :production :name)]
- (if-let [production-names (seq (eng/node-rule-names (some-fn :production :query) node))]
- production-names
- ;; This should be un-reachable but i am leaving it here in the event that the rulebase is somehow corrupted
- (throw (ex-info "Unable to determine suitable name from node"
- {:node node}))))
- (throw (ex-info "Node-id not found in rulebase"
- {:node-id node-id
- :simple-name simple-fn-name})))
- (throw (ex-info "Unable to determine node from function"
- {:name node-fn
- :simple-name simple-fn-name}))))))
diff --git a/build/clara/tools/internal/inspect.clj b/build/clara/tools/internal/inspect.clj
deleted file mode 100644
index a6b33498..00000000
--- a/build/clara/tools/internal/inspect.clj
+++ /dev/null
@@ -1,80 +0,0 @@
-(ns clara.tools.internal.inspect
- "Internal implementation details of session inspection. Nothing in this namespace
- should be directly referenced by callers outside of the clara-rules project."
- (:require [clara.rules.listener :as l]
- [clara.rules.engine :as eng]))
-
-(declare to-persistent-listener)
-
-(deftype TransientActivationListener [activations]
- l/ITransientEventListener
- (fire-activation! [listener activation resulting-operations]
- (swap! (.-activations listener) conj {:activation activation
- :resulting-operations resulting-operations})
- listener)
- (to-persistent! [listener]
- (to-persistent-listener @(.-activations listener)))
-
- ;; The methods below don't do anything; they aren't needed for this functionality.
- (left-activate! [listener node tokens]
- listener)
- (left-retract! [listener node tokens]
- listener)
- (right-activate! [listener node elements]
- listener)
- (right-retract! [listener node elements]
- listener)
- (insert-facts! [listener node token facts]
- listener)
- (alpha-activate! [listener node facts]
- listener)
- (insert-facts-logical! [listener node token facts]
- listener)
- (retract-facts! [listener node token facts]
- listener)
- (alpha-retract! [listener node facts]
- listener)
- (retract-facts-logical! [listener node token facts]
- listener)
- (add-accum-reduced! [listener node join-bindings result fact-bindings]
- listener)
- (remove-accum-reduced! [listener node join-bindings fact-bindings]
- listener)
- (add-activations! [listener node activations]
- listener)
- (remove-activations! [listener node activations]
- listener)
- (activation-group-transition! [listener previous-group new-group]
- listener)
- (fire-rules! [listener node]
- listener))
-
-(deftype PersistentActivationListener [activations]
- l/IPersistentEventListener
- (to-transient [listener]
- (TransientActivationListener. (atom activations))))
-
-(defn to-persistent-listener
- [activations]
- (PersistentActivationListener. activations))
-
-(defn with-activation-listening
- [session]
- (if (empty? (eng/find-listeners session (partial instance? PersistentActivationListener)))
- (eng/with-listener session (PersistentActivationListener. []))
- session))
-
-(defn without-activation-listening
- [session]
- (eng/remove-listeners session (partial instance? PersistentActivationListener)))
-
-(defn get-activation-info
- [session]
- (let [matching-listeners (eng/find-listeners session (partial instance? PersistentActivationListener))]
- (condp = (count matching-listeners)
- 0 nil
- 1 (-> matching-listeners ^PersistentActivationListener (first) .-activations)
- (throw (ex-info "Found more than one PersistentActivationListener on session"
- {:session session})))))
-
-
diff --git a/build/clara/tools/loop_detector.clj b/build/clara/tools/loop_detector.clj
deleted file mode 100644
index bbe14fe4..00000000
--- a/build/clara/tools/loop_detector.clj
+++ /dev/null
@@ -1,102 +0,0 @@
-(ns clara.tools.loop-detector
- (:require [clara.rules.listener :as l]
- [clara.rules.engine :as eng]
- [clara.tools.tracing :as trace]))
-
-;; Although we use a single type here note that the cycles-count and the on-limit-delay fields
-;; will be nil during the persistent state of the listener.
-(deftype CyclicalRuleListener [cycles-count max-cycles on-limit-fn on-limit-delay]
- l/ITransientEventListener
- (left-activate! [listener node tokens]
- listener)
- (left-retract! [listener node tokens]
- listener)
- (right-activate! [listener node elements]
- listener)
- (right-retract! [listener node elements]
- listener)
- (insert-facts! [listener node token facts]
- listener)
- (alpha-activate! [listener node facts]
- listener)
- (insert-facts-logical! [listener node token facts]
- listener)
- (retract-facts! [listener node token facts]
- listener)
- (alpha-retract! [listener node facts]
- listener)
- (retract-facts-logical! [listener node token facts]
- listener)
- (add-accum-reduced! [listener node join-bindings result fact-bindings]
- listener)
- (remove-accum-reduced! [listener node join-bindings fact-bindings]
- listener)
- (add-activations! [listener node activations]
- listener)
- (remove-activations! [listener node activations]
- listener)
- (fire-activation! [listener activation resulting-operations]
- listener)
- (fire-rules! [listener node]
- listener)
- (activation-group-transition! [listener original-group new-group]
- (when (>= @cycles-count max-cycles)
- @on-limit-delay)
- (swap! cycles-count inc))
- (to-persistent! [listener]
- (CyclicalRuleListener. nil max-cycles on-limit-fn nil))
-
- l/IPersistentEventListener
- (to-transient [listener]
- ;; To-transient will be called when a call to fire-rules begins, and to-persistent! will be called when it ends.
- ;; The resetting of the cycles-count atom prevents cycles from one call of fire-rules from leaking into the count
- ;; for another. Similarly the on-limit-fn should be invoked 1 or 0 times per fire-rules call. We only call
- ;; it once, rather than each time the limit is breached, since it may not cause the call to terminate but rather log
- ;; something etc., in which case we don't want to spam the user's logs.
- (CyclicalRuleListener. (atom 0) max-cycles on-limit-fn (delay (on-limit-fn)))))
-
-(defn throw-exception-on-max-cycles
- []
- (let [trace (trace/listener->trace (l/to-persistent! (:listener eng/*current-session*)))]
- (throw (ex-info "Reached maximum activation group transitions threshhold; an infinite loop is suspected"
- (cond-> {:clara-rules/infinite-loop-suspected true}
- trace (assoc :trace trace))))))
-
-(defn ->standard-out-warning
- []
- (println "Reached maximum activation group transitions threshhold; an infinite loop is suspected"))
-
-(defn on-limit-fn-lookup
- [fn-or-keyword]
- (cond
- (= fn-or-keyword :throw-exception) throw-exception-on-max-cycles
- (= fn-or-keyword :standard-out-warning) ->standard-out-warning
- (ifn? fn-or-keyword) fn-or-keyword
- :else (throw (ex-info "The :on-error-fn must be a non-nil function value" {:clara-rules/max-cycles-exceeded-fn fn-or-keyword}))))
-
-(defn with-loop-detection
- "Detect suspected infinite loops in the session.
-
- Max-cycles is the maximum
- number of transitions permitted between different activation groups (salience levels)
- plus the number of times all rules are evaluated and their facts inserted, thus leading
- to another cycle of rules activations in the same activation group.
-
- on-limit-fn is a 0-arg function that is invoked exactly once when this limit is exceeded. It can either be
- a user-provided function or a keyword that indicates a built-in function to use. Currently supported keywords are:
-
- :throw-exception - This throws an exception when the limit is reached. If tracing is enabled, the exception will include
- the trace.
-
- :standard-out-warning - This prints a warning to standard out."
-
- [session max-cycles on-limit-fn]
-
- (let [on-limit-fn-normalized (on-limit-fn-lookup on-limit-fn)]
- (eng/with-listener
- session
- (CyclicalRuleListener.
- nil
- max-cycles
- on-limit-fn-normalized
- nil))))
diff --git a/build/clara/tools/testing_utils.clj b/build/clara/tools/testing_utils.clj
deleted file mode 100644
index 1c62dd25..00000000
--- a/build/clara/tools/testing_utils.clj
+++ /dev/null
@@ -1,204 +0,0 @@
-(ns clara.tools.testing-utils
- "Internal utilities for testing clara-rules and derivative projects. These should be considered experimental
- right now from the perspective of consumers of clara-rules, although it is possible that this namespace
- will be made part of the public API once its functionality has proven robust and reliable. The focus, however,
- is functionality needed to test the rules engine itself."
- (:require [clara.rules.update-cache.core :as uc]
- [clara.rules.update-cache.cancelling :as ca]
- [clara.rules.compiler :as com]
- [clara.rules.dsl :as dsl]
- [clojure.test :refer [is]]))
-
-(defmacro def-rules-test
- "This macro allows creation of rules, queries, and sessions from arbitrary combinations of rules
- and queries in a setup map without the necessity of creating a namespace or defining a session
- using defsession in both Clojure and ClojureScript. The first argument is the name of the test,
- and the second argument is a map with entries :rules, :queries, and :sessions. For example usage see
- clara.test-testing-utils. Note that sessions currently can only contain rules and queries defined
- in the setup map; supporting other rule sources such as namespaces and defrule/defquery may be possible
- in the future.
-
- Namespaces consuming this macro are expected to require clara.rules and either clojure.test or cljs.test.
- Unfortunately, at this time we can't add inline requires for these namespace with the macroexpanded code in
- ClojureScript; see https://anmonteiro.com/2016/10/clojurescript-require-outside-ns/ for some discussion on the
- subject. However, the test namespaces consuming this will in all likelihood have these dependencies anyway
- so this probably isn't a significant shortcoming of this macro."
- [name params & forms]
- (let [sym->rule (->> params
- :rules
- (partition 2)
- (into {}
- (map (fn [[rule-name [lhs rhs props]]]
- [rule-name (assoc (dsl/parse-rule* lhs rhs props {}) :name (str rule-name))]))))
-
- sym->query (->> params
- :queries
- (partition 2)
- (into {}
- (map (fn [[query-name [params lhs]]]
- [query-name (assoc (dsl/parse-query* params lhs {}) :name (str query-name))]))))
-
- production-syms->productions (fn [p-syms]
- (map (fn [s]
- (or (get sym->rule s)
- (get sym->query s)))
- p-syms))
-
- session-syms->session-forms (->> params
- :sessions
- (partition 3)
- (into []
- (comp (map (fn [[session-name production-syms session-opts]]
- [session-name (production-syms->productions production-syms) session-opts]))
- (map (fn [[session-name productions session-opts]]
- [session-name `(com/mk-session ~(into [(vec productions)]
- cat
- session-opts))]))
- cat)))
-
- test-form `(clojure.test/deftest
- ~name
- (let [~@session-syms->session-forms
- ~@(sequence cat sym->query)
- ~@(sequence cat sym->rule)]
- ~@forms))]
- test-form))
-
-(defn opts-fixture
- ;; For operations other than replace-facts uc/get-ordered-update-cache is currently
- ;; always used. This fixture ensures that CancellingUpdateCache is tested for a wide
- ;; variety of different cases rather than a few cases cases specific to it.
- [f]
- (f)
- (with-redefs [uc/get-ordered-update-cache ca/get-cancelling-update-cache]
- (f)))
-
-(defn join-filter-equals
- "Intended to be a test function that is the same as equals, but is not visible to Clara as such
- and thus forces usage of join filters instead of hash joins"
- [& args]
- (apply = args))
-
-(def side-effect-holder (atom nil))
-
-(defn side-effect-holder-fixture
- "Fixture to reset the side effect holder to nil both before and after tests.
- This should be used as a :each fixture."
- [t]
- (reset! side-effect-holder nil)
- (t)
- (reset! side-effect-holder nil))
-
-(defn time-execution
- [func]
- (let [start (System/currentTimeMillis)
- _ (func)
- stop (System/currentTimeMillis)]
- (- stop start)))
-
-(defn execute-tests
- [func iterations]
- (let [execution-times (for [_ (range iterations)]
- (time-execution func))
- sum #(reduce + %)
- mean (/ (sum execution-times) iterations)
- std (->
- (into []
- (comp
- (map #(- % mean))
- (map #(Math/pow (double %) 2.0)))
- execution-times)
- sum
- (/ iterations)
- Math/sqrt)]
- {:std (double std)
- :mean (double mean)}))
-
-(defn run-performance-test
- "Created as a rudimentary alternative to criterium, due to assumptions made during benchmarking. Specifically, that
- criterium attempts to reach a steady state of compiled and loaded classes. This fundamentally doesn't work when the
- metrics needed rely on compilation or evaluation."
- [form]
- (let [{:keys [description func iterations mean-assertion verbose]} form
- {:keys [std mean]} (execute-tests func iterations)]
- (when verbose
- (println (str \newline "Running Performance tests for:"))
- (println description)
- (println "==========================================")
- (println (str "Mean: " mean "ms"))
- (println (str "Standard Deviation: " std "ms" \newline)))
- (is (mean-assertion mean)
- (str "Actual mean value: " mean))
- {:mean mean
- :std std}))
-
-(defn ex-data-search
- ([^Exception e edata]
- (ex-data-search e nil edata))
- ([^Exception e emsg edata]
- (loop [non-matches []
- e e]
- (cond
- ;; Found match.
- (and (= edata
- (select-keys (ex-data e)
- (keys edata)))
- (or (= emsg
- (.getMessage e))
- (nil? emsg)))
- :success
-
- ;; Keep searching, record any non-matching ex-data.
- (.getCause e)
- (recur (if-let [ed (ex-data e)]
- (conj non-matches {(.getMessage e) ed})
- non-matches)
- (.getCause e))
-
- ;; Can't find a match.
- :else
- non-matches))))
-
-(defn get-all-ex-data
- "Walk a Throwable chain and return a sequence of all data maps
- from any ExceptionInfo instances in that chain."
- [e]
- (let [get-ex-chain (fn get-ex-chain [e]
- (if-let [cause (.getCause e)]
- (conj (get-ex-chain cause) e)
- [e]))]
-
- (map ex-data
- (filter (partial instance? clojure.lang.IExceptionInfo)
- (get-ex-chain e)))))
-
-(defmacro assert-ex-data
- ([expected-ex-data form]
- `(assert-ex-data nil ~expected-ex-data ~form))
- ([expected-ex-message expected-ex-data form]
- `(try
- ~form
- (is false
- (str "Exception expected to be thrown when evaluating: " \newline
- '~form))
- (catch Exception e#
- (let [res# (ex-data-search e# ~expected-ex-message ~expected-ex-data)]
- (is (= :success res#)
- (str "Exception msg found: " \newline
- e# \newline
- "Non matches found: " \newline
- res#)))))))
-
-(defn ex-data-maps
- "Given a throwable/exception/error `t`, return all `ex-data` maps from the stack trace cause chain in
- the order they occur traversing the chain from this `t` through the rest of the call stack."
- [t]
- (let [append-self (fn append-self
- [prior t1]
- (if t1
- (append-self (conj prior t1) (.getCause ^Throwable t1))
- prior))
- throwables (append-self [] t)]
- (into []
- (comp (map ex-data))
- throwables)))
diff --git a/build/clara/tools/tracing.clj b/build/clara/tools/tracing.clj
deleted file mode 100644
index 4cc6d955..00000000
--- a/build/clara/tools/tracing.clj
+++ /dev/null
@@ -1,175 +0,0 @@
-(ns clara.tools.tracing
- "Support for tracing state changes in a Clara session."
- (:require [clara.rules.listener :as l]
- [clara.rules.engine :as eng]))
-
-(declare to-tracing-listener)
-
-(deftype PersistentTracingListener [trace]
- l/IPersistentEventListener
- (to-transient [listener]
- (to-tracing-listener listener)))
-
-(declare append-trace)
-
-(deftype TracingListener [trace]
- l/ITransientEventListener
- (left-activate! [listener node tokens]
- (append-trace listener {:type :left-activate :node-id (:id node) :tokens tokens}))
-
- (left-retract! [listener node tokens]
- (append-trace listener {:type :left-retract :node-id (:id node) :tokens tokens}))
-
- (right-activate! [listener node elements]
- (append-trace listener {:type :right-activate :node-id (:id node) :elements elements}))
-
- (right-retract! [listener node elements]
- (append-trace listener {:type :right-retract :node-id (:id node) :elements elements}))
-
- (insert-facts! [listener node token facts]
- (append-trace listener {:type :add-facts :node node :token token :facts facts}))
-
- (alpha-activate! [listener node facts]
- (append-trace listener {:type :alpha-activate :facts facts}))
-
- (insert-facts-logical! [listener node token facts]
- (append-trace listener {:type :add-facts-logical :node node :token token :facts facts}))
-
- (retract-facts! [listener node token facts]
- (append-trace listener {:type :retract-facts :node node :token token :facts facts}))
-
- (alpha-retract! [listener node facts]
- (append-trace listener {:type :alpha-retract :facts facts}))
-
- (retract-facts-logical! [listener node token facts]
- (append-trace listener {:type :retract-facts-logical :node node :token token :facts facts}))
-
- (add-accum-reduced! [listener node join-bindings result fact-bindings]
- (append-trace listener {:type :accum-reduced
- :node-id (:id node)
- :join-bindings join-bindings
- :result result
- :fact-bindings fact-bindings}))
-
- (remove-accum-reduced! [listener node join-bindings fact-bindings]
- (append-trace listener {:type :remove-accum-reduced
- :node-id (:id node)
- :join-bindings join-bindings
- :fact-bindings fact-bindings}))
-
- (add-activations! [listener node activations]
- (append-trace listener {:type :add-activations :node-id (:id node) :tokens (map :token activations)}))
-
- (remove-activations! [listener node activations]
- (append-trace listener {:type :remove-activations :node-id (:id node) :activations activations}))
-
- (fire-activation! [listener activation resulting-operations]
- (append-trace listener {:type :fire-activation :activation activation :resulting-operations resulting-operations}))
-
- (fire-rules! [listener node]
- (append-trace listener {:type :fire-rules :node-id (:id node)}))
-
- (activation-group-transition! [listener previous-group new-group]
- (append-trace listener {:type :activation-group-transition :new-group new-group :previous-group previous-group}))
-
- (to-persistent! [listener]
- (PersistentTracingListener. @trace)))
-
-(defn- to-tracing-listener [^PersistentTracingListener listener]
- (TracingListener. (atom (.-trace listener))))
-
-(defn- append-trace
- "Appends a trace event and returns a new listener with it."
- [^TracingListener listener event]
- (reset! (.-trace listener) (conj @(.-trace listener) event)))
-
-(defn tracing-listener
- "Creates a persistent tracing event listener"
- []
- (PersistentTracingListener. []))
-
-(defn is-tracing?
- "Returns true if the given session has tracing enabled, false otherwise."
- [session]
- (let [{:keys [listeners]} (eng/components session)]
- (boolean (some #(instance? PersistentTracingListener %) listeners))))
-
-(defn with-tracing
- "Returns a new session identical to the given one, but with tracing enabled.
- The given session is returned unmodified if tracing is already enabled."
- [session]
- (if (is-tracing? session)
- session
- (eng/with-listener session (PersistentTracingListener. []))))
-
-(defn without-tracing
- "Returns a new session identical to the given one, but with tracing disabled
- The given session is returned unmodified if tracing is already disabled."
- [session]
- (eng/remove-listeners session (partial instance? PersistentTracingListener)))
-
-(defn get-trace
- "Returns the trace from the given session."
- [session]
- (if-let [listener (->> (eng/components session)
- :listeners
- (filter #(instance? PersistentTracingListener %))
- (first))]
- (.-trace ^PersistentTracingListener listener)
- (throw (ex-info "No tracing listener attached to session." {:session session}))))
-
-(defn listener->trace
- [listener]
- (let [tracing-listener (cond
- (instance? PersistentTracingListener listener)
- listener
-
- (some (partial instance? PersistentTracingListener) (l/flatten-listener listener))
- (first (filter (partial instance? PersistentTracingListener) (l/flatten-listener listener))))]
- (when tracing-listener
- (.-trace ^PersistentTracingListener tracing-listener))))
-
-(defn ^:private node-id->productions
- "Given a session and a node ID return a list of the rule and query names associated
- with the node."
- [session id]
- (let [node (-> session eng/components :rulebase :id-to-node (get id))]
- (into []
- (comp
- (map second)
- cat
- (map second))
- (eng/get-conditions-and-rule-names node))))
-
-(defn ranked-productions
- "Given a session with tracing enabled, return a map of rule and query names
- to a numerical index that represents an approximation of the proportional
- amount of times Clara performed processing related to this rule. This
- is not intended to have a precise meaning, and is intended solely as a means
- to provide a rough guide to which rules and queries should be considered
- the first suspects when diagnosing performance problems in rules sessions.
- It is possible for a relatively small number of interactions to take a long
- time if those interactions are particularly costly. It is expected that
- the results may change between different versions when Clara's internals change,
- for example to optimize the rules network. Nevertheless, it is anticipated
- that this will provide useful information for a first pass at rules
- performance problem debugging. This should not be used to drive user logic.
-
- This currently returns a Clojure array map in order to conveniently have the rules
- with the most interactions printed first in the string representation of the map."
- [session]
- (let [node-ids (->> session
- get-trace
- (map :node-id))
-
- production-names (into []
- (comp
- (map (partial node-id->productions session))
- cat)
- node-ids)
-
- production-name->interactions (frequencies production-names)
-
- ranked-tuples (reverse (sort-by second production-name->interactions))]
-
- (apply array-map (into [] cat ranked-tuples))))
diff --git a/build/clj-kondo.exports/clara/rules/config.edn b/build/clj-kondo.exports/clara/rules/config.edn
deleted file mode 100644
index 6b946f6a..00000000
--- a/build/clj-kondo.exports/clara/rules/config.edn
+++ /dev/null
@@ -1,7 +0,0 @@
-{:lint-as {clara.rules/defsession clojure.core/def
- clara.rules.platform/eager-for clojure.core/for}
- :hooks {:analyze-call {clara.rules/defquery hooks.clara-rules/analyze-defquery-macro
- clara.rules/defrule hooks.clara-rules/analyze-defrule-macro
- clara.rules.dsl/parse-query hooks.clara-rules/analyze-parse-query-macro
- clara.rules.dsl/parse-rule hooks.clara-rules/analyze-parse-rule-macro
- clara.tools.testing-utils/def-rules-test hooks.clara-rules/analyze-def-rules-test-macro}}}
diff --git a/build/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo b/build/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
deleted file mode 100644
index 0d5abbcc..00000000
--- a/build/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
+++ /dev/null
@@ -1,444 +0,0 @@
-(ns hooks.clara-rules
- (:require [clj-kondo.hooks-api :as api]
- [clojure.string :as str]
- [clojure.set :as set]))
-
-(defn node-value
- [node]
- (when node
- (api/sexpr node)))
-
-(defn node-type
- [node]
- (when node
- (api/tag node)))
-
-(defn- binding-node?
- "determine if a symbol is a clara-rules binding symbol in the form `?`"
- [node]
- (let [node-name (node-value node)]
- (and (symbol? node-name)
- (str/starts-with? (str node-name) "?"))))
-
-(defn- fact-result-node?
- [node]
- (some-> node meta ::fact-result))
-
-(defn- special-binding-node?
- "determine if a symbol is a clara-rules special binding symbol in the form `?____`"
- [node]
- (let [node-name (node-value node)
- node-name-str (str node-name)]
- (and (symbol? node-name)
- (str/starts-with? node-name-str "?__")
- (str/ends-with? node-name-str "__"))))
-
-(defn- extract-special-tokens
- [node-seq]
- (->> (reduce (fn [token-seq node]
- (cond
- (and (= :token (node-type node))
- (special-binding-node? node)
- (nil? (namespace (node-value node))))
- (cons node token-seq)
-
- (seq (:children node))
- (concat token-seq (extract-special-tokens (:children node)))
-
- :else token-seq)) [] node-seq)
- (set)
- (sort-by node-value)))
-
-(defn extract-arg-tokens
- [node-seq]
- (->> (reduce (fn [token-seq node]
- (cond
- (and (= :token (node-type node))
- (symbol? (node-value node))
- (not (binding-node? node))
- (nil? (namespace (node-value node))))
- (cons node token-seq)
-
- (seq (:children node))
- (concat token-seq (extract-arg-tokens (:children node)))
-
- :else token-seq)) [] node-seq)
- (set)
- (sort-by node-value)))
-
-(defn analyze-constraints
- "sequentially analyzes constraint expressions of clara rules and queries
- defined via defrule or defquery by sequentially analyzing its children lhs
- expressions and bindings."
- [fact-node condition allow-bindings? prev-bindings input-token production-args]
- (let [[condition-args constraint-seq]
- (cond
- (= :vector (node-type (first condition)))
- [(first condition) (rest condition)]
-
- (symbol? (node-value fact-node))
- [(api/vector-node (vec (extract-arg-tokens condition))) condition]
-
- :else [(api/vector-node []) condition])
- cond-binding-set (set (map node-value (:children condition-args)))
- ;;; if `this` bindings are not explicit, then add them anyways
- [this-input-bindings
- this-output-bindings] (when-not (contains? cond-binding-set 'this)
- [[[(api/token-node 'this) input-token]]
- [[(api/token-node '_) (api/token-node 'this)]]])
- args-binding-set (set (map node-value (:children production-args)))
- prev-bindings-set (->> (mapcat (comp :children first) prev-bindings)
- (filter binding-node?)
- (map node-value)
- (set))
- constraint-bindings
- (loop [[constraint-expr & more] constraint-seq
- bindings []
- bindings-set (set/union prev-bindings-set args-binding-set)]
- (if (nil? constraint-expr)
- bindings
- (let [constraint (:children constraint-expr)
- binding-nodes (let [binding-tokens (seq (filter binding-node? (rest constraint)))
- match-bindings-set (set/intersection (set (map node-value binding-tokens)) bindings-set)]
- (when (and allow-bindings?
- (contains? #{'= '==} (node-value (first constraint)))
- (or (seq (filter (complement binding-node?) (rest constraint)))
- (not-empty match-bindings-set)))
- binding-tokens))
- next-bindings-set (-> (set (map node-value binding-nodes))
- (set/difference bindings-set))
- binding-expr-nodes (seq (filter (comp next-bindings-set node-value) binding-nodes))
- [next-bindings-set next-bindings]
- (if binding-nodes
- [next-bindings-set
- (cond->> [[(api/vector-node
- (vec binding-nodes))
- constraint-expr]]
- binding-expr-nodes
- (concat [[(api/vector-node
- (vec binding-expr-nodes))
- input-token]]))]
- [#{}
- [[(api/vector-node
- [(api/token-node '_)])
- constraint-expr]]])]
- (recur more
- (concat bindings next-bindings)
- (set/union bindings-set next-bindings-set)))))
-
- input-bindings (when-not (empty? (node-value condition-args))
- [[condition-args input-token]])]
- (concat input-bindings this-input-bindings constraint-bindings this-output-bindings)))
-
-(defn analyze-conditions
- "sequentially analyzes condition expressions of clara rules and queries
- defined via defrule and defquery by taking into account the optional
- result binding, optional args bindings and sequentially analyzing
- its children constraint expressions."
- [condition-seq allow-bindings? prev-bindings input-token production-args]
- (loop [[condition-expr & more] condition-seq
- bindings []]
- (if (nil? condition-expr)
- bindings
- (let [condition (:children condition-expr)
- [result-token fact-node & condition] (if (= '<- (-> condition second node-value))
- (cons (api/vector-node
- [(vary-meta
- (first condition)
- assoc ::fact-result true)]) (nnext condition))
- (cons (api/vector-node
- [(api/token-node '_)]) condition))
- condition-bindings (cond
- (nil? condition)
- []
-
- (contains? #{:not} (node-value fact-node))
- (analyze-conditions condition false (concat prev-bindings bindings) input-token production-args)
-
- (contains? #{:or :and :exists} (node-value fact-node))
- (analyze-conditions condition allow-bindings? (concat prev-bindings bindings) input-token production-args)
-
- (and (= :list (node-type fact-node))
- (= :from (-> condition first node-value)))
- (analyze-conditions (rest condition) allow-bindings? (concat prev-bindings bindings) input-token production-args)
-
- :else
- (analyze-constraints fact-node condition allow-bindings? (concat prev-bindings bindings) input-token production-args))
- condition-tokens (->> (mapcat first condition-bindings)
- (filter binding-node?))
- result-vector (api/vector-node (vec (list* fact-node condition-tokens)))
- result-bindings [[result-token result-vector]]
- output-bindings (concat condition-bindings result-bindings)
- condition-output (->> (mapcat (comp :children first) output-bindings)
- (filter binding-node?)
- (set)
- (sort-by node-value))
- output-node (api/vector-node
- (if (empty? condition-output)
- [(api/token-node '_)]
- (vec condition-output)))
- output-result-node (api/vector-node
- (if (empty? condition-output)
- [(api/token-node nil)]
- (vec condition-output)))
- next-bindings [output-node
- (api/list-node
- (list
- (api/token-node 'let)
- (api/vector-node
- (vec (apply concat output-bindings)))
- output-result-node))]]
- (recur more (concat bindings [next-bindings]))))))
-
-(defn analyze-parse-query-macro
- "analyze clara-rules parse-query macro"
- [{:keys [:node]}]
- (let [input-token (api/token-node (gensym 'input))
- input-args (api/vector-node
- [input-token])
- [args conditions-node] (rest (:children node))
- condition-seq (:children conditions-node)
- special-tokens (extract-special-tokens condition-seq)
- special-args (when (seq special-tokens)
- (api/vector-node
- (vec special-tokens)))
- transformed-args (for [arg (:children args)]
- (let [v (node-value arg)]
- (if (keyword? v)
- (api/token-node (symbol v))
- arg)))
- production-args (api/vector-node
- (if (empty? transformed-args)
- [(api/token-node '_)]
- (vec transformed-args)))
- condition-bindings (analyze-conditions condition-seq true [] input-token production-args)
- production-bindings (apply concat
- (when special-args
- [special-args input-token
- (api/token-node '_) special-args])
- [production-args input-token]
- condition-bindings)
- production-output (->> (mapcat (comp :children first) condition-bindings)
- (filter binding-node?)
- (set)
- (sort-by node-value))
- production-result (api/list-node
- (list
- (api/token-node 'let)
- (api/vector-node
- (vec production-bindings))
- (api/vector-node
- (vec production-output))))
- fn-node (api/list-node
- (list
- (api/token-node 'fn)
- input-args
- production-result))
- new-node (api/map-node
- [(api/keyword-node :production) fn-node])]
- {:node new-node}))
-
-(defn analyze-defquery-macro
- "analyze clara-rules defquery macro"
- [{:keys [:node]}]
- (let [[production-name & children] (rest (:children node))
- production-docs (when (= :token (node-type (first children)))
- (first children))
- children (if production-docs (rest children) children)
- production-opts (when (= :map (node-type (first children)))
- (first children))
- input-token (api/token-node (gensym 'input))
- input-args (api/vector-node
- [input-token])
- [args & condition-seq] (if production-opts (rest children) children)
- special-tokens (extract-special-tokens condition-seq)
- special-args (when (seq special-tokens)
- (api/vector-node
- (vec special-tokens)))
- transformed-args (for [arg (:children args)]
- (let [v (node-value arg)
- m (meta arg)]
- (if (keyword? v)
- (cond-> (api/token-node (symbol v))
- (not-empty m)
- (vary-meta merge m))
- arg)))
- production-args (api/vector-node
- (if (empty? transformed-args)
- [(api/token-node '_)]
- (vec transformed-args)))
- condition-bindings (analyze-conditions condition-seq true [] input-token production-args)
- production-bindings (apply concat
- (when special-args
- [special-args input-token
- (api/token-node '_) special-args])
- [production-args input-token]
- condition-bindings)
- production-output (->> (mapcat (comp :children first) condition-bindings)
- (filter binding-node?)
- (set)
- (sort-by node-value))
- production-result (api/list-node
- (list
- (api/token-node 'let)
- (api/vector-node
- (vec production-bindings))
- (api/vector-node
- (vec production-output))))
- fn-node (api/list-node
- (cond-> (list (api/token-node 'fn))
- production-docs (concat [production-docs])
- :always (concat [input-args])
- production-opts (concat [production-opts])
- :always (concat [production-result])))
- new-node (vary-meta
- (api/list-node
- (list
- (api/token-node 'def) production-name
- (api/map-node
- [(api/keyword-node :production) fn-node])))
- merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
- {:node new-node}))
-
-(defn analyze-parse-rule-macro
- "analyze clara-rules parse-rule macro"
- [{:keys [:node]}]
- (let [input-token (api/token-node (gensym 'input))
- input-args (api/vector-node
- [input-token])
- empty-args (api/vector-node [])
- production-seq (rest (:children node))
- special-tokens (extract-special-tokens production-seq)
- special-args (when (seq special-tokens)
- (api/vector-node
- (vec special-tokens)))
- [conditions-node body-node] production-seq
- condition-seq (:children conditions-node)
- condition-bindings (analyze-conditions condition-seq true [] input-token empty-args)
- production-bindings (apply concat
- (when special-args
- [special-args input-token
- (api/token-node '_) special-args])
- [(api/token-node '_) input-token]
- condition-bindings)
- production-output (->> (mapcat (comp :children first) condition-bindings)
- (filter binding-node?)
- (remove fact-result-node?)
- (set)
- (sort-by node-value))
- production-result (api/list-node
- (list
- (api/token-node 'let)
- (api/vector-node
- (vec production-bindings))
- (api/vector-node
- (vec production-output))
- body-node))
- fn-node (api/list-node
- (list
- (api/token-node 'fn)
- input-args
- production-result))
- new-node (api/map-node
- [(api/keyword-node :production) fn-node])]
- {:node new-node}))
-
-(defn analyze-defrule-macro
- "analyze clara-rules defrule macro"
- [{:keys [:node]}]
- (let [[production-name & children] (rest (:children node))
- production-docs (when (= :token (node-type (first children)))
- (first children))
- children (if production-docs (rest children) children)
- production-opts (when (= :map (node-type (first children)))
- (first children))
- input-token (api/token-node (gensym 'input))
- input-args (api/vector-node
- [input-token])
- empty-args (api/vector-node [])
- production-seq (if production-opts (rest children) children)
- special-tokens (extract-special-tokens production-seq)
- special-args (when (seq special-tokens)
- (api/vector-node
- (vec special-tokens)))
- [body-seq _ condition-seq] (->> (partition-by (comp #{'=>} node-value) production-seq)
- (reverse))
- condition-bindings (analyze-conditions condition-seq true [] input-token empty-args)
- production-bindings (apply concat
- (when special-args
- [special-args input-token
- (api/token-node '_) special-args])
- [(api/token-node '_) input-token]
- condition-bindings)
- production-output (->> (mapcat (comp :children first) condition-bindings)
- (filter binding-node?)
- (remove fact-result-node?)
- (set)
- (sort-by node-value))
- production-result (api/list-node
- (list*
- (api/token-node 'let)
- (api/vector-node
- (vec production-bindings))
- (api/vector-node
- (vec production-output))
- body-seq))
- fn-node (api/list-node
- (cond-> (list (api/token-node 'fn))
- production-docs (concat [production-docs])
- :always (concat [input-args])
- production-opts (concat [production-opts])
- :always (concat [production-result])))
- new-node (vary-meta
- (api/list-node
- (list
- (api/token-node 'def) production-name
- (api/map-node
- [(api/keyword-node :production) fn-node])))
- merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
- {:node new-node}))
-
-
-(defn analyze-def-rules-test-macro
- [{:keys [:node]}]
- (let [[test-name test-params & test-body] (rest (:children node))
- {:keys [rules
- queries
- sessions]} (->> (:children test-params)
- (partition 2)
- (map (juxt (comp api/sexpr first) last))
- (into {}))
- rules-seq (for [[name-node rule-node] (partition 2 (:children rules))]
- [name-node (analyze-parse-rule-macro
- {:node (api/list-node
- (list*
- (api/token-node 'clara.rules.dsl/parse-rule)
- (:children rule-node)))})])
- query-seq (for [[name-node query-node] (partition 2 (:children queries))]
- [name-node (analyze-parse-query-macro
- {:node (api/list-node
- (list*
- (api/token-node 'clara.rules.dsl/parse-query)
- (:children query-node)))})])
- session-seq (for [[name-node productions-node options-node] (partition 3 (:children sessions))
- :let [args-seq (concat (:children productions-node)
- (:children options-node))]]
- [name-node (api/list-node
- (list
- (api/token-node 'clara.rules.compiler/mk-session)
- (api/vector-node
- (vec args-seq))))])
- args-seq (->> (concat rules-seq query-seq session-seq)
- (apply concat))
- wrap-body (api/list-node
- (list*
- (api/token-node 'let)
- (api/vector-node
- (vec args-seq))
- test-body))
- new-node (api/list-node
- (list
- (api/token-node 'clojure.test/deftest)
- test-name
- wrap-body))]
- {:node new-node}))
From 83306cb62c134d928a01e0d39b7f4df082ddbc80 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 20:35:29 -0600
Subject: [PATCH 08/87] feat: add pom file
---
pom.xml | 46 ++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 46 insertions(+)
create mode 100644 pom.xml
diff --git a/pom.xml b/pom.xml
new file mode 100644
index 00000000..d42ad99e
--- /dev/null
+++ b/pom.xml
@@ -0,0 +1,46 @@
+
+
+ 4.0.0
+ jar
+ com.github.k13labs
+ clara-rules
+ 0.9.0-SNAPSHOT
+ clara-rules
+
+
+ Apache-2.0
+ https://www.apache.org/licenses/LICENSE-2.0.txt
+
+
+
+
+ org.clojure
+ clojure
+ 1.10.3
+
+
+ com.cnuernber
+ ham-fisted
+ 2.014
+
+
+ prismatic
+ schema
+ 1.4.1
+
+
+ org.clojure
+ data.fressian
+ 1.0.0
+
+
+
+ src/main/clojure
+
+
+
+ clojars
+ https://repo.clojars.org/
+
+
+
From 04ecb7ab6d6c0c17f24e2973cd922c283b3346b7 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 20:36:00 -0600
Subject: [PATCH 09/87] chore: update ignore file
---
.gitignore | 1 -
1 file changed, 1 deletion(-)
diff --git a/.gitignore b/.gitignore
index 58e9522a..9785cd29 100644
--- a/.gitignore
+++ b/.gitignore
@@ -4,7 +4,6 @@
/classes
/checkouts
/resources/public/js/*
-pom.xml
pom.xml.asc
*.jar
*.class
From c2986af883974014bed8497fc13535e9d6b229ed Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 20:38:04 -0600
Subject: [PATCH 10/87] feat: dont delete pom
---
Makefile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
index aa534744..34a31047 100644
--- a/Makefile
+++ b/Makefile
@@ -22,7 +22,7 @@ test-config:
clojure -M:dev:test:runner --print-config
clean:
- rm -rf pom.xml target build
+ rm -rf target build
lint: compile-test-java
clojure -M:dev:test:clj-kondo --copy-configs --dependencies --parallel --lint "$(shell clojure -A:dev:test -Spath)"
From 892bf79746a48b0be8ffa8e5639ee40b64624eaa Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 26 Dec 2023 23:02:22 -0600
Subject: [PATCH 11/87] feat: make minor enhancements to cancelling cache
---
.../clara/rules/update_cache/cancelling.clj | 33 +++++--------------
1 file changed, 8 insertions(+), 25 deletions(-)
diff --git a/src/main/clojure/clara/rules/update_cache/cancelling.clj b/src/main/clojure/clara/rules/update_cache/cancelling.clj
index de506c3d..fa4e9fc4 100644
--- a/src/main/clojure/clara/rules/update_cache/cancelling.clj
+++ b/src/main/clojure/clara/rules/update_cache/cancelling.clj
@@ -1,11 +1,12 @@
(ns clara.rules.update-cache.cancelling
- (:require [clara.rules.update-cache.core :as uc])
+ (:require [clara.rules.update-cache.core :as uc]
+ [ham-fisted.api :as hf]
+ [ham-fisted.mut-map :as hm])
(:import [java.util
List
Map
LinkedList
- LinkedHashMap
- Collections]))
+ LinkedHashMap]))
;;; We need a wrapper to use Clojure equality semantics inside
;;; a Java collection. Furthermore, since we know we will need to do
@@ -43,10 +44,9 @@
;;; on equality.
(defn inc-fact-count! [^Map m fact]
(let [wrapper (FactWrapper. fact (hash fact))
- ^List current-val (.get m wrapper)]
- (if current-val
- (.add current-val fact)
- (.put m wrapper (LinkedList. [fact])))))
+ ^List result (or (.get m wrapper) (LinkedList.))]
+ (hm/compute-if-absent! m wrapper (constantly result))
+ (.add result fact)))
(defn dec-fact-count! [^Map m fact]
(let [wrapper (FactWrapper. fact (hash fact))
@@ -68,24 +68,7 @@
(defn map->vals-concated
[^Map m]
- (let [val-list (java.util.LinkedList.)
- it (.iterator (.entrySet m))]
- (loop []
- (when (.hasNext it)
- (do (let [^java.util.Map$Entry e (.next it)
- fact (.fact ^FactWrapper (.getKey e))
- ^Iterable facts-in-val (.getValue e)
- fact-iter (.iterator facts-in-val)]
- (loop []
- (when (.hasNext fact-iter)
- (do
- (.add val-list (.next fact-iter))
- (recur)))))
- (recur))))
- ;; This list will never be exposed to the user; it is simply iterated over
- ;; by the engine and then discarded. This being the case there is no
- ;; need to return a persistent data structure rather than an unmodifiable one.
- (Collections/unmodifiableList val-list)))
+ (hf/apply-concat (hm/values m)))
;;; This is a pending updates cache that allows
;; retractions and insertions of equal facts
From c6e1dbef2e605b5a9eb991d441a90c1428f25ba4 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 27 Dec 2023 09:07:43 -0600
Subject: [PATCH 12/87] feat: implement update cache using ham fisted mut list
---
.../clojure/clara/rules/update_cache/core.clj | 17 +++++++++--------
1 file changed, 9 insertions(+), 8 deletions(-)
diff --git a/src/main/clojure/clara/rules/update_cache/core.clj b/src/main/clojure/clara/rules/update_cache/core.clj
index 80073ea6..3849e876 100644
--- a/src/main/clojure/clara/rules/update_cache/core.clj
+++ b/src/main/clojure/clara/rules/update_cache/core.clj
@@ -1,4 +1,6 @@
-(ns clara.rules.update-cache.core)
+(ns clara.rules.update-cache.core
+ (:require [ham-fisted.api :as hf])
+ (:import [ham_fisted MutList]))
;; Record indicating pending insertion or removal of a sequence of facts.
(defrecord PendingUpdate [type facts])
@@ -12,21 +14,20 @@
;; This cache replicates the behavior prior to https://github.com/cerner/clara-rules/issues/249,
;; just in a stateful object rather than a persistent data structure.
-(deftype OrderedUpdateCache [updates]
-
+(deftype OrderedUpdateCache [^MutList ^:unsynchronized-mutable updates]
UpdateCache
(add-insertions! [this facts]
- (swap! updates into [(->PendingUpdate :insert facts)]))
+ (.add updates (->PendingUpdate :insert facts)))
(add-retractions! [this facts]
- (swap! updates into [(->PendingUpdate :retract facts)]))
+ (.add updates (->PendingUpdate :retract facts)))
(get-updates-and-reset! [this]
- (let [current-updates @updates]
- (reset! updates [])
+ (let [current-updates (hf/persistent! updates)]
+ (set! updates (hf/mut-list))
(partition-by :type current-updates))))
(defn get-ordered-update-cache
[]
- (OrderedUpdateCache. (atom [])))
+ (OrderedUpdateCache. (hf/mut-list)))
From 3a79d0a8d01d23dc14b4ac034e4207c21f653509 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 27 Dec 2023 10:55:57 -0600
Subject: [PATCH 13/87] feat: simply multiple equality wrappers into mostly
single JEQ wrapper
---
src/main/clojure/clara/rules/accumulators.clj | 27 +++++++------
src/main/clojure/clara/rules/compiler.clj | 40 +++++++++----------
src/main/clojure/clara/rules/dsl.clj | 6 +--
src/main/clojure/clara/rules/platform.clj | 30 ++++++++++----
.../clara/rules/update_cache/cancelling.clj | 32 ++-------------
5 files changed, 60 insertions(+), 75 deletions(-)
diff --git a/src/main/clojure/clara/rules/accumulators.clj b/src/main/clojure/clara/rules/accumulators.clj
index 37da6748..adece0b0 100644
--- a/src/main/clojure/clara/rules/accumulators.clj
+++ b/src/main/clojure/clara/rules/accumulators.clj
@@ -79,23 +79,24 @@
:convert-return-fn convert-return-fn}
combine-fn (assoc :combine-fn combine-fn)))))
-(let [grouping-fn (fnil conj [])]
- (defn grouping-by
- "Return a generic grouping accumulator. Behaves like clojure.core/group-by.
+(def ^:private grouping-fn (fnil conj []))
+
+(defn grouping-by
+ "Return a generic grouping accumulator. Behaves like clojure.core/group-by.
* `field` - required - The field of a fact to group by.
* `convert-return-fn` - optional - Converts the resulting grouped
data. Defaults to clojure.core/identity."
- ([field]
- (grouping-by field identity))
- ([field convert-return-fn]
- {:pre [(ifn? convert-return-fn)]}
- (reduce-to-accum
- (fn [m x]
- (let [v (field x)]
- (update m v grouping-fn x)))
- {}
- convert-return-fn))))
+ ([field]
+ (grouping-by field identity))
+ ([field convert-return-fn]
+ {:pre [(ifn? convert-return-fn)]}
+ (reduce-to-accum
+ (fn [m x]
+ (let [v (field x)]
+ (update m v grouping-fn x)))
+ {}
+ convert-return-fn)))
(defn- comparison-based
"Creates a comparison-based result such as min or max"
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 899c06b4..0393b489 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -4,11 +4,14 @@
Most users should use only the clara.rules namespace."
(:require [clara.rules.engine :as eng]
[clara.rules.schema :as schema]
+ [clara.rules.platform :refer [jeq-wrap] :as platform]
[clojure.set :as set]
[clojure.string :as string]
[clojure.walk :as walk]
[schema.core :as sc])
- (:import [clara.rules.engine
+ (:import [clara.rules.platform
+ JavaEqualityWrapper]
+ [clara.rules.engine
ProductionNode
QueryNode
AlphaNode
@@ -884,15 +887,13 @@
(variables-as-keywords join-filter-expressions)
nil)]
- (cond->
- {:node-type node-type
- :condition condition
- :new-bindings new-bindings
- :used-bindings (set/union cond-bindings join-filter-bindings)}
-
+ (cond-> {:node-type node-type
+ :condition condition
+ :new-bindings new-bindings
+ :used-bindings (set/union cond-bindings join-filter-bindings)}
(seq env) (assoc :env env)
- ;; Add the join bindings to join, accumulator or negation nodes.
+ ;; Add the join bindings to join, accumulator or negation nodes.
(#{:join :negation :accumulator} node-type) (assoc :join-bindings (set/intersection cond-bindings parent-bindings))
accumulator (assoc :accumulator accumulator)
@@ -1689,24 +1690,19 @@
;; Wrap the fact-type so that Clojure equality and hashcode semantics are used
;; even though this is placed in a Java map.
-(deftype AlphaRootsWrapper [fact-type ^int fact-type-hash roots]
+(deftype AlphaRootsWrapper [^JavaEqualityWrapper fact-type wrapped]
Object
(equals [this other]
(let [other ^AlphaRootsWrapper other]
- (cond
-
- (identical? fact-type (.fact-type other))
- true
-
- (not (== fact-type-hash (.fact-type-hash other)))
- false
-
- :else
- (= fact-type (.fact-type other)))))
+ (.equals fact-type (.fact_type other))))
;; Since know we will need to find the hashcode of this object in all cases just eagerly calculate it upfront
;; and avoid extra calls to hash later.
- (hashCode [this] fact-type-hash))
+ (hashCode [this] (.hash_code fact-type)))
+
+(defn alpha-roots-wrap
+ [fact-type roots]
+ (AlphaRootsWrapper. (jeq-wrap fact-type) roots))
(defn- create-get-alphas-fn
"Returns a function that given a sequence of facts,
@@ -1748,7 +1744,7 @@
;; removing groups with no alpha nodes here will improve performance on subsequent calls
;; to the get-alphas-fn with the same fact type.
(keep #(when-let [roots (not-empty (get alpha-roots %))]
- (AlphaRootsWrapper. % (hash %) roots)))
+ (alpha-roots-wrap % roots)))
;; If a user-provided ancestors-fn returns a sorted collection, for example for
;; ensuring determinism, we respect that ordering here by conj'ing on to the existing
;; collection.
@@ -1779,7 +1775,7 @@
(loop []
(when (.hasNext entries-it)
(let [^java.util.Map$Entry e (.next entries-it)]
- (.add return-list [(-> e ^AlphaRootsWrapper (.getKey) .roots)
+ (.add return-list [(-> e ^AlphaRootsWrapper (.getKey) (.wrapped))
(java.util.Collections/unmodifiableList (.getValue e))])
(recur))))
diff --git a/src/main/clojure/clara/rules/dsl.clj b/src/main/clojure/clara/rules/dsl.clj
index aa98fc40..d01dfb57 100644
--- a/src/main/clojure/clara/rules/dsl.clj
+++ b/src/main/clojure/clara/rules/dsl.clj
@@ -240,10 +240,10 @@
(cond-> rule
;; Add properties, if given.
- (not (empty? properties)) (assoc :props properties)
+ (seq properties) (assoc :props properties)
;; Add the environment, if given.
- (not (empty? env)) (assoc :env matching-env)))))
+ (seq env) (assoc :env matching-env)))))
(defn parse-query*
"Creates a query from the DSL syntax using the given environment map."
@@ -264,7 +264,7 @@
[(keyword (name sym)) sym]))]
(cond-> query
- (not (empty? env)) (assoc :env matching-env)))))
+ (seq env) (assoc :env matching-env)))))
(defmacro parse-rule
"Macro used to dynamically create a new rule using the DSL syntax."
diff --git a/src/main/clojure/clara/rules/platform.clj b/src/main/clojure/clara/rules/platform.clj
index 36a88b55..d02a050f 100644
--- a/src/main/clojure/clara/rules/platform.clj
+++ b/src/main/clojure/clara/rules/platform.clj
@@ -28,11 +28,28 @@
Object
(equals [this other]
- (and (instance? JavaEqualityWrapper other)
- (= wrapped (.wrapped ^JavaEqualityWrapper other))))
- (hashCode [this]
- hash-code))
+ (cond
+
+ ;; There are some cases where the inserted and retracted facts could be identical, particularly
+ ;; if user code in the RHS has caches, so we go ahead and check for identity as a first-pass check,
+ ;; but there are probably aren't enough cases where the facts are identical to make doing a full sweep
+ ;; on identity first worthwhile, particularly since in practice the hash check will make the vast majority
+ ;; of .equals calls that return false quite fast.
+ (identical? wrapped (.wrapped ^JavaEqualityWrapper other))
+ true
+
+ (not (== hash-code (.hash_code ^JavaEqualityWrapper other)))
+ false
+
+ :else (= wrapped (.wrapped ^JavaEqualityWrapper other))))
+
+ (hashCode [this] hash-code))
+
+(defn jeq-wrap
+ "wraps the value with a JavaEqualityWrapper"
+ ^JavaEqualityWrapper [value]
+ (JavaEqualityWrapper. value (hash value)))
(defn group-by-seq
"Groups the items of the given coll by f to each item. Returns a seq of tuples of the form
@@ -49,10 +66,7 @@
(let [k (f x)
;; Use Java's hashcode for performance reasons as
;; discussed at https://github.com/cerner/clara-rules/issues/393
- wrapper (JavaEqualityWrapper. k
- (if (nil? k)
- (int 0)
- (int (.hashCode ^Object k))))
+ wrapper (jeq-wrap k)
xs (or (.get m wrapper)
(transient []))]
(.put m wrapper (conj! xs x)))
diff --git a/src/main/clojure/clara/rules/update_cache/cancelling.clj b/src/main/clojure/clara/rules/update_cache/cancelling.clj
index fa4e9fc4..74e29623 100644
--- a/src/main/clojure/clara/rules/update_cache/cancelling.clj
+++ b/src/main/clojure/clara/rules/update_cache/cancelling.clj
@@ -1,5 +1,6 @@
(ns clara.rules.update-cache.cancelling
(:require [clara.rules.update-cache.core :as uc]
+ [clara.rules.platform :refer [jeq-wrap]]
[ham-fisted.api :as hf]
[ham-fisted.mut-map :as hm])
(:import [java.util
@@ -8,33 +9,6 @@
LinkedList
LinkedHashMap]))
-;;; We need a wrapper to use Clojure equality semantics inside
-;;; a Java collection. Furthermore, since we know we will need to do
-;;; a hash operation for each such wrapper created anyway we can ensure
-;;; that if the hashes of two facts are not equal that the equals implementation
-;;; here will quickly return false.
-(deftype FactWrapper [fact ^int fact-hash]
-
- Object
- (equals [this other]
-
- (cond
-
- ;; There are some cases where the inserted and retracted facts could be identical, particularly
- ;; if user code in the RHS has caches, so we go ahead and check for identity as a first-pass check,
- ;; but there are probably aren't enough cases where the facts are identical to make doing a full sweep
- ;; on identity first worthwhile, particularly since in practice the hash check will make the vast majority
- ;; of .equals calls that return false quite fast.
- (identical? fact (.fact ^FactWrapper other))
- true
-
- (not (== fact-hash (.fact_hash ^FactWrapper other)))
- false
-
- :else (= fact (.fact ^FactWrapper other))))
-
- (hashCode [this] fact-hash))
-
;;; These functions essentially allow us to use a Java map to create a set that stores
;;; the frequency of its items. Note that when multiple instances of a fact are added
;;; we keep both instances as distinct objects. We don't strictly speaking need to do this
@@ -43,13 +17,13 @@
;;; memory operations look for matches on identity first in tokens before falling back to matching
;;; on equality.
(defn inc-fact-count! [^Map m fact]
- (let [wrapper (FactWrapper. fact (hash fact))
+ (let [wrapper (jeq-wrap fact)
^List result (or (.get m wrapper) (LinkedList.))]
(hm/compute-if-absent! m wrapper (constantly result))
(.add result fact)))
(defn dec-fact-count! [^Map m fact]
- (let [wrapper (FactWrapper. fact (hash fact))
+ (let [wrapper (jeq-wrap fact)
;; Note that we specifically use a LinkedList type hint here since we
;; use methods from multiple interfaces here, namely List and Deque.
^LinkedList current-val (.get m wrapper)]
From 84b48ff1cd1e8e4b219692f5cb918cb3ab028d19 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 27 Dec 2023 11:00:49 -0600
Subject: [PATCH 14/87] chore: formatting
---
src/main/clojure/clara/rules/memory.clj | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index 958234ba..f9d3e8eb 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -580,8 +580,7 @@
(when-not (coll-empty? tokens)
(let [binding-token-map (get beta-memory (:id node) {})
previous-tokens (get binding-token-map join-bindings)]
- (if
- (coll-empty? previous-tokens)
+ (if (coll-empty? previous-tokens)
[]
(let [;; Attempt to remove tokens using the faster indentity-based equality first since
From 32192b08744ef0afafd443c22917e0c50accf43b Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 27 Dec 2023 20:24:40 -0600
Subject: [PATCH 15/87] feat: progress converting memory to mutable map/coll,
add kaocha script
---
bin/kaocha | 2 +
dev/user.clj | 7 +-
src/main/clojure/clara/rules/memory.clj | 104 +++++++++++++--------
src/test/clojure/clara/test_durability.clj | 54 +++++------
4 files changed, 99 insertions(+), 68 deletions(-)
create mode 100755 bin/kaocha
diff --git a/bin/kaocha b/bin/kaocha
new file mode 100755
index 00000000..4c19821a
--- /dev/null
+++ b/bin/kaocha
@@ -0,0 +1,2 @@
+#!/usr/bin/env bash
+clojure -M:dev:test:runner $@
diff --git a/dev/user.clj b/dev/user.clj
index 7a96237a..6e867c30 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -1 +1,6 @@
-(ns user)
+(ns user
+ (:require [criterium.core :refer [report-result
+ quick-benchmark] :as crit]
+ [ham-fisted.api :as hf]
+ [ham-fisted.mut-map :as hm]
+ [clara.rules.platform :as platform]))
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index f9d3e8eb..aa631f4f 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -1,12 +1,15 @@
(ns clara.rules.memory
"This namespace is for internal use and may move in the future.
Specification and default implementation of working memory"
+ (:require [ham-fisted.api :as hf])
(:import [java.util
+ Map
Collections
LinkedList
NavigableMap
PriorityQueue
- TreeMap]))
+ TreeMap]
+ [ham_fisted MutableMap]))
(defprotocol IPersistentMemory
(to-transient [memory]))
@@ -154,15 +157,36 @@
(.add dest x))
dest)
+(defn- linked-list?
+ [coll]
+ (instance? LinkedList coll))
+
(defn- ->linked-list
"Creates a new java.util.LinkedList from the coll, but avoids using
Collection.addAll(Collection) since there is unnecessary overhead
in this of calling Collection.toArray() on coll."
^java.util.List [coll]
- (if (instance? LinkedList coll)
+ (if (linked-list? coll)
coll
(add-all! (LinkedList.) coll)))
+(defn- mutable-map?
+ [m]
+ (instance? MutableMap m))
+
+(defn- ->mutable-map
+ "Creates a new ham_fisted.MutableMap from the map, but only if necessary."
+ [m]
+ (if (mutable-map? m) m
+ (hf/mut-map m)))
+
+(defn- ->persistent-coll
+ "Creates a persistent collection from the input collection, but only if necessary"
+ [coll]
+ (if (coll? coll)
+ coll
+ (seq coll)))
+
(defn- remove-first-of-each!
"Remove the first instance of each item in the given remove-seq that
appears in the collection coll. coll is updated in place for
@@ -417,11 +441,11 @@
activation-group-sort-fn
activation-group-fn
alphas-fn
- ^:unsynchronized-mutable alpha-memory
- ^:unsynchronized-mutable beta-memory
- ^:unsynchronized-mutable accum-memory
- ^:unsynchronized-mutable production-memory
- ^:unsynchronized-mutable ^NavigableMap activation-map]
+ ^Map ^:unsynchronized-mutable alpha-memory
+ ^Map ^:unsynchronized-mutable beta-memory
+ ^Map ^:unsynchronized-mutable accum-memory
+ ^Map ^:unsynchronized-mutable production-memory
+ ^NavigableMap ^:unsynchronized-mutable activation-map]
IMemoryReader
(get-rulebase [memory] rulebase)
@@ -483,7 +507,8 @@
ITransientMemory
(add-elements! [memory node join-bindings elements]
- (let [binding-element-map (get alpha-memory (:id node) {})
+ (let [node-id (:id node)
+ binding-element-map (get alpha-memory node-id {})
previous-elements (get binding-element-map join-bindings)]
(cond
@@ -492,7 +517,7 @@
(coll? previous-elements)
(set! alpha-memory
(assoc! alpha-memory
- (:id node)
+ node-id
(assoc binding-element-map
join-bindings
(into previous-elements elements))))
@@ -507,7 +532,7 @@
elements
(set! alpha-memory
(assoc! alpha-memory
- (:id node)
+ node-id
(assoc binding-element-map
join-bindings
elements))))))
@@ -515,7 +540,8 @@
(remove-elements! [memory node join-bindings elements]
;; Do nothing when no elements to remove.
(when-not (coll-empty? elements)
- (let [binding-element-map (get alpha-memory (:id node) {})
+ (let [node-id (:id node)
+ binding-element-map (get alpha-memory node-id {})
previous-elements (get binding-element-map join-bindings)]
(cond
;; Do nothing when no previous elements to remove from.
@@ -537,7 +563,7 @@
remaining-elements))]
(set! alpha-memory
(assoc! alpha-memory
- (:id node)
+ node-id
new-bindings-map))
removed-elements))
@@ -547,19 +573,20 @@
(when (.isEmpty ^java.util.List previous-elements)
(set! alpha-memory
(assoc! alpha-memory
- (:id node)
+ node-id
(dissoc binding-element-map join-bindings))))
removed-elements)))))
(add-tokens! [memory node join-bindings tokens]
- (let [binding-token-map (get beta-memory (:id node) {})
+ (let [node-id (:id node)
+ binding-token-map (get beta-memory node-id {})
previous-tokens (get binding-token-map join-bindings)]
;; The reasoning here is the same as in add-elements! impl above.
(cond
(coll? previous-tokens)
(set! beta-memory
(assoc! beta-memory
- (:id node)
+ node-id
(assoc binding-token-map
join-bindings
(into previous-tokens tokens))))
@@ -570,7 +597,7 @@
tokens
(set! beta-memory
(assoc! beta-memory
- (:id node)
+ node-id
(assoc binding-token-map
join-bindings
tokens))))))
@@ -578,7 +605,8 @@
(remove-tokens! [memory node join-bindings tokens]
;; The reasoning here is the same as remove-elements!
(when-not (coll-empty? tokens)
- (let [binding-token-map (get beta-memory (:id node) {})
+ (let [node-id (:id node)
+ binding-token-map (get beta-memory node-id {})
previous-tokens (get binding-token-map join-bindings)]
(if (coll-empty? previous-tokens)
[]
@@ -589,18 +617,17 @@
;; equality though since those semantics are supported within the engine. This
;; slower path should be rare for any heavy retraction flows - such as those that come
;; via truth maintenance.
- two-pass-remove! (fn [remaining-tokens tokens]
+ two-pass-remove! (fn do-remove-tokens
+ [remaining-tokens tokens]
(let [[removed-tokens not-removed-tokens]
(remove-first-of-each! tokens
remaining-tokens
- (fn [t1 t2]
- (fast-token-compare identical? t1 t2)))]
+ (partial fast-token-compare identical?))]
(if-let [other-removed (and (seq not-removed-tokens)
(-> not-removed-tokens
(remove-first-of-each! remaining-tokens
- (fn [t1 t2]
- (fast-token-compare = t1 t2)))
+ (partial fast-token-compare =))
first
seq))]
(into removed-tokens other-removed)
@@ -614,7 +641,7 @@
(assoc binding-token-map join-bindings remaining-tokens))]
(set! beta-memory
(assoc! beta-memory
- (:id node)
+ node-id
new-tokens-map))
removed-tokens)
@@ -623,7 +650,7 @@
(when (.isEmpty ^java.util.List previous-tokens)
(set! beta-memory
(assoc! beta-memory
- (:id node)
+ node-id
(dissoc binding-token-map join-bindings))))
removed-tokens)))))))
@@ -785,22 +812,19 @@
(.clear activation-map))
(to-persistent! [memory]
- (let [->persistent-coll #(if (coll? %)
- %
- (seq %))
- update-vals (fn [m update-fn]
+ (let [update-vals (fn do-update-vals [update-fn m]
(->> m
(reduce-kv (fn [m k v]
(assoc! m k (update-fn v)))
(transient m))
persistent!))
- persistent-vals #(update-vals % ->persistent-coll)]
+ persistent-vals (partial update-vals ->persistent-coll)]
(->PersistentLocalMemory rulebase
activation-group-sort-fn
activation-group-fn
alphas-fn
- (update-vals (persistent! alpha-memory) persistent-vals)
- (update-vals (persistent! beta-memory) persistent-vals)
+ (update-vals persistent-vals (persistent! alpha-memory))
+ (update-vals persistent-vals (persistent! beta-memory))
(persistent! accum-memory)
(persistent! production-memory)
(into {}
@@ -882,10 +906,10 @@
activation-group-sort-fn
activation-group-fn
alphas-fn
- (transient alpha-memory)
- (transient beta-memory)
- (transient accum-memory)
- (transient production-memory)
+ (->mutable-map alpha-memory)
+ (->mutable-map beta-memory)
+ (->mutable-map accum-memory)
+ (->mutable-map production-memory)
(let [treemap (TreeMap. ^java.util.Comparator activation-group-sort-fn)]
(doseq [[activation-group activations] activation-map]
(.put treemap
@@ -901,8 +925,8 @@
activation-group-sort-fn
activation-group-fn
alphas-fn
- {}
- {}
- {}
- {}
- {}))
+ (hf/hash-map)
+ (hf/hash-map)
+ (hf/hash-map)
+ (hf/hash-map)
+ (hf/hash-map)))
diff --git a/src/test/clojure/clara/test_durability.clj b/src/test/clojure/clara/test_durability.clj
index bb14e21d..42897e9b 100644
--- a/src/test/clojure/clara/test_durability.clj
+++ b/src/test/clojure/clara/test_durability.clj
@@ -216,7 +216,7 @@
r-temp-his-res (query restored dr/temp-his)
r-temps-under-thresh-res (query restored dr/temps-under-thresh)
- facts @(:holder mem-serializer)]
+ facts (sort-by hash @(:holder mem-serializer))]
(testing "Ensure the queries return same before and after serialization"
(is (= (frequencies [{:?ws (dr/->UnpairedWindSpeed ws10)}])
@@ -268,32 +268,32 @@
;; All of these facts must have an identical? relationship (same object references)
;; as the actual facts being tested against.
- expected-facts [temp50
- temp40
- temp30
- temp20
- [temp50 temp40 temp30 temp20]
- mci
- lax
- san
- chi
- twenty
- cold20
- unpaired-ws10
- temp-his
- ws50
- ws40
- ws10
- irk
- fifty
- forty
- thirty
- thresh50
- temps-under-thresh
- hot40
- hot30
- hot50
- [temp40 temp30 temp20]]]
+ expected-facts (sort-by hash [temp50
+ temp40
+ temp30
+ temp20
+ [temp50 temp40 temp30 temp20]
+ mci
+ lax
+ san
+ chi
+ twenty
+ cold20
+ unpaired-ws10
+ temp-his
+ ws50
+ ws40
+ ws10
+ irk
+ fifty
+ forty
+ thirty
+ thresh50
+ temps-under-thresh
+ hot40
+ hot30
+ hot50
+ [temp40 temp30 temp20]])]
(is (= (count expected-facts)
(count facts))
From 8fa2b8569511989ac0aef7645c26cec0abfb224e Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 27 Dec 2023 21:00:02 -0600
Subject: [PATCH 16/87] feat: remove unnecessary mutable flags and set
statements
---
Makefile | 4 +-
deps.edn | 7 +-
src/main/clojure/clara/rules/memory.clj | 129 +++++++++++-------------
tests.edn | 2 +-
4 files changed, 67 insertions(+), 75 deletions(-)
diff --git a/Makefile b/Makefile
index 34a31047..58d1b083 100644
--- a/Makefile
+++ b/Makefile
@@ -13,10 +13,10 @@ repl: compile-test-java
clojure -M:dev:test:repl
test: compile-test-java
- clojure -M:dev:test:runner --focus :unit --reporter kaocha.report/tap
+ clojure -M:dev:test:runner --focus :unit --reporter kaocha.report/tap --no-capture-output
test-generative: compile-test-java
- clojure -M:dev:test:runner --focus :generative --reporter kaocha.report/tap
+ clojure -M:dev:test:runner --focus :generative --reporter kaocha.report/tap --no-capture-output
test-config:
clojure -M:dev:test:runner --print-config
diff --git a/deps.edn b/deps.edn
index e748f41e..884e7142 100644
--- a/deps.edn
+++ b/deps.edn
@@ -25,13 +25,16 @@
org.clojure/test.check {:mvn/version "1.1.1"}
pjstadig/humane-test-output {:mvn/version "0.10.0"}}}
- :runner {:main-opts ["-m" "kaocha.runner"]
+ :runner {:main-opts ["-e" "(println \"warn-on-reflection =\" (set! *warn-on-reflection* true))"
+ "-m" "kaocha.runner"]
:exec-fn kaocha.runner/exec-fn}
:repl {:extra-deps {nrepl/nrepl {:mvn/version "1.1.0"}
cider/cider-nrepl {:mvn/version "0.44.0"}}
- :main-opts ["-m" "nrepl.cmdline" "--interactive" "--middleware" "[\"cider.nrepl/cider-middleware\"]"]}
+ :main-opts ["-e" "(println \"warn-on-reflection =\" (set! *warn-on-reflection* true))"
+ "-m" "nrepl.cmdline" "--interactive"
+ "--middleware" "[\"cider.nrepl/cider-middleware\"]"]}
:format-fix {:extra-deps {cljfmt/cljfmt {:mvn/version "0.9.2"}}
:main-opts ["-m" "cljfmt.main" "fix" "src" "dev"]}
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index aa631f4f..fceb24ea 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -177,7 +177,8 @@
(defn- ->mutable-map
"Creates a new ham_fisted.MutableMap from the map, but only if necessary."
[m]
- (if (mutable-map? m) m
+ (if (mutable-map? m)
+ m
(hf/mut-map m)))
(defn- ->persistent-coll
@@ -441,11 +442,11 @@
activation-group-sort-fn
activation-group-fn
alphas-fn
- ^Map ^:unsynchronized-mutable alpha-memory
- ^Map ^:unsynchronized-mutable beta-memory
- ^Map ^:unsynchronized-mutable accum-memory
- ^Map ^:unsynchronized-mutable production-memory
- ^NavigableMap ^:unsynchronized-mutable activation-map]
+ ^Map alpha-memory
+ ^Map beta-memory
+ ^Map accum-memory
+ ^Map production-memory
+ ^NavigableMap activation-map]
IMemoryReader
(get-rulebase [memory] rulebase)
@@ -515,12 +516,11 @@
;; When changing existing persistent collections, just add on
;; the new elements.
(coll? previous-elements)
- (set! alpha-memory
- (assoc! alpha-memory
- node-id
- (assoc binding-element-map
- join-bindings
- (into previous-elements elements))))
+ (assoc! alpha-memory
+ node-id
+ (assoc binding-element-map
+ join-bindings
+ (into previous-elements elements)))
;; Already mutable, so update-in-place.
previous-elements
@@ -530,12 +530,11 @@
;; until we actually need to modify anything. This avoids
;; unnecessary copying.
elements
- (set! alpha-memory
- (assoc! alpha-memory
- node-id
- (assoc binding-element-map
- join-bindings
- elements))))))
+ (assoc! alpha-memory
+ node-id
+ (assoc binding-element-map
+ join-bindings
+ elements)))))
(remove-elements! [memory node join-bindings elements]
;; Do nothing when no elements to remove.
@@ -561,20 +560,18 @@
(assoc binding-element-map
join-bindings
remaining-elements))]
- (set! alpha-memory
- (assoc! alpha-memory
- node-id
- new-bindings-map))
+ (assoc! alpha-memory
+ node-id
+ new-bindings-map)
removed-elements))
;; Already mutable, so we do not need to re-associate to alpha-memory.
previous-elements
(let [removed-elements (first (remove-first-of-each! elements previous-elements))]
(when (.isEmpty ^java.util.List previous-elements)
- (set! alpha-memory
- (assoc! alpha-memory
- node-id
- (dissoc binding-element-map join-bindings))))
+ (assoc! alpha-memory
+ node-id
+ (dissoc binding-element-map join-bindings)))
removed-elements)))))
(add-tokens! [memory node join-bindings tokens]
@@ -584,23 +581,21 @@
;; The reasoning here is the same as in add-elements! impl above.
(cond
(coll? previous-tokens)
- (set! beta-memory
- (assoc! beta-memory
- node-id
- (assoc binding-token-map
- join-bindings
- (into previous-tokens tokens))))
+ (assoc! beta-memory
+ node-id
+ (assoc binding-token-map
+ join-bindings
+ (into previous-tokens tokens)))
previous-tokens
(add-all! previous-tokens tokens)
tokens
- (set! beta-memory
- (assoc! beta-memory
- node-id
- (assoc binding-token-map
- join-bindings
- tokens))))))
+ (assoc! beta-memory
+ node-id
+ (assoc binding-token-map
+ join-bindings
+ tokens)))))
(remove-tokens! [memory node join-bindings tokens]
;; The reasoning here is the same as remove-elements!
@@ -639,29 +634,26 @@
new-tokens-map (if (.isEmpty ^java.util.List remaining-tokens)
(dissoc binding-token-map join-bindings)
(assoc binding-token-map join-bindings remaining-tokens))]
- (set! beta-memory
- (assoc! beta-memory
- node-id
- new-tokens-map))
+ (assoc! beta-memory
+ node-id
+ new-tokens-map)
removed-tokens)
previous-tokens
(let [removed-tokens (two-pass-remove! previous-tokens tokens)]
(when (.isEmpty ^java.util.List previous-tokens)
- (set! beta-memory
- (assoc! beta-memory
- node-id
- (dissoc binding-token-map join-bindings))))
+ (assoc! beta-memory
+ node-id
+ (dissoc binding-token-map join-bindings)))
removed-tokens)))))))
(add-accum-reduced! [memory node join-bindings accum-result fact-bindings]
- (set! accum-memory
- (assoc! accum-memory
- (:id node)
- (assoc-in (get accum-memory (:id node) {})
- [join-bindings fact-bindings]
- accum-result))))
+ (assoc! accum-memory
+ (:id node)
+ (assoc-in (get accum-memory (:id node) {})
+ [join-bindings fact-bindings]
+ accum-result)))
(remove-accum-reduced! [memory node join-bindings fact-bindings]
(let [node-id (:id node)
@@ -670,23 +662,21 @@
node-id-mem (if (empty? join-mem)
(dissoc node-id-mem join-bindings)
(assoc node-id-mem join-bindings join-mem))]
- (set! accum-memory
- (if (empty? node-id-mem)
- (dissoc! accum-memory
- node-id)
- (assoc! accum-memory
- node-id
- node-id-mem)))))
+ (if (empty? node-id-mem)
+ (dissoc! accum-memory
+ node-id)
+ (assoc! accum-memory
+ node-id
+ node-id-mem))))
;; The value under each token in the map should be a sequence
;; of sequences of facts, with each inner sequence coming from a single
;; rule activation.
(add-insertions! [memory node token facts]
(let [token-facts-map (get production-memory (:id node) {})]
- (set! production-memory
- (assoc! production-memory
- (:id node)
- (update token-facts-map token conj facts)))))
+ (assoc! production-memory
+ (:id node)
+ (update token-facts-map token conj facts))))
(remove-insertions! [memory node tokens]
@@ -718,12 +708,11 @@
(persistent! token-map)]))]
;; Clear the tokens and update the memory.
- (set! production-memory
- (if (not-empty new-token-facts-map)
- (assoc! production-memory
- (:id node)
- new-token-facts-map)
- (dissoc! production-memory (:id node))))
+ (if (not-empty new-token-facts-map)
+ (assoc! production-memory
+ (:id node)
+ new-token-facts-map)
+ (dissoc! production-memory (:id node)))
results))
(add-activations!
@@ -816,7 +805,7 @@
(->> m
(reduce-kv (fn [m k v]
(assoc! m k (update-fn v)))
- (transient m))
+ (hf/mut-map))
persistent!))
persistent-vals (partial update-vals ->persistent-coll)]
(->PersistentLocalMemory rulebase
diff --git a/tests.edn b/tests.edn
index 2fa1537c..0023dd3a 100644
--- a/tests.edn
+++ b/tests.edn
@@ -1,4 +1,4 @@
-#kaocha/v1 {:capture-output? true
+#kaocha/v1 {:capture-output? false
:kaocha/fail-fast? false
:plugins [:kaocha.plugin/profiling
:kaocha.plugin/gc-profiling
From 17980062bb97e92194ae6fca3601dad630872650 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 27 Dec 2023 22:19:53 -0600
Subject: [PATCH 17/87] feat: refactor elements and token fns using mutable map
compute
---
src/main/clojure/clara/rules/memory.clj | 204 +++++++++---------------
1 file changed, 74 insertions(+), 130 deletions(-)
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index fceb24ea..8184996f 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -1,7 +1,8 @@
(ns clara.rules.memory
"This namespace is for internal use and may move in the future.
Specification and default implementation of working memory"
- (:require [ham-fisted.api :as hf])
+ (:require [ham-fisted.api :as hf]
+ [ham-fisted.mut-map :as hm])
(:import [java.util
Map
Collections
@@ -508,145 +509,88 @@
ITransientMemory
(add-elements! [memory node join-bindings elements]
- (let [node-id (:id node)
- binding-element-map (get alpha-memory node-id {})
- previous-elements (get binding-element-map join-bindings)]
-
- (cond
- ;; When changing existing persistent collections, just add on
- ;; the new elements.
- (coll? previous-elements)
- (assoc! alpha-memory
- node-id
- (assoc binding-element-map
- join-bindings
- (into previous-elements elements)))
-
- ;; Already mutable, so update-in-place.
- previous-elements
- (add-all! previous-elements elements)
-
- ;; No previous. We can just leave it persistent if it is
- ;; until we actually need to modify anything. This avoids
- ;; unnecessary copying.
- elements
- (assoc! alpha-memory
- node-id
- (assoc binding-element-map
- join-bindings
- elements)))))
+ (hm/compute!
+ alpha-memory (:id node)
+ (fn do-add-bem
+ [_ bem]
+ (let [binding-element-map (->mutable-map bem)]
+ (hm/compute!
+ binding-element-map join-bindings
+ (fn do-add-bel
+ [_ bel]
+ (let [binding-element-list (->linked-list bel)]
+ (add-all! binding-element-list elements)
+ binding-element-list)))
+ binding-element-map))))
(remove-elements! [memory node join-bindings elements]
;; Do nothing when no elements to remove.
(when-not (coll-empty? elements)
- (let [node-id (:id node)
- binding-element-map (get alpha-memory node-id {})
- previous-elements (get binding-element-map join-bindings)]
- (cond
- ;; Do nothing when no previous elements to remove from.
- (coll-empty? previous-elements)
- []
-
- ;; Convert persistent collection to a mutable one prior to calling remove-first-of-each!
- ;; alpha-memory needs to be updated this time since there is now going to be a mutable
- ;; collection associated in this memory location instead.
- (coll? previous-elements)
- (let [remaining-elements (->linked-list previous-elements)
- removed-elements (first (remove-first-of-each! elements remaining-elements))]
- ;; If there are no remaining elements under a binding group for the node remove the binding group.
- ;; This allows these binding values to be garbage collected.
- (let [new-bindings-map (if (.isEmpty ^java.util.List remaining-elements)
- (dissoc binding-element-map join-bindings)
- (assoc binding-element-map
- join-bindings
- remaining-elements))]
- (assoc! alpha-memory
- node-id
- new-bindings-map)
- removed-elements))
-
- ;; Already mutable, so we do not need to re-associate to alpha-memory.
- previous-elements
- (let [removed-elements (first (remove-first-of-each! elements previous-elements))]
- (when (.isEmpty ^java.util.List previous-elements)
- (assoc! alpha-memory
- node-id
- (dissoc binding-element-map join-bindings)))
- removed-elements)))))
+ (let [removed-elements-result (hf/mut-list)]
+ (hm/compute-if-present!
+ alpha-memory (:id node)
+ (fn do-rem-bem
+ [_ bem]
+ (let [binding-element-map (->mutable-map bem)]
+ (hm/compute-if-present!
+ binding-element-map join-bindings
+ (fn do-rem-bel
+ [_ bel]
+ (let [binding-element-list (->linked-list bel)
+ removed-elements (first (remove-first-of-each! elements binding-element-list))]
+ (hf/add-all! removed-elements-result removed-elements)
+ (not-empty binding-element-list))))
+ (not-empty binding-element-map))))
+ (hf/persistent! removed-elements-result))))
(add-tokens! [memory node join-bindings tokens]
- (let [node-id (:id node)
- binding-token-map (get beta-memory node-id {})
- previous-tokens (get binding-token-map join-bindings)]
- ;; The reasoning here is the same as in add-elements! impl above.
- (cond
- (coll? previous-tokens)
- (assoc! beta-memory
- node-id
- (assoc binding-token-map
- join-bindings
- (into previous-tokens tokens)))
-
- previous-tokens
- (add-all! previous-tokens tokens)
-
- tokens
- (assoc! beta-memory
- node-id
- (assoc binding-token-map
- join-bindings
- tokens)))))
+ (hm/compute!
+ beta-memory (:id node)
+ (fn do-add-btm
+ [_ btm]
+ (let [binding-token-map (->mutable-map btm)]
+ (hm/compute!
+ binding-token-map join-bindings
+ (fn do-add-btl
+ [_ btl]
+ (let [binding-token-list (->linked-list btl)]
+ (add-all! binding-token-list tokens)
+ binding-token-list)))
+ binding-token-map))))
(remove-tokens! [memory node join-bindings tokens]
;; The reasoning here is the same as remove-elements!
(when-not (coll-empty? tokens)
- (let [node-id (:id node)
- binding-token-map (get beta-memory node-id {})
- previous-tokens (get binding-token-map join-bindings)]
- (if (coll-empty? previous-tokens)
- []
-
- (let [;; Attempt to remove tokens using the faster indentity-based equality first since
- ;; most of the time this is all we need and it can be much faster. Any token that
- ;; wasn't removed via identity, has to be "retried" with normal value-based
- ;; equality though since those semantics are supported within the engine. This
- ;; slower path should be rare for any heavy retraction flows - such as those that come
- ;; via truth maintenance.
- two-pass-remove! (fn do-remove-tokens
- [remaining-tokens tokens]
- (let [[removed-tokens not-removed-tokens]
- (remove-first-of-each! tokens
- remaining-tokens
- (partial fast-token-compare identical?))]
-
- (if-let [other-removed (and (seq not-removed-tokens)
- (-> not-removed-tokens
- (remove-first-of-each! remaining-tokens
- (partial fast-token-compare =))
- first
- seq))]
- (into removed-tokens other-removed)
- removed-tokens)))]
- (cond
- (coll? previous-tokens)
- (let [remaining-tokens (->linked-list previous-tokens)
- removed-tokens (two-pass-remove! remaining-tokens tokens)
- new-tokens-map (if (.isEmpty ^java.util.List remaining-tokens)
- (dissoc binding-token-map join-bindings)
- (assoc binding-token-map join-bindings remaining-tokens))]
- (assoc! beta-memory
- node-id
- new-tokens-map)
- removed-tokens)
-
- previous-tokens
- (let [removed-tokens (two-pass-remove! previous-tokens tokens)]
- (when (.isEmpty ^java.util.List previous-tokens)
- (assoc! beta-memory
- node-id
- (dissoc binding-token-map join-bindings)))
-
- removed-tokens)))))))
+ (let [removed-tokens-result (hf/mut-list)]
+ (hm/compute-if-present!
+ beta-memory (:id node)
+ (fn do-rem-btm
+ [_ btm]
+ (let [binding-token-map (->mutable-map btm)]
+ (hm/compute-if-present!
+ binding-token-map join-bindings
+ (fn do-rem-btl
+ [_ btl]
+ (let [binding-token-list (->linked-list btl)]
+ ;; Attempt to remove tokens using the faster indentity-based equality first since
+ ;; most of the time this is all we need and it can be much faster. Any token that
+ ;; wasn't removed via identity, has to be "retried" with normal value-based
+ ;; equality though since those semantics are supported within the engine. This
+ ;; slower path should be rare for any heavy retraction flows - such as those that come
+ ;; via truth maintenance.
+ (let [[removed-tokens not-removed-tokens]
+ (remove-first-of-each! tokens
+ binding-token-list
+ (partial fast-token-compare identical?))]
+ (hf/add-all! removed-tokens-result removed-tokens)
+ (when (seq not-removed-tokens)
+ (let [[other-removed-tokens]
+ (remove-first-of-each! not-removed-tokens binding-token-list
+ (partial fast-token-compare =))]
+ (hf/add-all! removed-tokens-result other-removed-tokens))))
+ (not-empty binding-token-list))))
+ (not-empty binding-token-map))))
+ (hf/persistent! removed-tokens-result))))
(add-accum-reduced! [memory node join-bindings accum-result fact-bindings]
(assoc! accum-memory
From 5f94ae0b2de26d8d46265138c6fbbd4caaa9a90d Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 27 Dec 2023 22:28:09 -0600
Subject: [PATCH 18/87] chore: formatting and cleanup
---
src/main/clojure/clara/rules/memory.clj | 128 +++++++++++-------------
1 file changed, 61 insertions(+), 67 deletions(-)
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index 8184996f..154e9787 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -509,87 +509,81 @@
ITransientMemory
(add-elements! [memory node join-bindings elements]
- (hm/compute!
- alpha-memory (:id node)
- (fn do-add-bem
- [_ bem]
- (let [binding-element-map (->mutable-map bem)]
- (hm/compute!
- binding-element-map join-bindings
- (fn do-add-bel
- [_ bel]
- (let [binding-element-list (->linked-list bel)]
- (add-all! binding-element-list elements)
- binding-element-list)))
- binding-element-map))))
+ (hm/compute! alpha-memory (:id node)
+ (fn do-add-bem
+ [_ bem]
+ (let [binding-element-map (->mutable-map bem)]
+ (hm/compute! binding-element-map join-bindings
+ (fn do-add-bel
+ [_ bel]
+ (let [binding-element-list (->linked-list bel)]
+ (add-all! binding-element-list elements)
+ binding-element-list)))
+ binding-element-map))))
(remove-elements! [memory node join-bindings elements]
;; Do nothing when no elements to remove.
(when-not (coll-empty? elements)
(let [removed-elements-result (hf/mut-list)]
- (hm/compute-if-present!
- alpha-memory (:id node)
- (fn do-rem-bem
- [_ bem]
- (let [binding-element-map (->mutable-map bem)]
- (hm/compute-if-present!
- binding-element-map join-bindings
- (fn do-rem-bel
- [_ bel]
- (let [binding-element-list (->linked-list bel)
- removed-elements (first (remove-first-of-each! elements binding-element-list))]
- (hf/add-all! removed-elements-result removed-elements)
- (not-empty binding-element-list))))
- (not-empty binding-element-map))))
+ (hm/compute-if-present! alpha-memory (:id node)
+ (fn do-rem-bem
+ [_ bem]
+ (let [binding-element-map (->mutable-map bem)]
+ (hm/compute-if-present! binding-element-map join-bindings
+ (fn do-rem-bel
+ [_ bel]
+ (let [binding-element-list (->linked-list bel)
+ removed-elements (first (remove-first-of-each! elements binding-element-list))]
+ (hf/add-all! removed-elements-result removed-elements)
+ (not-empty binding-element-list))))
+ (not-empty binding-element-map))))
(hf/persistent! removed-elements-result))))
(add-tokens! [memory node join-bindings tokens]
- (hm/compute!
- beta-memory (:id node)
- (fn do-add-btm
- [_ btm]
- (let [binding-token-map (->mutable-map btm)]
- (hm/compute!
- binding-token-map join-bindings
- (fn do-add-btl
- [_ btl]
- (let [binding-token-list (->linked-list btl)]
- (add-all! binding-token-list tokens)
- binding-token-list)))
- binding-token-map))))
+ (hm/compute! beta-memory (:id node)
+ (fn do-add-btm
+ [_ btm]
+ (let [binding-token-map (->mutable-map btm)]
+ (hm/compute! binding-token-map join-bindings
+ (fn do-add-btl
+ [_ btl]
+ (let [binding-token-list (->linked-list btl)]
+ (add-all! binding-token-list tokens)
+ binding-token-list)))
+ binding-token-map))))
(remove-tokens! [memory node join-bindings tokens]
;; The reasoning here is the same as remove-elements!
(when-not (coll-empty? tokens)
(let [removed-tokens-result (hf/mut-list)]
(hm/compute-if-present!
- beta-memory (:id node)
- (fn do-rem-btm
- [_ btm]
- (let [binding-token-map (->mutable-map btm)]
- (hm/compute-if-present!
- binding-token-map join-bindings
- (fn do-rem-btl
- [_ btl]
- (let [binding-token-list (->linked-list btl)]
- ;; Attempt to remove tokens using the faster indentity-based equality first since
- ;; most of the time this is all we need and it can be much faster. Any token that
- ;; wasn't removed via identity, has to be "retried" with normal value-based
- ;; equality though since those semantics are supported within the engine. This
- ;; slower path should be rare for any heavy retraction flows - such as those that come
- ;; via truth maintenance.
- (let [[removed-tokens not-removed-tokens]
- (remove-first-of-each! tokens
- binding-token-list
- (partial fast-token-compare identical?))]
- (hf/add-all! removed-tokens-result removed-tokens)
- (when (seq not-removed-tokens)
- (let [[other-removed-tokens]
- (remove-first-of-each! not-removed-tokens binding-token-list
- (partial fast-token-compare =))]
- (hf/add-all! removed-tokens-result other-removed-tokens))))
- (not-empty binding-token-list))))
- (not-empty binding-token-map))))
+ beta-memory (:id node)
+ (fn do-rem-btm
+ [_ btm]
+ (let [binding-token-map (->mutable-map btm)]
+ (hm/compute-if-present!
+ binding-token-map join-bindings
+ (fn do-rem-btl
+ [_ btl]
+ (let [binding-token-list (->linked-list btl)
+ ;; Attempt to remove tokens using the faster indentity-based equality first since
+ ;; most of the time this is all we need and it can be much faster. Any token that
+ ;; wasn't removed via identity, has to be "retried" with normal value-based
+ ;; equality though since those semantics are supported within the engine. This
+ ;; slower path should be rare for any heavy retraction flows - such as those that come
+ ;; via truth maintenance.
+ [removed-tokens not-removed-tokens]
+ (remove-first-of-each! tokens
+ binding-token-list
+ (partial fast-token-compare identical?))]
+ (hf/add-all! removed-tokens-result removed-tokens)
+ (when (seq not-removed-tokens)
+ (let [[other-removed-tokens]
+ (remove-first-of-each! not-removed-tokens binding-token-list
+ (partial fast-token-compare =))]
+ (hf/add-all! removed-tokens-result other-removed-tokens)))
+ (not-empty binding-token-list))))
+ (not-empty binding-token-map))))
(hf/persistent! removed-tokens-result))))
(add-accum-reduced! [memory node join-bindings accum-result fact-bindings]
From 83fa452a0c4e70296161ebbb84bec84bc9922f05 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 27 Dec 2023 23:27:29 -0600
Subject: [PATCH 19/87] feat: map persistent accum memory using compute
---
src/main/clojure/clara/rules/memory.clj | 68 +++++++++++++++----------
1 file changed, 40 insertions(+), 28 deletions(-)
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index 154e9787..b746e028 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -185,8 +185,17 @@
(defn- ->persistent-coll
"Creates a persistent collection from the input collection, but only if necessary"
[coll]
- (if (coll? coll)
+ (cond
+ (coll? coll)
coll
+
+ (mutable-map? coll)
+ (persistent! coll)
+
+ (map? coll)
+ (persistent! coll)
+
+ :else
(seq coll)))
(defn- remove-first-of-each!
@@ -587,25 +596,28 @@
(hf/persistent! removed-tokens-result))))
(add-accum-reduced! [memory node join-bindings accum-result fact-bindings]
- (assoc! accum-memory
- (:id node)
- (assoc-in (get accum-memory (:id node) {})
- [join-bindings fact-bindings]
- accum-result)))
+ (hm/compute! accum-memory (:id node)
+ (fn add-jbam
+ [_ jbam]
+ (let [join-binding-accum-map (->mutable-map jbam)]
+ (hm/compute! join-binding-accum-map join-bindings
+ (fn add-fbam
+ [_ fbam]
+ (let [fact-binding-accum-map (->mutable-map fbam)]
+ (assoc! fact-binding-accum-map fact-bindings accum-result))))
+ (not-empty join-binding-accum-map)))))
(remove-accum-reduced! [memory node join-bindings fact-bindings]
- (let [node-id (:id node)
- node-id-mem (get accum-memory node-id {})
- join-mem (dissoc (get node-id-mem join-bindings) fact-bindings)
- node-id-mem (if (empty? join-mem)
- (dissoc node-id-mem join-bindings)
- (assoc node-id-mem join-bindings join-mem))]
- (if (empty? node-id-mem)
- (dissoc! accum-memory
- node-id)
- (assoc! accum-memory
- node-id
- node-id-mem))))
+ (hm/compute! accum-memory (:id node)
+ (fn add-jbam
+ [_ jbam]
+ (let [join-binding-accum-map (->mutable-map jbam)]
+ (hm/compute! join-binding-accum-map join-bindings
+ (fn add-fbam
+ [_ fbam]
+ (let [fact-binding-accum-map (->mutable-map fbam)]
+ (not-empty (dissoc! fact-binding-accum-map fact-bindings)))))
+ (not-empty join-binding-accum-map)))))
;; The value under each token in the map should be a sequence
;; of sequences of facts, with each inner sequence coming from a single
@@ -739,20 +751,20 @@
(.clear activation-map))
(to-persistent! [memory]
- (let [update-vals (fn do-update-vals [update-fn m]
- (->> m
- (reduce-kv (fn [m k v]
- (assoc! m k (update-fn v)))
- (hf/mut-map))
- persistent!))
- persistent-vals (partial update-vals ->persistent-coll)]
+ (let [persistent-maps (fn do-update-vals [update-fn m]
+ (->> m
+ (reduce-kv (fn [m k v]
+ (assoc! m k (update-fn v)))
+ (hf/mut-map))
+ persistent!))
+ persistent-vals (partial persistent-maps ->persistent-coll)]
(->PersistentLocalMemory rulebase
activation-group-sort-fn
activation-group-fn
alphas-fn
- (update-vals persistent-vals (persistent! alpha-memory))
- (update-vals persistent-vals (persistent! beta-memory))
- (persistent! accum-memory)
+ (persistent-maps persistent-vals alpha-memory)
+ (persistent-maps persistent-vals beta-memory)
+ (persistent-maps persistent-vals accum-memory)
(persistent! production-memory)
(into {}
(map (juxt key (comp ->persistent-coll val)))
From 281eef7a54a2404c2e36210e22d5c1cdd4ba4dea Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 27 Dec 2023 23:32:03 -0600
Subject: [PATCH 20/87] fix: lint deps
---
deps.edn | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/deps.edn b/deps.edn
index 884e7142..eebd1ac5 100644
--- a/deps.edn
+++ b/deps.edn
@@ -17,7 +17,7 @@
org.clojure/math.combinatorics {:mvn/version "0.1.3"}
criterium/criterium {:mvn/version "0.4.6"}}}
- :clj-kondo {:deps {clj-kondo/clj-kondo {:mvn/version "2023.04.14"}}
+ :clj-kondo {:extra-deps {clj-kondo/clj-kondo {:mvn/version "2023.04.14"}}
:main-opts ["-m" "clj-kondo.main"]}
:test {:extra-paths ["src/test/clojure" "target/test/classes"]
From fed0df750c9ee4aa85e3938ae5dc6ebba0d0afd9 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Thu, 28 Dec 2023 00:23:15 -0600
Subject: [PATCH 21/87] fix: compute if present for remove accum
---
src/main/clojure/clara/rules/memory.clj | 21 ++++++++++-----------
1 file changed, 10 insertions(+), 11 deletions(-)
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index b746e028..429d6b92 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -608,16 +608,16 @@
(not-empty join-binding-accum-map)))))
(remove-accum-reduced! [memory node join-bindings fact-bindings]
- (hm/compute! accum-memory (:id node)
- (fn add-jbam
- [_ jbam]
- (let [join-binding-accum-map (->mutable-map jbam)]
- (hm/compute! join-binding-accum-map join-bindings
- (fn add-fbam
- [_ fbam]
- (let [fact-binding-accum-map (->mutable-map fbam)]
- (not-empty (dissoc! fact-binding-accum-map fact-bindings)))))
- (not-empty join-binding-accum-map)))))
+ (hm/compute-if-present! accum-memory (:id node)
+ (fn add-jbam
+ [_ jbam]
+ (let [join-binding-accum-map (->mutable-map jbam)]
+ (hm/compute-if-present! join-binding-accum-map join-bindings
+ (fn add-fbam
+ [_ fbam]
+ (let [fact-binding-accum-map (->mutable-map fbam)]
+ (not-empty (dissoc! fact-binding-accum-map fact-bindings)))))
+ (not-empty join-binding-accum-map)))))
;; The value under each token in the map should be a sequence
;; of sequences of facts, with each inner sequence coming from a single
@@ -629,7 +629,6 @@
(update token-facts-map token conj facts))))
(remove-insertions! [memory node tokens]
-
;; Remove the facts inserted from the given token.
(let [token-facts-map (get production-memory (:id node) {})
;; Get removed tokens for the caller.
From 9d0f22b04c3fd4c8a66216a098de287151308b90 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Thu, 28 Dec 2023 10:19:04 -0600
Subject: [PATCH 22/87] feat: update transient production memory ops using
mutable ops
---
Makefile | 4 +-
dev/user.clj | 8 ++-
src/main/clojure/clara/rules/memory.clj | 74 ++++++++++++-------------
3 files changed, 44 insertions(+), 42 deletions(-)
diff --git a/Makefile b/Makefile
index 58d1b083..9ef030b0 100644
--- a/Makefile
+++ b/Makefile
@@ -13,10 +13,10 @@ repl: compile-test-java
clojure -M:dev:test:repl
test: compile-test-java
- clojure -M:dev:test:runner --focus :unit --reporter kaocha.report/tap --no-capture-output
+ clojure -M:dev:test:runner --focus :unit --reporter kaocha.report/documentation --no-capture-output
test-generative: compile-test-java
- clojure -M:dev:test:runner --focus :generative --reporter kaocha.report/tap --no-capture-output
+ clojure -M:dev:test:runner --focus :generative --reporter kaocha.report/documentation --no-capture-output
test-config:
clojure -M:dev:test:runner --print-config
diff --git a/dev/user.clj b/dev/user.clj
index 6e867c30..a4ec5ac1 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -2,5 +2,9 @@
(:require [criterium.core :refer [report-result
quick-benchmark] :as crit]
[ham-fisted.api :as hf]
- [ham-fisted.mut-map :as hm]
- [clara.rules.platform :as platform]))
+ [ham-fisted.mut-map :as hm]))
+
+(comment
+ (add-tap #'println)
+ (remove-tap #'println)
+ (tap> "foobar"))
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index 429d6b92..b57b0d3d 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -5,6 +5,7 @@
[ham-fisted.mut-map :as hm])
(:import [java.util
Map
+ List
Collections
LinkedList
NavigableMap
@@ -623,46 +624,43 @@
;; of sequences of facts, with each inner sequence coming from a single
;; rule activation.
(add-insertions! [memory node token facts]
- (let [token-facts-map (get production-memory (:id node) {})]
- (assoc! production-memory
- (:id node)
- (update token-facts-map token conj facts))))
+ (hm/compute! production-memory (:id node)
+ (fn add-tfm
+ [_ tfm]
+ (let [token-facts-map (->mutable-map tfm)]
+ (hm/compute! token-facts-map token
+ (fn add-tfl
+ [_ tfl]
+ (let [^List token-facts-list (->linked-list tfl)]
+ (.add token-facts-list facts)
+ token-facts-list)))
+ token-facts-map))))
(remove-insertions! [memory node tokens]
;; Remove the facts inserted from the given token.
- (let [token-facts-map (get production-memory (:id node) {})
- ;; Get removed tokens for the caller.
- [results
- new-token-facts-map]
-
- (loop [results (transient {})
- token-map (transient token-facts-map)
- to-remove tokens]
- (if-let [head-token (first to-remove)]
- ;; Don't use contains? due to http://dev.clojure.org/jira/browse/CLJ-700
- (if-let [token-insertions (get token-map head-token)]
- (let [;; There is no particular significance in removing the
- ;; first group; we just need to remove exactly one.
- [removed-facts & remaining-facts] token-insertions
- removed-insertion-map (if (not-empty remaining-facts)
- (assoc! token-map head-token remaining-facts)
- (dissoc! token-map head-token))
- prev-token-result (get results head-token [])]
- (recur (assoc! results head-token (into prev-token-result removed-facts))
- removed-insertion-map
- (rest to-remove)))
- ;; If the token isn't present in the insertions just try the next one.
- (recur results token-map (rest to-remove)))
- [(persistent! results)
- (persistent! token-map)]))]
-
- ;; Clear the tokens and update the memory.
- (if (not-empty new-token-facts-map)
- (assoc! production-memory
- (:id node)
- new-token-facts-map)
- (dissoc! production-memory (:id node)))
- results))
+ (let [results (hf/mut-map)]
+ (hm/compute-if-present! production-memory (:id node)
+ (fn rem-tfm
+ [_ tfm]
+ (let [token-facts-map (->mutable-map tfm)]
+ (doseq [token tokens]
+ (hm/compute-if-present! token-facts-map token
+ (fn [_ tfl]
+ ;; Don't use contains? due to http://dev.clojure.org/jira/browse/CLJ-700
+ (let [^List token-facts-list (->linked-list tfl)
+ ;; There is no particular significance in removing the
+ ;; first group; we just need to remove exactly one.
+ removed-facts (.remove token-facts-list 0)]
+ (hm/compute! results token
+ (fn add-tfr
+ [_ tfr]
+ (let [token-facts-removed (->linked-list tfr)]
+ (add-all! token-facts-removed removed-facts)
+ token-facts-removed)))
+ (not-empty token-facts-list)))))
+ (not-empty token-facts-map))))
+ (persistent! (hf/update-values results (fn to-persistent [_ coll]
+ (->persistent-coll coll))))))
(add-activations!
[memory production new-activations]
@@ -764,7 +762,7 @@
(persistent-maps persistent-vals alpha-memory)
(persistent-maps persistent-vals beta-memory)
(persistent-maps persistent-vals accum-memory)
- (persistent! production-memory)
+ (persistent-maps persistent-vals production-memory)
(into {}
(map (juxt key (comp ->persistent-coll val)))
activation-map)))))
From 67e0e5708168e9b17ea42f8c373326fe47998c8f Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Thu, 28 Dec 2023 11:43:23 -0600
Subject: [PATCH 23/87] feat: accum simplify using persistent strucs for
results
---
src/main/clojure/clara/rules/memory.clj | 12 +++++-------
1 file changed, 5 insertions(+), 7 deletions(-)
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index b57b0d3d..01c158f0 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -603,10 +603,9 @@
(let [join-binding-accum-map (->mutable-map jbam)]
(hm/compute! join-binding-accum-map join-bindings
(fn add-fbam
- [_ fbam]
- (let [fact-binding-accum-map (->mutable-map fbam)]
- (assoc! fact-binding-accum-map fact-bindings accum-result))))
- (not-empty join-binding-accum-map)))))
+ [_ fact-binding-accum-map]
+ (assoc fact-binding-accum-map fact-bindings accum-result)))
+ join-binding-accum-map))))
(remove-accum-reduced! [memory node join-bindings fact-bindings]
(hm/compute-if-present! accum-memory (:id node)
@@ -615,9 +614,8 @@
(let [join-binding-accum-map (->mutable-map jbam)]
(hm/compute-if-present! join-binding-accum-map join-bindings
(fn add-fbam
- [_ fbam]
- (let [fact-binding-accum-map (->mutable-map fbam)]
- (not-empty (dissoc! fact-binding-accum-map fact-bindings)))))
+ [_ fact-binding-accum-map]
+ (not-empty (dissoc fact-binding-accum-map fact-bindings))))
(not-empty join-binding-accum-map)))))
;; The value under each token in the map should be a sequence
From 37eaa07a1acd979cb52fee4102e73291338a5801 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Thu, 28 Dec 2023 13:34:21 -0600
Subject: [PATCH 24/87] feat: memory tweaks to initialize nodes as persistent
colls
---
src/main/clojure/clara/rules/memory.clj | 72 ++++++++++++++-----------
1 file changed, 42 insertions(+), 30 deletions(-)
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index 01c158f0..1ca37d34 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -522,14 +522,18 @@
(hm/compute! alpha-memory (:id node)
(fn do-add-bem
[_ bem]
- (let [binding-element-map (->mutable-map bem)]
- (hm/compute! binding-element-map join-bindings
- (fn do-add-bel
- [_ bel]
- (let [binding-element-list (->linked-list bel)]
- (add-all! binding-element-list elements)
- binding-element-list)))
- binding-element-map))))
+ (if bem
+ (let [binding-element-map (->mutable-map bem)]
+ (hm/compute! binding-element-map join-bindings
+ (fn do-add-bel
+ [_ bel]
+ (if bel
+ (let [binding-element-list (->linked-list bel)]
+ (add-all! binding-element-list elements)
+ binding-element-list)
+ elements)))
+ binding-element-map)
+ {join-bindings elements}))))
(remove-elements! [memory node join-bindings elements]
;; Do nothing when no elements to remove.
@@ -553,14 +557,18 @@
(hm/compute! beta-memory (:id node)
(fn do-add-btm
[_ btm]
- (let [binding-token-map (->mutable-map btm)]
- (hm/compute! binding-token-map join-bindings
- (fn do-add-btl
- [_ btl]
- (let [binding-token-list (->linked-list btl)]
- (add-all! binding-token-list tokens)
- binding-token-list)))
- binding-token-map))))
+ (if btm
+ (let [binding-token-map (->mutable-map btm)]
+ (hm/compute! binding-token-map join-bindings
+ (fn do-add-btl
+ [_ btl]
+ (if btl
+ (let [binding-token-list (->linked-list btl)]
+ (add-all! binding-token-list tokens)
+ binding-token-list)
+ tokens)))
+ binding-token-map)
+ {join-bindings tokens}))))
(remove-tokens! [memory node join-bindings tokens]
;; The reasoning here is the same as remove-elements!
@@ -600,12 +608,14 @@
(hm/compute! accum-memory (:id node)
(fn add-jbam
[_ jbam]
- (let [join-binding-accum-map (->mutable-map jbam)]
- (hm/compute! join-binding-accum-map join-bindings
- (fn add-fbam
- [_ fact-binding-accum-map]
- (assoc fact-binding-accum-map fact-bindings accum-result)))
- join-binding-accum-map))))
+ (if jbam
+ (let [join-binding-accum-map (->mutable-map jbam)]
+ (hm/compute! join-binding-accum-map join-bindings
+ (fn add-fbam
+ [_ fact-binding-accum-map]
+ (assoc fact-binding-accum-map fact-bindings accum-result)))
+ join-binding-accum-map)
+ {join-bindings {fact-bindings accum-result}}))))
(remove-accum-reduced! [memory node join-bindings fact-bindings]
(hm/compute-if-present! accum-memory (:id node)
@@ -625,14 +635,16 @@
(hm/compute! production-memory (:id node)
(fn add-tfm
[_ tfm]
- (let [token-facts-map (->mutable-map tfm)]
- (hm/compute! token-facts-map token
- (fn add-tfl
- [_ tfl]
- (let [^List token-facts-list (->linked-list tfl)]
- (.add token-facts-list facts)
- token-facts-list)))
- token-facts-map))))
+ (if tfm
+ (let [token-facts-map (->mutable-map tfm)]
+ (hm/compute! token-facts-map token
+ (fn add-tfl
+ [_ tfl]
+ (let [^List token-facts-list (->linked-list tfl)]
+ (.add token-facts-list facts)
+ token-facts-list)))
+ token-facts-map)
+ {token [facts]}))))
(remove-insertions! [memory node tokens]
;; Remove the facts inserted from the given token.
From ac035123b13d9370109b034514776c21a8bf5269 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Fri, 29 Dec 2023 08:01:19 -0600
Subject: [PATCH 25/87] feat: enhance compilter with hamf collections and
update serialization
---
src/main/clojure/clara/rules/compiler.clj | 61 +++++++++++--------
.../clara/rules/durability/fressian.clj | 23 ++++++-
2 files changed, 57 insertions(+), 27 deletions(-)
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 0393b489..a5ce3407 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -5,6 +5,9 @@
(:require [clara.rules.engine :as eng]
[clara.rules.schema :as schema]
[clara.rules.platform :refer [jeq-wrap] :as platform]
+ [ham-fisted.api :as hf]
+ [ham-fisted.set :as hs]
+ [ham-fisted.mut-map :as hm]
[clojure.set :as set]
[clojure.string :as string]
[clojure.walk :as walk]
@@ -117,12 +120,16 @@
[(symbol (string/replace (.getName property) #"_" "-")) ; Replace underscore with idiomatic dash.
(symbol (str "." (.getName read-method)))])))
-(defn effective-type [type]
+(defn- effective-type*
+ [type]
(if (symbol? type)
(.loadClass (clojure.lang.RT/makeClassLoader) (name type))
type))
-(defn get-fields
+(def effective-type
+ (memoize effective-type*))
+
+(defn- get-fields*
"Returns a map of field name to a symbol representing the function used to access it."
[type]
(let [type (effective-type type)]
@@ -131,6 +138,9 @@
(class? type) (get-bean-accessors type) ; Treat unrecognized classes as beans.
:else [])))
+(def get-fields
+ (memoize get-fields*))
+
(defn- equality-expression? [expression]
(let [qualify-when-sym #(when-let [resolved (and (symbol? %)
(resolve %))]
@@ -1095,7 +1105,7 @@
(assoc! m node [id]))))
node->ids (-> (reduce update-node->ids
- (transient {})
+ (transient (hf/hash-map))
forward-edges)
persistent!)
@@ -1285,7 +1295,7 @@
:compile-ctx {:condition condition
:env env
:msg "compiling alpha node"}})))
- {}
+ (hf/hash-map)
alpha-graph)
id->expr (reduce-kv (fn [prev id production-node]
(let [production (-> production-node :production)]
@@ -1427,7 +1437,7 @@
"that the method size exceeded the maximum set by the jvm, see the cause for the actual error.")
{:compilation-ctxs compilation-ctxs}
e)))))]
- (into {}
+ (into (hf/hash-map)
cat
;; Grouping by ns, most expressions will not have a defined ns, only expressions from production nodes.
;; These expressions must be evaluated in the ns where they were defined, as they will likely contain code
@@ -1655,7 +1665,7 @@
(if (get node-map [condition env])
(update-in node-map [[condition env]] conj node-id)
(assoc node-map [condition env] [node-id])))
- {}
+ (hf/hash-map)
condition-to-node-ids)
;; We sort the alpha nodes by the ordered sequence of the node ids they correspond to
@@ -1729,7 +1739,7 @@
;; Exclude system types from having ancestors for now
;; since none of our use-cases require them. If this changes
;; we may need to define a custom hierarchy for them.
- #{}
+ (hs/set)
(ancestors-fn fact-type)))
fact-type->roots (memoize
@@ -1751,19 +1761,20 @@
(conj (wrapped-ancestors-fn fact-type) fact-type))))
update-roots->facts! (fn [^java.util.Map roots->facts roots-group fact]
- (if-let [v (.get roots->facts roots-group)]
- (.add ^java.util.List v fact)
- (.put roots->facts roots-group (doto (java.util.LinkedList.)
- (.add fact)))))]
+ (hm/compute! roots->facts roots-group
+ (fn update-roots
+ [_ facts]
+ (let [^java.util.List fact-list (or facts (hf/mut-list))]
+ (.add fact-list fact)
+ fact-list))))]
(fn [facts]
(let [roots->facts (java.util.LinkedHashMap.)]
-
(doseq [fact facts
roots-group (fact-type->roots (wrapped-fact-type-fn fact))]
(update-roots->facts! roots->facts roots-group fact))
- (let [return-list (java.util.LinkedList.)
+ (let [return-list (hf/mut-list)
entries (.entrySet roots->facts)
entries-it (.iterator entries)]
;; We iterate over the LinkedHashMap manually to avoid potential issues described at http://dev.clojure.org/jira/browse/CLJ-1738
@@ -1776,10 +1787,9 @@
(when (.hasNext entries-it)
(let [^java.util.Map$Entry e (.next entries-it)]
(.add return-list [(-> e ^AlphaRootsWrapper (.getKey) (.wrapped))
- (java.util.Collections/unmodifiableList (.getValue e))])
+ (hf/persistent! (.getValue e))])
(recur))))
-
- (java.util.Collections/unmodifiableList return-list))))))
+ (hf/persistent! return-list))))))
(sc/defn build-network
"Constructs the network from compiled beta tree and condition functions."
@@ -1803,12 +1813,13 @@
:when (= QueryNode (type node))]
node)
- query-map (into {} (for [query-node query-nodes
+ query-map (->> (for [query-node query-nodes
- ;; Queries can be looked up by reference or by name;
- entry [[(:query query-node) query-node]
- [(:name (:query query-node)) query-node]]]
- entry))
+ ;; Queries can be looked up by reference or by name;
+ entry [[(:query query-node) query-node]
+ [(:name (:query query-node)) query-node]]]
+ entry)
+ (into (hf/hash-map)))
;; type, alpha node tuples.
alpha-nodes (for [{:keys [id type alpha-fn children env] :as alpha-map} alpha-fns
@@ -1819,7 +1830,7 @@
alpha-map (reduce
(fn [alpha-map [type alpha-node]]
(update-in alpha-map [type] conj alpha-node))
- {}
+ (hf/hash-map)
alpha-nodes)
;; Merge the alpha nodes into the id-to-node map
@@ -1843,7 +1854,7 @@
:node-expr-fn-lookup expr-fn-lookup})))
;; Cache of sessions for fast reloading.
-(def ^:private session-cache (atom {}))
+(def ^:private session-cache (atom (hf/hash-map)))
(defn clear-session-cache!
"Clears the cache of reusable Clara sessions, so any subsequent sessions
@@ -1851,7 +1862,7 @@
by tooling or specialized needs; most users can simply specify the :cache false
option when creating sessions."
[]
- (reset! session-cache {}))
+ (reset! session-cache (hf/hash-map)))
(defn production-load-order-comp [a b]
(< (-> a meta ::rule-load-order)
@@ -1949,7 +1960,7 @@
;; in diagnosing compilation errors in specific rules.
omit-compile-ctx (:omit-compile-ctx options omit-compile-ctx-default)
exprs (if omit-compile-ctx
- (into {}
+ (into (hf/hash-map)
(map
(fn [[k [expr ctx]]]
[k [expr (dissoc ctx :compile-ctx)]]))
diff --git a/src/main/clojure/clara/rules/durability/fressian.clj b/src/main/clojure/clara/rules/durability/fressian.clj
index c60c9b00..8d6cea62 100644
--- a/src/main/clojure/clara/rules/durability/fressian.clj
+++ b/src/main/clojure/clara/rules/durability/fressian.clj
@@ -11,7 +11,9 @@
[schema.core :as s]
[clojure.data.fressian :as fres]
[clojure.java.io :as jio]
- [clojure.main :as cm])
+ [clojure.main :as cm]
+ [ham-fisted.api :as hf]
+ [ham-fisted.set :as hs])
(:import [clara.rules.durability
MemIdx
InternalMemIdx]
@@ -47,7 +49,10 @@
WeakHashMap]
[java.io
InputStream
- OutputStream]))
+ OutputStream]
+ [ham_fisted
+ IAPersistentSet
+ IAPersistentMap]))
;; Use this map to cache the symbol for the map->RecordNameHere
;; factory function created for every Clojure record to improve
@@ -242,6 +247,20 @@
(read [_ rdr tag component-count]
(resolve (.readObject rdr))))}}
+ "hamf/set"
+ (create-identity-based-handler
+ IAPersistentSet
+ "hamf/set"
+ write-with-meta
+ (fn hamf-set-reader [rdr] (read-with-meta rdr hs/set)))
+
+ "hamf/map"
+ (create-identity-based-handler
+ IAPersistentMap
+ "hamf/map"
+ (fn clj-map-writer [wtr tag m] (write-with-meta wtr tag m write-map))
+ (fn clj-map-reader [rdr] (read-with-meta rdr (partial into (hf/hash-map)))))
+
"clj/set"
(create-identity-based-handler
clojure.lang.APersistentSet
From 1b3db0d7a6994a7cb7c23662def5d468d14f746b Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Fri, 29 Dec 2023 20:48:34 -0600
Subject: [PATCH 26/87] feat: add parallel processing support to various node
types
---
.../clj-kondo.exports/clara/rules/config.edn | 3 +-
src/main/clojure/clara/rules/engine.clj | 521 +++++++++---------
src/main/clojure/clara/rules/platform.clj | 60 +-
.../clojure/clara/tools/testing_utils.clj | 10 +-
4 files changed, 338 insertions(+), 256 deletions(-)
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/config.edn b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
index 6b946f6a..a7b18abe 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/config.edn
+++ b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
@@ -1,5 +1,6 @@
{:lint-as {clara.rules/defsession clojure.core/def
- clara.rules.platform/eager-for clojure.core/for}
+ clara.rules.platform/eager-for clojure.core/for
+ clara.rules.platform/match-for clojure.core/for}
:hooks {:analyze-call {clara.rules/defquery hooks.clara-rules/analyze-defquery-macro
clara.rules/defrule hooks.clara-rules/analyze-defrule-macro
clara.rules.dsl/parse-query hooks.clara-rules/analyze-parse-query-macro
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index 0cfde6dd..06ab307a 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -332,8 +332,9 @@
(when (or (not (get-in production [:props :no-loop]))
(not (= production (get-in *rule-context* [:node :production]))))
- (let [activations (platform/eager-for [token tokens]
- (->Activation node token))]
+ (let [activations (platform/eager-for
+ [token tokens]
+ (->Activation node token))]
(l/add-activations! listener node activations)
@@ -349,8 +350,9 @@
(l/left-retract! listener node tokens)
;; Remove pending activations triggered by the retracted tokens.
- (let [activations (platform/eager-for [token tokens]
- (->Activation node token))
+ (let [activations (platform/eager-for
+ [token tokens]
+ (->Activation node token))
;; We attempt to remove a pending activation for all tokens retracted, but our expectation
;; is that each token may remove a pending activation
@@ -493,18 +495,16 @@
:conditions-and-rules conditions-and-rules}
cause))))
-(defn- alpha-node-matches
- [facts env activation node]
- (platform/eager-for [fact facts
- :let [bindings (try
- (activation fact env)
- (catch Exception e
- (throw-condition-exception {:cause e
- :node node
- :fact fact
- :env env})))]
- :when bindings] ; FIXME: add env.
- [fact bindings]))
+(defn- alpha-node-match->Element
+ [fact env activation node]
+ (try
+ (when-let [bindings (activation fact env)]
+ (->Element fact bindings))
+ (catch Exception e
+ (throw-condition-exception {:cause e
+ :node node
+ :fact fact
+ :env env}))))
;; Record representing alpha nodes in the Rete network,
;; each of which evaluates a single condition and
@@ -513,26 +513,28 @@
IAlphaActivate
(alpha-activate [node facts memory transport listener]
- (let [fact-binding-pairs (alpha-node-matches facts env activation node)]
- (l/alpha-activate! listener node (map first fact-binding-pairs))
+ (let [match-elements (platform/match-for
+ [fact facts]
+ (alpha-node-match->Element fact env activation node))]
+ (l/alpha-activate! listener node (map :fact match-elements))
(send-elements
transport
memory
listener
children
- (platform/eager-for [[fact bindings] fact-binding-pairs]
- (->Element fact bindings)))))
+ match-elements)))
(alpha-retract [node facts memory transport listener]
- (let [fact-binding-pairs (alpha-node-matches facts env activation node)]
- (l/alpha-retract! listener node (map first fact-binding-pairs))
+ (let [match-elements (platform/match-for
+ [fact facts]
+ (alpha-node-match->Element fact env activation node))]
+ (l/alpha-retract! listener node (map :fact match-elements))
(retract-elements
transport
memory
listener
children
- (platform/eager-for [[fact bindings] fact-binding-pairs]
- (->Element fact bindings))))))
+ match-elements))))
(defrecord RootJoinNode [id condition children binding-keys]
ILeftActivate
@@ -553,7 +555,7 @@
(l/right-activate! listener node elements)
-;; Add elements to the working memory to support analysis tools.
+ ;; Add elements to the working memory to support analysis tools.
(mem/add-elements! memory node join-bindings elements)
;; Simply create tokens and send it downstream.
(send-tokens
@@ -561,8 +563,9 @@
memory
listener
children
- (platform/eager-for [{:keys [fact bindings] :as element} elements]
- (->Token [[fact (:id node)]] bindings))))
+ (platform/eager-for
+ [{:keys [fact bindings] :as element} elements]
+ (->Token [[fact (:id node)]] bindings))))
(right-retract [node join-bindings elements memory transport listener]
@@ -574,8 +577,9 @@
memory
listener
children
- (platform/eager-for [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)]
- (->Token [[fact (:id node)]] bindings))))
+ (platform/eager-for
+ [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)]
+ (->Token [[fact (:id node)]] bindings))))
IConditionNode
(get-condition-description [this]
@@ -596,11 +600,12 @@
memory
listener
children
- (platform/eager-for [element (mem/get-elements memory node join-bindings)
- token tokens
- :let [fact (:fact element)
- fact-binding (:bindings element)]]
- (->Token (conj (:matches token) [fact id]) (conj fact-binding (:bindings token))))))
+ (platform/eager-for
+ [element (mem/get-elements memory node join-bindings)
+ token tokens
+ :let [fact (:fact element)
+ fact-binding (:bindings element)]]
+ (->Token (conj (:matches token) [fact id]) (conj fact-binding (:bindings token))))))
(left-retract [node join-bindings tokens memory transport listener]
(l/left-retract! listener node tokens)
@@ -609,11 +614,12 @@
memory
listener
children
- (platform/eager-for [token (mem/remove-tokens! memory node join-bindings tokens)
- element (mem/get-elements memory node join-bindings)
- :let [fact (:fact element)
- fact-bindings (:bindings element)]]
- (->Token (conj (:matches token) [fact id]) (conj fact-bindings (:bindings token))))))
+ (platform/eager-for
+ [token (mem/remove-tokens! memory node join-bindings tokens)
+ element (mem/get-elements memory node join-bindings)
+ :let [fact (:fact element)
+ fact-bindings (:bindings element)]]
+ (->Token (conj (:matches token) [fact id]) (conj fact-bindings (:bindings token))))))
(get-join-keys [node] binding-keys)
@@ -628,9 +634,10 @@
memory
listener
children
- (platform/eager-for [token (mem/get-tokens memory node join-bindings)
- {:keys [fact bindings] :as element} elements]
- (->Token (conj (:matches token) [fact id]) (conj (:bindings token) bindings)))))
+ (platform/eager-for
+ [token (mem/get-tokens memory node join-bindings)
+ {:keys [fact bindings] :as element} elements]
+ (->Token (conj (:matches token) [fact id]) (conj (:bindings token) bindings)))))
(right-retract [node join-bindings elements memory transport listener]
(l/right-retract! listener node elements)
@@ -639,9 +646,10 @@
memory
listener
children
- (platform/eager-for [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)
- token (mem/get-tokens memory node join-bindings)]
- (->Token (conj (:matches token) [fact id]) (conj (:bindings token) bindings)))))
+ (platform/eager-for
+ [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)
+ token (mem/get-tokens memory node join-bindings)]
+ (->Token (conj (:matches token) [fact id]) (conj (:bindings token) bindings)))))
IConditionNode
(get-condition-description [this]
@@ -660,41 +668,47 @@
fact-bindings)})))]
beta-bindings))
+(defn expression-join-node-match->Token
+ [element token node id join-filter-fn env]
+ (let [fact (:fact element)
+ fact-binding (:bindings element)
+ beta-bindings (join-node-matches node join-filter-fn token fact fact-binding env)]
+ (when beta-bindings
+ (->Token (conj (:matches token) [fact id])
+ (conj fact-binding (:bindings token) beta-bindings)))))
+
(defrecord ExpressionJoinNode [id condition join-filter-fn children binding-keys]
ILeftActivate
(left-activate [node join-bindings tokens memory transport listener]
;; Add token to the node's working memory for future right activations.
(mem/add-tokens! memory node join-bindings tokens)
(l/left-activate! listener node tokens)
- (send-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [element (mem/get-elements memory node join-bindings)
- token tokens
- :let [fact (:fact element)
- fact-binding (:bindings element)
- beta-bindings (join-node-matches node join-filter-fn token fact fact-binding {})]
- :when beta-bindings]
- (->Token (conj (:matches token) [fact id])
- (conj fact-binding (:bindings token) beta-bindings)))))
+ (let [elements (mem/get-elements memory node join-bindings)
+ matched-tokens (platform/match-for
+ [element elements
+ token tokens]
+ (expression-join-node-match->Token element token node id join-filter-fn (:env condition)))]
+ (send-tokens
+ transport
+ memory
+ listener
+ children
+ matched-tokens)))
(left-retract [node join-bindings tokens memory transport listener]
(l/left-retract! listener node tokens)
- (retract-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [token (mem/remove-tokens! memory node join-bindings tokens)
- element (mem/get-elements memory node join-bindings)
- :let [fact (:fact element)
- fact-bindings (:bindings element)
- beta-bindings (join-node-matches node join-filter-fn token fact fact-bindings {})]
- :when beta-bindings]
- (->Token (conj (:matches token) [fact id])
- (conj fact-bindings (:bindings token) beta-bindings)))))
+ (let [tokens (mem/remove-tokens! memory node join-bindings tokens)
+ elements (mem/get-elements memory node join-bindings)
+ matched-tokens (platform/match-for
+ [element elements
+ token tokens]
+ (expression-join-node-match->Token element token node id join-filter-fn (:env condition)))]
+ (retract-tokens
+ transport
+ memory
+ listener
+ children
+ matched-tokens)))
(get-join-keys [node] binding-keys)
@@ -702,33 +716,34 @@
IRightActivate
(right-activate [node join-bindings elements memory transport listener]
- (mem/add-elements! memory node join-bindings elements)
- (l/right-activate! listener node elements)
- (send-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [token (mem/get-tokens memory node join-bindings)
- {:keys [fact bindings] :as element} elements
- :let [beta-bindings (join-node-matches node join-filter-fn token fact bindings {})]
- :when beta-bindings]
- (->Token (conj (:matches token) [fact id])
- (conj (:bindings token) bindings beta-bindings)))))
+ (let [tokens (mem/get-tokens memory node join-bindings)
+ matched-tokens (platform/match-for
+ [element elements
+ token tokens]
+ (expression-join-node-match->Token element token node id join-filter-fn (:env condition)))]
+ (mem/add-elements! memory node join-bindings elements)
+ (l/right-activate! listener node elements)
+ (send-tokens
+ transport
+ memory
+ listener
+ children
+ matched-tokens)))
(right-retract [node join-bindings elements memory transport listener]
(l/right-retract! listener node elements)
- (retract-tokens
- transport
- memory
- listener
- children
- (platform/eager-for [{:keys [fact bindings] :as element} (mem/remove-elements! memory node join-bindings elements)
- token (mem/get-tokens memory node join-bindings)
- :let [beta-bindings (join-node-matches node join-filter-fn token fact bindings {})]
- :when beta-bindings]
- (->Token (conj (:matches token) [fact id])
- (conj (:bindings token) bindings beta-bindings)))))
+ (let [elements (mem/remove-elements! memory node join-bindings elements)
+ tokens (mem/get-tokens memory node join-bindings)
+ matched-tokens (platform/match-for
+ [element elements
+ token tokens]
+ (expression-join-node-match->Token element token node id join-filter-fn (:env condition)))]
+ (retract-tokens
+ transport
+ memory
+ listener
+ children
+ matched-tokens)))
IConditionNode
(get-condition-description [this]
@@ -791,6 +806,14 @@
(join-node-matches node join-filter-fn token fact bindings (:env condition)))
elements))
+(defn negation-join-node-not-match->Token
+ [node token elements join-filter-fn condition]
+ (when-not (some (fn negation-join-match
+ [{:keys [fact bindings]}]
+ (join-node-matches node join-filter-fn token fact bindings (:env condition)))
+ elements)
+ token))
+
;; A specialization of the NegationNode that supports additional tests
;; that have to occur on the beta side of the network. The key difference between this and the simple
;; negation node is the join-filter-fn, which allows negation tests to
@@ -808,13 +831,13 @@
listener
children
(let [elements (mem/get-elements memory node join-bindings)]
- (platform/eager-for [token tokens
- :when (not (matches-some-facts? node
- token
- elements
- join-filter-fn
- condition))]
- token))))
+ (platform/match-for
+ [token tokens]
+ (negation-join-node-not-match->Token node
+ token
+ elements
+ join-filter-fn
+ condition)))))
(left-retract [node join-bindings tokens memory transport listener]
(l/left-retract! listener node tokens)
@@ -823,17 +846,16 @@
memory
listener
children
-
;; Retract only if it previously had no matches in the negation node,
;; and therefore had an activation.
(let [elements (mem/get-elements memory node join-bindings)]
- (platform/eager-for [token tokens
- :when (not (matches-some-facts? node
- token
- elements
- join-filter-fn
- condition))]
- token))))
+ (platform/match-for
+ [token tokens]
+ (negation-join-node-not-match->Token node
+ token
+ elements
+ join-filter-fn
+ condition)))))
(get-join-keys [node] binding-keys)
@@ -848,24 +870,23 @@
memory
listener
children
- (platform/eager-for [token (mem/get-tokens memory node join-bindings)
-
- ;; Retract downstream if the token now has matching elements and didn't before.
- ;; We check the new elements first in the expectation that the new elements will be
- ;; smaller than the previous elements most of the time
- ;; and that the time to check the elements will be proportional
- ;; to the number of elements.
- :when (and (matches-some-facts? node
- token
- elements
- join-filter-fn
- condition)
- (not (matches-some-facts? node
- token
- previous-elements
- join-filter-fn
- condition)))]
- token))
+ (platform/match-for
+ [token (mem/get-tokens memory node join-bindings)]
+ ;; Retract downstream if the token now has matching elements and didn't before.
+ ;; We check the new elements first in the expectation that the new elements will be
+ ;; smaller than the previous elements most of the time
+ ;; and that the time to check the elements will be proportional
+ ;; to the number of elements.
+ (when-not (negation-join-node-not-match->Token node
+ token
+ elements
+ join-filter-fn
+ condition)
+ (negation-join-node-not-match->Token node
+ token
+ previous-elements
+ join-filter-fn
+ condition))))
;; Adding the elements will mutate the previous-elements since, on the JVM, the LocalMemory
;; currently returns a mutable List from get-elements after changes in issue 184. We need to use the
;; new and old elements in the logic above as separate collections. Therefore we need to delay updating the
@@ -882,21 +903,20 @@
listener
children
(let [remaining-elements (mem/get-elements memory node join-bindings)]
- (platform/eager-for [token (mem/get-tokens memory node join-bindings)
-
- ;; Propagate tokens when some of the retracted facts joined
- ;; but none of the remaining facts do.
- :when (and (matches-some-facts? node
- token
- elements
- join-filter-fn
- condition)
- (not (matches-some-facts? node
- token
- remaining-elements
- join-filter-fn
- condition)))]
- token))))
+ (platform/match-for
+ [token (mem/get-tokens memory node join-bindings)]
+ ;; Propagate tokens when some of the retracted facts joined
+ ;; but none of the remaining facts do.
+ (when-not (negation-join-node-not-match->Token node
+ token
+ elements
+ join-filter-fn
+ condition)
+ (negation-join-node-not-match->Token node
+ token
+ remaining-elements
+ join-filter-fn
+ condition))))))
IConditionNode
(get-condition-description [this]
@@ -906,7 +926,7 @@
constraints)]
[:not (into [type] full-constraints)])))
-(defn- test-node-matches
+(defn- test-node-match->Token
[node test-handler env token]
(let [test-result (try
(test-handler token env)
@@ -915,7 +935,8 @@
:node node
:env env
:bindings (:bindings token)})))]
- test-result))
+ (when test-result
+ token)))
;; The test node represents a Rete extension in which an arbitrary test condition is run
;; against bindings from ancestor nodes. Since this node
@@ -929,10 +950,9 @@
memory
listener
children
- (platform/eager-for
- [token tokens
- :when (test-node-matches node (:handler test) env token)]
- token)))
+ (platform/match-for
+ [token tokens]
+ (test-node-match->Token node (:handler test) env token))))
(left-retract [node join-bindings tokens memory transport listener]
(l/left-retract! listener node tokens)
@@ -1112,8 +1132,9 @@
IAccumRightActivate
(pre-reduce [node elements]
;; Return a seq tuples with the form [binding-group facts-from-group-elements].
- (platform/eager-for [[bindings element-group] (platform/group-by-seq :bindings elements)]
- [bindings (mapv :fact element-group)]))
+ (platform/eager-for
+ [[bindings element-group] (platform/group-by-seq :bindings elements)]
+ [bindings (mapv :fact element-group)]))
(right-activate-reduced [node join-bindings fact-seq memory transport listener]
@@ -1424,7 +1445,7 @@
converted-result join-bindings transport memory listener))))
;; Propagate nothing if the above conditions don't apply.
- :default nil)))
+ :else nil)))
(left-retract [node join-bindings tokens memory transport listener]
@@ -1483,8 +1504,9 @@
;; Return a map of bindings to the candidate facts that match them. This accumulator
;; depends on the values from parent facts, so we defer actually running the accumulator
;; until we have a token.
- (platform/eager-for [[bindings element-group] (platform/group-by-seq :bindings elements)]
- [bindings (map :fact element-group)]))
+ (platform/eager-for
+ [[bindings element-group] (platform/group-by-seq :bindings elements)]
+ [bindings (map :fact element-group)]))
(right-activate-reduced [node join-bindings binding-candidates-seq memory transport listener]
@@ -1715,10 +1737,11 @@
(defn variables-as-keywords
"Returns symbols in the given s-expression that start with '?' as keywords"
[expression]
- (into #{} (platform/eager-for [item (tree-seq coll? seq expression)
- :when (and (symbol? item)
- (= \? (first (name item))))]
- (keyword item))))
+ (into #{} (platform/eager-for
+ [item (tree-seq coll? seq expression)
+ :when (and (symbol? item)
+ (= \? (first (name item))))]
+ (keyword item))))
(defn conj-rulebases
"DEPRECATED. Simply concat sequences of rules and queries.
@@ -1877,90 +1900,46 @@
;; but we kept a one-argument version of the fire-rules in case anyone is calling the fire-rules protocol function or method on the session directly.
(fire-rules [session] (fire-rules session {}))
(fire-rules [session opts]
+ (binding [platform/*parallel-match* (or (:parallel-match opts) platform/*parallel-match*)]
+ (let [transient-memory (mem/to-transient memory)
+ transient-listener (l/to-transient listener)]
+
+ (if-not (:cancelling opts)
+ ;; We originally performed insertions and retractions immediately after the insert and retract calls,
+ ;; but this had the downside of making a pattern like "Retract facts, insert other facts, and fire the rules"
+ ;; perform at least three transitions between a persistent and transient memory. Delaying the actual execution
+ ;; of the insertions and retractions until firing the rules allows us to cut this down to a single transition
+ ;; between persistent and transient memory. There is some cost to the runtime dispatch on operation types here,
+ ;; but this is presumably less significant than the cost of memory transitions.
+ ;;
+ ;; We perform the insertions and retractions in the same order as they were applied to the session since
+ ;; if a fact is not in the session, retracted, and then subsequently inserted it should be in the session at
+ ;; the end.
+ (do
+ (doseq [{op-type :type facts :facts} pending-operations]
- (let [transient-memory (mem/to-transient memory)
- transient-listener (l/to-transient listener)]
-
- (if-not (:cancelling opts)
- ;; We originally performed insertions and retractions immediately after the insert and retract calls,
- ;; but this had the downside of making a pattern like "Retract facts, insert other facts, and fire the rules"
- ;; perform at least three transitions between a persistent and transient memory. Delaying the actual execution
- ;; of the insertions and retractions until firing the rules allows us to cut this down to a single transition
- ;; between persistent and transient memory. There is some cost to the runtime dispatch on operation types here,
- ;; but this is presumably less significant than the cost of memory transitions.
- ;;
- ;; We perform the insertions and retractions in the same order as they were applied to the session since
- ;; if a fact is not in the session, retracted, and then subsequently inserted it should be in the session at
- ;; the end.
- (do
- (doseq [{op-type :type facts :facts} pending-operations]
-
- (case op-type
-
- :insertion
- (do
- (l/insert-facts! transient-listener nil nil facts)
+ (case op-type
- (binding [*pending-external-retractions* (atom [])]
- ;; Bind the external retractions cache so that any logical retractions as a result
- ;; of these insertions can be cached and executed as a batch instead of eagerly realizing
- ;; them. An external insertion of a fact that matches
- ;; a negation or accumulator condition can cause logical retractions.
- (doseq [[alpha-roots fact-group] (get-alphas-fn facts)
- root alpha-roots]
- (alpha-activate root fact-group transient-memory transport transient-listener))
- (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))
-
- :retraction
- (do
- (l/retract-facts! transient-listener nil nil facts)
+ :insertion
+ (do
+ (l/insert-facts! transient-listener nil nil facts)
+
+ (binding [*pending-external-retractions* (atom [])]
+ ;; Bind the external retractions cache so that any logical retractions as a result
+ ;; of these insertions can be cached and executed as a batch instead of eagerly realizing
+ ;; them. An external insertion of a fact that matches
+ ;; a negation or accumulator condition can cause logical retractions.
+ (doseq [[alpha-roots fact-group] (get-alphas-fn facts)
+ root alpha-roots]
+ (alpha-activate root fact-group transient-memory transport transient-listener))
+ (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))
+
+ :retraction
+ (do
+ (l/retract-facts! transient-listener nil nil facts)
- (binding [*pending-external-retractions* (atom facts)]
- (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))))
-
- (fire-rules* rulebase
- (:production-nodes rulebase)
- transient-memory
- transport
- transient-listener
- get-alphas-fn
- (uc/get-ordered-update-cache)))
-
- (let [insertions (sequence
- (comp (filter (fn [pending-op]
- (= (:type pending-op)
- :insertion)))
- (mapcat :facts))
- pending-operations)
-
- retractions (sequence
- (comp (filter (fn [pending-op]
- (= (:type pending-op)
- :retraction)))
- (mapcat :facts))
- pending-operations)
-
- update-cache (ca/get-cancelling-update-cache)]
-
- (binding [*current-session* {:rulebase rulebase
- :transient-memory transient-memory
- :transport transport
- :insertions (atom 0)
- :get-alphas-fn get-alphas-fn
- :pending-updates update-cache
- :listener transient-listener}]
-
- ;; Insertions should come before retractions so that if we insert and then retract the same
- ;; fact that is not already in the session the end result will be that the session won't have that fact.
- ;; If retractions came first then we'd first retract a fact that isn't in the session, which doesn't do anything,
- ;; and then later we would insert the fact.
- (doseq [[alpha-roots fact-group] (get-alphas-fn insertions)
- root alpha-roots]
- (alpha-activate root fact-group transient-memory transport transient-listener))
-
- (doseq [[alpha-roots fact-group] (get-alphas-fn retractions)
- root alpha-roots]
- (alpha-retract root fact-group transient-memory transport transient-listener))
+ (binding [*pending-external-retractions* (atom facts)]
+ (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))))
(fire-rules* rulebase
(:production-nodes rulebase)
@@ -1968,16 +1947,60 @@
transport
transient-listener
get-alphas-fn
- ;; This continues to use the cancelling cache after the first batch of insertions and retractions.
- ;; If this is suboptimal for some workflows we can revisit this.
- update-cache))))
-
- (LocalSession. rulebase
- (mem/to-persistent! transient-memory)
- transport
- (l/to-persistent! transient-listener)
- get-alphas-fn
- [])))
+ (uc/get-ordered-update-cache)))
+
+ (let [insertions (sequence
+ (comp (filter (fn [pending-op]
+ (= (:type pending-op)
+ :insertion)))
+ (mapcat :facts))
+ pending-operations)
+
+ retractions (sequence
+ (comp (filter (fn [pending-op]
+ (= (:type pending-op)
+ :retraction)))
+ (mapcat :facts))
+ pending-operations)
+
+ update-cache (ca/get-cancelling-update-cache)]
+
+ (binding [*current-session* {:rulebase rulebase
+ :transient-memory transient-memory
+ :transport transport
+ :insertions (atom 0)
+ :get-alphas-fn get-alphas-fn
+ :pending-updates update-cache
+ :listener transient-listener}]
+
+ ;; Insertions should come before retractions so that if we insert and then retract the same
+ ;; fact that is not already in the session the end result will be that the session won't have that fact.
+ ;; If retractions came first then we'd first retract a fact that isn't in the session, which doesn't do anything,
+ ;; and then later we would insert the fact.
+ (doseq [[alpha-roots fact-group] (get-alphas-fn insertions)
+ root alpha-roots]
+ (alpha-activate root fact-group transient-memory transport transient-listener))
+
+ (doseq [[alpha-roots fact-group] (get-alphas-fn retractions)
+ root alpha-roots]
+ (alpha-retract root fact-group transient-memory transport transient-listener))
+
+ (fire-rules* rulebase
+ (:production-nodes rulebase)
+ transient-memory
+ transport
+ transient-listener
+ get-alphas-fn
+ ;; This continues to use the cancelling cache after the first batch of insertions and retractions.
+ ;; If this is suboptimal for some workflows we can revisit this.
+ update-cache))))
+
+ (LocalSession. rulebase
+ (mem/to-persistent! transient-memory)
+ transport
+ (l/to-persistent! transient-listener)
+ get-alphas-fn
+ []))))
(query [session query params]
(let [query-node (get-in rulebase [:query-nodes query])]
diff --git a/src/main/clojure/clara/rules/platform.clj b/src/main/clojure/clara/rules/platform.clj
index d02a050f..51709bf7 100644
--- a/src/main/clojure/clara/rules/platform.clj
+++ b/src/main/clojure/clara/rules/platform.clj
@@ -1,8 +1,19 @@
(ns clara.rules.platform
"This namespace is for internal use and may move in the future.
Platform unified code Clojure/ClojureScript."
- (:import [java.lang IllegalArgumentException]
- [java.util LinkedHashMap]))
+ (:require
+ [ham-fisted.function :as f])
+ (:import
+ [clojure.lang Var]
+ [java.lang IllegalArgumentException]
+ [java.util LinkedHashMap]
+ [java.util.concurrent
+ ExecutionException
+ CompletableFuture
+ ExecutorService
+ ForkJoinPool
+ Future]
+ [java.util.function Function]))
(defn throw-error
"Throw an error with the given description string."
@@ -101,7 +112,52 @@
~@(for [[tl] binding-pairs]
`(.remove ~tl))))))
+(def ^:dynamic *thread-pool* (ForkJoinPool/commonPool))
+
+(defmacro completable-future
+ "Asynchronously invokes the body inside a completable future, preserves the current thread binding frame,
+ using by default the `ForkJoinPool/commonPool`, the pool used can be specified via `*thread-pool*` binding."
+ ^CompletableFuture [& body]
+ `(let [binding-frame# (Var/cloneThreadBindingFrame) ;;; capture the thread local binding frame before start
+ ^CompletableFuture result# (CompletableFuture.) ;;; this is the CompletableFuture being returned
+ ^ExecutorService pool# (or *thread-pool* (ForkJoinPool/commonPool))
+ ^Runnable fbody# (fn do-complete#
+ []
+ (try
+ (Var/resetThreadBindingFrame binding-frame#) ;;; set the Clojure binding frame captured above
+ (.complete result# (do ~@body)) ;;; send the result of evaluating the body to the CompletableFuture
+ (catch Throwable ~'e
+ (.completeExceptionally result# ~'e)))) ;;; if we catch an exception we send it to the CompletableFuture
+ ^Future fut# (.submit pool# fbody#)
+ ^Function cancel# (f/function
+ [~'_]
+ (future-cancel fut#))] ;;; submit the work to the pool and get the FutureTask doing the work
+ ;;; if the CompletableFuture returns exceptionally
+ ;;; then cancel the Future which is currently doing the work
+ (.exceptionally result# cancel#)
+ result#))
+
(defmacro eager-for
"A for wrapped with a doall to force realisation. Usage is the same as regular for."
[& body]
`(doall (for ~@body)))
+
+(def ^:dynamic *parallel-match* false)
+
+(defmacro match-for
+ [bindings & body]
+ `(if *parallel-match*
+ (let [fut-seq# (eager-for [~@bindings]
+ (platform/completable-future
+ ~@body))]
+ (try
+ (eager-for [fut# fut-seq#
+ :let [match# @fut#]
+ :when match#]
+ match#)
+ (catch ExecutionException e#
+ (throw (ex-cause e#)))))
+ (eager-for [~@bindings
+ :let [match# (do ~@body)]
+ :when match#]
+ match#)))
diff --git a/src/main/clojure/clara/tools/testing_utils.clj b/src/main/clojure/clara/tools/testing_utils.clj
index c2e8d6a3..eb5483a4 100644
--- a/src/main/clojure/clara/tools/testing_utils.clj
+++ b/src/main/clojure/clara/tools/testing_utils.clj
@@ -7,7 +7,8 @@
[clara.rules.update-cache.cancelling :as ca]
[clara.rules.compiler :as com]
[clara.rules.dsl :as dsl]
- [clojure.test :refer [is]]))
+ [clojure.test :refer [is]]
+ [clara.rules.platform :as platform]))
(defmacro def-rules-test
"This macro allows creation of rules, queries, and sessions from arbitrary combinations of rules
@@ -66,12 +67,13 @@
(defn opts-fixture
;; For operations other than replace-facts uc/get-ordered-update-cache is currently
- ;; always used. This fixture ensures that CancellingUpdateCache is tested for a wide
+ ;; always used. This fixture ensures that CancellingUpdateCache and Parallel Matching is tested for a wide
;; variety of different cases rather than a few cases cases specific to it.
[f]
(f)
- (with-redefs [uc/get-ordered-update-cache ca/get-cancelling-update-cache]
- (f)))
+ (binding [platform/*parallel-match* true]
+ (with-redefs [uc/get-ordered-update-cache ca/get-cancelling-update-cache]
+ (f))))
(defn join-filter-equals
"Intended to be a test function that is the same as equals, but is not visible to Clara as such
From 77f3174d77cde69cc11564ad035250a5b9cd1b62 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sat, 30 Dec 2023 11:56:46 -0600
Subject: [PATCH 27/87] feat: initial pass at poppping multiple activations for
parallelization
---
src/main/clojure/clara/rules/engine.clj | 141 ++++++++++++------------
src/main/clojure/clara/rules/memory.clj | 65 +++++++----
src/test/clojure/clara/test_engine.clj | 50 +++++++++
src/test/clojure/clara/test_rules.clj | 2 +-
4 files changed, 168 insertions(+), 90 deletions(-)
create mode 100644 src/test/clojure/clara/test_engine.clj
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index 06ab307a..e4df45f6 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -1779,76 +1779,77 @@
(do
;; If there are activations, fire them.
- (when-let [{:keys [node token] :as activation} (mem/pop-activation! transient-memory)]
- ;; Use vectors for the insertion caches so that within an insertion type
- ;; (unconditional or logical) all insertions are done in order after the into
- ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
- ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
- ;; in the sequences.
- (let [batched-logical-insertions (atom [])
- batched-unconditional-insertions (atom [])
- batched-rhs-retractions (atom [])]
- (binding [*rule-context* {:token token
- :node node
- :batched-logical-insertions batched-logical-insertions
- :batched-unconditional-insertions batched-unconditional-insertions
- :batched-rhs-retractions batched-rhs-retractions}]
-
- ;; Fire the rule itself.
- (try
- ((:rhs node) token (:env (:production node)))
- ;; Don't do anything if a given insertion type has no corresponding
- ;; facts to avoid complicating traces. Note that since each no RHS's of
- ;; downstream rules are fired here everything is governed by truth maintenance.
- ;; Therefore, the reordering of retractions and insertions should have no impact
- ;; assuming that the evaluation of rule conditions is pure, which is a general expectation
- ;; of the rules engine.
- ;;
- ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
- ;; below. They will be dereferenced again if an exception is caught, but in the error
- ;; case we aren't worried about performance.
- (let [retrieved-unconditional-insertions @batched-unconditional-insertions
- retrieved-logical-insertions @batched-logical-insertions
- retrieved-rhs-retractions @batched-rhs-retractions]
- (l/fire-activation! listener
- activation
- {:unconditional-insertions retrieved-unconditional-insertions
- :logical-insertions retrieved-logical-insertions
- :rhs-retractions retrieved-rhs-retractions})
- (when-let [batched (seq retrieved-unconditional-insertions)]
- (flush-insertions! batched true))
- (when-let [batched (seq retrieved-logical-insertions)]
- (flush-insertions! batched false))
- (when-let [batched (seq retrieved-rhs-retractions)]
- (flush-rhs-retractions! batched)))
- (catch Exception e
- ;; If the rule fired an exception, help debugging by attaching
- ;; details about the rule itself, cached insertions, and any listeners
- ;; while propagating the cause.
- (let [production (:production node)
- rule-name (:name production)
- rhs (:rhs production)]
- (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
- " with bindings " (pr-str (:bindings token)))
- {:bindings (:bindings token)
- :name rule-name
- :rhs rhs
- :batched-logical-insertions @batched-logical-insertions
- :batched-unconditional-insertions @batched-unconditional-insertions
- :batched-rhs-retractions @batched-rhs-retractions
- :listeners (try
- (let [p-listener (l/to-persistent! listener)]
- (if (l/null-listener? p-listener)
- []
- (l/get-children p-listener)))
- (catch Exception listener-exception
- listener-exception))}
- e)))))
-
- ;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
- ;; will be in context for child rules.
- (when (some-> node :production :props :no-loop)
- (flush-updates *current-session*)))))
+ (let [activations (mem/pop-activations! transient-memory 10)]
+ (doseq [{:keys [node token] :as activation} activations]
+ ;; Use vectors for the insertion caches so that within an insertion type
+ ;; (unconditional or logical) all insertions are done in order after the into
+ ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
+ ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
+ ;; in the sequences.
+ (let [batched-logical-insertions (atom [])
+ batched-unconditional-insertions (atom [])
+ batched-rhs-retractions (atom [])]
+ (binding [*rule-context* {:token token
+ :node node
+ :batched-logical-insertions batched-logical-insertions
+ :batched-unconditional-insertions batched-unconditional-insertions
+ :batched-rhs-retractions batched-rhs-retractions}]
+
+ ;; Fire the rule itself.
+ (try
+ ((:rhs node) token (:env (:production node)))
+ ;; Don't do anything if a given insertion type has no corresponding
+ ;; facts to avoid complicating traces. Note that since each no RHS's of
+ ;; downstream rules are fired here everything is governed by truth maintenance.
+ ;; Therefore, the reordering of retractions and insertions should have no impact
+ ;; assuming that the evaluation of rule conditions is pure, which is a general expectation
+ ;; of the rules engine.
+ ;;
+ ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
+ ;; below. They will be dereferenced again if an exception is caught, but in the error
+ ;; case we aren't worried about performance.
+ (let [retrieved-unconditional-insertions @batched-unconditional-insertions
+ retrieved-logical-insertions @batched-logical-insertions
+ retrieved-rhs-retractions @batched-rhs-retractions]
+ (l/fire-activation! listener
+ activation
+ {:unconditional-insertions retrieved-unconditional-insertions
+ :logical-insertions retrieved-logical-insertions
+ :rhs-retractions retrieved-rhs-retractions})
+ (when-let [batched (seq retrieved-unconditional-insertions)]
+ (flush-insertions! batched true))
+ (when-let [batched (seq retrieved-logical-insertions)]
+ (flush-insertions! batched false))
+ (when-let [batched (seq retrieved-rhs-retractions)]
+ (flush-rhs-retractions! batched)))
+ (catch Exception e
+ ;; If the rule fired an exception, help debugging by attaching
+ ;; details about the rule itself, cached insertions, and any listeners
+ ;; while propagating the cause.
+ (let [production (:production node)
+ rule-name (:name production)
+ rhs (:rhs production)]
+ (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
+ " with bindings " (pr-str (:bindings token)))
+ {:bindings (:bindings token)
+ :name rule-name
+ :rhs rhs
+ :batched-logical-insertions @batched-logical-insertions
+ :batched-unconditional-insertions @batched-unconditional-insertions
+ :batched-rhs-retractions @batched-rhs-retractions
+ :listeners (try
+ (let [p-listener (l/to-persistent! listener)]
+ (if (l/null-listener? p-listener)
+ []
+ (l/get-children p-listener)))
+ (catch Exception listener-exception
+ listener-exception))}
+ e)))))
+
+ ;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
+ ;; will be in context for child rules.
+ (when (some-> node :production :props :no-loop)
+ (flush-updates *current-session*))))))
(recur (mem/next-activation-group transient-memory) next-group)))
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index 1ca37d34..48e35533 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -97,7 +97,7 @@
;; Pop an activation from the working memory. Returns nil if no
;; activations are pending.
- (pop-activation! [memory])
+ (pop-activations! [memory count])
;; Returns the group of the next activation, or nil if none are pending.
(next-activation-group [memory])
@@ -686,24 +686,51 @@
activation-group
(->activation-priority-queue new-activations)))))
- (pop-activation!
- [memory]
- (when (not (.isEmpty activation-map))
- (let [entry (.firstEntry activation-map)
- key (.getKey entry)
- ^java.util.Queue value (.getValue entry)
-
- ;; An empty value is illegal and should be removed by an action
- ;; that creates one (e.g. a remove-activations! call).
- ^RuleOrderedActivation activation (.remove value)]
-
- ;; This activation group is empty now, so remove it from
- ;; the map entirely.
- (when (.isEmpty value)
- (.remove activation-map key))
-
- ;; Return the selected activation.
- (.activation activation))))
+ (pop-activations!
+ [memory count]
+ (loop [^List activations (hf/mut-list)
+ remaining count]
+ (cond
+ (.isEmpty activation-map)
+ (persistent! activations)
+
+ (not (and (number? remaining)
+ (pos? remaining)))
+ (persistent! activations)
+
+ :else
+ (let [entry (.firstEntry activation-map)
+ key (.getKey entry)
+ ^java.util.Queue activation-queue (.getValue entry)
+ curr-popped-activations (not (.isEmpty activations))
+ next-no-loop-activation (some-> ^RuleOrderedActivation (.peek activation-queue)
+ (.activation)
+ :node :production :props :no-loop)]
+ (if (and curr-popped-activations next-no-loop-activation)
+ (persistent! activations)
+ (let [;; An empty value is illegal and should be removed by an action
+ ;; that creates one (e.g. a remove-activations! call).
+ ^RuleOrderedActivation activation-wrapper (.remove activation-queue)
+ ;; Extract the selected activation.
+ activation (.activation activation-wrapper)
+ empty-activation-group (.isEmpty activation-queue)]
+ (.add activations activation)
+ ;; This activation group is empty now, so remove it from
+ ;; the map entirely.
+ (when empty-activation-group
+ (.remove activation-map key))
+ (cond
+ ;; This activation group is empty so return and do not move
+ ;; to the next activation group
+ empty-activation-group
+ (persistent! activations)
+ ;; When we encounter a no-loop activation we need to stop returning activations since it would
+ ;; flush updates after the RHS is activated
+ (some-> activation :node :production :props :no-loop)
+ (persistent! activations)
+
+ :else
+ (recur activations (dec remaining)))))))))
(next-activation-group
[memory]
diff --git a/src/test/clojure/clara/test_engine.clj b/src/test/clojure/clara/test_engine.clj
new file mode 100644
index 00000000..fb3df027
--- /dev/null
+++ b/src/test/clojure/clara/test_engine.clj
@@ -0,0 +1,50 @@
+(ns clara.test-engine
+ (:require [clara.rules :refer [clear-ns-productions!
+ mk-session
+ fire-rules
+ query
+ defrule defquery
+ insert-all insert-all!
+ insert insert!
+ retract retract!]]
+ [clara.rules.compiler :refer [clear-session-cache!]]
+ [clara.rules.platform :as platform]
+ [clara.rules.engine :as eng]
+ [criterium.core :refer [report-result
+ quick-benchmark]]))
+(defrule test-slow-rule-1
+ [:number [{:keys [value]}]
+ (= value ?value)
+ (do (Thread/sleep 50) (pos? ?value))]
+ =>
+ (insert! {:type :result
+ :value (+ ?value 100)}))
+
+(defrule test-slow-rule-2
+ [:result [{:keys [value]}]
+ (= value ?value)
+ (do (Thread/sleep 50) (pos? ?value))]
+ =>
+ (println "result:" ?value)
+ (insert! {:type :output
+ :value (inc ?value)}))
+
+(defquery test-slow-query
+ []
+ [:output [{:keys [value]}] (= value ?value)])
+
+(def session
+ (let [fact-seq (repeat 50 {:type :number
+ :value 199})
+ session (-> (mk-session 'clara.test-engine :fact-type-fn :type)
+ (insert-all fact-seq))]
+ session))
+
+(comment
+ (time
+ (-> (fire-rules session)
+ (query test-slow-query)
+ (count)))
+ (do
+ (clear-ns-productions!)
+ (clear-session-cache!)))
diff --git a/src/test/clojure/clara/test_rules.clj b/src/test/clojure/clara/test_rules.clj
index 3e99af4d..f7afb8de 100644
--- a/src/test/clojure/clara/test_rules.clj
+++ b/src/test/clojure/clara/test_rules.clj
@@ -1626,7 +1626,7 @@
[Hot (= ?t temperature)]]]]
(insert-unconditional! (->First)))
- q (dsl/parse-query [] [[First]])
+ q (dsl/parse-query [] [[?result <- First]])
session->results (fn [session] (-> session
(insert (->Cold 10) (->Hot 10))
From 8abaa78be6b9e7d349b1a6cbcc8b1d49649842f4 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sat, 30 Dec 2023 14:25:23 -0600
Subject: [PATCH 28/87] feat: first cut of parallel compute rhs activations
---
.../clj-kondo.exports/clara/rules/config.edn | 2 +-
src/main/clojure/clara/rules/engine.clj | 171 ++++++------
src/main/clojure/clara/rules/platform.clj | 6 +-
.../clojure/clara/tools/testing_utils.clj | 2 +-
src/test/clojure/clara/test_engine.clj | 4 +-
src/test/clojure/clara/test_rules.clj | 253 +++++++++---------
6 files changed, 223 insertions(+), 215 deletions(-)
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/config.edn b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
index a7b18abe..f402225d 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/config.edn
+++ b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
@@ -1,6 +1,6 @@
{:lint-as {clara.rules/defsession clojure.core/def
clara.rules.platform/eager-for clojure.core/for
- clara.rules.platform/match-for clojure.core/for}
+ clara.rules.platform/compute-for clojure.core/for}
:hooks {:analyze-call {clara.rules/defquery hooks.clara-rules/analyze-defquery-macro
clara.rules/defrule hooks.clara-rules/analyze-defrule-macro
clara.rules.dsl/parse-query hooks.clara-rules/analyze-parse-query-macro
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index e4df45f6..98803fcb 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -513,7 +513,7 @@
IAlphaActivate
(alpha-activate [node facts memory transport listener]
- (let [match-elements (platform/match-for
+ (let [match-elements (platform/compute-for
[fact facts]
(alpha-node-match->Element fact env activation node))]
(l/alpha-activate! listener node (map :fact match-elements))
@@ -525,7 +525,7 @@
match-elements)))
(alpha-retract [node facts memory transport listener]
- (let [match-elements (platform/match-for
+ (let [match-elements (platform/compute-for
[fact facts]
(alpha-node-match->Element fact env activation node))]
(l/alpha-retract! listener node (map :fact match-elements))
@@ -684,7 +684,7 @@
(mem/add-tokens! memory node join-bindings tokens)
(l/left-activate! listener node tokens)
(let [elements (mem/get-elements memory node join-bindings)
- matched-tokens (platform/match-for
+ matched-tokens (platform/compute-for
[element elements
token tokens]
(expression-join-node-match->Token element token node id join-filter-fn (:env condition)))]
@@ -699,7 +699,7 @@
(l/left-retract! listener node tokens)
(let [tokens (mem/remove-tokens! memory node join-bindings tokens)
elements (mem/get-elements memory node join-bindings)
- matched-tokens (platform/match-for
+ matched-tokens (platform/compute-for
[element elements
token tokens]
(expression-join-node-match->Token element token node id join-filter-fn (:env condition)))]
@@ -717,7 +717,7 @@
IRightActivate
(right-activate [node join-bindings elements memory transport listener]
(let [tokens (mem/get-tokens memory node join-bindings)
- matched-tokens (platform/match-for
+ matched-tokens (platform/compute-for
[element elements
token tokens]
(expression-join-node-match->Token element token node id join-filter-fn (:env condition)))]
@@ -734,7 +734,7 @@
(l/right-retract! listener node elements)
(let [elements (mem/remove-elements! memory node join-bindings elements)
tokens (mem/get-tokens memory node join-bindings)
- matched-tokens (platform/match-for
+ matched-tokens (platform/compute-for
[element elements
token tokens]
(expression-join-node-match->Token element token node id join-filter-fn (:env condition)))]
@@ -831,7 +831,7 @@
listener
children
(let [elements (mem/get-elements memory node join-bindings)]
- (platform/match-for
+ (platform/compute-for
[token tokens]
(negation-join-node-not-match->Token node
token
@@ -849,7 +849,7 @@
;; Retract only if it previously had no matches in the negation node,
;; and therefore had an activation.
(let [elements (mem/get-elements memory node join-bindings)]
- (platform/match-for
+ (platform/compute-for
[token tokens]
(negation-join-node-not-match->Token node
token
@@ -870,7 +870,7 @@
memory
listener
children
- (platform/match-for
+ (platform/compute-for
[token (mem/get-tokens memory node join-bindings)]
;; Retract downstream if the token now has matching elements and didn't before.
;; We check the new elements first in the expectation that the new elements will be
@@ -903,7 +903,7 @@
listener
children
(let [remaining-elements (mem/get-elements memory node join-bindings)]
- (platform/match-for
+ (platform/compute-for
[token (mem/get-tokens memory node join-bindings)]
;; Propagate tokens when some of the retracted facts joined
;; but none of the remaining facts do.
@@ -950,7 +950,7 @@
memory
listener
children
- (platform/match-for
+ (platform/compute-for
[token tokens]
(test-node-match->Token node (:handler test) env token))))
@@ -1779,78 +1779,81 @@
(do
;; If there are activations, fire them.
- (let [activations (mem/pop-activations! transient-memory 10)]
- (doseq [{:keys [node token] :as activation} activations]
- ;; Use vectors for the insertion caches so that within an insertion type
- ;; (unconditional or logical) all insertions are done in order after the into
- ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
- ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
- ;; in the sequences.
- (let [batched-logical-insertions (atom [])
- batched-unconditional-insertions (atom [])
- batched-rhs-retractions (atom [])]
- (binding [*rule-context* {:token token
- :node node
- :batched-logical-insertions batched-logical-insertions
- :batched-unconditional-insertions batched-unconditional-insertions
- :batched-rhs-retractions batched-rhs-retractions}]
-
- ;; Fire the rule itself.
- (try
- ((:rhs node) token (:env (:production node)))
- ;; Don't do anything if a given insertion type has no corresponding
- ;; facts to avoid complicating traces. Note that since each no RHS's of
- ;; downstream rules are fired here everything is governed by truth maintenance.
- ;; Therefore, the reordering of retractions and insertions should have no impact
- ;; assuming that the evaluation of rule conditions is pure, which is a general expectation
- ;; of the rules engine.
- ;;
- ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
- ;; below. They will be dereferenced again if an exception is caught, but in the error
- ;; case we aren't worried about performance.
- (let [retrieved-unconditional-insertions @batched-unconditional-insertions
- retrieved-logical-insertions @batched-logical-insertions
- retrieved-rhs-retractions @batched-rhs-retractions]
- (l/fire-activation! listener
- activation
- {:unconditional-insertions retrieved-unconditional-insertions
- :logical-insertions retrieved-logical-insertions
- :rhs-retractions retrieved-rhs-retractions})
- (when-let [batched (seq retrieved-unconditional-insertions)]
- (flush-insertions! batched true))
- (when-let [batched (seq retrieved-logical-insertions)]
- (flush-insertions! batched false))
- (when-let [batched (seq retrieved-rhs-retractions)]
- (flush-rhs-retractions! batched)))
- (catch Exception e
- ;; If the rule fired an exception, help debugging by attaching
- ;; details about the rule itself, cached insertions, and any listeners
- ;; while propagating the cause.
- (let [production (:production node)
- rule-name (:name production)
- rhs (:rhs production)]
- (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
- " with bindings " (pr-str (:bindings token)))
- {:bindings (:bindings token)
- :name rule-name
- :rhs rhs
- :batched-logical-insertions @batched-logical-insertions
- :batched-unconditional-insertions @batched-unconditional-insertions
- :batched-rhs-retractions @batched-rhs-retractions
- :listeners (try
- (let [p-listener (l/to-persistent! listener)]
- (if (l/null-listener? p-listener)
- []
- (l/get-children p-listener)))
- (catch Exception listener-exception
- listener-exception))}
- e)))))
-
- ;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
- ;; will be in context for child rules.
- (when (some-> node :production :props :no-loop)
- (flush-updates *current-session*))))))
-
+ (let [activations (mem/pop-activations! transient-memory Long/MAX_VALUE)
+ rhs-activation-results
+ (platform/compute-for
+ [{:keys [node token] :as activation} activations]
+ ;; Use vectors for the insertion caches so that within an insertion type
+ ;; (unconditional or logical) all insertions are done in order after the into
+ ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
+ ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
+ ;; in the sequences.
+ (let [batched-logical-insertions (atom [])
+ batched-unconditional-insertions (atom [])
+ batched-rhs-retractions (atom [])]
+ (binding [*rule-context* {:token token
+ :node node
+ :batched-logical-insertions batched-logical-insertions
+ :batched-unconditional-insertions batched-unconditional-insertions
+ :batched-rhs-retractions batched-rhs-retractions}]
+
+ ;; Fire the rule itself.
+ (try
+ ((:rhs node) token (:env (:production node)))
+ ;; Don't do anything if a given insertion type has no corresponding
+ ;; facts to avoid complicating traces. Note that since each no RHS's of
+ ;; downstream rules are fired here everything is governed by truth maintenance.
+ ;; Therefore, the reordering of retractions and insertions should have no impact
+ ;; assuming that the evaluation of rule conditions is pure, which is a general expectation
+ ;; of the rules engine.
+ ;;
+ ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
+ ;; below. They will be dereferenced again if an exception is caught, but in the error
+ ;; case we aren't worried about performance.
+ (let [resulting-ops {:unconditional-insertions @batched-unconditional-insertions
+ :logical-insertions @batched-logical-insertions
+ :rhs-retractions @batched-rhs-retractions}]
+ (l/fire-activation! listener activation resulting-ops)
+ {:token token :node node :ops resulting-ops})
+ (catch Exception e
+ ;; If the rule fired an exception, help debugging by attaching
+ ;; details about the rule itself, cached insertions, and any listeners
+ ;; while propagating the cause.
+ (let [production (:production node)
+ rule-name (:name production)
+ rhs (:rhs production)]
+ (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
+ " with bindings " (pr-str (:bindings token)))
+ {:bindings (:bindings token)
+ :name rule-name
+ :rhs rhs
+ :batched-logical-insertions @batched-logical-insertions
+ :batched-unconditional-insertions @batched-unconditional-insertions
+ :batched-rhs-retractions @batched-rhs-retractions
+ :listeners (try
+ (let [p-listener (l/to-persistent! listener)]
+ (if (l/null-listener? p-listener)
+ []
+ (l/get-children p-listener)))
+ (catch Exception listener-exception
+ listener-exception))}
+ e))))))))]
+ (doseq [{:keys [token node ops]} rhs-activation-results
+ :let [{:keys [unconditional-insertions
+ logical-insertions
+ rhs-retractions]} ops]]
+ (binding [*rule-context* {:token token
+ :node node}]
+ (when-let [batched (seq unconditional-insertions)]
+ (flush-insertions! batched true))
+ (when-let [batched (seq logical-insertions)]
+ (flush-insertions! batched false))
+ (when-let [batched (seq rhs-retractions)]
+ (flush-rhs-retractions! batched))
+ ;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
+ ;; will be in context for child rules.
+ (when (some-> node :production :props :no-loop)
+ (flush-updates *current-session*)))))
(recur (mem/next-activation-group transient-memory) next-group)))
;; There were no items to be activated, so flush any pending
@@ -1901,7 +1904,7 @@
;; but we kept a one-argument version of the fire-rules in case anyone is calling the fire-rules protocol function or method on the session directly.
(fire-rules [session] (fire-rules session {}))
(fire-rules [session opts]
- (binding [platform/*parallel-match* (or (:parallel-match opts) platform/*parallel-match*)]
+ (binding [platform/*parallel-compute* (or (:parallel-compute opts) platform/*parallel-compute*)]
(let [transient-memory (mem/to-transient memory)
transient-listener (l/to-transient listener)]
diff --git a/src/main/clojure/clara/rules/platform.clj b/src/main/clojure/clara/rules/platform.clj
index 51709bf7..4526d000 100644
--- a/src/main/clojure/clara/rules/platform.clj
+++ b/src/main/clojure/clara/rules/platform.clj
@@ -142,11 +142,11 @@
[& body]
`(doall (for ~@body)))
-(def ^:dynamic *parallel-match* false)
+(def ^:dynamic *parallel-compute* false)
-(defmacro match-for
+(defmacro compute-for
[bindings & body]
- `(if *parallel-match*
+ `(if *parallel-compute*
(let [fut-seq# (eager-for [~@bindings]
(platform/completable-future
~@body))]
diff --git a/src/main/clojure/clara/tools/testing_utils.clj b/src/main/clojure/clara/tools/testing_utils.clj
index eb5483a4..bfbd7949 100644
--- a/src/main/clojure/clara/tools/testing_utils.clj
+++ b/src/main/clojure/clara/tools/testing_utils.clj
@@ -71,7 +71,7 @@
;; variety of different cases rather than a few cases cases specific to it.
[f]
(f)
- (binding [platform/*parallel-match* true]
+ (binding [platform/*parallel-compute* true]
(with-redefs [uc/get-ordered-update-cache ca/get-cancelling-update-cache]
(f))))
diff --git a/src/test/clojure/clara/test_engine.clj b/src/test/clojure/clara/test_engine.clj
index fb3df027..a7adc753 100644
--- a/src/test/clojure/clara/test_engine.clj
+++ b/src/test/clojure/clara/test_engine.clj
@@ -17,6 +17,7 @@
(= value ?value)
(do (Thread/sleep 50) (pos? ?value))]
=>
+ (println "number:" ?value)
(insert! {:type :result
:value (+ ?value 100)}))
@@ -26,6 +27,7 @@
(do (Thread/sleep 50) (pos? ?value))]
=>
(println "result:" ?value)
+ (Thread/sleep 50)
(insert! {:type :output
:value (inc ?value)}))
@@ -42,7 +44,7 @@
(comment
(time
- (-> (fire-rules session)
+ (-> (fire-rules session {:parallel-compute true})
(query test-slow-query)
(count)))
(do
diff --git a/src/test/clojure/clara/test_rules.clj b/src/test/clojure/clara/test_rules.clj
index f7afb8de..480c1f60 100644
--- a/src/test/clojure/clara/test_rules.clj
+++ b/src/test/clojure/clara/test_rules.clj
@@ -17,7 +17,8 @@
[clara.order-ruleset :as order-rules]
[schema.test]
[schema.core :as sc]
- [clara.tools.testing-utils :as tu :refer [assert-ex-data]])
+ [clara.tools.testing-utils :as tu :refer [assert-ex-data]]
+ [clara.rules.platform :as platform])
(:import [clara.rules.testfacts Temperature WindSpeed Cold Hot TemperatureHistory
ColdAndWindy LousyWeather First Second Third Fourth FlexibleFields]
[clara.rules.engine
@@ -1767,159 +1768,161 @@
(frequencies (query retract2 q))))))
(deftest test-rule-order-respected
- (let [fire-order (atom [])
- rule-A (dsl/parse-rule [[Cold]]
- (swap! fire-order conj :A))
- rule-B (dsl/parse-rule [[Cold]]
- (swap! fire-order conj :B))]
+ (when-not platform/*parallel-compute*
+ (let [fire-order (atom [])
+ rule-A (dsl/parse-rule [[Cold]]
+ (swap! fire-order conj :A))
+ rule-B (dsl/parse-rule [[Cold]]
+ (swap! fire-order conj :B))]
- (reset! fire-order [])
+ (reset! fire-order [])
- (-> (mk-session [rule-A rule-B] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :B])
- "Rule order in a seq of rule structures should be respected")
-
- (reset! fire-order [])
-
- (-> (mk-session [rule-B rule-A] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:B :A])
- "Validate that when we reverse the seq of rule structures the firing order is reversed.")
-
- (reset! fire-order [])
-
- (binding [order-rules/*rule-order-atom* fire-order]
- (-> (mk-session [rule-A] 'clara.order-ruleset :cache false)
+ (-> (mk-session [rule-A rule-B] :cache false)
(insert (->Cold 10))
fire-rules)
- (is (= @fire-order [:A :C :D])
- "Rules should fire in the order they appear in a namespace. A rule that is before that namespace
- in the rule sources should fire first."))
+ (is (= @fire-order [:A :B])
+ "Rule order in a seq of rule structures should be respected")
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order]
- (-> (mk-session 'clara.order-ruleset [rule-A] :cache false)
+ (-> (mk-session [rule-B rule-A] :cache false)
(insert (->Cold 10))
fire-rules)
- (is (= @fire-order [:C :D :A])
- "Rules should fire in the order they appear in a namespace. A rule that is after that namespace
- in the rule sources should fire later."))
+ (is (= @fire-order [:B :A])
+ "Validate that when we reverse the seq of rule structures the firing order is reversed.")
- (reset! fire-order [])
+ (reset! fire-order [])
+
+ (binding [order-rules/*rule-order-atom* fire-order]
+ (-> (mk-session [rule-A] 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:A :C :D])
+ "Rules should fire in the order they appear in a namespace. A rule that is before that namespace
+ in the rule sources should fire first."))
- (let [rule-C-line (-> #'order-rules/rule-C meta :line)
- rule-D-line (-> #'order-rules/rule-D meta :line)]
- (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-D-line)))
- (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-C-line)))
+ (reset! fire-order [])
(binding [order-rules/*rule-order-atom* fire-order]
+ (-> (mk-session 'clara.order-ruleset [rule-A] :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:C :D :A])
+ "Rules should fire in the order they appear in a namespace. A rule that is after that namespace
+ in the rule sources should fire later."))
+
+ (reset! fire-order [])
+
+ (let [rule-C-line (-> #'order-rules/rule-C meta :line)
+ rule-D-line (-> #'order-rules/rule-D meta :line)]
+ (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-D-line)))
+ (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-C-line)))
+
+ (binding [order-rules/*rule-order-atom* fire-order]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules))
+
+ (is (= @fire-order [:D :C])
+ "When we alter the metadata of the rules to reverse their line order their
+ firing order should also be reversed.")
+
+ ;; Reset the line metadata on the rules to what it was previously.
+ (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-C-line)))
+ (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-D-line))))
+
+ (reset! fire-order [])
+
+ (binding [order-rules/*rule-order-atom* fire-order
+ order-rules/*rule-seq-prior* [rule-A rule-B]]
(-> (mk-session 'clara.order-ruleset :cache false)
(insert (->Cold 10))
- fire-rules))
+ fire-rules)
+ (is (= @fire-order [:A :B :C :D])
+ "When a :production-seq occurs before defrules, the rules in the :production-seq
+ should fire before those rules and in the order they are in in the :production-seq."))
- (is (= @fire-order [:D :C])
- "When we alter the metadata of the rules to reverse their line order their
- firing order should also be reversed.")
+ (reset! fire-order [])
- ;; Reset the line metadata on the rules to what it was previously.
- (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-C-line)))
- (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-D-line))))
+ (binding [order-rules/*rule-order-atom* fire-order
+ order-rules/*rule-seq-after* [rule-A rule-B]]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:C :D :A :B])
+ "When a :production-seq occurs after defrules, the rules in the :production-seq
+ should fire after those rules and in the order they are in in the :production-seq."))
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order
- order-rules/*rule-seq-prior* [rule-A rule-B]]
- (-> (mk-session 'clara.order-ruleset :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :B :C :D])
- "When a :production-seq occurs before defrules, the rules in the :production-seq
- should fire before those rules and in the order they are in in the :production-seq."))
+ (binding [order-rules/*rule-order-atom* fire-order
+ order-rules/*rule-seq-after* [rule-B rule-A]]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:C :D :B :A])
+ "Validate that when the order of rules in the :production-seq is reversed those rules
+ fire in the reversed order"))
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order
- order-rules/*rule-seq-after* [rule-A rule-B]]
- (-> (mk-session 'clara.order-ruleset :cache false)
+ (-> (mk-session [rule-A (assoc-in rule-B [:props :salience] 1)] :cache false)
(insert (->Cold 10))
fire-rules)
- (is (= @fire-order [:C :D :A :B])
- "When a :production-seq occurs after defrules, the rules in the :production-seq
- should fire after those rules and in the order they are in in the :production-seq."))
+ (is (= @fire-order [:B :A])
+ "Validate that when explicit salience is present it overrides rule order.")
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order
- order-rules/*rule-seq-after* [rule-B rule-A]]
- (-> (mk-session 'clara.order-ruleset :cache false)
+ (-> (mk-session [rule-A rule-B rule-A] :cache false)
(insert (->Cold 10))
fire-rules)
- (is (= @fire-order [:C :D :B :A])
- "Validate that when the order of rules in the :production-seq is reversed those rules
- fire in the reversed order"))
-
- (reset! fire-order [])
-
- (-> (mk-session [rule-A (assoc-in rule-B [:props :salience] 1)] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:B :A])
- "Validate that when explicit salience is present it overrides rule order.")
-
- (reset! fire-order [])
-
- (-> (mk-session [rule-A rule-B rule-A] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :B])
- "Validate that the first occurence of a rule is used for rule ordering when it occurs multiple times.")
+ (is (= @fire-order [:A :B])
+ "Validate that the first occurence of a rule is used for rule ordering when it occurs multiple times.")
- (reset! fire-order [])))
+ (reset! fire-order []))))
(deftest test-rule-order-respected-by-batched-inserts
- (let [qholder (atom [])
-
- r1 (dsl/parse-rule [[Temperature (= ?t temperature)]]
- (insert! (->Cold ?t)))
- r2 (dsl/parse-rule [[Temperature (= ?t temperature)]]
- (insert! (->Hot ?t)))
-
- ;; Make two "alpha roots" that the 2 rules above insertions will need to propagate to.
- q1 (dsl/parse-query [] [[?c <- Cold (swap! qholder conj :cold)]])
- q2 (dsl/parse-query [] [[?h <- Hot (swap! qholder conj :hot)]])
-
- order1 (mk-session [r1 r2 q1 q2] :cache false)
- order2 (mk-session [r2 r1 q1 q2] :cache false)
-
- run-session (fn [s]
- (let [s (-> s
- (insert (->Temperature 10 "MCI"))
- fire-rules)]
- [(-> s (query q1) frequencies)
- (-> s (query q2) frequencies)]))
-
- [res11 res12] (run-session order1)
- holder1 @qholder
- _ (reset! qholder [])
-
- [res21 res22] (run-session order2)
- holder2 @qholder
- _ (reset! qholder [])]
-
- ;; Sanity check that the query matches what is expected.
- (is (= (frequencies [{:?c (->Cold 10)}])
- res11
- res21))
- (is (= (frequencies [{:?h (->Hot 10)}])
- res12
- res22))
-
- (is (= [:cold :hot] holder1))
- (is (= [:hot :cold] holder2))))
+ (when-not platform/*parallel-compute*
+ (let [qholder (atom [])
+
+ r1 (dsl/parse-rule [[Temperature (= ?t temperature)]]
+ (insert! (->Cold ?t)))
+ r2 (dsl/parse-rule [[Temperature (= ?t temperature)]]
+ (insert! (->Hot ?t)))
+
+ ;; Make two "alpha roots" that the 2 rules above insertions will need to propagate to.
+ q1 (dsl/parse-query [] [[?c <- Cold (swap! qholder conj :cold)]])
+ q2 (dsl/parse-query [] [[?h <- Hot (swap! qholder conj :hot)]])
+
+ order1 (mk-session [r1 r2 q1 q2] :cache false)
+ order2 (mk-session [r2 r1 q1 q2] :cache false)
+
+ run-session (fn [s]
+ (let [s (-> s
+ (insert (->Temperature 10 "MCI"))
+ fire-rules)]
+ [(-> s (query q1) frequencies)
+ (-> s (query q2) frequencies)]))
+
+ [res11 res12] (run-session order1)
+ holder1 @qholder
+ _ (reset! qholder [])
+
+ [res21 res22] (run-session order2)
+ holder2 @qholder
+ _ (reset! qholder [])]
+
+ ;; Sanity check that the query matches what is expected.
+ (is (= (frequencies [{:?c (->Cold 10)}])
+ res11
+ res21))
+ (is (= (frequencies [{:?h (->Hot 10)}])
+ res12
+ res22))
+
+ (is (= [:cold :hot] holder1))
+ (is (= [:hot :cold] holder2)))))
;; TODO: Move this to test-dsl once a strategy for replicating assert-ex-data is determined and implemented.
#_{:clj-kondo/ignore [:unresolved-symbol]}
From c03f6f0ec845cf942272762958a0d5c73ad0b771 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 31 Dec 2023 14:30:47 -0600
Subject: [PATCH 29/87] feat: add support for async rhs productions
---
.../clj-kondo.exports/clara/rules/config.edn | 3 +-
deps.edn | 1 +
dev/user.clj | 33 ++++-
src/main/clojure/clara/rules/engine.clj | 110 +++++++--------
src/main/clojure/clara/rules/platform.clj | 133 +++++++++++-------
.../clojure/clara/rules/platform/async.clj | 111 +++++++++++++++
src/test/clojure/clara/test_engine.clj | 20 +--
7 files changed, 292 insertions(+), 119 deletions(-)
create mode 100644 src/main/clojure/clara/rules/platform/async.clj
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/config.edn b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
index f402225d..a66e2854 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/config.edn
+++ b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
@@ -1,6 +1,7 @@
{:lint-as {clara.rules/defsession clojure.core/def
clara.rules.platform/eager-for clojure.core/for
- clara.rules.platform/compute-for clojure.core/for}
+ clara.rules.platform/compute-for clojure.core/for
+ clara.rules.platform/produce-for clojure.core/for}
:hooks {:analyze-call {clara.rules/defquery hooks.clara-rules/analyze-defquery-macro
clara.rules/defrule hooks.clara-rules/analyze-defrule-macro
clara.rules.dsl/parse-query hooks.clara-rules/analyze-parse-query-macro
diff --git a/deps.edn b/deps.edn
index eebd1ac5..403eff79 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,6 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
+ org.clojure/core.async {:mvn/version "1.6.681"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/dev/user.clj b/dev/user.clj
index a4ec5ac1..5961233c 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -1,10 +1,41 @@
(ns user
(:require [criterium.core :refer [report-result
quick-benchmark] :as crit]
+ [clara.rules.platform :refer [compute-for
+ produce-for] :as platform]
+ [clojure.core.async :refer [go timeout > (produce-for
+ [v (range 100)]
+ (apply + v (range v)))
+ (apply +)))
+ {:verbose true}))
+
+ (report-result
+ (quick-benchmark
+ (binding [platform/*parallel-compute* true]
+ (->> (compute-for
+ [v (range 100)]
+ (apply + v (range v)))
+ (apply +)))
+ {:verbose true}))
+
+ (report-result
+ (quick-benchmark
+ (binding [platform/*parallel-compute* true]
+ (->> (for
+ [v (range 100)
+ (apply + v (range v))])
+ (apply +)))
+ {:verbose true}))
+
(add-tap #'println)
(remove-tap #'println)
(tap> "foobar"))
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index 98803fcb..fe02ecd9 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -1781,63 +1781,59 @@
;; If there are activations, fire them.
(let [activations (mem/pop-activations! transient-memory Long/MAX_VALUE)
rhs-activation-results
- (platform/compute-for
- [{:keys [node token] :as activation} activations]
- ;; Use vectors for the insertion caches so that within an insertion type
- ;; (unconditional or logical) all insertions are done in order after the into
- ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
- ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
- ;; in the sequences.
- (let [batched-logical-insertions (atom [])
- batched-unconditional-insertions (atom [])
- batched-rhs-retractions (atom [])]
- (binding [*rule-context* {:token token
- :node node
- :batched-logical-insertions batched-logical-insertions
- :batched-unconditional-insertions batched-unconditional-insertions
- :batched-rhs-retractions batched-rhs-retractions}]
-
- ;; Fire the rule itself.
- (try
- ((:rhs node) token (:env (:production node)))
- ;; Don't do anything if a given insertion type has no corresponding
- ;; facts to avoid complicating traces. Note that since each no RHS's of
- ;; downstream rules are fired here everything is governed by truth maintenance.
- ;; Therefore, the reordering of retractions and insertions should have no impact
- ;; assuming that the evaluation of rule conditions is pure, which is a general expectation
- ;; of the rules engine.
- ;;
- ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
- ;; below. They will be dereferenced again if an exception is caught, but in the error
- ;; case we aren't worried about performance.
- (let [resulting-ops {:unconditional-insertions @batched-unconditional-insertions
- :logical-insertions @batched-logical-insertions
- :rhs-retractions @batched-rhs-retractions}]
- (l/fire-activation! listener activation resulting-ops)
- {:token token :node node :ops resulting-ops})
- (catch Exception e
- ;; If the rule fired an exception, help debugging by attaching
- ;; details about the rule itself, cached insertions, and any listeners
- ;; while propagating the cause.
- (let [production (:production node)
- rule-name (:name production)
- rhs (:rhs production)]
- (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
- " with bindings " (pr-str (:bindings token)))
- {:bindings (:bindings token)
- :name rule-name
- :rhs rhs
- :batched-logical-insertions @batched-logical-insertions
- :batched-unconditional-insertions @batched-unconditional-insertions
- :batched-rhs-retractions @batched-rhs-retractions
- :listeners (try
- (let [p-listener (l/to-persistent! listener)]
- (if (l/null-listener? p-listener)
- []
- (l/get-children p-listener)))
- (catch Exception listener-exception
- listener-exception))}
- e))))))))]
+ (platform/produce-for
+ [{:keys [node token] :as activation} activations
+ ;; Use vectors for the insertion caches so that within an insertion type
+ ;; (unconditional or logical) all insertions are done in order after the into
+ ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
+ ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
+ ;; in the sequences.
+ :let [batched-logical-insertions (atom [])
+ batched-unconditional-insertions (atom [])
+ batched-rhs-retractions (atom [])
+ rule-context {:token token
+ :node node
+ :batched-logical-insertions batched-logical-insertions
+ :batched-unconditional-insertions batched-unconditional-insertions
+ :batched-rhs-retractions batched-rhs-retractions}]]
+ ;;; this the production expression, which could return an async result if parallel computing
+ (binding [*rule-context* rule-context]
+ ;; Fire the rule itself.
+ (platform/produce-try
+ ((:rhs node) token (:env (:production node)))
+ (catch Exception e
+ ;; If the rule fired an exception, help debugging by attaching
+ ;; details about the rule itself, cached insertions, and any listeners
+ ;; while propagating the cause.
+ (let [production (:production node)
+ rule-name (:name production)
+ rhs (:rhs production)]
+ (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
+ " with bindings " (pr-str (:bindings token)))
+ {:bindings (:bindings token)
+ :name rule-name
+ :rhs rhs
+ :batched-logical-insertions @batched-logical-insertions
+ :batched-unconditional-insertions @batched-unconditional-insertions
+ :batched-rhs-retractions @batched-rhs-retractions
+ :listeners (try
+ (let [p-listener (l/to-persistent! listener)]
+ (if (l/null-listener? p-listener)
+ []
+ (l/get-children p-listener)))
+ (catch Exception listener-exception
+ listener-exception))}
+ e))))))
+ ;;; this the post-production expression, which runs after the production is activated
+ (binding [*rule-context* rule-context]
+ ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
+ ;; below. They will be dereferenced again if an exception is caught, but in the error
+ ;; case we aren't worried about performance.
+ (let [resulting-ops {:unconditional-insertions @batched-unconditional-insertions
+ :logical-insertions @batched-logical-insertions
+ :rhs-retractions @batched-rhs-retractions}]
+ (l/fire-activation! listener activation resulting-ops)
+ {:token token :node node :ops resulting-ops})))]
(doseq [{:keys [token node ops]} rhs-activation-results
:let [{:keys [unconditional-insertions
logical-insertions
diff --git a/src/main/clojure/clara/rules/platform.clj b/src/main/clojure/clara/rules/platform.clj
index 4526d000..587d5dd9 100644
--- a/src/main/clojure/clara/rules/platform.clj
+++ b/src/main/clojure/clara/rules/platform.clj
@@ -1,19 +1,13 @@
(ns clara.rules.platform
"This namespace is for internal use and may move in the future.
Platform unified code Clojure/ClojureScript."
- (:require
- [ham-fisted.function :as f])
- (:import
- [clojure.lang Var]
- [java.lang IllegalArgumentException]
- [java.util LinkedHashMap]
- [java.util.concurrent
- ExecutionException
- CompletableFuture
- ExecutorService
- ForkJoinPool
- Future]
- [java.util.function Function]))
+ (:require [clara.rules.platform.async :as async])
+ (:import [java.lang IllegalArgumentException]
+ [java.util LinkedHashMap]
+ [java.util.concurrent
+ CompletableFuture
+ ExecutionException]
+ [java.util.function Function BiFunction]))
(defn throw-error
"Throw an error with the given description string."
@@ -112,31 +106,6 @@
~@(for [[tl] binding-pairs]
`(.remove ~tl))))))
-(def ^:dynamic *thread-pool* (ForkJoinPool/commonPool))
-
-(defmacro completable-future
- "Asynchronously invokes the body inside a completable future, preserves the current thread binding frame,
- using by default the `ForkJoinPool/commonPool`, the pool used can be specified via `*thread-pool*` binding."
- ^CompletableFuture [& body]
- `(let [binding-frame# (Var/cloneThreadBindingFrame) ;;; capture the thread local binding frame before start
- ^CompletableFuture result# (CompletableFuture.) ;;; this is the CompletableFuture being returned
- ^ExecutorService pool# (or *thread-pool* (ForkJoinPool/commonPool))
- ^Runnable fbody# (fn do-complete#
- []
- (try
- (Var/resetThreadBindingFrame binding-frame#) ;;; set the Clojure binding frame captured above
- (.complete result# (do ~@body)) ;;; send the result of evaluating the body to the CompletableFuture
- (catch Throwable ~'e
- (.completeExceptionally result# ~'e)))) ;;; if we catch an exception we send it to the CompletableFuture
- ^Future fut# (.submit pool# fbody#)
- ^Function cancel# (f/function
- [~'_]
- (future-cancel fut#))] ;;; submit the work to the pool and get the FutureTask doing the work
- ;;; if the CompletableFuture returns exceptionally
- ;;; then cancel the Future which is currently doing the work
- (.exceptionally result# cancel#)
- result#))
-
(defmacro eager-for
"A for wrapped with a doall to force realisation. Usage is the same as regular for."
[& body]
@@ -145,19 +114,81 @@
(def ^:dynamic *parallel-compute* false)
(defmacro compute-for
- [bindings & body]
+ [bindings body]
`(if *parallel-compute*
- (let [fut-seq# (eager-for [~@bindings]
- (platform/completable-future
- ~@body))]
+ (let [fut-seq# (eager-for
+ [~@bindings]
+ (async/completable-future
+ ~body))]
(try
- (eager-for [fut# fut-seq#
- :let [match# @fut#]
- :when match#]
- match#)
+ (eager-for
+ [fut# fut-seq#
+ :let [result# @fut#]
+ :when result#]
+ result#)
(catch ExecutionException e#
- (throw (ex-cause e#)))))
- (eager-for [~@bindings
- :let [match# (do ~@body)]
- :when match#]
- match#)))
+ (if-let [cause# (ex-cause e#)]
+ (throw cause#)
+ (throw e#)))))
+ (eager-for
+ [~@bindings
+ :let [result# ~body]
+ :when result#]
+ result#)))
+
+(def ^:dynamic *production* nil)
+
+(defmacro produce-try
+ [body & catch-finally]
+ `(if *parallel-compute*
+ (let [result# (try
+ ~body
+ ~@catch-finally)]
+ (if (instance? CompletableFuture result#)
+ (.exceptionally ^CompletableFuture result#
+ (reify Function
+ (apply [_# e#]
+ (try
+ (throw e#)
+ ~@catch-finally))))
+ result#))
+ (try
+ ~body
+ ~@catch-finally)))
+
+(defmacro produce-for
+ [bindings prod-body & post-body]
+ `(if *parallel-compute*
+ (let [fut-seq# (doall
+ (for
+ [~@bindings]
+ (let [^CompletableFuture f# (async/flatten-completable-future
+ (async/completable-future
+ ~prod-body))]
+ (if ~(some? post-body)
+ (.thenApply f#
+ (reify Function
+ (apply [~'_ result#]
+ (binding [*production* result#]
+ ~@post-body))))
+ f#))))
+ ^BiFunction conj# (reify BiFunction
+ (apply [_ ~'results ~'result]
+ (conj ~'results ~'result)))]
+ (loop [[^CompletableFuture fut# & more#] fut-seq#
+ ^CompletableFuture res-fut# (CompletableFuture/completedFuture [])]
+ (if-not fut#
+ (try
+ @res-fut#
+ (catch ExecutionException e#
+ (if-let [cause# (ex-cause e#)]
+ (throw cause#)
+ (throw e#))))
+ (recur more# (.thenCombine res-fut# fut# conj#)))))
+ (eager-for
+ [~@bindings
+ :let [result# ~prod-body]]
+ (if ~(some? post-body)
+ (binding [*production* result#]
+ ~@post-body)
+ result#))))
diff --git a/src/main/clojure/clara/rules/platform/async.clj b/src/main/clojure/clara/rules/platform/async.clj
new file mode 100644
index 00000000..b05bb8c4
--- /dev/null
+++ b/src/main/clojure/clara/rules/platform/async.clj
@@ -0,0 +1,111 @@
+(ns clara.rules.platform.async
+ (:require [clojure.core.async :refer [take!]])
+ (:import [clojure.lang Var IDeref]
+ [java.util.concurrent
+ ExecutionException
+ CompletableFuture
+ ExecutorService
+ ForkJoinPool
+ Future]
+ [clojure.core.async.impl.channels ManyToManyChannel]
+ [java.util.function Function]))
+
+(defprotocol CompletableFutureAdapter
+ (as-completable-future [this]))
+
+(defn completable-future-adapter?
+ "is v a `CompletableFutureAdapter`?"
+ [v]
+ (satisfies? CompletableFutureAdapter v))
+
+(def ^:private ^Function flatten-handler
+ (reify Function
+ (apply [_ v]
+ (if-not (completable-future-adapter? v)
+ (CompletableFuture/completedFuture v)
+ (.thenCompose ^CompletableFuture (as-completable-future v) ^Function flatten-handler)))))
+
+(defn flatten-completable-future
+ "recursively takes from an async object until only one non-async value remains,
+ returning it over a future"
+ [^CompletableFuture f]
+ (.thenCompose ^CompletableFuture f ^Function flatten-handler))
+
+(defn- handle-async-channel-result
+ "puts a result on an async channel if not nil, always closes the channel when done"
+ [channel]
+ (let [^CompletableFuture f (CompletableFuture.)]
+ (take! channel
+ (fn do-read
+ [v]
+ (cond
+ (instance? ExecutionException v)
+ (if-let [^Exception cause (ex-cause v)]
+ (.completeExceptionally f ^Exception cause)
+ (.completeExceptionally f ^Exception v))
+
+ (instance? Exception v)
+ (.completeExceptionally f ^Exception v)
+
+ :else
+ (.complete f v))))
+ f))
+
+(defn- handle-blocking-deref-result
+ "puts a blocking ref result on an async channel if not nil, always closes the channel when done"
+ [vref]
+ (let [^CompletableFuture f (CompletableFuture.)]
+ (try
+ (.complete f @vref)
+ (catch ExecutionException e
+ (if-let [^Exception cause (ex-cause e)]
+ (.completeExceptionally f ^Exception cause)
+ (.completeExceptionally f ^Exception e)))
+ (catch Exception e
+ (.completeExceptionally f e)))
+ f))
+
+(extend-type CompletableFuture
+ CompletableFutureAdapter
+ (as-completable-future [f]
+ f))
+
+(extend-type ManyToManyChannel
+ CompletableFutureAdapter
+ (as-completable-future [c]
+ (handle-async-channel-result c)))
+
+(extend-type IDeref
+ CompletableFutureAdapter
+ (as-completable-future [r]
+ (handle-blocking-deref-result r)))
+
+(extend-type Future
+ CompletableFutureAdapter
+ (as-completable-future [f]
+ (handle-blocking-deref-result f)))
+
+(def ^:dynamic *thread-pool* (ForkJoinPool/commonPool))
+
+(defmacro completable-future
+ "Asynchronously invokes the body inside a completable future, preserves the current thread binding frame,
+ using by default the `ForkJoinPool/commonPool`, the pool used can be specified via `*thread-pool*` binding."
+ ^CompletableFuture [& body]
+ `(let [binding-frame# (Var/cloneThreadBindingFrame) ;;; capture the thread local binding frame before start
+ ^CompletableFuture res-fut# (CompletableFuture.) ;;; this is the CompletableFuture being returned
+ ^ExecutorService pool# (or *thread-pool* (ForkJoinPool/commonPool))
+ ^Runnable fbody# (fn do-complete#
+ []
+ (try
+ (Var/resetThreadBindingFrame binding-frame#) ;;; set the Clojure binding frame captured above
+ (.complete res-fut# (do ~@body)) ;;; send the result of evaluating the body to the CompletableFuture
+ (catch Exception ~'e
+ (.completeExceptionally res-fut# ~'e)))) ;;; if we catch an exception we send it to the CompletableFuture
+ ^Future fut# (.submit pool# fbody#)
+ ^Function cancel# (reify Function
+ (apply [~'_ ~'_]
+ (future-cancel fut#)))] ;;; submit the work to the pool and get the FutureTask doing the work
+ ;;; if the CompletableFuture returns exceptionally
+ ;;; then cancel the Future which is currently doing the work
+ (.exceptionally res-fut# cancel#)
+ res-fut#))
diff --git a/src/test/clojure/clara/test_engine.clj b/src/test/clojure/clara/test_engine.clj
index a7adc753..c904e5bf 100644
--- a/src/test/clojure/clara/test_engine.clj
+++ b/src/test/clojure/clara/test_engine.clj
@@ -7,6 +7,7 @@
insert-all insert-all!
insert insert!
retract retract!]]
+ [clojure.core.async :refer [go timeout
- (println "number:" ?value)
- (insert! {:type :result
- :value (+ ?value 100)}))
+ (go
+ (
- (println "result:" ?value)
- (Thread/sleep 50)
- (insert! {:type :output
- :value (inc ?value)}))
+ (go
+ (
Date: Sun, 31 Dec 2023 15:37:51 -0600
Subject: [PATCH 30/87] chore: lint produce-try macro
---
.../clj-kondo.exports/clara/rules/config.edn | 1 +
.../clara/rules/hooks/clara_rules.clj_kondo | 8 ++++++
deps.edn | 2 +-
src/main/clojure/clara/rules/platform.clj | 25 +++++++++----------
4 files changed, 22 insertions(+), 14 deletions(-)
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/config.edn b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
index a66e2854..fa5ccec0 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/config.edn
+++ b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
@@ -6,4 +6,5 @@
clara.rules/defrule hooks.clara-rules/analyze-defrule-macro
clara.rules.dsl/parse-query hooks.clara-rules/analyze-parse-query-macro
clara.rules.dsl/parse-rule hooks.clara-rules/analyze-parse-rule-macro
+ clara.rules.platform/produce-try hooks.clara-rules/analyze-produce-try-macro
clara.tools.testing-utils/def-rules-test hooks.clara-rules/analyze-def-rules-test-macro}}}
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
index 0d5abbcc..d5280a07 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
+++ b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
@@ -398,6 +398,14 @@
merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
{:node new-node}))
+(defn analyze-produce-try-macro
+ [{:keys [:node]}]
+ (let [[& body] (rest (:children node))
+ new-node (api/list-node
+ (list*
+ (api/token-node 'try)
+ body))]
+ {:node new-node}))
(defn analyze-def-rules-test-macro
[{:keys [:node]}]
diff --git a/deps.edn b/deps.edn
index 403eff79..65dababd 100644
--- a/deps.edn
+++ b/deps.edn
@@ -18,7 +18,7 @@
org.clojure/math.combinatorics {:mvn/version "0.1.3"}
criterium/criterium {:mvn/version "0.4.6"}}}
- :clj-kondo {:extra-deps {clj-kondo/clj-kondo {:mvn/version "2023.04.14"}}
+ :clj-kondo {:extra-deps {clj-kondo/clj-kondo {:mvn/version "2023.12.15"}}
:main-opts ["-m" "clj-kondo.main"]}
:test {:extra-paths ["src/test/clojure" "target/test/classes"]
diff --git a/src/main/clojure/clara/rules/platform.clj b/src/main/clojure/clara/rules/platform.clj
index 587d5dd9..afb56f5f 100644
--- a/src/main/clojure/clara/rules/platform.clj
+++ b/src/main/clojure/clara/rules/platform.clj
@@ -159,19 +159,18 @@
(defmacro produce-for
[bindings prod-body & post-body]
`(if *parallel-compute*
- (let [fut-seq# (doall
- (for
- [~@bindings]
- (let [^CompletableFuture f# (async/flatten-completable-future
- (async/completable-future
- ~prod-body))]
- (if ~(some? post-body)
- (.thenApply f#
- (reify Function
- (apply [~'_ result#]
- (binding [*production* result#]
- ~@post-body))))
- f#))))
+ (let [fut-seq# (eager-for
+ [~@bindings]
+ (let [^CompletableFuture f# (async/flatten-completable-future
+ (async/completable-future
+ ~prod-body))]
+ (if ~(some? post-body)
+ (.thenApply f#
+ (reify Function
+ (apply [~'_ result#]
+ (binding [*production* result#]
+ ~@post-body))))
+ f#)))
^BiFunction conj# (reify BiFunction
(apply [_ ~'results ~'result]
(conj ~'results ~'result)))]
From 34f643d7609dceddb817fbb915ce084592a8c542 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 31 Dec 2023 17:04:35 -0600
Subject: [PATCH 31/87] feat: update pom
---
pom.xml | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/pom.xml b/pom.xml
index d42ad99e..6d211514 100644
--- a/pom.xml
+++ b/pom.xml
@@ -18,6 +18,11 @@
clojure
1.10.3
+
+ org.clojure
+ core.async
+ 1.6.681
+
com.cnuernber
ham-fisted
From cf3f8648282622dced224d132779724990bfb9a0 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 31 Dec 2023 17:32:21 -0600
Subject: [PATCH 32/87] feat: update docs
---
CHANGELOG.md | 7 ++++---
README.md | 17 ++++++++++-------
RELEASE.md | 10 +++++-----
src/main/java/clara/rules/package-info.java | 2 +-
4 files changed, 20 insertions(+), 16 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 971fcc87..29335238 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,8 +1,9 @@
This is a history of changes to k13labs/clara-rules.
-# 0.9.0
-* strip cljs support, general cleanup.
-* integrate with collections from ham-fisted for performance gains.
+# 0.9.0-SNAPSHOT
+* Add parallel support to Node and RHS activation.
+* Remove ClojureScript support, general cleanup.
+* Replace mutable/transient collections with [ham-fisted](https://github.com/cnuernber/ham-fisted) data structures.
This is a history of changes to clara-rules prior to forking to k13labs/clara-rules.
diff --git a/README.md b/README.md
index 61ab8563..8733314c 100644
--- a/README.md
+++ b/README.md
@@ -1,9 +1,11 @@
-[![Build Status](https://github.com/cerner/clara-rules/actions/workflows/clojure.yml/badge.svg)](https://github.com/cerner/clara-rules/actions/workflows/clojure.yml)
+[![Build Status](https://github.com/k13labs/clara-rules/actions/workflows/clojure.yml/badge.svg)](https://github.com/cerner/clara-rules/actions/workflows/clojure.yml)
# _About_
Clara is a forward-chaining rules engine written in Clojure(Script) with Java interoperability. It aims to simplify code with a developer-centric approach to expert systems. See [clara-rules.org](http://www.clara-rules.org) for more.
+NOTE: this fork only supports the JVM/Clojure, and not ClojureScript.
+
# _Usage_
Here's a simple example. Complete documentation is at [clara-rules.org](http://www.clara-rules.org/docs/firststeps/).
@@ -43,17 +45,20 @@ Here's a simple example. Complete documentation is at [clara-rules.org](http://w
# _Building_
-Clara is built, tested, and deployed using [Leiningen](http://leiningen.org).
+Clara is built, tested, and deployed using [Clojure Tools Deps](https://clojure.org/guides/deps_and_cli).
+
+CMake is used to simplify invocation of some commands.
# _Availability_
-Clara releases are on [Clojars](https://clojars.org/). Simply add the following to your project:
+Clara releases for this project are on [Clojars](https://clojars.org/). Simply add the following to your project:
-[![Clojars Project](http://clojars.org/com.cerner/clara-rules/latest-version.svg)](http://clojars.org/com.cerner/clara-rules)
+[![Clojars Project](http://clojars.org/com.github.k13labs/clara-rules/latest-version.svg)](http://clojars.org/com.github.k13labs/clara-rules)
# _Communication_
-Questions can be posted to the [Clara Rules Google Group](https://groups.google.com/forum/?hl=en#!forum/clara-rules) or the [Slack channel](https://clojurians.slack.com/messages/clara/).
+- Questions about Clara rules can be posted to the [Clara Rules Google Group](https://groups.google.com/forum/?hl=en#!forum/clara-rules) or the [Slack channel](https://clojurians.slack.com/messages/clara/).
+- For any other questions or issues about this Clara rules fork specifically feel free to browse or open a [Github Issue](https://github.com/k13labs/clara-rules/issues).
# Contributing
@@ -68,5 +73,3 @@ Licensed under the Apache License, Version 2.0 (the "License"); you may not use
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
-
-
diff --git a/RELEASE.md b/RELEASE.md
index cf676811..c0ea6497 100644
--- a/RELEASE.md
+++ b/RELEASE.md
@@ -4,14 +4,14 @@ This project is hosted on [Clojars][clojars]. You can see it [here][release-sit
Releasing the project requires these steps:
-0. Set the version number in the project.clj file.
-1. Run ```lein do clean, test``` to ensure everything is working as expected.
+0. Set the version number in the `Makefile` file.
+1. Run ```make test``` to ensure everything is working as expected.
2. Use a GitHub [project release][github-release-url] to release the project and tag (be sure it follows [semver][semantic-versioning])
-3. Run ```lein deploy clojars``` to deploy the project to the Clojars repository.
+3. Run ```make deploy``` to deploy the project to the Clojars repository.
4. Update `main` to a new minor version
[clojars]: https://clojars.org
-[release-site]: https://clojars.org/com.cerner/clara-rules
-[project-url]: https://github.com/cerner/clara-rules/
+[release-site]: https://clojars.org/com.github.k13labs/clara-rules
+[project-url]: https://github.com/k13labs/clara-rules/
[semantic-versioning]: http://semver.org/
[github-release-url]: https://help.github.com/articles/creating-releases/
diff --git a/src/main/java/clara/rules/package-info.java b/src/main/java/clara/rules/package-info.java
index 024f91b3..352a2b40 100644
--- a/src/main/java/clara/rules/package-info.java
+++ b/src/main/java/clara/rules/package-info.java
@@ -14,7 +14,7 @@
* since the WorkingMemory is immutable, creating a new instance that shares internal state when changes occur.
*
*
- * See the Clara Examples
+ * See the Clara Examples
* project for an example of this in action.
*/
package clara.rules;
From f1b6ab60ccd291df6ebe5fe792dab0d72437e373 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 31 Dec 2023 20:09:45 -0600
Subject: [PATCH 33/87] fix: use correct build status url
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 8733314c..f4b23af0 100644
--- a/README.md
+++ b/README.md
@@ -1,4 +1,4 @@
-[![Build Status](https://github.com/k13labs/clara-rules/actions/workflows/clojure.yml/badge.svg)](https://github.com/cerner/clara-rules/actions/workflows/clojure.yml)
+[![Build Status](https://github.com/k13labs/clara-rules/actions/workflows/clojure.yml/badge.svg)](https://github.com/k13labs/clara-rules/actions/workflows/clojure.yml)
# _About_
From 209087490123df752b3fcf50d8f0b8beecc7256b Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 2 Jan 2024 11:16:50 -0600
Subject: [PATCH 34/87] feat: update platform async patterns using futurama,
prepare for release
---
Makefile | 8 +-
RELEASE.md | 9 +-
deps.edn | 2 +-
pom.xml | 6 +
src/main/clojure/clara/rules/platform.clj | 84 ++++++-------
.../clojure/clara/rules/platform/async.clj | 111 ------------------
6 files changed, 49 insertions(+), 171 deletions(-)
delete mode 100644 src/main/clojure/clara/rules/platform/async.clj
diff --git a/Makefile b/Makefile
index 9ef030b0..0acebeff 100644
--- a/Makefile
+++ b/Makefile
@@ -1,7 +1,6 @@
.PHONY: repl test clean compile-main-java compile-test-java deploy install format-check format-fix
SHELL := /bin/bash
-VERSION := 0.9.0-SNAPSHOT
compile-main-java:
clojure -T:build compile-main-java
@@ -29,12 +28,7 @@ lint: compile-test-java
clojure -M:dev:test:clj-kondo --lint "src/main:src/test" --fail-level "error"
build: compile-main-java
- clojure -Spom
- clojure -X:jar \
- :sync-pom true \
- :group-id "com.github.k13labs" \
- :artifact-id "clara-rules" \
- :version '"$(VERSION)"'
+ clojure -X:jar :sync-pom true
deploy: clean build
clojure -X:deploy-maven
diff --git a/RELEASE.md b/RELEASE.md
index c0ea6497..e6fdf787 100644
--- a/RELEASE.md
+++ b/RELEASE.md
@@ -4,11 +4,12 @@ This project is hosted on [Clojars][clojars]. You can see it [here][release-sit
Releasing the project requires these steps:
-0. Set the version number in the `Makefile` file.
1. Run ```make test``` to ensure everything is working as expected.
-2. Use a GitHub [project release][github-release-url] to release the project and tag (be sure it follows [semver][semantic-versioning])
-3. Run ```make deploy``` to deploy the project to the Clojars repository.
-4. Update `main` to a new minor version
+2. Set the version number and tag in the `pom.xml` file, commit the changes.
+3. Use a GitHub [project release][github-release-url] to release the project and tag (be sure it follows [semver][semantic-versioning])
+4. Run ```make clean build``` to test building the project, commit any changes to the `pom.xml` file.
+5. Run ```make deploy``` to deploy the project to the Clojars repository.
+6. Update the `pom.xml` in `main` to a new minor version, commit the changes.
[clojars]: https://clojars.org
[release-site]: https://clojars.org/com.github.k13labs/clara-rules
diff --git a/deps.edn b/deps.edn
index 65dababd..de5cfe77 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- org.clojure/core.async {:mvn/version "1.6.681"}
+ com.github.k13labs/futurama {:mvn/version "0.3.1"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index 6d211514..bf59d8e1 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,6 +6,12 @@
clara-rules
0.9.0-SNAPSHOT
clara-rules
+
+ https://github.com/k13labs/clara-rules
+ scm:git:git://github.com/k13labs/clara-rules.git
+ scm:git:ssh://git@github.com/k13labs/clara-rules.git
+ 0.9.0-SNAPSHOT
+
Apache-2.0
diff --git a/src/main/clojure/clara/rules/platform.clj b/src/main/clojure/clara/rules/platform.clj
index afb56f5f..c75a2dbe 100644
--- a/src/main/clojure/clara/rules/platform.clj
+++ b/src/main/clojure/clara/rules/platform.clj
@@ -1,13 +1,13 @@
(ns clara.rules.platform
"This namespace is for internal use and may move in the future.
Platform unified code Clojure/ClojureScript."
- (:require [clara.rules.platform.async :as async])
+ (:require [futurama.core :refer [!
Date: Tue, 2 Jan 2024 11:18:44 -0600
Subject: [PATCH 35/87] chore: update pom deps
---
pom.xml | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/pom.xml b/pom.xml
index bf59d8e1..e182aa54 100644
--- a/pom.xml
+++ b/pom.xml
@@ -25,9 +25,9 @@
1.10.3
- org.clojure
- core.async
- 1.6.681
+ com.github.k13labs
+ futurama
+ 0.3.1
com.cnuernber
From 3ab4626e37b03500c4bb208441d4e9783ba967e7 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 2 Jan 2024 15:05:17 -0600
Subject: [PATCH 36/87] feat: async enhancements, add async tests
---
pom.xml | 4 +--
src/main/clojure/clara/rules/engine.clj | 18 ++++++++---
src/test/clojure/clara/test_engine.clj | 42 +++++++++++++++----------
3 files changed, 41 insertions(+), 23 deletions(-)
diff --git a/pom.xml b/pom.xml
index e182aa54..1b53edc6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,13 +4,13 @@
jar
com.github.k13labs
clara-rules
- 0.9.0-SNAPSHOT
+ 0.9.0
clara-rules
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
- 0.9.0-SNAPSHOT
+ 0.9.0
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index fe02ecd9..bd32842c 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -1752,7 +1752,9 @@
(defn fire-rules*
"Fire rules for the given nodes."
- [rulebase nodes transient-memory transport listener get-alphas-fn update-cache]
+ [rulebase nodes transient-memory transport
+ listener get-alphas-fn update-cache
+ pop-activations-batch-size]
(binding [*current-session* {:rulebase rulebase
:transient-memory transient-memory
:transport transport
@@ -1779,7 +1781,7 @@
(do
;; If there are activations, fire them.
- (let [activations (mem/pop-activations! transient-memory Long/MAX_VALUE)
+ (let [activations (mem/pop-activations! transient-memory pop-activations-batch-size)
rhs-activation-results
(platform/produce-for
[{:keys [node token] :as activation} activations
@@ -1902,7 +1904,11 @@
(fire-rules [session opts]
(binding [platform/*parallel-compute* (or (:parallel-compute opts) platform/*parallel-compute*)]
(let [transient-memory (mem/to-transient memory)
- transient-listener (l/to-transient listener)]
+ transient-listener (l/to-transient listener)
+ parallel-batch-size (if platform/*parallel-compute*
+ (or (:parallel-batch-size opts)
+ (.. Runtime getRuntime availableProcessors))
+ 1)]
(if-not (:cancelling opts)
;; We originally performed insertions and retractions immediately after the insert and retract calls,
@@ -1947,7 +1953,8 @@
transport
transient-listener
get-alphas-fn
- (uc/get-ordered-update-cache)))
+ (uc/get-ordered-update-cache)
+ parallel-batch-size))
(let [insertions (sequence
(comp (filter (fn [pending-op]
@@ -1993,7 +2000,8 @@
get-alphas-fn
;; This continues to use the cancelling cache after the first batch of insertions and retractions.
;; If this is suboptimal for some workflows we can revisit this.
- update-cache))))
+ update-cache
+ parallel-batch-size))))
(LocalSession. rulebase
(mem/to-persistent! transient-memory)
diff --git a/src/test/clojure/clara/test_engine.clj b/src/test/clojure/clara/test_engine.clj
index c904e5bf..d670e4e2 100644
--- a/src/test/clojure/clara/test_engine.clj
+++ b/src/test/clojure/clara/test_engine.clj
@@ -1,19 +1,18 @@
(ns clara.test-engine
- (:require [clara.rules :refer [clear-ns-productions!
- mk-session
+ (:require [clara.rules :refer [mk-session
fire-rules
query
defrule defquery
- insert-all insert-all!
- insert insert!
- retract retract!]]
+ insert-all
+ insert!]]
[clojure.core.async :refer [go timeout
- (go
- ( (fire-rules session {:parallel-compute true
+ :parallel-batch-size 100})
+ (query test-slow-query)
+ (count))
+ {:verbose true}))
+ [mean [lower upper]] (:mean result)]
+ (is (< 0.1 lower mean 0.15)) ;;; our lower and mean values should be between 100ms and 150ms
+ (is (< 0.1 mean upper 0.2)) ;;; our mean and upper values should be lower than 200ms
+ (report-result result))))
+
(comment
- (time
- (-> (fire-rules session {:parallel-compute true})
- (query test-slow-query)
- (count)))
(do
(clear-ns-productions!)
(clear-session-cache!)))
From 3df55fe7d0928a80aa7449d6be9c35b2b2d1fe3d Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 2 Jan 2024 17:00:18 -0600
Subject: [PATCH 37/87] chore: cleanup comments
---
src/test/clojure/clara/test_engine.clj | 5 -----
1 file changed, 5 deletions(-)
diff --git a/src/test/clojure/clara/test_engine.clj b/src/test/clojure/clara/test_engine.clj
index d670e4e2..f2d236ff 100644
--- a/src/test/clojure/clara/test_engine.clj
+++ b/src/test/clojure/clara/test_engine.clj
@@ -57,8 +57,3 @@
(is (< 0.1 lower mean 0.15)) ;;; our lower and mean values should be between 100ms and 150ms
(is (< 0.1 mean upper 0.2)) ;;; our mean and upper values should be lower than 200ms
(report-result result))))
-
-(comment
- (do
- (clear-ns-productions!)
- (clear-session-cache!)))
From 8463bd481e3b80a540d9f0df3d407d9c0536a244 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sat, 6 Jan 2024 15:35:10 -0600
Subject: [PATCH 38/87] feat: add explicit async methods
---
.../clj-kondo.exports/clara/rules/config.edn | 4 +-
.../clara/rules/hooks/clara_rules.clj_kondo | 9 -
deps.edn | 2 +-
dev/user.clj | 30 +-
src/main/clojure/clara/rules.clj | 18 +
src/main/clojure/clara/rules/engine.clj | 448 ++++++++++--------
src/main/clojure/clara/rules/platform.clj | 77 +--
.../clojure/clara/tools/testing_utils.clj | 5 +-
src/test/clojure/clara/test_engine.clj | 6 +-
src/test/clojure/clara/test_rules.clj | 244 +++++-----
10 files changed, 400 insertions(+), 443 deletions(-)
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/config.edn b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
index fa5ccec0..f402225d 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/config.edn
+++ b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
@@ -1,10 +1,8 @@
{:lint-as {clara.rules/defsession clojure.core/def
clara.rules.platform/eager-for clojure.core/for
- clara.rules.platform/compute-for clojure.core/for
- clara.rules.platform/produce-for clojure.core/for}
+ clara.rules.platform/compute-for clojure.core/for}
:hooks {:analyze-call {clara.rules/defquery hooks.clara-rules/analyze-defquery-macro
clara.rules/defrule hooks.clara-rules/analyze-defrule-macro
clara.rules.dsl/parse-query hooks.clara-rules/analyze-parse-query-macro
clara.rules.dsl/parse-rule hooks.clara-rules/analyze-parse-rule-macro
- clara.rules.platform/produce-try hooks.clara-rules/analyze-produce-try-macro
clara.tools.testing-utils/def-rules-test hooks.clara-rules/analyze-def-rules-test-macro}}}
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
index d5280a07..fbe18443 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
+++ b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
@@ -398,15 +398,6 @@
merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
{:node new-node}))
-(defn analyze-produce-try-macro
- [{:keys [:node]}]
- (let [[& body] (rest (:children node))
- new-node (api/list-node
- (list*
- (api/token-node 'try)
- body))]
- {:node new-node}))
-
(defn analyze-def-rules-test-macro
[{:keys [:node]}]
(let [[test-name test-params & test-body] (rest (:children node))
diff --git a/deps.edn b/deps.edn
index de5cfe77..59bf6bc6 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.3.1"}
+ com.github.k13labs/futurama {:mvn/version "0.3.7"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/dev/user.clj b/dev/user.clj
index 5961233c..9c8e1233 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -1,41 +1,13 @@
(ns user
(:require [criterium.core :refer [report-result
quick-benchmark] :as crit]
- [clara.rules.platform :refer [compute-for
- produce-for] :as platform]
+ [clara.rules.platform :refer [compute-for]]
[clojure.core.async :refer [go timeout > (produce-for
- [v (range 100)]
- (apply + v (range v)))
- (apply +)))
- {:verbose true}))
-
- (report-result
- (quick-benchmark
- (binding [platform/*parallel-compute* true]
- (->> (compute-for
- [v (range 100)]
- (apply + v (range v)))
- (apply +)))
- {:verbose true}))
-
- (report-result
- (quick-benchmark
- (binding [platform/*parallel-compute* true]
- (->> (for
- [v (range 100)
- (apply + v (range v))])
- (apply +)))
- {:verbose true}))
-
(add-tap #'println)
(remove-tap #'println)
(tap> "foobar"))
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
index c26815b1..97fb10e9 100644
--- a/src/main/clojure/clara/rules.clj
+++ b/src/main/clojure/clara/rules.clj
@@ -41,6 +41,24 @@
([session] (eng/fire-rules session {}))
([session opts] (eng/fire-rules session opts)))
+(defn fire-rules-async
+ "Fires are rules in the given session. Once a rule is fired, it is labeled in a fired
+ state and will not be re-fired unless facts affecting the rule are added or retracted.
+
+ This function does not modify the given session to mark rules as fired. Instead, it returns
+ a new session in which the rules are marked as fired.
+
+ This take an additional map of options as a second argument. Current options:
+
+ :cancelling true (EXPERIMENTAL, subject to change/removal. Not supported in ClojureScript.):
+ Simultaneously propagate insertions and retractions through the rules network, at every step using the insertion and retractions of equals facts to cancel each
+ other out and avoid operations deeper in the rules network. The behavior of unconditional insertions and RHS (right-hand side) retractions
+ is undefined when this option is enabled and this option should not be used when calling fire-rules can result in these operations.
+ Note that this is purely a performance optimization and no guarantees are made at this time on whether a given rule's RHS will be called.
+ When this option is used rule RHS code that is executed shouldn't do anything that impacts state other than perform logical insertions."
+ ([session] (eng/fire-rules-async session {}))
+ ([session opts] (eng/fire-rules-async session opts)))
+
(defn query
"Runs the given query with the optional given parameters against the session.
The optional parameters should be in map form. For example, a query call might be:
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index bd32842c..ffa869fd 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -6,7 +6,12 @@
[clara.rules.listener :as l]
[clara.rules.platform :as platform]
[clara.rules.update-cache.core :as uc]
- [clara.rules.update-cache.cancelling :as ca]))
+ [clara.rules.update-cache.cancelling :as ca]
+ [futurama.core :refer [async
+ async?
+ !activation-output
+ (fn activation-output
+ ;; We don't actually care what was returned from the activation
+ [_]
+ ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
+ ;; below. They will be dereferenced again if an exception is caught, but in the error
+ ;; case we aren't worried about performance.
+ (let [resulting-ops {:unconditional-insertions @batched-unconditional-insertions
+ :logical-insertions @batched-logical-insertions
+ :rhs-retractions @batched-rhs-retractions}]
+ (l/fire-activation! listener activation resulting-ops)
+ {:token token :node node :ops resulting-ops}))
+ ->activation-ex-args
+ (fn activation-ex-args
+ [e]
+ (let [production (:production node)
+ rule-name (:name production)
+ rhs (:rhs production)]
+ [(str "Exception in " (if rule-name rule-name (pr-str rhs))
+ " with bindings " (pr-str (:bindings token)))
+ {:bindings (:bindings token)
+ :name rule-name
+ :rhs rhs
+ :batched-logical-insertions @batched-logical-insertions
+ :batched-unconditional-insertions @batched-unconditional-insertions
+ :batched-rhs-retractions @batched-rhs-retractions
+ :listeners (try
+ (let [p-listener (l/to-persistent! listener)]
+ (if (l/null-listener? p-listener)
+ []
+ (l/get-children p-listener)))
+ (catch Exception listener-exception
+ listener-exception))}
+ e]))]
+ (try
+ ;; Actually fire the rule RHS
+ (let [result ((:rhs node) token (:env (:production node)))]
+ (if (async? result)
+ (async
+ (try
+ (->activation-output (!activation-ex-args e)]
+ (throw (ex-info msg info cause))))))
+ (->activation-output result)))
+ (catch Exception e
+ ;; If the rule fired an exception, help debugging by attaching
+ ;; details about the rule itself, cached insertions, and any listeners
+ ;; while propagating the cause.
+ (let [[msg info cause] (->activation-ex-args e)]
+ (throw (ex-info msg info cause)))))))
+
+(defn fire-rules-async*
"Fire rules for the given nodes."
- [rulebase nodes transient-memory transport
+ [rulebase transient-memory transport
listener get-alphas-fn update-cache
pop-activations-batch-size]
- (binding [*current-session* {:rulebase rulebase
- :transient-memory transient-memory
- :transport transport
- :insertions (atom 0)
- :get-alphas-fn get-alphas-fn
- :pending-updates update-cache
- :listener listener}]
-
- (loop [next-group (mem/next-activation-group transient-memory)
- last-group nil]
-
- (if next-group
-
- (if (and last-group (not= last-group next-group))
-
- ;; We have changed groups, so flush the updates from the previous
- ;; group before continuing.
- (do
- (flush-updates *current-session*)
- (let [upcoming-group (mem/next-activation-group transient-memory)]
- (l/activation-group-transition! listener next-group upcoming-group)
- (recur upcoming-group next-group)))
-
- (do
-
- ;; If there are activations, fire them.
- (let [activations (mem/pop-activations! transient-memory pop-activations-batch-size)
- rhs-activation-results
- (platform/produce-for
- [{:keys [node token] :as activation} activations
- ;; Use vectors for the insertion caches so that within an insertion type
- ;; (unconditional or logical) all insertions are done in order after the into
- ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
- ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
- ;; in the sequences.
- :let [batched-logical-insertions (atom [])
- batched-unconditional-insertions (atom [])
- batched-rhs-retractions (atom [])
- rule-context {:token token
- :node node
- :batched-logical-insertions batched-logical-insertions
- :batched-unconditional-insertions batched-unconditional-insertions
- :batched-rhs-retractions batched-rhs-retractions}]]
- ;;; this the production expression, which could return an async result if parallel computing
- (binding [*rule-context* rule-context]
- ;; Fire the rule itself.
- (platform/produce-try
- ((:rhs node) token (:env (:production node)))
- (catch Exception e
- ;; If the rule fired an exception, help debugging by attaching
- ;; details about the rule itself, cached insertions, and any listeners
- ;; while propagating the cause.
- (let [production (:production node)
- rule-name (:name production)
- rhs (:rhs production)]
- (throw (ex-info (str "Exception in " (if rule-name rule-name (pr-str rhs))
- " with bindings " (pr-str (:bindings token)))
- {:bindings (:bindings token)
- :name rule-name
- :rhs rhs
- :batched-logical-insertions @batched-logical-insertions
- :batched-unconditional-insertions @batched-unconditional-insertions
- :batched-rhs-retractions @batched-rhs-retractions
- :listeners (try
- (let [p-listener (l/to-persistent! listener)]
- (if (l/null-listener? p-listener)
- []
- (l/get-children p-listener)))
- (catch Exception listener-exception
- listener-exception))}
- e))))))
- ;;; this the post-production expression, which runs after the production is activated
- (binding [*rule-context* rule-context]
- ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
- ;; below. They will be dereferenced again if an exception is caught, but in the error
- ;; case we aren't worried about performance.
- (let [resulting-ops {:unconditional-insertions @batched-unconditional-insertions
- :logical-insertions @batched-logical-insertions
- :rhs-retractions @batched-rhs-retractions}]
- (l/fire-activation! listener activation resulting-ops)
- {:token token :node node :ops resulting-ops})))]
- (doseq [{:keys [token node ops]} rhs-activation-results
- :let [{:keys [unconditional-insertions
- logical-insertions
- rhs-retractions]} ops]]
- (binding [*rule-context* {:token token
- :node node}]
- (when-let [batched (seq unconditional-insertions)]
- (flush-insertions! batched true))
- (when-let [batched (seq logical-insertions)]
- (flush-insertions! batched false))
- (when-let [batched (seq rhs-retractions)]
- (flush-rhs-retractions! batched))
- ;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
- ;; will be in context for child rules.
- (when (some-> node :production :props :no-loop)
- (flush-updates *current-session*)))))
- (recur (mem/next-activation-group transient-memory) next-group)))
-
- ;; There were no items to be activated, so flush any pending
- ;; updates and recur with a potential new activation group
- ;; since a flushed item may have triggered one.
- (when (flush-updates *current-session*)
- (let [upcoming-group (mem/next-activation-group transient-memory)]
- (l/activation-group-transition! listener next-group upcoming-group)
- (recur upcoming-group next-group)))))))
+ (async
+ (binding [*current-session* {:rulebase rulebase
+ :transient-memory transient-memory
+ :transport transport
+ :insertions (atom 0)
+ :get-alphas-fn get-alphas-fn
+ :pending-updates update-cache
+ :listener listener}]
+
+ (loop [next-group (mem/next-activation-group transient-memory)
+ last-group nil]
+
+ (if next-group
+
+ (if (and last-group (not= last-group next-group))
+
+ ;; We have changed groups, so flush the updates from the previous
+ ;; group before continuing.
+ (do
+ (flush-updates *current-session*)
+ (let [upcoming-group (mem/next-activation-group transient-memory)]
+ (l/activation-group-transition! listener next-group upcoming-group)
+ (recur upcoming-group next-group)))
+
+ (do
+
+ ;; If there are activations, fire them.
+ (let [activations (mem/pop-activations! transient-memory pop-activations-batch-size)
+ rhs-activations
+ (platform/eager-for
+ [{:keys [node token] :as activation} activations
+ ;; Use vectors for the insertion caches so that within an insertion type
+ ;; (unconditional or logical) all insertions are done in order after the into
+ ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
+ ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
+ ;; in the sequences.
+ :let [batched-logical-insertions (atom [])
+ batched-unconditional-insertions (atom [])
+ batched-rhs-retractions (atom [])
+ rule-context {:token token
+ :node node
+ :batched-logical-insertions batched-logical-insertions
+ :batched-unconditional-insertions batched-unconditional-insertions
+ :batched-rhs-retractions batched-rhs-retractions}]]
+ ;;; this the production expression, which could return an async result if parallel computing
+ (binding [*rule-context* rule-context]
+ (fire-activation* activation)))]
+ (doseq [{:keys [token node ops]} (! node :production :props :no-loop)
+ (flush-updates *current-session*)))))
+ (recur (mem/next-activation-group transient-memory) next-group)))
+
+ ;; There were no items to be activated, so flush any pending
+ ;; updates and recur with a potential new activation group
+ ;; since a flushed item may have triggered one.
+ (when (flush-updates *current-session*)
+ (let [upcoming-group (mem/next-activation-group transient-memory)]
+ (l/activation-group-transition! listener next-group upcoming-group)
+ (recur upcoming-group next-group))))))))
+
+(declare ->LocalSession)
(deftype LocalSession [rulebase memory transport listener get-alphas-fn pending-operations]
ISession
@@ -1876,12 +1911,12 @@
facts
(into [] facts))))]
- (LocalSession. rulebase
- memory
- transport
- listener
- get-alphas-fn
- new-pending-operations)))
+ (->LocalSession rulebase
+ memory
+ transport
+ listener
+ get-alphas-fn
+ new-pending-operations)))
(retract [session facts]
@@ -1891,26 +1926,29 @@
facts
(into [] facts))))]
- (LocalSession. rulebase
- memory
- transport
- listener
- get-alphas-fn
- new-pending-operations)))
+ (->LocalSession rulebase
+ memory
+ transport
+ listener
+ get-alphas-fn
+ new-pending-operations)))
;; Prior to issue 249 we only had a one-argument fire-rules method. clara.rules/fire-rules will always call the two-argument method now
;; but we kept a one-argument version of the fire-rules in case anyone is calling the fire-rules protocol function or method on the session directly.
- (fire-rules [session] (fire-rules session {}))
+ (fire-rules [session]
+ (fire-rules session {}))
(fire-rules [session opts]
- (binding [platform/*parallel-compute* (or (:parallel-compute opts) platform/*parallel-compute*)]
- (let [transient-memory (mem/to-transient memory)
- transient-listener (l/to-transient listener)
- parallel-batch-size (if platform/*parallel-compute*
- (or (:parallel-batch-size opts)
- (.. Runtime getRuntime availableProcessors))
- 1)]
-
- (if-not (:cancelling opts)
+ (!LocalSession rulebase
(mem/to-persistent! transient-memory)
transport
(l/to-persistent! transient-listener)
@@ -2038,6 +2074,10 @@
:listeners (l/flatten-listener listener)
:get-alphas-fn get-alphas-fn}))
+(defn ->LocalSession
+ [rulebase memory transport listener get-alphas-fn pending-operations]
+ (LocalSession. rulebase memory transport listener get-alphas-fn pending-operations))
+
(defn assemble
"Assembles a session from the given components, which must be a map
containing the following:
@@ -2049,14 +2089,14 @@
:get-alphas-fn The function used to return the alpha nodes for a fact of the given type."
[{:keys [rulebase memory transport listeners get-alphas-fn]}]
- (LocalSession. rulebase
- memory
- transport
- (if (> (count listeners) 0)
- (l/delegating-listener listeners)
- l/default-listener)
- get-alphas-fn
- []))
+ (->LocalSession rulebase
+ memory
+ transport
+ (if (> (count listeners) 0)
+ (l/delegating-listener listeners)
+ l/default-listener)
+ get-alphas-fn
+ []))
(defn with-listener
"Return a new session with the listener added to the provided session,
diff --git a/src/main/clojure/clara/rules/platform.clj b/src/main/clojure/clara/rules/platform.clj
index c75a2dbe..5a5d7137 100644
--- a/src/main/clojure/clara/rules/platform.clj
+++ b/src/main/clojure/clara/rules/platform.clj
@@ -5,8 +5,10 @@
! (fire-rules session {:parallel-compute true
- :parallel-batch-size 100})
+ (-> (! (mk-session [rule-A rule-B] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :B])
- "Rule order in a seq of rule structures should be respected")
+ (-> (mk-session [rule-A rule-B] :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:A :B])
+ "Rule order in a seq of rule structures should be respected")
- (reset! fire-order [])
+ (reset! fire-order [])
- (-> (mk-session [rule-B rule-A] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:B :A])
- "Validate that when we reverse the seq of rule structures the firing order is reversed.")
+ (-> (mk-session [rule-B rule-A] :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:B :A])
+ "Validate that when we reverse the seq of rule structures the firing order is reversed.")
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order]
- (-> (mk-session [rule-A] 'clara.order-ruleset :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :C :D])
- "Rules should fire in the order they appear in a namespace. A rule that is before that namespace
+ (binding [order-rules/*rule-order-atom* fire-order]
+ (-> (mk-session [rule-A] 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:A :C :D])
+ "Rules should fire in the order they appear in a namespace. A rule that is before that namespace
in the rule sources should fire first."))
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order]
- (-> (mk-session 'clara.order-ruleset [rule-A] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:C :D :A])
- "Rules should fire in the order they appear in a namespace. A rule that is after that namespace
+ (binding [order-rules/*rule-order-atom* fire-order]
+ (-> (mk-session 'clara.order-ruleset [rule-A] :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:C :D :A])
+ "Rules should fire in the order they appear in a namespace. A rule that is after that namespace
in the rule sources should fire later."))
- (reset! fire-order [])
+ (reset! fire-order [])
- (let [rule-C-line (-> #'order-rules/rule-C meta :line)
- rule-D-line (-> #'order-rules/rule-D meta :line)]
- (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-D-line)))
- (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-C-line)))
+ (let [rule-C-line (-> #'order-rules/rule-C meta :line)
+ rule-D-line (-> #'order-rules/rule-D meta :line)]
+ (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-D-line)))
+ (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-C-line)))
- (binding [order-rules/*rule-order-atom* fire-order]
- (-> (mk-session 'clara.order-ruleset :cache false)
- (insert (->Cold 10))
- fire-rules))
+ (binding [order-rules/*rule-order-atom* fire-order]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules))
- (is (= @fire-order [:D :C])
- "When we alter the metadata of the rules to reverse their line order their
+ (is (= @fire-order [:D :C])
+ "When we alter the metadata of the rules to reverse their line order their
firing order should also be reversed.")
- ;; Reset the line metadata on the rules to what it was previously.
- (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-C-line)))
- (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-D-line))))
+ ;; Reset the line metadata on the rules to what it was previously.
+ (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-C-line)))
+ (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-D-line))))
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order
- order-rules/*rule-seq-prior* [rule-A rule-B]]
- (-> (mk-session 'clara.order-ruleset :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :B :C :D])
- "When a :production-seq occurs before defrules, the rules in the :production-seq
+ (binding [order-rules/*rule-order-atom* fire-order
+ order-rules/*rule-seq-prior* [rule-A rule-B]]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:A :B :C :D])
+ "When a :production-seq occurs before defrules, the rules in the :production-seq
should fire before those rules and in the order they are in in the :production-seq."))
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order
- order-rules/*rule-seq-after* [rule-A rule-B]]
- (-> (mk-session 'clara.order-ruleset :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:C :D :A :B])
- "When a :production-seq occurs after defrules, the rules in the :production-seq
+ (binding [order-rules/*rule-order-atom* fire-order
+ order-rules/*rule-seq-after* [rule-A rule-B]]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:C :D :A :B])
+ "When a :production-seq occurs after defrules, the rules in the :production-seq
should fire after those rules and in the order they are in in the :production-seq."))
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order
- order-rules/*rule-seq-after* [rule-B rule-A]]
- (-> (mk-session 'clara.order-ruleset :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:C :D :B :A])
- "Validate that when the order of rules in the :production-seq is reversed those rules
+ (binding [order-rules/*rule-order-atom* fire-order
+ order-rules/*rule-seq-after* [rule-B rule-A]]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:C :D :B :A])
+ "Validate that when the order of rules in the :production-seq is reversed those rules
fire in the reversed order"))
- (reset! fire-order [])
+ (reset! fire-order [])
- (-> (mk-session [rule-A (assoc-in rule-B [:props :salience] 1)] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:B :A])
- "Validate that when explicit salience is present it overrides rule order.")
+ (-> (mk-session [rule-A (assoc-in rule-B [:props :salience] 1)] :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:B :A])
+ "Validate that when explicit salience is present it overrides rule order.")
- (reset! fire-order [])
+ (reset! fire-order [])
- (-> (mk-session [rule-A rule-B rule-A] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :B])
- "Validate that the first occurence of a rule is used for rule ordering when it occurs multiple times.")
+ (-> (mk-session [rule-A rule-B rule-A] :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:A :B])
+ "Validate that the first occurence of a rule is used for rule ordering when it occurs multiple times.")
- (reset! fire-order []))))
+ (reset! fire-order [])))
(deftest test-rule-order-respected-by-batched-inserts
- (when-not platform/*parallel-compute*
- (let [qholder (atom [])
-
- r1 (dsl/parse-rule [[Temperature (= ?t temperature)]]
- (insert! (->Cold ?t)))
- r2 (dsl/parse-rule [[Temperature (= ?t temperature)]]
- (insert! (->Hot ?t)))
-
- ;; Make two "alpha roots" that the 2 rules above insertions will need to propagate to.
- q1 (dsl/parse-query [] [[?c <- Cold (swap! qholder conj :cold)]])
- q2 (dsl/parse-query [] [[?h <- Hot (swap! qholder conj :hot)]])
-
- order1 (mk-session [r1 r2 q1 q2] :cache false)
- order2 (mk-session [r2 r1 q1 q2] :cache false)
-
- run-session (fn [s]
- (let [s (-> s
- (insert (->Temperature 10 "MCI"))
- fire-rules)]
- [(-> s (query q1) frequencies)
- (-> s (query q2) frequencies)]))
-
- [res11 res12] (run-session order1)
- holder1 @qholder
- _ (reset! qholder [])
-
- [res21 res22] (run-session order2)
- holder2 @qholder
- _ (reset! qholder [])]
-
- ;; Sanity check that the query matches what is expected.
- (is (= (frequencies [{:?c (->Cold 10)}])
- res11
- res21))
- (is (= (frequencies [{:?h (->Hot 10)}])
- res12
- res22))
-
- (is (= [:cold :hot] holder1))
- (is (= [:hot :cold] holder2)))))
+ (let [qholder (atom [])
+
+ r1 (dsl/parse-rule [[Temperature (= ?t temperature)]]
+ (insert! (->Cold ?t)))
+ r2 (dsl/parse-rule [[Temperature (= ?t temperature)]]
+ (insert! (->Hot ?t)))
+
+ ;; Make two "alpha roots" that the 2 rules above insertions will need to propagate to.
+ q1 (dsl/parse-query [] [[?c <- Cold (swap! qholder conj :cold)]])
+ q2 (dsl/parse-query [] [[?h <- Hot (swap! qholder conj :hot)]])
+
+ order1 (mk-session [r1 r2 q1 q2] :cache false)
+ order2 (mk-session [r2 r1 q1 q2] :cache false)
+
+ run-session (fn [s]
+ (let [s (-> s
+ (insert (->Temperature 10 "MCI"))
+ fire-rules)]
+ [(-> s (query q1) frequencies)
+ (-> s (query q2) frequencies)]))
+
+ [res11 res12] (run-session order1)
+ holder1 @qholder
+ _ (reset! qholder [])
+
+ [res21 res22] (run-session order2)
+ holder2 @qholder
+ _ (reset! qholder [])]
+
+ ;; Sanity check that the query matches what is expected.
+ (is (= (frequencies [{:?c (->Cold 10)}])
+ res11
+ res21))
+ (is (= (frequencies [{:?h (->Hot 10)}])
+ res12
+ res22))
+
+ (is (= [:cold :hot] holder1))
+ (is (= [:hot :cold] holder2))))
;; TODO: Move this to test-dsl once a strategy for replicating assert-ex-data is determined and implemented.
#_{:clj-kondo/ignore [:unresolved-symbol]}
From 7f9e53be68cf6d92294a132854ff63483019ed39 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sat, 6 Jan 2024 23:09:19 -0600
Subject: [PATCH 39/87] feat: refactor fire rules into two separate async and
sync handlers
---
src/main/clojure/clara/rules/engine.clj | 519 +++++++++++++-----------
1 file changed, 278 insertions(+), 241 deletions(-)
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index ffa869fd..6c365d07 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -675,7 +675,7 @@
fact-bindings)})))]
beta-bindings))
-(defn expression-join-node-match->Token
+(defn- expression-join-node-match->Token
[element token node id join-filter-fn env]
(let [fact (:fact element)
fact-binding (:bindings element)
@@ -806,14 +806,7 @@
(let [{:keys [type constraints]} condition]
[:not (into [type] constraints)])))
-(defn- matches-some-facts?
- "Returns true if the given token matches one or more of the given elements."
- [node token elements join-filter-fn condition]
- (some (fn [{:keys [fact bindings]}]
- (join-node-matches node join-filter-fn token fact bindings (:env condition)))
- elements))
-
-(defn negation-join-node-not-match->Token
+(defn- negation-join-node-not-match->Token
[node token elements join-filter-fn condition]
(when-not (some (fn negation-join-match
[{:keys [fact bindings]}]
@@ -1741,159 +1734,279 @@
"NegationWithJoinFilterNode" "NJFN"
"ExpressionJoinNode" "EJN"})
-(defn variables-as-keywords
- "Returns symbols in the given s-expression that start with '?' as keywords"
- [expression]
- (into #{} (platform/eager-for
- [item (tree-seq coll? seq expression)
- :when (and (symbol? item)
- (= \? (first (name item))))]
- (keyword item))))
-
-(defn conj-rulebases
- "DEPRECATED. Simply concat sequences of rules and queries.
-
- Conjoin two rulebases, returning a new one with the same rules."
- [base1 base2]
- (concat base1 base2))
-
-(defn- fire-activation*
- [activation]
+(defn- ->activation-output
+ "Bind the contents of the cache atoms after the RHS is fired since they are used to send to the listener
+ and then again to flush updates. They will be dereferenced again if an exception is caught, but in the error
+ case we aren't worried about performance."
+ [activation _result]
+ (let [{:keys [node
+ token]} activation
+ {:keys [listener]} *current-session*
+ {:keys [batched-logical-insertions
+ batched-rhs-retractions
+ batched-unconditional-insertions]} *rule-context*
+ resulting-ops {:unconditional-insertions @batched-unconditional-insertions
+ :logical-insertions @batched-logical-insertions
+ :rhs-retractions @batched-rhs-retractions}]
+ (l/fire-activation! listener activation resulting-ops)
+ {:token token :node node :ops resulting-ops}))
+
+(defn- throw-activation-exception
+ "If the rule fired an exception, help debugging by attaching
+ details about the rule itself, cached insertions, and any listeners
+ while propagating the cause."
+ [activation exception]
(let [{:keys [node
token]} activation
{:keys [listener]} *current-session*
{:keys [batched-logical-insertions
batched-rhs-retractions
batched-unconditional-insertions]} *rule-context*
- ->activation-output
- (fn activation-output
- ;; We don't actually care what was returned from the activation
- [_]
- ;; Bind the contents of the cache atoms after the RHS is fired since they are used twice
- ;; below. They will be dereferenced again if an exception is caught, but in the error
- ;; case we aren't worried about performance.
- (let [resulting-ops {:unconditional-insertions @batched-unconditional-insertions
- :logical-insertions @batched-logical-insertions
- :rhs-retractions @batched-rhs-retractions}]
- (l/fire-activation! listener activation resulting-ops)
- {:token token :node node :ops resulting-ops}))
- ->activation-ex-args
- (fn activation-ex-args
- [e]
- (let [production (:production node)
- rule-name (:name production)
- rhs (:rhs production)]
- [(str "Exception in " (if rule-name rule-name (pr-str rhs))
- " with bindings " (pr-str (:bindings token)))
- {:bindings (:bindings token)
- :name rule-name
- :rhs rhs
- :batched-logical-insertions @batched-logical-insertions
- :batched-unconditional-insertions @batched-unconditional-insertions
- :batched-rhs-retractions @batched-rhs-retractions
- :listeners (try
- (let [p-listener (l/to-persistent! listener)]
- (if (l/null-listener? p-listener)
- []
- (l/get-children p-listener)))
- (catch Exception listener-exception
- listener-exception))}
- e]))]
+ production (:production node)
+ rule-name (:name production)
+ rhs (:rhs production)]
+ (throw (ex-info
+ (str "Exception in " (if rule-name rule-name (pr-str rhs))
+ " with bindings " (pr-str (:bindings token)))
+ {:bindings (:bindings token)
+ :name rule-name
+ :rhs rhs
+ :batched-logical-insertions @batched-logical-insertions
+ :batched-unconditional-insertions @batched-unconditional-insertions
+ :batched-rhs-retractions @batched-rhs-retractions
+ :listeners (try
+ (let [p-listener (l/to-persistent! listener)]
+ (if (l/null-listener? p-listener)
+ []
+ (l/get-children p-listener)))
+ (catch Exception listener-exception
+ listener-exception))}
+ exception))))
+
+(defn- fire-activation!
+ "Fire the rule's RHS represented by the activation node"
+ [activation]
+ (let [{:keys [node
+ token]} activation]
+ (try
+ ;; Actually fire the rule RHS
+ (let [result ((:rhs node) token (:env (:production node)))]
+ (->activation-output activation (!activation-output (!activation-output activation (!activation-ex-args e)]
- (throw (ex-info msg info cause))))))
- (->activation-output result)))
+ (throw-activation-exception activation e))))
+ (->activation-output activation result)))
(catch Exception e
- ;; If the rule fired an exception, help debugging by attaching
- ;; details about the rule itself, cached insertions, and any listeners
- ;; while propagating the cause.
- (let [[msg info cause] (->activation-ex-args e)]
- (throw (ex-info msg info cause)))))))
-
-(defn fire-rules-async*
- "Fire rules for the given nodes."
- [rulebase transient-memory transport
- listener get-alphas-fn update-cache
- pop-activations-batch-size]
- (async
- (binding [*current-session* {:rulebase rulebase
- :transient-memory transient-memory
- :transport transport
- :insertions (atom 0)
- :get-alphas-fn get-alphas-fn
- :pending-updates update-cache
- :listener listener}]
-
- (loop [next-group (mem/next-activation-group transient-memory)
- last-group nil]
-
- (if next-group
-
- (if (and last-group (not= last-group next-group))
-
- ;; We have changed groups, so flush the updates from the previous
- ;; group before continuing.
+ (throw-activation-exception activation e)))))
+
+(defn- ->activation-rule-context
+ "Use vectors for the insertion caches so that within an insertion type
+ (unconditional or logical) all insertions are done in order after the into
+ calls in insert-facts!. This shouldn't have a functional impact, since any ordering
+ should be valid, but makes traces less confusing to end users.
+ It also prevents any laziness in the sequences."
+ [{:keys [node token]} to-fire-rules]
+ (cond-> {:token token
+ :node node}
+ to-fire-rules
+ (assoc :batched-logical-insertions (atom [])
+ :batched-unconditional-insertions (atom [])
+ :batched-rhs-retractions (atom []))))
+
+(defn- fire-activations!
+ [activations fire-activations-handler]
+ (platform/eager-for
+ [activation activations]
+ ;;; this the production expression, which could return an async result if parallel computing
+ (binding [*rule-context* (->activation-rule-context activation true)]
+ (fire-activations-handler activation))))
+
+(defn- process-activations!
+ "Flush the changes and updates made during activation of the rules"
+ [rhs-activations]
+ (doseq [{:keys [token node ops]} rhs-activations
+ :let [{:keys [unconditional-insertions
+ logical-insertions
+ rhs-retractions]} ops]]
+ (binding [*rule-context* {:token token
+ :node node}]
+ (when-let [batched (seq unconditional-insertions)]
+ (flush-insertions! batched true))
+ (when-let [batched (seq logical-insertions)]
+ (flush-insertions! batched false))
+ (when-let [batched (seq rhs-retractions)]
+ (flush-rhs-retractions! batched))
+ ;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
+ ;; will be in context for child rules.
+ (when (some-> node :production :props :no-loop)
+ (flush-updates *current-session*)))))
+
+(defmacro ^:private do-fire-rules
+ [rulebase memory transport
+ listener get-alphas-fn
+ update-cache
+ & fire-activations-body]
+ `(binding [~'clara.rules.engine/*current-session* {:rulebase ~rulebase
+ :transient-memory ~memory
+ :transport ~transport
+ :insertions (atom 0)
+ :get-alphas-fn ~get-alphas-fn
+ :pending-updates ~update-cache
+ :listener ~listener}]
+ (loop [next-group# (mem/next-activation-group ~memory)
+ last-group# nil]
+ (if next-group#
+ (if (and last-group# (not= last-group# next-group#))
+ ;; We have changed groups, so flush the updates from the previous
+ ;; group before continuing.
(do
- (flush-updates *current-session*)
- (let [upcoming-group (mem/next-activation-group transient-memory)]
- (l/activation-group-transition! listener next-group upcoming-group)
- (recur upcoming-group next-group)))
-
+ (flush-updates ~'clara.rules.engine/*current-session*)
+ (let [upcoming-group# (mem/next-activation-group ~memory)]
+ (l/activation-group-transition! ~listener next-group# upcoming-group#)
+ (recur upcoming-group# next-group#)))
(do
-
- ;; If there are activations, fire them.
- (let [activations (mem/pop-activations! transient-memory pop-activations-batch-size)
- rhs-activations
- (platform/eager-for
- [{:keys [node token] :as activation} activations
- ;; Use vectors for the insertion caches so that within an insertion type
- ;; (unconditional or logical) all insertions are done in order after the into
- ;; calls in insert-facts!. This shouldn't have a functional impact, since any ordering
- ;; should be valid, but makes traces less confusing to end users. It also prevents any laziness
- ;; in the sequences.
- :let [batched-logical-insertions (atom [])
- batched-unconditional-insertions (atom [])
- batched-rhs-retractions (atom [])
- rule-context {:token token
- :node node
- :batched-logical-insertions batched-logical-insertions
- :batched-unconditional-insertions batched-unconditional-insertions
- :batched-rhs-retractions batched-rhs-retractions}]]
- ;;; this the production expression, which could return an async result if parallel computing
- (binding [*rule-context* rule-context]
- (fire-activation* activation)))]
- (doseq [{:keys [token node ops]} (! node :production :props :no-loop)
- (flush-updates *current-session*)))))
- (recur (mem/next-activation-group transient-memory) next-group)))
-
- ;; There were no items to be activated, so flush any pending
- ;; updates and recur with a potential new activation group
- ;; since a flushed item may have triggered one.
- (when (flush-updates *current-session*)
- (let [upcoming-group (mem/next-activation-group transient-memory)]
- (l/activation-group-transition! listener next-group upcoming-group)
- (recur upcoming-group next-group))))))))
+ ;; If there are activations, fire them.
+ ~@fire-activations-body
+ (recur (mem/next-activation-group ~memory) next-group#)))
+ ;; There were no items to be activated, so flush any pending
+ ;; updates and recur with a potential new activation group
+ ;; since a flushed item may have triggered one.
+ (when (flush-updates ~'clara.rules.engine/*current-session*)
+ (let [upcoming-group# (mem/next-activation-group ~memory)]
+ (l/activation-group-transition! ~listener next-group# upcoming-group#)
+ (recur upcoming-group# next-group#)))))))
+
+(defn- fire-rules!
+ "Fire rules for the given nodes supporting async behavior."
+ [{:keys [rulebase memory transport
+ listener get-alphas-fn
+ update-cache]}]
+ (do-fire-rules
+ rulebase memory transport
+ listener get-alphas-fn
+ update-cache
+ (let [activations (mem/pop-activations! memory 1)
+ rhs-activations (fire-activations! activations fire-activation!)]
+ (process-activations! rhs-activations))))
+
+(defn- fire-rules-async!
+ "Fire rules for the given nodes supporting async behavior."
+ [{:keys [rulebase memory transport
+ listener get-alphas-fn
+ update-cache options]}]
+ (async
+ (do-fire-rules
+ rulebase memory transport
+ listener get-alphas-fn
+ update-cache
+ (let [pop-activations-batch-size (or (:parallel-batch-size options) 1)
+ activations (mem/pop-activations! memory pop-activations-batch-size)
+ rhs-activations (fire-activations! activations fire-activation-async!)]
+ (process-activations! (!LocalSession)
@@ -1938,107 +2051,31 @@
(fire-rules [session]
(fire-rules session {}))
(fire-rules [session opts]
- (!LocalSession rulebase
+ (mem/to-persistent! transient-memory)
+ transport
+ (l/to-persistent! transient-listener)
+ get-alphas-fn
+ [])))
;; These return CompletableFuture async results
(fire-rules-async [session]
(fire-rules-async session {}))
(fire-rules-async [session opts]
(async
(let [transient-memory (mem/to-transient memory)
- transient-listener (l/to-transient listener)
- parallel-batch-size (or (:parallel-batch-size opts) 1)]
-
- (if-not (:cancelling opts)
- ;; We originally performed insertions and retractions immediately after the insert and retract calls,
- ;; but this had the downside of making a pattern like "Retract facts, insert other facts, and fire the rules"
- ;; perform at least three transitions between a persistent and transient memory. Delaying the actual execution
- ;; of the insertions and retractions until firing the rules allows us to cut this down to a single transition
- ;; between persistent and transient memory. There is some cost to the runtime dispatch on operation types here,
- ;; but this is presumably less significant than the cost of memory transitions.
- ;;
- ;; We perform the insertions and retractions in the same order as they were applied to the session since
- ;; if a fact is not in the session, retracted, and then subsequently inserted it should be in the session at
- ;; the end.
- (do
- (doseq [{op-type :type facts :facts} pending-operations]
-
- (case op-type
-
- :insertion
- (do
- (l/insert-facts! transient-listener nil nil facts)
-
- (binding [*pending-external-retractions* (atom [])]
- ;; Bind the external retractions cache so that any logical retractions as a result
- ;; of these insertions can be cached and executed as a batch instead of eagerly realizing
- ;; them. An external insertion of a fact that matches
- ;; a negation or accumulator condition can cause logical retractions.
- (doseq [[alpha-roots fact-group] (get-alphas-fn facts)
- root alpha-roots]
- (alpha-activate root fact-group transient-memory transport transient-listener))
- (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))
-
- :retraction
- (do
- (l/retract-facts! transient-listener nil nil facts)
-
- (binding [*pending-external-retractions* (atom facts)]
- (external-retract-loop get-alphas-fn transient-memory transport transient-listener)))))
-
- (!LocalSession rulebase
(mem/to-persistent! transient-memory)
transport
From d74b0b92ebb19f83b1aaeda75721c0b24bc9e298 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 7 Jan 2024 00:00:27 -0600
Subject: [PATCH 40/87] WIP
---
src/main/clojure/clara/rules/compiler.clj | 2 +-
src/main/clojure/clara/rules/engine.clj | 37 +++++++++++-------
.../clojure/clara/tools/testing_utils.clj | 39 ++++++++++++++++++-
3 files changed, 62 insertions(+), 16 deletions(-)
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index a5ce3407..186ad819 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -291,7 +291,7 @@
(vary-meta fact-symbol assoc :tag (symbol (.getName ^Class fact-type)))
fact-symbol)))
-(defn- mk-node-fn-name
+(defn mk-node-fn-name
"A simple helper function to maintain a consistent pattern for naming anonymous functions in the rulebase.
node-type - expected to align with one of the types of nodes defined in clara.rules.engine, and node-type->abbreviated-type.
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index 6c365d07..955fc1fa 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -243,7 +243,10 @@
(defn- flush-updates
"Flush all pending updates in the current session. Returns true if there were
some items to flush, false otherwise"
- [current-session]
+ [label current-session]
+ (when-not (-> current-session :pending-updates)
+ (throw (ex-info "session pending updates missing:" {:session current-session
+ :label label})))
(letfn [(flush-all [current-session flushed-items?]
(let [{:keys [rulebase transient-memory transport insertions get-alphas-fn listener]} current-session
pending-updates (-> current-session :pending-updates uc/get-updates-and-reset!)]
@@ -1784,7 +1787,9 @@
exception))))
(defn- fire-activation!
- "Fire the rule's RHS represented by the activation node"
+ "Fire the rule's RHS represented by the activation node,
+ if an activation returns an async result then it is handled
+ by blocking until it completes."
[activation]
(let [{:keys [node
token]} activation]
@@ -1796,7 +1801,9 @@
(throw-activation-exception activation e)))))
(defn- fire-activation-async!
- "Fire the rule's RHS represented by the activation node"
+ "Fire the rule's RHS represented by the activation node,
+ if an activation returns an async result then it is handled
+ without blocking and the call returns an async result as well."
[activation]
(let [{:keys [node
token]} activation]
@@ -1828,6 +1835,7 @@
:batched-rhs-retractions (atom []))))
(defn- fire-activations!
+ "fire all activations in order"
[activations fire-activations-handler]
(platform/eager-for
[activation activations]
@@ -1853,20 +1861,21 @@
;; Explicitly flush updates if we are in a no-loop rule, so the no-loop
;; will be in context for child rules.
(when (some-> node :production :props :no-loop)
- (flush-updates *current-session*)))))
+ (flush-updates :process-activations *current-session*)))))
(defmacro ^:private do-fire-rules
+ "Instrument a session to fire rules activations then execute the fire-activations-body"
[rulebase memory transport
listener get-alphas-fn
update-cache
& fire-activations-body]
- `(binding [~'clara.rules.engine/*current-session* {:rulebase ~rulebase
- :transient-memory ~memory
- :transport ~transport
- :insertions (atom 0)
- :get-alphas-fn ~get-alphas-fn
- :pending-updates ~update-cache
- :listener ~listener}]
+ `(binding [*current-session* {:rulebase ~rulebase
+ :transient-memory ~memory
+ :transport ~transport
+ :insertions (atom 0)
+ :get-alphas-fn ~get-alphas-fn
+ :pending-updates ~update-cache
+ :listener ~listener}]
(loop [next-group# (mem/next-activation-group ~memory)
last-group# nil]
(if next-group#
@@ -1874,7 +1883,7 @@
;; We have changed groups, so flush the updates from the previous
;; group before continuing.
(do
- (flush-updates ~'clara.rules.engine/*current-session*)
+ (flush-updates :changed-group *current-session*)
(let [upcoming-group# (mem/next-activation-group ~memory)]
(l/activation-group-transition! ~listener next-group# upcoming-group#)
(recur upcoming-group# next-group#)))
@@ -1885,13 +1894,13 @@
;; There were no items to be activated, so flush any pending
;; updates and recur with a potential new activation group
;; since a flushed item may have triggered one.
- (when (flush-updates ~'clara.rules.engine/*current-session*)
+ (when (flush-updates :changing-groups *current-session*)
(let [upcoming-group# (mem/next-activation-group ~memory)]
(l/activation-group-transition! ~listener next-group# upcoming-group#)
(recur upcoming-group# next-group#)))))))
(defn- fire-rules!
- "Fire rules for the given nodes supporting async behavior."
+ "Fire rules for the given nodes, sequentially one at a time"
[{:keys [rulebase memory transport
listener get-alphas-fn
update-cache]}]
diff --git a/src/main/clojure/clara/tools/testing_utils.clj b/src/main/clojure/clara/tools/testing_utils.clj
index 16adba09..4687a8de 100644
--- a/src/main/clojure/clara/tools/testing_utils.clj
+++ b/src/main/clojure/clara/tools/testing_utils.clj
@@ -7,8 +7,9 @@
[clara.rules.update-cache.cancelling :as ca]
[clara.rules.compiler :as com]
[clara.rules.dsl :as dsl]
+ [clara.rules :as r]
[clojure.test :refer [is]]
- [clara.rules.platform :as platform]))
+ [futurama.core :refer [async ! (count env) 0)
+ {:keys (mapv #(symbol (name %)) (keys env))}
+ '?__env__)
+
+ ;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-action' to be used for this scenario
+ fn-name (com/mk-node-fn-name "ProductionNode" node-id "AE")]
+ `(fn ~fn-name [~'?__token__ ~destructured-env]
+ (async
+ (let [~@assignments]
+ ~rhs)))))
+
+(defn test-fire-rules-async
+ ([session]
+ (test-fire-rules-async session {}))
+ ([session opts]
+ (!
Date: Sun, 7 Jan 2024 08:53:54 -0600
Subject: [PATCH 41/87] feat: refactor bound session and fire rules fns
---
src/main/clojure/clara/rules/engine.clj | 234 ++++++++----------
.../clojure/clara/tools/testing_utils.clj | 4 +-
src/test/clojure/clara/tools/test_tracing.clj | 2 +-
3 files changed, 105 insertions(+), 135 deletions(-)
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index 955fc1fa..a7921253 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -1865,157 +1865,127 @@
(defmacro ^:private do-fire-rules
"Instrument a session to fire rules activations then execute the fire-activations-body"
- [rulebase memory transport
- listener get-alphas-fn
- update-cache
- & fire-activations-body]
- `(binding [*current-session* {:rulebase ~rulebase
- :transient-memory ~memory
- :transport ~transport
- :insertions (atom 0)
- :get-alphas-fn ~get-alphas-fn
- :pending-updates ~update-cache
- :listener ~listener}]
- (loop [next-group# (mem/next-activation-group ~memory)
- last-group# nil]
- (if next-group#
- (if (and last-group# (not= last-group# next-group#))
- ;; We have changed groups, so flush the updates from the previous
- ;; group before continuing.
- (do
- (flush-updates :changed-group *current-session*)
- (let [upcoming-group# (mem/next-activation-group ~memory)]
- (l/activation-group-transition! ~listener next-group# upcoming-group#)
- (recur upcoming-group# next-group#)))
- (do
- ;; If there are activations, fire them.
- ~@fire-activations-body
- (recur (mem/next-activation-group ~memory) next-group#)))
- ;; There were no items to be activated, so flush any pending
- ;; updates and recur with a potential new activation group
- ;; since a flushed item may have triggered one.
- (when (flush-updates :changing-groups *current-session*)
+ [memory listener & fire-activations-body]
+ `(loop [next-group# (mem/next-activation-group ~memory)
+ last-group# nil]
+ (if next-group#
+ (if (and last-group# (not= last-group# next-group#))
+ ;; We have changed groups, so flush the updates from the previous
+ ;; group before continuing.
+ (do
+ (flush-updates :changed-group *current-session*)
(let [upcoming-group# (mem/next-activation-group ~memory)]
(l/activation-group-transition! ~listener next-group# upcoming-group#)
- (recur upcoming-group# next-group#)))))))
+ (recur upcoming-group# next-group#)))
+ (do
+ ;; If there are activations, fire them.
+ ~@fire-activations-body
+ (recur (mem/next-activation-group ~memory) next-group#)))
+ ;; There were no items to be activated, so flush any pending
+ ;; updates and recur with a potential new activation group
+ ;; since a flushed item may have triggered one.
+ (when (flush-updates :changing-groups *current-session*)
+ (let [upcoming-group# (mem/next-activation-group ~memory)]
+ (l/activation-group-transition! ~listener next-group# upcoming-group#)
+ (recur upcoming-group# next-group#))))))
(defn- fire-rules!
"Fire rules for the given nodes, sequentially one at a time"
- [{:keys [rulebase memory transport
- listener get-alphas-fn
- update-cache]}]
+ [{:keys [transient-memory listener]} _options]
(do-fire-rules
- rulebase memory transport
- listener get-alphas-fn
- update-cache
- (let [activations (mem/pop-activations! memory 1)
+ transient-memory listener
+ (let [activations (mem/pop-activations! transient-memory 1)
rhs-activations (fire-activations! activations fire-activation!)]
(process-activations! rhs-activations))))
(defn- fire-rules-async!
"Fire rules for the given nodes supporting async behavior."
- [{:keys [rulebase memory transport
- listener get-alphas-fn
- update-cache options]}]
+ [{:keys [transient-memory listener]} options]
(async
(do-fire-rules
- rulebase memory transport
- listener get-alphas-fn
- update-cache
+ transient-memory listener
(let [pop-activations-batch-size (or (:parallel-batch-size options) 1)
- activations (mem/pop-activations! memory pop-activations-batch-size)
+ activations (mem/pop-activations! transient-memory pop-activations-batch-size)
rhs-activations (fire-activations! activations fire-activation-async!)]
(process-activations! (!LocalSession)
diff --git a/src/main/clojure/clara/tools/testing_utils.clj b/src/main/clojure/clara/tools/testing_utils.clj
index 4687a8de..9a5eb659 100644
--- a/src/main/clojure/clara/tools/testing_utils.clj
+++ b/src/main/clojure/clara/tools/testing_utils.clj
@@ -86,8 +86,8 @@
fn-name (com/mk-node-fn-name "ProductionNode" node-id "AE")]
`(fn ~fn-name [~'?__token__ ~destructured-env]
(async
- (let [~@assignments]
- ~rhs)))))
+ (let [~@assignments]
+ ~rhs)))))
(defn test-fire-rules-async
([session]
diff --git a/src/test/clojure/clara/tools/test_tracing.clj b/src/test/clojure/clara/tools/test_tracing.clj
index ece16714..56862983 100644
--- a/src/test/clojure/clara/tools/test_tracing.clj
+++ b/src/test/clojure/clara/tools/test_tracing.clj
@@ -132,7 +132,7 @@
;; Ensure expected events occur in order.
(is (= [:add-facts :alpha-activate :right-activate :left-activate :add-activations :fire-activation
:add-facts-logical :activation-group-transition :retract-facts :alpha-retract :right-retract
- :left-retract :remove-activations :retract-facts-logical]
+ :left-retract :remove-activations :retract-facts-logical :activation-group-transition]
(map :type session-trace)))
;; Ensure only the expected fact was indicated as retracted.
From 8f6770bb68f7e5ca96ec87b172ac0301fee7b2d1 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 7 Jan 2024 09:06:34 -0600
Subject: [PATCH 42/87] feat: disable rule order tests when doing parallel
testing
---
.../clojure/clara/tools/testing_utils.clj | 8 +-
src/test/clojure/clara/test_rules.clj | 245 +++++++++---------
2 files changed, 130 insertions(+), 123 deletions(-)
diff --git a/src/main/clojure/clara/tools/testing_utils.clj b/src/main/clojure/clara/tools/testing_utils.clj
index 9a5eb659..e4d8dc4a 100644
--- a/src/main/clojure/clara/tools/testing_utils.clj
+++ b/src/main/clojure/clara/tools/testing_utils.clj
@@ -95,18 +95,22 @@
([session opts]
(! (mk-session [rule-A rule-B] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :B])
- "Rule order in a seq of rule structures should be respected")
-
- (reset! fire-order [])
-
- (-> (mk-session [rule-B rule-A] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:B :A])
- "Validate that when we reverse the seq of rule structures the firing order is reversed.")
-
- (reset! fire-order [])
-
- (binding [order-rules/*rule-order-atom* fire-order]
- (-> (mk-session [rule-A] 'clara.order-ruleset :cache false)
+ (-> (mk-session [rule-A rule-B] :cache false)
(insert (->Cold 10))
fire-rules)
- (is (= @fire-order [:A :C :D])
- "Rules should fire in the order they appear in a namespace. A rule that is before that namespace
- in the rule sources should fire first."))
+ (is (= @fire-order [:A :B])
+ "Rule order in a seq of rule structures should be respected")
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order]
- (-> (mk-session 'clara.order-ruleset [rule-A] :cache false)
+ (-> (mk-session [rule-B rule-A] :cache false)
(insert (->Cold 10))
fire-rules)
- (is (= @fire-order [:C :D :A])
- "Rules should fire in the order they appear in a namespace. A rule that is after that namespace
- in the rule sources should fire later."))
+ (is (= @fire-order [:B :A])
+ "Validate that when we reverse the seq of rule structures the firing order is reversed.")
- (reset! fire-order [])
+ (reset! fire-order [])
+
+ (binding [order-rules/*rule-order-atom* fire-order]
+ (-> (mk-session [rule-A] 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:A :C :D])
+ "Rules should fire in the order they appear in a namespace. A rule that is before that namespace
+ in the rule sources should fire first."))
- (let [rule-C-line (-> #'order-rules/rule-C meta :line)
- rule-D-line (-> #'order-rules/rule-D meta :line)]
- (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-D-line)))
- (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-C-line)))
+ (reset! fire-order [])
(binding [order-rules/*rule-order-atom* fire-order]
- (-> (mk-session 'clara.order-ruleset :cache false)
+ (-> (mk-session 'clara.order-ruleset [rule-A] :cache false)
(insert (->Cold 10))
- fire-rules))
+ fire-rules)
+ (is (= @fire-order [:C :D :A])
+ "Rules should fire in the order they appear in a namespace. A rule that is after that namespace
+ in the rule sources should fire later."))
- (is (= @fire-order [:D :C])
- "When we alter the metadata of the rules to reverse their line order their
+ (reset! fire-order [])
+
+ (let [rule-C-line (-> #'order-rules/rule-C meta :line)
+ rule-D-line (-> #'order-rules/rule-D meta :line)]
+ (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-D-line)))
+ (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-C-line)))
+
+ (binding [order-rules/*rule-order-atom* fire-order]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules))
+
+ (is (= @fire-order [:D :C])
+ "When we alter the metadata of the rules to reverse their line order their
firing order should also be reversed.")
- ;; Reset the line metadata on the rules to what it was previously.
- (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-C-line)))
- (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-D-line))))
+ ;; Reset the line metadata on the rules to what it was previously.
+ (alter-meta! #'order-rules/rule-C (fn [m] (assoc m :line rule-C-line)))
+ (alter-meta! #'order-rules/rule-D (fn [m] (assoc m :line rule-D-line))))
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order
- order-rules/*rule-seq-prior* [rule-A rule-B]]
- (-> (mk-session 'clara.order-ruleset :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :B :C :D])
- "When a :production-seq occurs before defrules, the rules in the :production-seq
+ (binding [order-rules/*rule-order-atom* fire-order
+ order-rules/*rule-seq-prior* [rule-A rule-B]]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:A :B :C :D])
+ "When a :production-seq occurs before defrules, the rules in the :production-seq
should fire before those rules and in the order they are in in the :production-seq."))
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order
- order-rules/*rule-seq-after* [rule-A rule-B]]
- (-> (mk-session 'clara.order-ruleset :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:C :D :A :B])
- "When a :production-seq occurs after defrules, the rules in the :production-seq
+ (binding [order-rules/*rule-order-atom* fire-order
+ order-rules/*rule-seq-after* [rule-A rule-B]]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:C :D :A :B])
+ "When a :production-seq occurs after defrules, the rules in the :production-seq
should fire after those rules and in the order they are in in the :production-seq."))
- (reset! fire-order [])
+ (reset! fire-order [])
- (binding [order-rules/*rule-order-atom* fire-order
- order-rules/*rule-seq-after* [rule-B rule-A]]
- (-> (mk-session 'clara.order-ruleset :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:C :D :B :A])
- "Validate that when the order of rules in the :production-seq is reversed those rules
+ (binding [order-rules/*rule-order-atom* fire-order
+ order-rules/*rule-seq-after* [rule-B rule-A]]
+ (-> (mk-session 'clara.order-ruleset :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:C :D :B :A])
+ "Validate that when the order of rules in the :production-seq is reversed those rules
fire in the reversed order"))
- (reset! fire-order [])
+ (reset! fire-order [])
- (-> (mk-session [rule-A (assoc-in rule-B [:props :salience] 1)] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:B :A])
- "Validate that when explicit salience is present it overrides rule order.")
+ (-> (mk-session [rule-A (assoc-in rule-B [:props :salience] 1)] :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:B :A])
+ "Validate that when explicit salience is present it overrides rule order.")
- (reset! fire-order [])
+ (reset! fire-order [])
- (-> (mk-session [rule-A rule-B rule-A] :cache false)
- (insert (->Cold 10))
- fire-rules)
- (is (= @fire-order [:A :B])
- "Validate that the first occurence of a rule is used for rule ordering when it occurs multiple times.")
+ (-> (mk-session [rule-A rule-B rule-A] :cache false)
+ (insert (->Cold 10))
+ fire-rules)
+ (is (= @fire-order [:A :B])
+ "Validate that the first occurence of a rule is used for rule ordering when it occurs multiple times.")
- (reset! fire-order [])))
+ (reset! fire-order []))))
(deftest test-rule-order-respected-by-batched-inserts
- (let [qholder (atom [])
-
- r1 (dsl/parse-rule [[Temperature (= ?t temperature)]]
- (insert! (->Cold ?t)))
- r2 (dsl/parse-rule [[Temperature (= ?t temperature)]]
- (insert! (->Hot ?t)))
-
- ;; Make two "alpha roots" that the 2 rules above insertions will need to propagate to.
- q1 (dsl/parse-query [] [[?c <- Cold (swap! qholder conj :cold)]])
- q2 (dsl/parse-query [] [[?h <- Hot (swap! qholder conj :hot)]])
-
- order1 (mk-session [r1 r2 q1 q2] :cache false)
- order2 (mk-session [r2 r1 q1 q2] :cache false)
-
- run-session (fn [s]
- (let [s (-> s
- (insert (->Temperature 10 "MCI"))
- fire-rules)]
- [(-> s (query q1) frequencies)
- (-> s (query q2) frequencies)]))
-
- [res11 res12] (run-session order1)
- holder1 @qholder
- _ (reset! qholder [])
-
- [res21 res22] (run-session order2)
- holder2 @qholder
- _ (reset! qholder [])]
-
- ;; Sanity check that the query matches what is expected.
- (is (= (frequencies [{:?c (->Cold 10)}])
- res11
- res21))
- (is (= (frequencies [{:?h (->Hot 10)}])
- res12
- res22))
-
- (is (= [:cold :hot] holder1))
- (is (= [:hot :cold] holder2))))
+ (when-not tu/parallel-testing
+ (let [qholder (atom [])
+
+ r1 (dsl/parse-rule [[Temperature (= ?t temperature)]]
+ (insert! (->Cold ?t)))
+ r2 (dsl/parse-rule [[Temperature (= ?t temperature)]]
+ (insert! (->Hot ?t)))
+
+ ;; Make two "alpha roots" that the 2 rules above insertions will need to propagate to.
+ q1 (dsl/parse-query [] [[?c <- Cold (swap! qholder conj :cold)]])
+ q2 (dsl/parse-query [] [[?h <- Hot (swap! qholder conj :hot)]])
+
+ order1 (mk-session [r1 r2 q1 q2] :cache false)
+ order2 (mk-session [r2 r1 q1 q2] :cache false)
+
+ run-session (fn [s]
+ (let [s (-> s
+ (insert (->Temperature 10 "MCI"))
+ fire-rules)]
+ [(-> s (query q1) frequencies)
+ (-> s (query q2) frequencies)]))
+
+ [res11 res12] (run-session order1)
+ holder1 @qholder
+ _ (reset! qholder [])
+
+ [res21 res22] (run-session order2)
+ holder2 @qholder
+ _ (reset! qholder [])]
+
+ ;; Sanity check that the query matches what is expected.
+ (is (= (frequencies [{:?c (->Cold 10)}])
+ res11
+ res21))
+ (is (= (frequencies [{:?h (->Hot 10)}])
+ res12
+ res22))
+
+ (is (= [:cold :hot] holder1))
+ (is (= [:hot :cold] holder2)))))
;; TODO: Move this to test-dsl once a strategy for replicating assert-ex-data is determined and implemented.
#_{:clj-kondo/ignore [:unresolved-symbol]}
From ac03bdef1391d12d1ef8830020dfaaec8002807d Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 7 Jan 2024 09:17:46 -0600
Subject: [PATCH 43/87] feat: update changelog and pom file for release
---
CHANGELOG.md | 3 ++-
pom.xml | 4 ++--
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 29335238..8aa40456 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,8 +1,9 @@
This is a history of changes to k13labs/clara-rules.
-# 0.9.0-SNAPSHOT
+# 0.9.0
* Add parallel support to Node and RHS activation.
* Remove ClojureScript support, general cleanup.
+* Refactor engine to support both fire-rules and fire-rules-async
* Replace mutable/transient collections with [ham-fisted](https://github.com/cnuernber/ham-fisted) data structures.
This is a history of changes to clara-rules prior to forking to k13labs/clara-rules.
diff --git a/pom.xml b/pom.xml
index 1b53edc6..75481e43 100644
--- a/pom.xml
+++ b/pom.xml
@@ -4,13 +4,13 @@
jar
com.github.k13labs
clara-rules
- 0.9.0
clara-rules
+ 0.9.0
+ 0.9.0
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
- 0.9.0
From a36ae24163747a7c4bd86752ed84171392152266 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 7 Jan 2024 09:24:20 -0600
Subject: [PATCH 44/87] feat: bump futurama to 0.3.8
---
CHANGELOG.md | 3 +++
deps.edn | 2 +-
pom.xml | 6 +++---
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 8aa40456..05cd9600 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 0.9.1
+* Update docs and futurama version bump
+
# 0.9.0
* Add parallel support to Node and RHS activation.
* Remove ClojureScript support, general cleanup.
diff --git a/deps.edn b/deps.edn
index 59bf6bc6..5407805a 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.3.7"}
+ com.github.k13labs/futurama {:mvn/version "0.3.8"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index 75481e43..da5678b0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.0
+ 0.9.1
- 0.9.0
+ 0.9.1
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 0.3.1
+ 0.3.8
com.cnuernber
From 7be6dea696686a34dfda88bcee879e66f7f95b83 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 7 Jan 2024 09:33:51 -0600
Subject: [PATCH 45/87] feat: add YourKit to README
---
README.md | 10 ++++++++++
1 file changed, 10 insertions(+)
diff --git a/README.md b/README.md
index f4b23af0..c9699708 100644
--- a/README.md
+++ b/README.md
@@ -64,6 +64,16 @@ Clara releases for this project are on [Clojars](https://clojars.org/). Simply a
See [CONTRIBUTING.md](CONTRIBUTING.md)
+# YourKit
+
+[![YourKit](https://www.yourkit.com/images/yklogo.png)](https://www.yourkit.com/)
+
+YourKit supports open source projects with innovative and intelligent tools
+for monitoring and profiling Java and .NET applications.
+YourKit is the creator of [YourKit Java Profiler](https://www.yourkit.com/java/profiler/),
+[YourKit .NET Profiler](https://www.yourkit.com/dotnet-profiler/),
+and [YourKit YouMonitor](https://www.yourkit.com/youmonitor/).
+
# LICENSE
Copyright 2023 Jose Gomez
From 7645ec8b9439c9fd05213fb7c5d4b2e7d0a961fa Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 7 Jan 2024 10:21:35 -0600
Subject: [PATCH 46/87] feat: bump futurama to 0.3.9
---
deps.edn | 2 +-
pom.xml | 6 +++---
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/deps.edn b/deps.edn
index 5407805a..476de9d9 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.3.8"}
+ com.github.k13labs/futurama {:mvn/version "0.3.9"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index da5678b0..8d888644 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.1
+ 0.9.2
- 0.9.1
+ 0.9.2
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 0.3.8
+ 0.3.9
com.cnuernber
From 302f99cfc61ddf9fadc1db606302f90c4c975602 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 7 Jan 2024 22:55:14 -0600
Subject: [PATCH 47/87] feat: add a built-in way to interrupt running session
during rules activations to interrupt infinite runaway sessions
---
deps.edn | 2 +-
src/main/clojure/clara/rules/engine.clj | 5 ++
.../clojure/clara/test_infinite_loops.clj | 63 ++++++++++++++++++-
3 files changed, 67 insertions(+), 3 deletions(-)
diff --git a/deps.edn b/deps.edn
index 476de9d9..cf51e113 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.3.9"}
+ com.github.k13labs/futurama {:mvn/version "0.6.0"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index a7921253..ba94a6c5 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -9,6 +9,7 @@
[clara.rules.update-cache.cancelling :as ca]
[futurama.core :refer [async
async?
+ async-cancelled?
!activation-output activation (!Cold ->Hot ->First ->Second]]
[clara.tools.testing-utils :refer [def-rules-test
ex-data-maps
@@ -9,13 +10,71 @@
assert-ex-data]]
[clara.tools.tracing :as tr]
[clara.rules.accumulators :as acc]
- [clara.tools.loop-detector :as ld])
- (:import [clara.rules.testfacts Cold Hot First Second]
+ [clara.tools.loop-detector :as ld]
+ [clojure.core.async :refer [timeout]]
+ [futurama.core :refer [!Cold nil))))
+ result-f (-> (mk-session [cold-rule] :cache false)
+ (insert (->Cold nil))
+ (fire-rules-async))]
+ (when-not (deref result-f 10 nil)
+ (async-cancel! result-f))
+ (is (thrown? CancellationException (!Cold nil))))
+ result-f (-> (mk-session [cold-rule] :cache false)
+ (insert (->Cold nil))
+ (fire-rules-async))]
+ (when-not (deref result-f 10 nil)
+ (async-cancel! result-f))
+ (is (thrown? CancellationException (!Cold nil))))
+ result-f (future
+ (-> (mk-session [cold-rule] :cache false)
+ (insert (->Cold nil))
+ (fire-rules)))]
+ (when-not (deref result-f 10 nil)
+ (async-cancel! result-f))
+ (is (thrown? CancellationException (!Cold nil))))
+ result-f (future
+ (-> (mk-session [cold-rule] :cache false)
+ (insert (->Cold nil))
+ (fire-rules)))]
+ (when-not (deref result-f 10 nil)
+ (async-cancel! result-f))
+ (is (thrown? CancellationException (!
Date: Sun, 7 Jan 2024 23:00:10 -0600
Subject: [PATCH 48/87] chore: release 0.9.3
---
CHANGELOG.md | 7 +++++++
pom.xml | 6 +++---
2 files changed, 10 insertions(+), 3 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 05cd9600..9b0afdc8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
This is a history of changes to k13labs/clara-rules.
+# 0.9.3
+* Implement interruptible sessions using futurama's async-cancel capabilities.
+* Add tests for infinite loop runaway sessions which can be interrupted.
+
+# 0.9.2
+* Update docs and futurama version bump again
+
# 0.9.1
* Update docs and futurama version bump
diff --git a/pom.xml b/pom.xml
index 8d888644..4533d09b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.2
+ 0.9.3
- 0.9.2
+ 0.9.3
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 0.3.9
+ 0.6.0
com.cnuernber
From 9afec6a6245a04de7cb04b07216d9d982b6b498e Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Mon, 8 Jan 2024 10:55:48 -0600
Subject: [PATCH 49/87] feat: update futurama lib version
---
CHANGELOG.md | 3 +++
deps.edn | 2 +-
pom.xml | 6 +++---
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9b0afdc8..151aa34a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 0.9.4
+* Update `futurama` to latest version
+
# 0.9.3
* Implement interruptible sessions using futurama's async-cancel capabilities.
* Add tests for infinite loop runaway sessions which can be interrupted.
diff --git a/deps.edn b/deps.edn
index cf51e113..7df12316 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.6.0"}
+ com.github.k13labs/futurama {:mvn/version "0.6.1"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index 4533d09b..16b614e6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.3
+ 0.9.4
- 0.9.3
+ 0.9.4
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 0.6.0
+ 0.6.1
com.cnuernber
From 9cf439c7cdc1bbbeb540fee0c9b409ce82960bc4 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 9 Jan 2024 14:03:17 -0600
Subject: [PATCH 50/87] feat: update docs, explicit use async future for
fire-rules-async
---
README.md | 15 ++++++++++++---
deps.edn | 2 +-
pom.xml | 6 +++---
src/main/clojure/clara/rules.clj | 5 ++++-
src/main/clojure/clara/rules/engine.clj | 6 ++++--
5 files changed, 24 insertions(+), 10 deletions(-)
diff --git a/README.md b/README.md
index c9699708..4dc4f714 100644
--- a/README.md
+++ b/README.md
@@ -12,7 +12,8 @@ Here's a simple example. Complete documentation is at [clara-rules.org](http://w
```clj
(ns clara.support-example
- (:require [clara.rules :refer :all]))
+ (:require [clara.rules :refer :all]
+ [futurama.core :refer [!
(println "Notify" ?name "that" ?client "has a new support request!"))
-;; Run the rules! We can just use Clojure's threading macro to wire things up.
+;; We can just use Clojure's threading macro to wire things up below.
+
+;; Run the rules!
(-> (mk-session)
(insert (->ClientRepresentative "Alice" "Acme")
(->SupportRequest "Acme" :high))
(fire-rules))
+;; Run the rules asynchronously!
+(! (mk-session)
+ (insert (->ClientRepresentative "Alice" "Acme")
+ (->SupportRequest "Acme" :high))
+ (fire-rules-async {:parallel-batch-size 50})))
+
;;;; Prints this:
;; High support requested!
@@ -47,7 +56,7 @@ Here's a simple example. Complete documentation is at [clara-rules.org](http://w
Clara is built, tested, and deployed using [Clojure Tools Deps](https://clojure.org/guides/deps_and_cli).
-CMake is used to simplify invocation of some commands.
+GNU Make is used to simplify invocation of some commands.
# _Availability_
diff --git a/deps.edn b/deps.edn
index 7df12316..0ebf5a2f 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.6.1"}
+ com.github.k13labs/futurama {:mvn/version "0.6.2"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index 16b614e6..195a89d8 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.4
+ 0.9.5
- 0.9.4
+ 0.9.5
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 0.6.1
+ 0.6.2
com.cnuernber
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
index 97fb10e9..e07bec08 100644
--- a/src/main/clojure/clara/rules.clj
+++ b/src/main/clojure/clara/rules.clj
@@ -49,8 +49,11 @@
a new session in which the rules are marked as fired.
This take an additional map of options as a second argument. Current options:
+ :parallel-batch-size (NEW, subject to change/updates in the future):
+ Allow the rules engine to fire rule RHS activations in batches of N (default N = 1) rules at most, allowing RHS of rules which return
+ async results such as Futures, CompletableFutures, Deferreds, Channels to be fired simultaneously and await the results without blocking.
- :cancelling true (EXPERIMENTAL, subject to change/removal. Not supported in ClojureScript.):
+ :cancelling true (EXPERIMENTAL, subject to change/removal.):
Simultaneously propagate insertions and retractions through the rules network, at every step using the insertion and retractions of equals facts to cancel each
other out and avoid operations deeper in the rules network. The behavior of unconditional insertions and RHS (right-hand side) retractions
is undefined when this option is enabled and this option should not be used when calling fire-rules can result in these operations.
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index ba94a6c5..2c176aee 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -9,10 +9,12 @@
[clara.rules.update-cache.cancelling :as ca]
[futurama.core :refer [async
async?
+ async-future
async-cancelled?
!
Date: Wed, 10 Jan 2024 14:43:01 -0600
Subject: [PATCH 51/87] feat: async enhancements and add engine test with 10k
async rules fired
---
CHANGELOG.md | 6 ++++
deps.edn | 2 +-
pom.xml | 6 ++--
src/main/clojure/clara/rules/engine.clj | 37 +++++++++++++++++--------
src/test/clojure/clara/test_engine.clj | 26 +++++++++++++----
5 files changed, 56 insertions(+), 21 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 151aa34a..d1da2523 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,11 @@
This is a history of changes to k13labs/clara-rules.
+# 0.9.6
+* Async enhancements and add engine test with thousands of async rules fired for stress test
+
+# 0.9.5
+* Update docs and explicit async-future in fire-rules-async
+
# 0.9.4
* Update `futurama` to latest version
diff --git a/deps.edn b/deps.edn
index 0ebf5a2f..8d14bed3 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.6.2"}
+ com.github.k13labs/futurama {:mvn/version "0.6.4"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index 195a89d8..2a29b0ec 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.5
+ 0.9.6
- 0.9.5
+ 0.9.6
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 0.6.2
+ 0.6.4
com.cnuernber
diff --git a/src/main/clojure/clara/rules/engine.clj b/src/main/clojure/clara/rules/engine.clj
index 2c176aee..7fca8f91 100644
--- a/src/main/clojure/clara/rules/engine.clj
+++ b/src/main/clojure/clara/rules/engine.clj
@@ -11,6 +11,8 @@
async?
async-future
async-cancelled?
+ activation-output activation (!activation-output activation (!activation-output activation result)))
(catch Exception e
(throw-activation-exception activation e)))))
@@ -1823,7 +1827,7 @@
(->activation-output activation (!activation-output activation result)))
+ (CompletableFuture/completedFuture (->activation-output activation result))))
(catch Exception e
(throw-activation-exception activation e)))))
@@ -1843,12 +1847,21 @@
(defn- fire-activations!
"fire all activations in order"
- [activations fire-activations-handler]
+ [activations]
+ (platform/eager-for
+ [activation activations]
+ ;;; this the production expression, which could return an async result if parallel computing
+ (binding [*rule-context* (->activation-rule-context activation true)]
+ (fire-activation! activation))))
+
+(defn- fire-activations-async!
+ "fire all activations in order"
+ [activations]
(platform/eager-for
[activation activations]
;;; this the production expression, which could return an async result if parallel computing
(binding [*rule-context* (->activation-rule-context activation true)]
- (fire-activations-handler activation))))
+ (fire-activation-async! activation))))
(defn- process-activations!
"Flush the changes and updates made during activation of the rules"
@@ -1902,7 +1915,7 @@
(do-fire-rules
transient-memory listener
(let [activations (mem/pop-activations! transient-memory 1)
- rhs-activations (fire-activations! activations fire-activation!)]
+ rhs-activations (fire-activations! activations)]
(process-activations! rhs-activations))))
(defn- fire-rules-async!
@@ -1913,8 +1926,8 @@
transient-memory listener
(let [pop-activations-batch-size (or (:parallel-batch-size options) 1)
activations (mem/pop-activations! transient-memory pop-activations-batch-size)
- rhs-activations (fire-activations! activations fire-activation-async!)]
- (process-activations! (!LocalSession rulebase
(mem/to-persistent! transient-memory)
transport
diff --git a/src/test/clojure/clara/test_engine.clj b/src/test/clojure/clara/test_engine.clj
index e08b312b..5054b07a 100644
--- a/src/test/clojure/clara/test_engine.clj
+++ b/src/test/clojure/clara/test_engine.clj
@@ -38,22 +38,38 @@
[]
[:output [{:keys [value]}] (= value ?value)])
-(def session
+(def session-50
(let [fact-seq (repeat 50 {:type :number
:value 199})
session (-> (mk-session 'clara.test-engine :fact-type-fn :type)
(insert-all fact-seq))]
session))
+(def session-10000
+ (let [fact-seq (repeat 10000 {:type :number
+ :value 199})
+ session (-> (mk-session 'clara.test-engine :fact-type-fn :type)
+ (insert-all fact-seq))]
+ session))
+
(deftest parallel-compute-engine-performance-test
- (testing "parallel compute with large batch size for non-blocking io"
+ (testing "parallel compute with large batch size for non-blocking io - 50 facts - 100 batch size"
+ (let [result (with-progress-reporting
+ (quick-benchmark
+ (-> (! (! (!
Date: Thu, 11 Jan 2024 16:01:31 -0600
Subject: [PATCH 52/87] feat: update to latest async lib
---
CHANGELOG.md | 3 +++
deps.edn | 2 +-
pom.xml | 6 +++---
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d1da2523..b55a869d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 0.9.7
+* Update to latest async library
+
# 0.9.6
* Async enhancements and add engine test with thousands of async rules fired for stress test
diff --git a/deps.edn b/deps.edn
index 8d14bed3..de93da0c 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.6.4"}
+ com.github.k13labs/futurama {:mvn/version "0.6.5"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index 2a29b0ec..ea36f973 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.6
+ 0.9.7
- 0.9.6
+ 0.9.7
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 0.6.4
+ 0.6.5
com.cnuernber
From 0db7edc3231a10782a1c8002d38384eec5c47848 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Thu, 25 Jan 2024 09:17:24 -0600
Subject: [PATCH 53/87] feat: update deps and format clj-kondo
---
.../clara/rules/hooks/clara_rules.clj_kondo | 222 +++++++++---------
deps.edn | 2 +-
2 files changed, 112 insertions(+), 112 deletions(-)
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
index fbe18443..86c2ab60 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
+++ b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
@@ -112,15 +112,15 @@
(if binding-nodes
[next-bindings-set
(cond->> [[(api/vector-node
- (vec binding-nodes))
+ (vec binding-nodes))
constraint-expr]]
binding-expr-nodes
(concat [[(api/vector-node
- (vec binding-expr-nodes))
+ (vec binding-expr-nodes))
input-token]]))]
[#{}
[[(api/vector-node
- [(api/token-node '_)])
+ [(api/token-node '_)])
constraint-expr]]])]
(recur more
(concat bindings next-bindings)
@@ -143,11 +143,11 @@
(let [condition (:children condition-expr)
[result-token fact-node & condition] (if (= '<- (-> condition second node-value))
(cons (api/vector-node
- [(vary-meta
- (first condition)
- assoc ::fact-result true)]) (nnext condition))
+ [(vary-meta
+ (first condition)
+ assoc ::fact-result true)]) (nnext condition))
(cons (api/vector-node
- [(api/token-node '_)]) condition))
+ [(api/token-node '_)]) condition))
condition-bindings (cond
(nil? condition)
[]
@@ -174,20 +174,20 @@
(set)
(sort-by node-value))
output-node (api/vector-node
- (if (empty? condition-output)
- [(api/token-node '_)]
- (vec condition-output)))
+ (if (empty? condition-output)
+ [(api/token-node '_)]
+ (vec condition-output)))
output-result-node (api/vector-node
- (if (empty? condition-output)
- [(api/token-node nil)]
- (vec condition-output)))
+ (if (empty? condition-output)
+ [(api/token-node nil)]
+ (vec condition-output)))
next-bindings [output-node
(api/list-node
- (list
- (api/token-node 'let)
- (api/vector-node
- (vec (apply concat output-bindings)))
- output-result-node))]]
+ (list
+ (api/token-node 'let)
+ (api/vector-node
+ (vec (apply concat output-bindings)))
+ output-result-node))]]
(recur more (concat bindings [next-bindings]))))))
(defn analyze-parse-query-macro
@@ -195,22 +195,22 @@
[{:keys [:node]}]
(let [input-token (api/token-node (gensym 'input))
input-args (api/vector-node
- [input-token])
+ [input-token])
[args conditions-node] (rest (:children node))
condition-seq (:children conditions-node)
special-tokens (extract-special-tokens condition-seq)
special-args (when (seq special-tokens)
(api/vector-node
- (vec special-tokens)))
+ (vec special-tokens)))
transformed-args (for [arg (:children args)]
(let [v (node-value arg)]
(if (keyword? v)
(api/token-node (symbol v))
arg)))
production-args (api/vector-node
- (if (empty? transformed-args)
- [(api/token-node '_)]
- (vec transformed-args)))
+ (if (empty? transformed-args)
+ [(api/token-node '_)]
+ (vec transformed-args)))
condition-bindings (analyze-conditions condition-seq true [] input-token production-args)
production-bindings (apply concat
(when special-args
@@ -223,19 +223,19 @@
(set)
(sort-by node-value))
production-result (api/list-node
- (list
- (api/token-node 'let)
- (api/vector-node
- (vec production-bindings))
- (api/vector-node
- (vec production-output))))
+ (list
+ (api/token-node 'let)
+ (api/vector-node
+ (vec production-bindings))
+ (api/vector-node
+ (vec production-output))))
fn-node (api/list-node
- (list
- (api/token-node 'fn)
- input-args
- production-result))
+ (list
+ (api/token-node 'fn)
+ input-args
+ production-result))
new-node (api/map-node
- [(api/keyword-node :production) fn-node])]
+ [(api/keyword-node :production) fn-node])]
{:node new-node}))
(defn analyze-defquery-macro
@@ -249,12 +249,12 @@
(first children))
input-token (api/token-node (gensym 'input))
input-args (api/vector-node
- [input-token])
+ [input-token])
[args & condition-seq] (if production-opts (rest children) children)
special-tokens (extract-special-tokens condition-seq)
special-args (when (seq special-tokens)
(api/vector-node
- (vec special-tokens)))
+ (vec special-tokens)))
transformed-args (for [arg (:children args)]
(let [v (node-value arg)
m (meta arg)]
@@ -264,9 +264,9 @@
(vary-meta merge m))
arg)))
production-args (api/vector-node
- (if (empty? transformed-args)
- [(api/token-node '_)]
- (vec transformed-args)))
+ (if (empty? transformed-args)
+ [(api/token-node '_)]
+ (vec transformed-args)))
condition-bindings (analyze-conditions condition-seq true [] input-token production-args)
production-bindings (apply concat
(when special-args
@@ -279,25 +279,25 @@
(set)
(sort-by node-value))
production-result (api/list-node
- (list
- (api/token-node 'let)
- (api/vector-node
- (vec production-bindings))
- (api/vector-node
- (vec production-output))))
+ (list
+ (api/token-node 'let)
+ (api/vector-node
+ (vec production-bindings))
+ (api/vector-node
+ (vec production-output))))
fn-node (api/list-node
- (cond-> (list (api/token-node 'fn))
- production-docs (concat [production-docs])
- :always (concat [input-args])
- production-opts (concat [production-opts])
- :always (concat [production-result])))
+ (cond-> (list (api/token-node 'fn))
+ production-docs (concat [production-docs])
+ :always (concat [input-args])
+ production-opts (concat [production-opts])
+ :always (concat [production-result])))
new-node (vary-meta
- (api/list-node
- (list
- (api/token-node 'def) production-name
- (api/map-node
- [(api/keyword-node :production) fn-node])))
- merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
+ (api/list-node
+ (list
+ (api/token-node 'def) production-name
+ (api/map-node
+ [(api/keyword-node :production) fn-node])))
+ merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
{:node new-node}))
(defn analyze-parse-rule-macro
@@ -305,13 +305,13 @@
[{:keys [:node]}]
(let [input-token (api/token-node (gensym 'input))
input-args (api/vector-node
- [input-token])
+ [input-token])
empty-args (api/vector-node [])
production-seq (rest (:children node))
special-tokens (extract-special-tokens production-seq)
special-args (when (seq special-tokens)
(api/vector-node
- (vec special-tokens)))
+ (vec special-tokens)))
[conditions-node body-node] production-seq
condition-seq (:children conditions-node)
condition-bindings (analyze-conditions condition-seq true [] input-token empty-args)
@@ -327,20 +327,20 @@
(set)
(sort-by node-value))
production-result (api/list-node
- (list
- (api/token-node 'let)
- (api/vector-node
- (vec production-bindings))
- (api/vector-node
- (vec production-output))
- body-node))
+ (list
+ (api/token-node 'let)
+ (api/vector-node
+ (vec production-bindings))
+ (api/vector-node
+ (vec production-output))
+ body-node))
fn-node (api/list-node
- (list
- (api/token-node 'fn)
- input-args
- production-result))
+ (list
+ (api/token-node 'fn)
+ input-args
+ production-result))
new-node (api/map-node
- [(api/keyword-node :production) fn-node])]
+ [(api/keyword-node :production) fn-node])]
{:node new-node}))
(defn analyze-defrule-macro
@@ -354,13 +354,13 @@
(first children))
input-token (api/token-node (gensym 'input))
input-args (api/vector-node
- [input-token])
+ [input-token])
empty-args (api/vector-node [])
production-seq (if production-opts (rest children) children)
special-tokens (extract-special-tokens production-seq)
special-args (when (seq special-tokens)
(api/vector-node
- (vec special-tokens)))
+ (vec special-tokens)))
[body-seq _ condition-seq] (->> (partition-by (comp #{'=>} node-value) production-seq)
(reverse))
condition-bindings (analyze-conditions condition-seq true [] input-token empty-args)
@@ -376,26 +376,26 @@
(set)
(sort-by node-value))
production-result (api/list-node
- (list*
- (api/token-node 'let)
- (api/vector-node
- (vec production-bindings))
- (api/vector-node
- (vec production-output))
- body-seq))
+ (list*
+ (api/token-node 'let)
+ (api/vector-node
+ (vec production-bindings))
+ (api/vector-node
+ (vec production-output))
+ body-seq))
fn-node (api/list-node
- (cond-> (list (api/token-node 'fn))
- production-docs (concat [production-docs])
- :always (concat [input-args])
- production-opts (concat [production-opts])
- :always (concat [production-result])))
+ (cond-> (list (api/token-node 'fn))
+ production-docs (concat [production-docs])
+ :always (concat [input-args])
+ production-opts (concat [production-opts])
+ :always (concat [production-result])))
new-node (vary-meta
- (api/list-node
- (list
- (api/token-node 'def) production-name
- (api/map-node
- [(api/keyword-node :production) fn-node])))
- merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
+ (api/list-node
+ (list
+ (api/token-node 'def) production-name
+ (api/map-node
+ [(api/keyword-node :production) fn-node])))
+ merge {:clj-kondo/ignore [:clojure-lsp/unused-public-var]})]
{:node new-node}))
(defn analyze-def-rules-test-macro
@@ -409,35 +409,35 @@
(into {}))
rules-seq (for [[name-node rule-node] (partition 2 (:children rules))]
[name-node (analyze-parse-rule-macro
- {:node (api/list-node
- (list*
- (api/token-node 'clara.rules.dsl/parse-rule)
- (:children rule-node)))})])
+ {:node (api/list-node
+ (list*
+ (api/token-node 'clara.rules.dsl/parse-rule)
+ (:children rule-node)))})])
query-seq (for [[name-node query-node] (partition 2 (:children queries))]
[name-node (analyze-parse-query-macro
- {:node (api/list-node
- (list*
- (api/token-node 'clara.rules.dsl/parse-query)
- (:children query-node)))})])
+ {:node (api/list-node
+ (list*
+ (api/token-node 'clara.rules.dsl/parse-query)
+ (:children query-node)))})])
session-seq (for [[name-node productions-node options-node] (partition 3 (:children sessions))
:let [args-seq (concat (:children productions-node)
(:children options-node))]]
[name-node (api/list-node
- (list
- (api/token-node 'clara.rules.compiler/mk-session)
- (api/vector-node
- (vec args-seq))))])
+ (list
+ (api/token-node 'clara.rules.compiler/mk-session)
+ (api/vector-node
+ (vec args-seq))))])
args-seq (->> (concat rules-seq query-seq session-seq)
(apply concat))
wrap-body (api/list-node
- (list*
- (api/token-node 'let)
- (api/vector-node
- (vec args-seq))
- test-body))
+ (list*
+ (api/token-node 'let)
+ (api/vector-node
+ (vec args-seq))
+ test-body))
new-node (api/list-node
- (list
- (api/token-node 'clojure.test/deftest)
- test-name
- wrap-body))]
+ (list
+ (api/token-node 'clojure.test/deftest)
+ test-name
+ wrap-body))]
{:node new-node}))
diff --git a/deps.edn b/deps.edn
index de93da0c..dad2b6be 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.6.5"}
+ com.github.k13labs/futurama {:mvn/version "0.6.6"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
From b45a54f638414deef1ee0de18f6180cf00f83e6d Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Thu, 25 Jan 2024 09:20:10 -0600
Subject: [PATCH 54/87] chore: update deps
---
CHANGELOG.md | 3 +++
pom.xml | 4 ++--
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b55a869d..e55766eb 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 0.9.8
+* Update to latest async library (futurama) to get fixes for async-reduce
+
# 0.9.7
* Update to latest async library
diff --git a/pom.xml b/pom.xml
index ea36f973..c3d6a657 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.7
+ 0.9.8
- 0.9.7
+ 0.9.8
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
From 30f4c6c2e765d3fd84d1b4b32ec4d7dae1646e96 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Thu, 25 Jan 2024 09:20:50 -0600
Subject: [PATCH 55/87] feat: update pom deps
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index c3d6a657..334bdbe0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 0.6.5
+ 0.6.6
com.cnuernber
From f7db8120a99599601bc2e250591938003429c263 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Thu, 25 Jan 2024 09:21:23 -0600
Subject: [PATCH 56/87] chore: fix futurama version in pom
---
pom.xml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pom.xml b/pom.xml
index 334bdbe0..eae72d27 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.8
+ 0.9.9
- 0.9.8
+ 0.9.9
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
From af3d083db059217b0207e5a7b78630d2fb0be011 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 11 Feb 2024 21:46:37 -0600
Subject: [PATCH 57/87] feat: bump deps for release 1.0.0
---
deps.edn | 2 +-
pom.xml | 6 +++---
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/deps.edn b/deps.edn
index dad2b6be..781215a4 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "0.6.6"}
+ com.github.k13labs/futurama {:mvn/version "1.0.0-SNAPSHOT"}
com.cnuernber/ham-fisted {:mvn/version "2.014"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index eae72d27..263893ed 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 0.9.9
+ 1.0.0-SNAPSHOT
- 0.9.9
+ 1.0.0-SNAPSHOT
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 0.6.6
+ 1.0.0-SNAPSHOT
com.cnuernber
From 810b8943d77f5d54f6cc44f84f7ce64605f09d6e Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Mon, 12 Feb 2024 10:28:04 -0600
Subject: [PATCH 58/87] feat: initial major release 1.0.0
---
CHANGELOG.md | 7 +++++++
deps.edn | 4 ++--
pom.xml | 8 ++++----
3 files changed, 13 insertions(+), 6 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e55766eb..cb075155 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
This is a history of changes to k13labs/clara-rules.
+# 1.0.0
+* Bump futurama version to first major version
+* Initial major release of k13labs/clara-rules
+
+# 0.9.9
+* Bump futurama version to latest
+
# 0.9.8
* Update to latest async library (futurama) to get fixes for async-reduce
diff --git a/deps.edn b/deps.edn
index 781215a4..5f17beac 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,8 +3,8 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "1.0.0-SNAPSHOT"}
- com.cnuernber/ham-fisted {:mvn/version "2.014"}
+ com.github.k13labs/futurama {:mvn/version "1.0.0"}
+ com.cnuernber/ham-fisted {:mvn/version "2.016"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index 263893ed..cd212d5c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.0.0-SNAPSHOT
+ 1.0.0
- 1.0.0-SNAPSHOT
+ 1.0.0
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,12 +27,12 @@
com.github.k13labs
futurama
- 1.0.0-SNAPSHOT
+ 1.0.0
com.cnuernber
ham-fisted
- 2.014
+ 2.016
prismatic
From 98c7628ed72a60aed631e9abb944e3abba8c724c Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Mon, 12 Feb 2024 12:39:28 -0600
Subject: [PATCH 59/87] feat: bump futurama to latest version 1.0.1
---
CHANGELOG.md | 3 +++
deps.edn | 2 +-
pom.xml | 6 +++---
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index cb075155..4b45f75f 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 1.0.1
+* Bump futurama version to 1.0.1 to get enhancements
+
# 1.0.0
* Bump futurama version to first major version
* Initial major release of k13labs/clara-rules
diff --git a/deps.edn b/deps.edn
index 5f17beac..d138b9e7 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "1.0.0"}
+ com.github.k13labs/futurama {:mvn/version "1.0.1"}
com.cnuernber/ham-fisted {:mvn/version "2.016"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index cd212d5c..9a2c2745 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.0.0
+ 1.0.1
- 1.0.0
+ 1.0.1
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 1.0.0
+ 1.0.1
com.cnuernber
From f3166f2ca51110373851423d6b4a04e6fb10d40a Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sat, 17 Feb 2024 16:33:59 -0600
Subject: [PATCH 60/87] feat: release 1.0.2 with latest futurama version
---
CHANGELOG.md | 3 +++
deps.edn | 2 +-
pom.xml | 6 +++---
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4b45f75f..12a4c658 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 1.0.2
+* Bump futurama version to 1.0.2 to get enhancements
+
# 1.0.1
* Bump futurama version to 1.0.1 to get enhancements
diff --git a/deps.edn b/deps.edn
index d138b9e7..832942c2 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- com.github.k13labs/futurama {:mvn/version "1.0.1"}
+ com.github.k13labs/futurama {:mvn/version "1.0.2"}
com.cnuernber/ham-fisted {:mvn/version "2.016"}
prismatic/schema {:mvn/version "1.4.1"}
org.clojure/data.fressian {:mvn/version "1.0.0"}}
diff --git a/pom.xml b/pom.xml
index 9a2c2745..1e917b1c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.0.1
+ 1.0.2
- 1.0.1
+ 1.0.2
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -27,7 +27,7 @@
com.github.k13labs
futurama
- 1.0.1
+ 1.0.2
com.cnuernber
From 4bcea939b78098b0130c44384980789c2b9ec8e1 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 18 Feb 2024 19:08:41 -0600
Subject: [PATCH 61/87] feat: implement custom caching support using core
CacheProtocol
---
deps.edn | 1 +
src/main/clojure/clara/rules/compiler.clj | 58 ++++++++-------
src/test/clojure/clara/test_rules.clj | 91 +++++++++++++++--------
3 files changed, 94 insertions(+), 56 deletions(-)
diff --git a/deps.edn b/deps.edn
index 832942c2..aae9c35e 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,6 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
+ org.clojure/core.memoize {:mvn/version "1.0.257"}
com.github.k13labs/futurama {:mvn/version "1.0.2"}
com.cnuernber/ham-fisted {:mvn/version "2.016"}
prismatic/schema {:mvn/version "1.4.1"}
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 186ad819..734e75b4 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -5,6 +5,8 @@
(:require [clara.rules.engine :as eng]
[clara.rules.schema :as schema]
[clara.rules.platform :refer [jeq-wrap] :as platform]
+ [clojure.core.memoize :as memo]
+ [clojure.core.cache.wrapped :as cache]
[ham-fisted.api :as hf]
[ham-fisted.set :as hs]
[ham-fisted.mut-map :as hm]
@@ -35,6 +37,18 @@
[clojure.lang
IFn]))
+;; Cache of sessions for fast reloading.
+(defonce default-session-cache
+ (cache/lru-cache-factory {}))
+
+(defn clear-session-cache!
+ "Clears the cache of reusable Clara sessions, so any subsequent sessions
+ will be re-compiled from the rule definitions. This is intended for use
+ by tooling or specialized needs; most users can simply specify the :cache false
+ option when creating sessions."
+ []
+ (swap! default-session-cache empty))
+
;; Protocol for loading rules from some arbitrary source.
(defprotocol IRuleSource
(load-rules [source]))
@@ -1003,7 +1017,7 @@
(list '= (-> b name symbol)
(list b 'ancestor-bindings)))
- modified-expression `[:not {:type clara.rules.engine.NegationResult
+ modified-expression `[:not {:type NegationResult
:constraints [(~'= ~gen-rule-name ~'gen-rule-name)
~@(map ancestor-binding->restriction-form
ancestor-bindings-in-negation-expr)]}]
@@ -1413,7 +1427,8 @@
See #381 for more details."
[key->expr :- schema/NodeExprLookup
partition-size :- sc/Int]
- (let [batching-try-eval (fn [compilation-ctxs exprs]
+ (let [batching-try-eval (fn do-compile-exprs
+ [compilation-ctxs exprs]
;; Try to evaluate all of the expressions as a batch. If they fail the batch eval then we
;; try them one by one with their compilation context, this is slow but we were going to fail
;; anyway.
@@ -1853,17 +1868,6 @@
:get-alphas-fn get-alphas-fn
:node-expr-fn-lookup expr-fn-lookup})))
-;; Cache of sessions for fast reloading.
-(def ^:private session-cache (atom (hf/hash-map)))
-
-(defn clear-session-cache!
- "Clears the cache of reusable Clara sessions, so any subsequent sessions
- will be re-compiled from the rule definitions. This is intended for use
- by tooling or specialized needs; most users can simply specify the :cache false
- option when creating sessions."
- []
- (reset! session-cache (hf/hash-map)))
-
(defn production-load-order-comp [a b]
(< (-> a meta ::rule-load-order)
(-> b meta ::rule-load-order)))
@@ -2031,15 +2035,19 @@
previous
(conj! previous new-production)))
(transient #{}))
- persistent!)]
-
- (if-let [session (get @session-cache [productions options])]
- session
- (let [session (mk-session* productions options)]
-
- ;; Cache the session unless instructed not to.
- (when (get options :cache true)
- (swap! session-cache assoc [productions options] session))
-
- ;; Return the session.
- session)))))
+ persistent!)
+ options-cache (get options :cache)
+ session-cache (cond
+ (true? options-cache)
+ default-session-cache
+ (nil? options-cache)
+ default-session-cache
+ :else options-cache)
+ options (dissoc options :cache)
+ ;;; this is simpler than storing all the productions and options in the cache
+ session-key [(count productions) (hash productions) (hash options)]]
+ (if session-cache
+ (cache/lookup-or-miss session-cache session-key
+ (fn do-mk-session [_]
+ (mk-session* productions options)))
+ (mk-session* productions options)))))
diff --git a/src/test/clojure/clara/test_rules.clj b/src/test/clojure/clara/test_rules.clj
index c1e3ba78..748cd06f 100644
--- a/src/test/clojure/clara/test_rules.clj
+++ b/src/test/clojure/clara/test_rules.clj
@@ -15,6 +15,8 @@
[clojure.walk :as walk]
[clara.sample-ruleset-seq :as srs]
[clara.order-ruleset :as order-rules]
+ [clojure.core.cache :refer [CacheProtocol]]
+ [clojure.core.cache.wrapped :as cache]
[schema.test]
[schema.core :as sc]
[clara.tools.testing-utils :as tu :refer [assert-ex-data]]
@@ -1575,40 +1577,67 @@
"s3 res: " (vec (q-res s2)) \newline))))
(deftest test-equivalent-rule-sources-caching
- (is (instance? clojure.lang.IAtom @#'com/session-cache)
+ (is (and (instance? clojure.lang.IAtom (-> #'com/default-session-cache deref))
+ (satisfies? CacheProtocol (-> #'com/default-session-cache deref deref)))
"Enforce that this test is revisited if the cache structure (an implementation detail) is changed.
This test should have a clean cache but should also not impact the global cache, which
requires resetting the cache for the duration of this test.")
-
- (let [original-cache (-> #'com/session-cache deref deref)
- _ (reset! @#'com/session-cache {})
- s1 (mk-session [test-rule] :cache false)
- s2 (mk-session [test-rule])
- s3 (mk-session [test-rule test-rule])
- s4 (mk-session [test-rule] [test-rule])
-
- ;; Since functions use reference equality create a single function instance
- ;; to test reuse when options are the same.
- alternate-alpha-fn (constantly Object)
- s5 (mk-session [test-rule] :fact-type-fn alternate-alpha-fn :cache false)
- s6 (mk-session [test-rule] :fact-type-fn alternate-alpha-fn)
- s7 (mk-session [test-rule test-rule] :fact-type-fn alternate-alpha-fn)
- s8 (mk-session [test-rule cold-query] :fact-type-fn alternate-alpha-fn)
-
- ;; Find all distinct references, with distinctness determined by reference equality.
- ;; If the reference to a session is identical to a previous session we infer that
- ;; the session was the result of a cache hit; if the reference is not identical to
- ;; a previous session it is the result of a cache miss.
- distinct-sessions (reduce (fn [existing new]
- (if-not (some (partial identical? new)
- existing)
- (conj existing new)
- existing))
- []
- [s1 s2 s3 s4 s5 s6 s7 s8])]
- (is (= distinct-sessions
- [s1 s2 s5 s6 s8]))
- (reset! @#'com/session-cache original-cache)))
+ (testing "using default caching"
+ (com/clear-session-cache!)
+ (let [s1 (mk-session [test-rule] :cache false)
+ s2 (mk-session [test-rule])
+ s3 (mk-session [test-rule test-rule])
+ s4 (mk-session [test-rule] [test-rule])
+
+ ;; Since functions use reference equality create a single function instance
+ ;; to test reuse when options are the same.
+ alternate-alpha-fn (constantly Object)
+ s5 (mk-session [test-rule] :fact-type-fn alternate-alpha-fn :cache false)
+ s6 (mk-session [test-rule] :fact-type-fn alternate-alpha-fn)
+ s7 (mk-session [test-rule test-rule] :fact-type-fn alternate-alpha-fn)
+ s8 (mk-session [test-rule cold-query] :fact-type-fn alternate-alpha-fn)
+
+ ;; Find all distinct references, with distinctness determined by reference equality.
+ ;; If the reference to a session is identical to a previous session we infer that
+ ;; the session was the result of a cache hit; if the reference is not identical to
+ ;; a previous session it is the result of a cache miss.
+ distinct-sessions (reduce (fn [existing new]
+ (if-not (some (partial identical? new)
+ existing)
+ (conj existing new)
+ existing))
+ []
+ [s1 s2 s3 s4 s5 s6 s7 s8])]
+ (is (= distinct-sessions
+ [s1 s2 s5 s6 s8]))))
+ (testing "using custom cache"
+ (let [cache (cache/ttl-cache-factory {} :ttl 30000)
+ s1 (mk-session [test-rule] :cache false)
+ s2 (mk-session [test-rule] :cache cache)
+ s3 (mk-session [test-rule test-rule] :cache cache)
+ s4 (mk-session [test-rule] [test-rule] :cache cache)
+
+ ;; Since functions use reference equality create a single function instance
+ ;; to test reuse when options are the same.
+ alternate-alpha-fn (constantly Object)
+ s5 (mk-session [test-rule] :fact-type-fn alternate-alpha-fn :cache false)
+ s6 (mk-session [test-rule] :fact-type-fn alternate-alpha-fn :cache cache)
+ s7 (mk-session [test-rule test-rule] :fact-type-fn alternate-alpha-fn :cache cache)
+ s8 (mk-session [test-rule cold-query] :fact-type-fn alternate-alpha-fn :cache cache)
+
+ ;; Find all distinct references, with distinctness determined by reference equality.
+ ;; If the reference to a session is identical to a previous session we infer that
+ ;; the session was the result of a cache hit; if the reference is not identical to
+ ;; a previous session it is the result of a cache miss.
+ distinct-sessions (reduce (fn [existing new]
+ (if-not (some (partial identical? new)
+ existing)
+ (conj existing new)
+ existing))
+ []
+ [s1 s2 s3 s4 s5 s6 s7 s8])]
+ (is (= distinct-sessions
+ [s1 s2 s5 s6 s8])))))
#_{:clj-kondo/ignore [:unresolved-symbol]}
(deftest test-try-eval-failures-includes-compile-ctx
From 58dc178697f9301efa1939847b7f5b79861380c3 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 18 Feb 2024 19:12:12 -0600
Subject: [PATCH 62/87] feat: update deps and change log and prepare for
release 1.1.0
---
CHANGELOG.md | 3 +++
deps.edn | 2 +-
pom.xml | 9 +++++++--
src/main/clojure/clara/rules/compiler.clj | 1 -
4 files changed, 11 insertions(+), 4 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 12a4c658..6e2e8c02 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 1.1.0
+* Enhance caching support by adding session caching using core CacheProtocol
+
# 1.0.2
* Bump futurama version to 1.0.2 to get enhancements
diff --git a/deps.edn b/deps.edn
index aae9c35e..37cea21d 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,7 +3,7 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
- org.clojure/core.memoize {:mvn/version "1.0.257"}
+ org.clojure/core.cache {:mvn/version "1.0.225"}
com.github.k13labs/futurama {:mvn/version "1.0.2"}
com.cnuernber/ham-fisted {:mvn/version "2.016"}
prismatic/schema {:mvn/version "1.4.1"}
diff --git a/pom.xml b/pom.xml
index 1e917b1c..d66dadc5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.0.2
+ 1.1.0
- 1.0.2
+ 1.1.0
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -24,6 +24,11 @@
clojure
1.10.3
+
+ org.clojure
+ core.cache
+ 1.0.225
+
com.github.k13labs
futurama
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 734e75b4..71eb7025 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -5,7 +5,6 @@
(:require [clara.rules.engine :as eng]
[clara.rules.schema :as schema]
[clara.rules.platform :refer [jeq-wrap] :as platform]
- [clojure.core.memoize :as memo]
[clojure.core.cache.wrapped :as cache]
[ham-fisted.api :as hf]
[ham-fisted.set :as hs]
From d8594363d8b69caff36715c0979210a58dc5bead Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 18 Feb 2024 22:12:49 -0600
Subject: [PATCH 63/87] feat: add support for a compiler cache which can cache
exprs eval'd
---
src/main/clojure/clara/rules/compiler.clj | 64 ++++++++++++++-----
.../clara/rules/durability/fressian.clj | 7 +-
src/test/clojure/clara/long_running_tests.clj | 4 +-
src/test/clojure/clara/test_durability.clj | 5 +-
src/test/clojure/clara/test_engine.clj | 2 +-
5 files changed, 58 insertions(+), 24 deletions(-)
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 71eb7025..73264f3a 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -38,7 +38,11 @@
;; Cache of sessions for fast reloading.
(defonce default-session-cache
- (cache/lru-cache-factory {}))
+ (cache/lru-cache-factory {} :threshold 100))
+
+;; Cache of compiled expressions
+(defonce default-compiler-cache
+ (cache/lru-cache-factory {} :threshold 5000))
(defn clear-session-cache!
"Clears the cache of reusable Clara sessions, so any subsequent sessions
@@ -48,6 +52,14 @@
[]
(swap! default-session-cache empty))
+(defn clear-compiler-cache!
+ "Clears the default compiler cache of re-usable compiled expressions, so any
+ subsequent expressions will be re-compiled. This is intended for use by tooling
+ or during testing; most users can simply specify the :compiler-cache false
+ option when creating sessions."
+ []
+ (swap! default-compiler-cache empty))
+
;; Protocol for loading rules from some arbitrary source.
(defprotocol IRuleSource
(load-rules [source]))
@@ -1425,14 +1437,29 @@
This allows the eval calls to be more effecient, rather than evaluating each expression on its own.
See #381 for more details."
[key->expr :- schema/NodeExprLookup
+ expr-cache :- (sc/maybe sc/Any)
partition-size :- sc/Int]
- (let [batching-try-eval (fn do-compile-exprs
- [compilation-ctxs exprs]
+ (let [prepare-expr (fn prepare-expr
+ [[expr-key [expr compilation-ctx]]]
+ (let [cache-key [(hash expr) (hash compilation-ctx)]]
+ (if-let [compiled-expr (and expr-cache
+ (cache/lookup expr-cache cache-key))]
+ [:compiled [expr-key [compiled-expr compilation-ctx]]]
+ [:prepared [expr-key [expr compilation-ctx]]])))
+ batching-try-eval (fn do-compile-exprs
+ [exprs compilation-ctxs]
;; Try to evaluate all of the expressions as a batch. If they fail the batch eval then we
;; try them one by one with their compilation context, this is slow but we were going to fail
;; anyway.
(try
- (mapv vector (eval exprs) compilation-ctxs)
+ (mapv
+ (fn cache-expr
+ [expr compiled-expr compilation-ctx]
+ (let [cache-key [(hash expr) (hash compilation-ctx)]]
+ (when expr-cache
+ (cache/miss expr-cache cache-key compiled-expr))
+ [compiled-expr compilation-ctx]))
+ exprs (eval exprs) compilation-ctxs)
(catch Exception e
;; Using mapv here rather than map to avoid laziness, otherwise compilation failure might
;; fall into the throw below for the wrong reason.
@@ -1456,19 +1483,23 @@
;; Grouping by ns, most expressions will not have a defined ns, only expressions from production nodes.
;; These expressions must be evaluated in the ns where they were defined, as they will likely contain code
;; that is namespace dependent.
- (for [[nspace group] (sort-by key (group-by (comp :ns second val) key->expr))
+ (for [[nspace ns-expr-group] (sort-by key (group-by (comp :ns second val) key->expr))
;; Partitioning the number of forms to be evaluated, Java has a limit to the size of methods if we were
;; evaluate all expressions at once it would likely exceed this limit and throw an exception.
- expr-batch (partition-all partition-size group)
- :let [node-expr-keys (mapv first expr-batch)
- compilation-ctxs (mapv (comp second val) expr-batch)
- exprs (mapv (comp first val) expr-batch)]]
- (mapv vector
- node-expr-keys
- (with-bindings (if nspace
- {#'*ns* (the-ns nspace)}
- {})
- (batching-try-eval compilation-ctxs exprs)))))))
+ expr-batch (partition-all partition-size ns-expr-group) ;;;; [ [ ]]
+ :let [grouped-exprs (->> (mapv prepare-expr expr-batch)
+ (group-by first))
+ prepared-exprs (map second (:prepared grouped-exprs))
+ node-expr-keys (mapv first prepared-exprs)
+ exprs (mapv (comp first second) prepared-exprs)
+ compilation-ctxs (mapv (comp second second) prepared-exprs)
+ prev-compiled-exprs (map second (:compiled grouped-exprs))
+ next-compiled-exprs (mapv vector node-expr-keys
+ (with-bindings (if nspace
+ {#'*ns* (the-ns nspace)}
+ {})
+ (batching-try-eval exprs compilation-ctxs)))]]
+ (concat prev-compiled-exprs next-compiled-exprs)))))
(defn safe-get
"A helper function for retrieving a given key from the provided map. If the key doesn't exist within the map this
@@ -1947,6 +1978,7 @@
id-counter (atom 0)
create-id-fn (fn [] (swap! id-counter inc))
+ compiler-cache (:compiler-cache options default-compiler-cache)
forms-per-eval (:forms-per-eval options forms-per-eval-default)
beta-graph (to-beta-graph productions create-id-fn)
@@ -1954,7 +1986,7 @@
;; Extract the expressions from the graphs and evaluate them in a batch manner.
;; This is a performance optimization, see Issue 381 for more information.
- exprs (compile-exprs (extract-exprs beta-graph alpha-graph) forms-per-eval)
+ exprs (compile-exprs (extract-exprs beta-graph alpha-graph) compiler-cache forms-per-eval)
;; If we have made it to this point, it means that we have succeeded in compiling all expressions
;; thus we can free the :compile-ctx used for troubleshooting compilation failures.
diff --git a/src/main/clojure/clara/rules/durability/fressian.clj b/src/main/clojure/clara/rules/durability/fressian.clj
index 8d6cea62..4149982d 100644
--- a/src/main/clojure/clara/rules/durability/fressian.clj
+++ b/src/main/clojure/clara/rules/durability/fressian.clj
@@ -590,7 +590,9 @@
(deserialize [_ mem-facts opts]
(with-open [^FressianReader rdr (fres/create-reader in-stream :handlers read-handler-lookup)]
- (let [{:keys [rulebase-only? base-rulebase forms-per-eval]} opts
+ (let [{:keys [rulebase-only?
+ base-rulebase
+ forms-per-eval]} opts
record-holder (ArrayList.)
;; The rulebase should either be given from the base-session or found in
@@ -599,6 +601,7 @@
base-rulebase)
forms-per-eval (or forms-per-eval com/forms-per-eval-default)
+ compiler-cache (get opts :compiler-cache com/default-compiler-cache)
reconstruct-expressions (fn [expr-lookup]
;; Rebuilding the expr-lookup map from the serialized map:
@@ -615,7 +618,7 @@
d/clj-struct-holder record-holder]
(pform/thread-local-binding [d/node-fn-cache (-> (fres/read-object rdr)
reconstruct-expressions
- (com/compile-exprs forms-per-eval))]
+ (com/compile-exprs compiler-cache forms-per-eval))]
(assoc (fres/read-object rdr)
:node-expr-fn-lookup
(.get d/node-fn-cache))))]
diff --git a/src/test/clojure/clara/long_running_tests.clj b/src/test/clojure/clara/long_running_tests.clj
index d300c92e..09ba7376 100644
--- a/src/test/clojure/clara/long_running_tests.clj
+++ b/src/test/clojure/clara/long_running_tests.clj
@@ -22,7 +22,7 @@
;; Validating that there were 5472 forms in the eval call.
(is (= 5472 (count (-> e ex-data :compilation-ctxs))))
- (is (re-find #"method size exceeded" (.getMessage e)))
+ (is (re-find #"method size exceeded" (ex-message e)))
;; Validate that the stated 5471 forms per eval will compile
- (is (sc/without-fn-validation (com/mk-session (conj (vector rules) :forms-per-eval 5471 :cache false))))))
\ No newline at end of file
+ (is (sc/without-fn-validation (com/mk-session (conj (vector rules) :forms-per-eval 5471 :cache false))))))
diff --git a/src/test/clojure/clara/test_durability.clj b/src/test/clojure/clara/test_durability.clj
index 42897e9b..9eb4a4f2 100644
--- a/src/test/clojure/clara/test_durability.clj
+++ b/src/test/clojure/clara/test_durability.clj
@@ -423,7 +423,7 @@
;; Simulate deserializing in an environment without this var by unmapping it.
(ns-unmap 'clara.test-durability 'test-compilation-ctx-var)
(try
- (rb-serde without-compile-ctx nil)
+ (rb-serde without-compile-ctx {:compiler-cache false})
(is false "Error not thrown when deserializing the rulebase without ctx")
(catch Exception e
;; In the event that the compilation context is not retained the original condition of the node will not be present.
@@ -433,10 +433,9 @@
(tu/get-all-ex-data e)))))
(try
- (rb-serde with-compile-ctx nil)
+ (rb-serde with-compile-ctx {:compiler-cache false})
(is false "Error not thrown when deserializing the rulebase with ctx")
(catch Exception e
-
(is (some #(= (select-keys (:condition %) [:type :constraints])
{:type Long
:constraints ['(== this clara.test-durability/test-compilation-ctx-var)]})
diff --git a/src/test/clojure/clara/test_engine.clj b/src/test/clojure/clara/test_engine.clj
index 5054b07a..2023d6ba 100644
--- a/src/test/clojure/clara/test_engine.clj
+++ b/src/test/clojure/clara/test_engine.clj
@@ -41,7 +41,7 @@
(def session-50
(let [fact-seq (repeat 50 {:type :number
:value 199})
- session (-> (mk-session 'clara.test-engine :fact-type-fn :type)
+ session (-> (mk-session 'clara.test-engine :fact-type-fn :type :cache false)
(insert-all fact-seq))]
session))
From 1e0c118aa6c736e844ea79005700bdf6bdb2d673 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 18 Feb 2024 23:14:11 -0600
Subject: [PATCH 64/87] feat: don't include node id on compiled fns
---
src/main/clojure/clara/rules/compiler.clj | 4 +---
src/test/clojure/clara/test_compiler.clj | 4 +---
2 files changed, 2 insertions(+), 6 deletions(-)
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 73264f3a..b1b12701 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -327,7 +327,7 @@
filter functions."
[node-type node-id fn-type]
(if-let [abbreviated-node-type (get eng/node-type->abbreviated-type node-type)]
- (symbol (str abbreviated-node-type "-" node-id "-" fn-type))
+ (with-meta (symbol (str abbreviated-node-type "-" fn-type)) {:node-id node-id})
(throw (ex-info "Unrecognized node type"
{:node-type node-type
:node-id node-id
@@ -358,7 +358,6 @@
;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-condition' to be used for this scenario
fn-name (mk-node-fn-name "AlphaNode" node-id "AE")]
-
`(fn ~fn-name [~(add-meta '?__fact__ type)
~destructured-env]
(let [~@assignments
@@ -481,7 +480,6 @@
;; JFE will stand for JoinFilterExpr
fn-name (mk-node-fn-name node-type node-id "JFE")]
-
`(fn ~fn-name
[~'?__token__
~(add-meta '?__fact__ type)
diff --git a/src/test/clojure/clara/test_compiler.clj b/src/test/clojure/clara/test_compiler.clj
index 144066b5..082fd515 100644
--- a/src/test/clojure/clara/test_compiler.clj
+++ b/src/test/clojure/clara/test_compiler.clj
@@ -52,9 +52,7 @@
RootJoinNode []))]
(doseq [node (-> base-session eng/components :rulebase :id-to-node vals)
node-fn (get-node-fns node)]
- (is (seq (re-find (re-pattern (str (get eng/node-type->abbreviated-type (.getSimpleName (class node)))
- "-"
- (:id node)))
+ (is (seq (re-find (re-pattern (str (get eng/node-type->abbreviated-type (.getSimpleName (class node)))))
(-> node-fn str m/demunge (str/split #"/") last)))
(str "For node: " node " and node-fn: " node-fn)))))
From 77f32e081597050d1d12dbde3fd5957327a11391 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 18 Feb 2024 23:24:27 -0600
Subject: [PATCH 65/87] chore: set next dev version 1.2.0-SNAPSHOT
---
pom.xml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pom.xml b/pom.xml
index d66dadc5..19e25fed 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.1.0
+ 1.2.0-SNAPSHOT
- 1.1.0
+ 1.2.0-SNAPSHOT
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
From 2ea09ccc7d43605c91a84a0f4e6a16e165f219fb Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 18 Feb 2024 23:30:16 -0600
Subject: [PATCH 66/87] chore: update changelog and docs
---
CHANGELOG.md | 3 +++
src/main/clojure/clara/rules.clj | 5 ++++-
src/main/clojure/clara/rules/durability.clj | 3 +++
3 files changed, 10 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6e2e8c02..2d7a3766 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 1.2.0
+* Enhance caching support by adding compile caching using core CacheProtocol
+
# 1.1.0
* Enhance caching support by adding session caching using core CacheProtocol
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
index e07bec08..a706cdda 100644
--- a/src/main/clojure/clara/rules.clj
+++ b/src/main/clojure/clara/rules.clj
@@ -185,7 +185,10 @@
* :fact-type-fn, which must have a value of a function used to determine the logical type of a given
fact. Defaults to Clojure's type function.
- * :cache, indicating whether the session creation can be cached, effectively memoizing mk-session.
+ * :cache, indicating whether the session creation can be cached, effectively memoizing mk-session,
+ valid values are true, false, or a wrapped CacheProtocol.
+ * :compiler-cache, indicating wether the expression compilation can be cached, effectively memoizing eval expr,
+ valid values are true, false, or a wrapped CacheProtocol.
Defaults to true. Callers may wish to set this to false when needing to dynamically reload rules.
* :ancestors-fn, which returns a collection of ancestors for a given type. Defaults to Clojure's ancestors function. A
fact of a given type will match any rule that uses one of that type's ancestors. Note that if the collection is ordered
diff --git a/src/main/clojure/clara/rules/durability.clj b/src/main/clojure/clara/rules/durability.clj
index 41f1149a..d2ac5023 100644
--- a/src/main/clojure/clara/rules/durability.clj
+++ b/src/main/clojure/clara/rules/durability.clj
@@ -549,6 +549,9 @@
Larger batch sizes should see better performance compared to smaller batch sizes.
Defaults to 5000, see clara.rules.compiler/forms-per-eval-default for more information.
+ * :compiler-cache, indicating wether the expression compilation can be cached, effectively memoizing eval expr,
+ valid values are true, false, or a wrapped CacheProtocol.
+
Options for the rulebase semantics that are documented at clara.rules/mk-session include:
* :fact-type-fn
From 7b14dd7a79d7ca4719a0448f23fd02e916ad0e15 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Mon, 19 Feb 2024 09:40:47 -0600
Subject: [PATCH 67/87] feat: release 1.2.0
---
pom.xml | 4 ++--
src/main/clojure/clara/rules/compiler.clj | 4 ++--
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/pom.xml b/pom.xml
index 19e25fed..5ed6daed 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.2.0-SNAPSHOT
+ 1.2.0
- 1.2.0-SNAPSHOT
+ 1.2.0
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index b1b12701..cfe2cfcf 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -38,11 +38,11 @@
;; Cache of sessions for fast reloading.
(defonce default-session-cache
- (cache/lru-cache-factory {} :threshold 100))
+ (cache/lru-cache-factory {}))
;; Cache of compiled expressions
(defonce default-compiler-cache
- (cache/lru-cache-factory {} :threshold 5000))
+ (cache/soft-cache-factory {}))
(defn clear-session-cache!
"Clears the cache of reusable Clara sessions, so any subsequent sessions
From eab3bcc1eed2aeb8d7b8f86d2ff8e052d5fa3c92 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 21 Feb 2024 10:08:22 -0600
Subject: [PATCH 68/87] feat: enhancements of compiler hotspots using ham
fisted mutables
---
dev/user.clj | 31 ++
src/main/clojure/clara/rules/compiler.clj | 475 +++++++++++-----------
src/main/clojure/clara/rules/schema.clj | 19 +-
3 files changed, 274 insertions(+), 251 deletions(-)
diff --git a/dev/user.clj b/dev/user.clj
index 9c8e1233..c53bff9f 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -3,6 +3,10 @@
quick-benchmark] :as crit]
[clara.rules.platform :refer [compute-for]]
[clojure.core.async :refer [go timeout "foobar"))
+
+(defonce session-cache
+ (cache/lru-cache-factory {}))
+
+;; Cache of compiled expressions
+(defonce compiler-cache
+ (cache/soft-cache-factory {}))
+
+(defonce rules
+ (vec
+ (for [n (range 5000)
+ :let [fact-type (keyword (str "fact" n))]]
+ {:ns-name (ns-name *ns*)
+ :lhs [{:type fact-type
+ :constraints []}]
+ :rhs `(println ~(str fact-type))})))
+
+(comment
+ (count (.cache ^clojure.core.cache.SoftCache @compiler-cache))
+
+ (time
+ (mk-session (conj rules {:ns-name (ns-name *ns*)
+ :lhs [{:type :foobar12
+ :constraints []}]
+ :rhs `(println ~(str :foobar))})
+ :cache false
+ :compiler-cache compiler-cache)))
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index cfe2cfcf..dba7caab 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -711,7 +711,6 @@
;; Flag indicating it is an accumulator.
:is-accumulator sc/Bool}
[condition :- schema/Condition]
-
(let [dnf-condition (to-dnf condition)
;; Break the condition up into leafs
@@ -785,7 +784,6 @@
"Performs a topologic sort of conditions to ensure variables needed by
child conditions are bound."
[conditions :- [schema/Condition]]
-
;; Get the bound and unbound variables for all conditions and sort them.
(let [classified-conditions (map analyze-condition conditions)]
@@ -853,7 +851,7 @@
"Returns a set of unifications that do not use equality-based checks."
[constraints]
(let [[bound-variables unbound-variables] (classify-variables constraints)]
- (into #{}
+ (into (hs/set)
(for [constraint constraints
:when (non-equality-unification? constraint bound-variables)]
constraint))))
@@ -887,7 +885,7 @@
(= :negation node-type)
(= :join node-type))
(non-equality-unifications (:constraints condition))
- #{})
+ (hs/set))
;; If there are any non-equality unitifications, create a join with filter expression to handle them.
join-filter-expressions (if (seq non-equality-unifications)
@@ -943,41 +941,32 @@
source-ids :- [sc/Int]
target-id :- sc/Int
target-node :- (sc/either schema/ConditionNode schema/ProductionNode)]
+ (hf/assoc! (:id-to-new-bindings beta-graph) target-id
+ ;; A ProductionNode will not have the set of new bindings previously created,
+ ;; so we assign them an empty set here. A ConditionNode will always have a set of new bindings,
+ ;; even if it is an empty one; this is enforced by the schema. Having an empty set of new bindings
+ ;; is a valid and meaningful state that just means that all join bindings in that node come from right-activations
+ ;; earlier in the network.
+ (or (:new-bindings target-node)
+ (hs/set)))
+ (if (#{:production :query} (:node-type target-node))
+ (hf/assoc! (:id-to-production-node beta-graph) target-id target-node)
+ (hf/assoc! (:id-to-condition-node beta-graph) target-id target-node))
;; Add the production or condition to the network.
- (let [beta-graph (assoc-in beta-graph [:id-to-new-bindings target-id]
- ;; A ProductionNode will not have the set of new bindings previously created,
- ;; so we assign them an empty set here. A ConditionNode will always have a set of new bindings,
- ;; even if it is an empty one; this is enforced by the schema. Having an empty set of new bindings
- ;; is a valid and meaningful state that just means that all join bindings in that node come from right-activations
- ;; earlier in the network.
- (or (:new-bindings target-node)
- #{}))
-
- beta-graph (if (#{:production :query} (:node-type target-node))
- (assoc-in beta-graph [:id-to-production-node target-id] target-node)
- (assoc-in beta-graph [:id-to-condition-node target-id] target-node))]
-
-;; Associate the forward and backward edges.
- (reduce (fn [beta-graph source-id]
- (let [forward-path [:forward-edges source-id]
- forward-previous (get-in beta-graph forward-path)
- backward-path [:backward-edges target-id]
- backward-previous (get-in beta-graph backward-path)]
- (-> beta-graph
- (assoc-in
- forward-path
- (if forward-previous
- (conj forward-previous target-id)
- (sorted-set target-id)))
- (assoc-in
- backward-path
- (if backward-previous
- (conj backward-previous source-id)
- (sorted-set source-id))))))
-
- beta-graph
- source-ids)))
+ ;; Associate the forward and backward edges.
+ (doseq [source-id source-ids]
+ (hm/compute! (:forward-edges beta-graph) source-id
+ (fn [_ forward-previous]
+ (if forward-previous
+ (conj forward-previous target-id)
+ (sorted-set target-id))))
+ (hm/compute! (:backward-edges beta-graph) target-id
+ (fn [_ backward-previous]
+ (if backward-previous
+ (conj backward-previous source-id)
+ (sorted-set source-id)))))
+ beta-graph)
(declare add-production)
@@ -1056,11 +1045,13 @@
:beta-with-negations beta-graph}))
;; A beta graph with no nodes.
-(def ^:private empty-beta-graph {:forward-edges (sorted-map)
- :backward-edges (sorted-map)
- :id-to-condition-node (sorted-map 0 ::root-condition)
- :id-to-production-node (sorted-map)
- :id-to-new-bindings (sorted-map)})
+(defn ^:private empty-beta-graph
+ []
+ {:forward-edges (hf/mut-long-map)
+ :backward-edges (hf/mut-long-map)
+ :id-to-condition-node (hf/mut-long-map {0 ::root-condition})
+ :id-to-production-node (hf/mut-long-map)
+ :id-to-new-bindings (hf/mut-long-map)})
(sc/defn ^:private add-conjunctions :- {:beta-graph schema/BetaGraph
:new-ids [sc/Int]
@@ -1074,7 +1065,6 @@
ancestor-bindings :- #{sc/Keyword}
beta-graph :- schema/BetaGraph
create-id-fn]
-
(loop [beta-graph beta-graph
parent-ids parent-ids
bindings ancestor-bindings
@@ -1101,7 +1091,7 @@
vals
;; Order doesn't matter here as we will effectively sort it using update-node->id later,
;; thus adding determinism.
- (into #{} cat)))
+ (into (hs/set) cat)))
id-to-condition-nodes (:id-to-condition-node beta-graph)
@@ -1117,20 +1107,18 @@
;; all of the old children for a prior match, for each additional child added we incur the assoc but the hash code
;; has already been cached by prior iterations of add-conjunctions. In these cases we have seen that the hashing
;; will out perform equivalence checks.
- update-node->ids (fn [m id]
- (let [node (get id-to-condition-nodes id)
- prev (get m node)]
- (if prev
- ;; There might be nodes with multiple representation under the same parent, this would
- ;; likely be due to nodes that have the same condition but do not share all the same
- ;; parents, see Issue 433 for more information
- (assoc! m node (conj prev id))
- (assoc! m node [id]))))
-
- node->ids (-> (reduce update-node->ids
- (transient (hf/hash-map))
- forward-edges)
- persistent!)
+ update-node->ids (fn update-node-ids [m id]
+ (hm/compute! m (get id-to-condition-nodes id)
+ (fn do-update-node-ids
+ [_ node-ids]
+ (if node-ids
+ (conj node-ids id)
+ [id])))
+ m)
+
+ node->ids (reduce update-node->ids
+ (hf/mut-hashtable-map)
+ forward-edges)
backward-edges (:backward-edges beta-graph)
@@ -1269,17 +1257,17 @@
"Produces a description of the beta network."
[productions :- #{schema/Production}
create-id-fn :- IFn]
- (reduce (fn [beta-graph production]
+ (reduce (fn add-to-beta-graph
+ [beta-graph production]
(binding [*compile-ctx* {:production production}]
(add-production production beta-graph create-id-fn)))
-
- empty-beta-graph
+ (empty-beta-graph)
productions))
(sc/defn ^:private root-node? :- sc/Bool
"A helper function to determine if the node-id provided is a root node. A given node would be considered a root-node,
if its only backward edge was that of the artificial root node, node 0."
- [backward-edges :- {sc/Int #{sc/Int}}
+ [backward-edges :- schema/MutableLongHashMap
node-id :- sc/Int]
(= #{0} (get backward-edges node-id)))
@@ -1295,12 +1283,14 @@
alpha-graph :- [schema/AlphaNode]]
(let [backward-edges (:backward-edges beta-graph)
handle-expr (fn [id->expr s-expr id expr-key compilation-ctx]
- (assoc id->expr
- [id expr-key]
- [s-expr (assoc compilation-ctx expr-key s-expr)]))
+ (hf/assoc! id->expr
+ [id expr-key]
+ [s-expr (assoc compilation-ctx expr-key s-expr)])
+ id->expr)
;; If extract-exprs ever became a hot spot, this could be changed out to use more java interop.
- id->expr (reduce (fn [prev alpha-node]
+ id->expr (reduce (fn add-alpha-nodes
+ [prev alpha-node]
(let [{:keys [id condition env]} alpha-node
{:keys [type constraints fact-binding args]} condition
cmeta (meta condition)]
@@ -1318,9 +1308,10 @@
:compile-ctx {:condition condition
:env env
:msg "compiling alpha node"}})))
- (hf/hash-map)
+ (hf/mut-map)
alpha-graph)
- id->expr (reduce-kv (fn [prev id production-node]
+ id->expr (reduce-kv (fn add-action-nodes
+ [prev id production-node]
(let [production (-> production-node :production)]
(handle-expr prev
(with-meta (compile-action id
@@ -1330,105 +1321,106 @@
(meta (:rhs production)))
id
:action-expr
- ;; ProductionNode expressions can be arbitrary code, therefore we need the
- ;; ns where the production was define so that we can compile the expression
- ;; later.
+ ;; ProductionNode expressions can be arbitrary code, therefore we need the
+ ;; ns where the production was define so that we can compile the expression
+ ;; later.
{:ns (:ns-name production)
:file (-> production :rhs meta :file)
:compile-ctx {:production production
:msg "compiling production node"}})))
id->expr
- (:id-to-production-node beta-graph))]
- (reduce-kv
- (fn [prev id beta-node]
- (let [condition (:condition beta-node)
- condition (if (symbol? condition)
- (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
- condition)]
- (case (or (:node-type beta-node)
- ;; If there is no :node-type then the node is the ::root-condition
- ;; however, creating a case for nil could potentially cause weird effects if something about the
- ;; compilation of the beta graph changes. Therefore making an explicit case for ::root-condition
- ;; and if there was anything that wasn't ::root-condition this case statement will fail rather than
- ;; failing somewhere else.
- beta-node)
- ::root-condition prev
-
- :join (if (or (root-node? backward-edges id)
- (not (:join-filter-expressions beta-node)))
- ;; This is either a RootJoin or HashJoin node, in either case they do not have an expression
- ;; to capture.
- prev
- (handle-expr prev
- (compile-join-filter id
- "ExpressionJoinNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- (:env beta-node))
- id
- :join-filter-expr
- {:compile-ctx {:condition condition
- :join-filter-expressions (:join-filter-expressions beta-node)
- :env (:env beta-node)
- :msg "compiling expression join node"}}))
- :negation (if (:join-filter-expressions beta-node)
- (handle-expr prev
- (compile-join-filter id
- "NegationWithJoinFilterNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- (:env beta-node))
- id
- :join-filter-expr
- {:compile-ctx {:condition condition
- :join-filter-expressions (:join-filter-expressions beta-node)
- :env (:env beta-node)
- :msg "compiling negation with join filter node"}})
- prev)
- :test (handle-expr prev
- (compile-test id (:constraints condition) (:env beta-node))
- id
- :test-expr
- {:compile-ctx {:condition condition
- :env (:env beta-node)
- :msg "compiling test node"}})
- :accumulator (cond-> (handle-expr prev
- (compile-accum id
- (if (:join-filter-expressions beta-node)
- "AccumulateWithJoinFilterNode"
- "AccumulateNode")
- (:accumulator beta-node)
- (:env beta-node))
+ (:id-to-production-node beta-graph))
+ id->expr (reduce-kv
+ (fn add-conditions [prev id beta-node]
+ (let [condition (:condition beta-node)
+ condition (if (symbol? condition)
+ (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
+ condition)]
+ (case (or (:node-type beta-node)
+ ;; If there is no :node-type then the node is the ::root-condition
+ ;; however, creating a case for nil could potentially cause weird effects if something about the
+ ;; compilation of the beta graph changes. Therefore making an explicit case for ::root-condition
+ ;; and if there was anything that wasn't ::root-condition this case statement will fail rather than
+ ;; failing somewhere else.
+ beta-node)
+ ::root-condition prev
+
+ :join (if (or (root-node? backward-edges id)
+ (not (:join-filter-expressions beta-node)))
+ ;; This is either a RootJoin or HashJoin node, in either case they do not have an expression
+ ;; to capture.
+ prev
+ (handle-expr prev
+ (compile-join-filter id
+ "ExpressionJoinNode"
+ (:join-filter-expressions beta-node)
+ (:join-filter-join-bindings beta-node)
+ (:new-bindings beta-node)
+ (:env beta-node))
id
- :accum-expr
+ :join-filter-expr
{:compile-ctx {:condition condition
- :accumulator (:accumulator beta-node)
+ :join-filter-expressions (:join-filter-expressions beta-node)
:env (:env beta-node)
- :msg "compiling accumulator"}})
-
- (:join-filter-expressions beta-node)
- (handle-expr (compile-join-filter id
- "AccumulateWithJoinFilterNode"
- (:join-filter-expressions beta-node)
- (:join-filter-join-bindings beta-node)
- (:new-bindings beta-node)
- (:env beta-node))
- id
- :join-filter-expr
- {:compile-ctx {:condition condition
- :join-filter-expressions (:join-filter-expressions beta-node)
- :env (:env beta-node)
- :msg "compiling accumulate with join filter node"}}))
- :query prev
-
- ;; This error should only be thrown if there are changes to the compilation of the beta-graph
- ;; such as an addition of a node type.
- (throw (ex-info "Invalid node type encountered while compiling rulebase."
- {:node beta-node})))))
- id->expr
- (:id-to-condition-node beta-graph))))
+ :msg "compiling expression join node"}}))
+ :negation (if (:join-filter-expressions beta-node)
+ (handle-expr prev
+ (compile-join-filter id
+ "NegationWithJoinFilterNode"
+ (:join-filter-expressions beta-node)
+ (:join-filter-join-bindings beta-node)
+ (:new-bindings beta-node)
+ (:env beta-node))
+ id
+ :join-filter-expr
+ {:compile-ctx {:condition condition
+ :join-filter-expressions (:join-filter-expressions beta-node)
+ :env (:env beta-node)
+ :msg "compiling negation with join filter node"}})
+ prev)
+ :test (handle-expr prev
+ (compile-test id (:constraints condition) (:env beta-node))
+ id
+ :test-expr
+ {:compile-ctx {:condition condition
+ :env (:env beta-node)
+ :msg "compiling test node"}})
+ :accumulator (cond-> (handle-expr prev
+ (compile-accum id
+ (if (:join-filter-expressions beta-node)
+ "AccumulateWithJoinFilterNode"
+ "AccumulateNode")
+ (:accumulator beta-node)
+ (:env beta-node))
+ id
+ :accum-expr
+ {:compile-ctx {:condition condition
+ :accumulator (:accumulator beta-node)
+ :env (:env beta-node)
+ :msg "compiling accumulator"}})
+
+ (:join-filter-expressions beta-node)
+ (handle-expr (compile-join-filter id
+ "AccumulateWithJoinFilterNode"
+ (:join-filter-expressions beta-node)
+ (:join-filter-join-bindings beta-node)
+ (:new-bindings beta-node)
+ (:env beta-node))
+ id
+ :join-filter-expr
+ {:compile-ctx {:condition condition
+ :join-filter-expressions (:join-filter-expressions beta-node)
+ :env (:env beta-node)
+ :msg "compiling accumulate with join filter node"}}))
+ :query prev
+
+ ;; This error should only be thrown if there are changes to the compilation of the beta-graph
+ ;; such as an addition of a node type.
+ (throw (ex-info "Invalid node type encountered while compiling rulebase."
+ {:node beta-node})))))
+ id->expr
+ (:id-to-condition-node beta-graph))]
+ (persistent! id->expr)))
(sc/defn compile-exprs :- schema/NodeFnLookup
"Takes a map in the form produced by extract-exprs and evaluates the values(expressions) of the map in a batched manner.
@@ -1518,7 +1510,6 @@
children :- [sc/Any]
expr-fn-lookup :- schema/NodeFnLookup
new-bindings :- #{sc/Keyword}]
-
(let [{:keys [condition production query join-bindings env]} beta-node
condition (if (symbol? condition)
@@ -1631,7 +1622,7 @@
query
(:params query)))))
-(sc/defn ^:private compile-beta-graph :- {sc/Int sc/Any}
+(sc/defn ^:private compile-beta-graph :- schema/MutableLongHashMap
"Compile the beta description to the nodes used at runtime."
[{:keys [id-to-production-node id-to-condition-node id-to-new-bindings forward-edges backward-edges]} :- schema/BetaGraph
expr-fn-lookup :- schema/NodeFnLookup]
@@ -1653,47 +1644,47 @@
(recur (set/difference pending-ids newly-satisfied-ids)
updated-edges
- (concat sorted-nodes newly-satisfied-ids)))))]
-
- (reduce (fn [id-to-compiled-nodes id-to-compile]
-
- ;; Get the condition or production node to compile
- (let [node-to-compile (get id-to-condition-node
- id-to-compile
- (get id-to-production-node id-to-compile))
-
- ;; Get the children. The children should be sorted because the
- ;; id-to-compiled-nodes map is sorted.
- children (->> (get forward-edges id-to-compile)
- (select-keys id-to-compiled-nodes)
- (vals))]
-
- ;; Sanity check for our logic...
- (assert (= (count children)
- (count (get forward-edges id-to-compile)))
- "Each child should be compiled.")
-
- (if (= ::root-condition node-to-compile)
- id-to-compiled-nodes
- (assoc id-to-compiled-nodes
- id-to-compile
- (compile-node node-to-compile
- id-to-compile
- (root-node? backward-edges id-to-compile)
- children
- expr-fn-lookup
- (get id-to-new-bindings id-to-compile))))))
- ;; The node IDs have been determined before now, so we just need to sort the map returned.
- ;; This matters because the engine will left-activate the beta roots with the empty token
- ;; in the order that this map is seq'ed over.
- (sorted-map)
- ids-to-compile)))
+ (concat sorted-nodes newly-satisfied-ids)))))
+ id-to-compiled-node
+ (reduce (fn [id-to-compiled-nodes id-to-compile]
+
+ ;; Get the condition or production node to compile
+ (let [node-to-compile (get id-to-condition-node
+ id-to-compile
+ (get id-to-production-node id-to-compile))
+
+ ;; Get the children. The children should be sorted because the
+ ;; id-to-compiled-nodes map is sorted.
+ children (->> (get forward-edges id-to-compile)
+ (select-keys id-to-compiled-nodes)
+ (vals))]
+
+ ;; Sanity check for our logic...
+ (assert (= (count children)
+ (count (get forward-edges id-to-compile)))
+ "Each child should be compiled.")
+
+ (when (not= ::root-condition node-to-compile)
+ (hf/assoc! id-to-compiled-nodes
+ id-to-compile
+ (compile-node node-to-compile
+ id-to-compile
+ (root-node? backward-edges id-to-compile)
+ children
+ expr-fn-lookup
+ (get id-to-new-bindings id-to-compile))))
+ id-to-compiled-nodes))
+ ;; The node IDs have been determined before now, so we just need to sort the map returned.
+ ;; This matters because the engine will left-activate the beta roots with the empty token
+ ;; in the order that this map is seq'ed over.
+ (hf/mut-long-map)
+ ids-to-compile)]
+ id-to-compiled-node))
(sc/defn to-alpha-graph :- [schema/AlphaNode]
"Returns a sequence of [condition-fn, [node-ids]] tuples to represent the alpha side of the network."
[beta-graph :- schema/BetaGraph
create-id-fn :- IFn]
-
;; Create a sequence of tuples of conditions + env to beta node ids.
(let [condition-to-node-ids (for [[id node] (:id-to-condition-node beta-graph)
:when (:condition node)]
@@ -1705,16 +1696,20 @@
;; Can't use simple update-in because we need to ensure
;; the value is a vector, not a list.
- (if (get node-map [condition env])
- (update-in node-map [[condition env]] conj node-id)
- (assoc node-map [condition env] [node-id])))
- (hf/hash-map)
+ (hm/compute! node-map [condition env]
+ (fn update-condition-to-node
+ [_ node-ids]
+ (if node-ids
+ (conj node-ids node-id)
+ [node-id])))
+ node-map)
+ (hf/mut-map)
condition-to-node-ids)
;; We sort the alpha nodes by the ordered sequence of the node ids they correspond to
;; in order to make the order of alpha nodes for any given type consistent. Note that we
;; coerce to a vector because we need a type that is comparable.
- condition-to-node-entries (sort-by (fn [[k v]] (-> v sort vec))
+ condition-to-node-entries (sort-by (fn [[_k v]] (-> v sort vec))
condition-to-node-map)]
;; Compile conditions into functions.
@@ -1734,12 +1729,13 @@
:children [sc/Num]}]
[alpha-nodes :- [schema/AlphaNode]
expr-fn-lookup :- schema/NodeFnLookup]
- (for [{:keys [id condition beta-children env] :as node} alpha-nodes]
- (cond-> {:id id
- :type (effective-type (:type condition))
- :alpha-fn (first (safe-get expr-fn-lookup [id :alpha-expr]))
- :children beta-children}
- env (assoc :env env))))
+ (vec
+ (for [{:keys [id condition beta-children env] :as node} alpha-nodes]
+ (cond-> {:id id
+ :type (effective-type (:type condition))
+ :alpha-fn (first (safe-get expr-fn-lookup [id :alpha-expr]))
+ :children beta-children}
+ env (assoc :env env)))))
;; Wrap the fact-type so that Clojure equality and hashcode semantics are used
;; even though this is placed in a Java map.
@@ -1836,7 +1832,7 @@
(sc/defn build-network
"Constructs the network from compiled beta tree and condition functions."
- [id-to-node :- {sc/Int sc/Any}
+ [id-to-node :- schema/MutableLongHashMap
beta-roots
alpha-fns
productions
@@ -1845,7 +1841,6 @@
activation-group-sort-fn
activation-group-fn
expr-fn-lookup]
-
(let [beta-nodes (vals id-to-node)
production-nodes (for [node beta-nodes
@@ -1863,38 +1858,32 @@
[(:name (:query query-node)) query-node]]]
entry)
(into (hf/hash-map)))
-
- ;; type, alpha node tuples.
- alpha-nodes (for [{:keys [id type alpha-fn children env] :as alpha-map} alpha-fns
- :let [beta-children (map id-to-node children)]]
- [type (eng/->AlphaNode id env beta-children alpha-fn type)])
-
- ;; Merge the alpha nodes into a multi-map
- alpha-map (reduce
- (fn [alpha-map [type alpha-node]]
- (update-in alpha-map [type] conj alpha-node))
- (hf/hash-map)
- alpha-nodes)
-
- ;; Merge the alpha nodes into the id-to-node map
- id-to-node (into id-to-node
- (map (juxt :id identity))
- (mapv second alpha-nodes))
-
- get-alphas-fn (create-get-alphas-fn fact-type-fn ancestors-fn alpha-map)]
-
- #_{:clj-kondo/ignore [:unresolved-symbol]}
- (strict-map->Rulebase
- {:alpha-roots alpha-map
- :beta-roots beta-roots
- :productions productions
- :production-nodes production-nodes
- :query-nodes query-map
- :id-to-node id-to-node
- :activation-group-sort-fn activation-group-sort-fn
- :activation-group-fn activation-group-fn
- :get-alphas-fn get-alphas-fn
- :node-expr-fn-lookup expr-fn-lookup})))
+ alpha-map (hf/mut-map)]
+ ;; Merge the alpha nodes into a multi-map
+ (doseq [{:keys [id type alpha-fn children env]} alpha-fns
+ :let [beta-children (map id-to-node children)
+ alpha-node (eng/->AlphaNode id env beta-children alpha-fn type)]]
+ (hf/assoc! id-to-node (:id alpha-node) alpha-node)
+ (hm/compute! alpha-map type
+ (fn add-alpha
+ [_ alpha-nodes]
+ (if alpha-nodes
+ (conj alpha-nodes alpha-node)
+ (list alpha-node))))
+ [type alpha-node])
+ (let [get-alphas-fn (create-get-alphas-fn fact-type-fn ancestors-fn alpha-map)]
+ #_{:clj-kondo/ignore [:unresolved-symbol]}
+ (strict-map->Rulebase
+ {:alpha-roots (hf/persistent! alpha-map)
+ :beta-roots beta-roots
+ :productions productions
+ :production-nodes production-nodes
+ :query-nodes query-map
+ :id-to-node (hf/persistent! id-to-node)
+ :activation-group-sort-fn activation-group-sort-fn
+ :activation-group-fn activation-group-fn
+ :get-alphas-fn get-alphas-fn
+ :node-expr-fn-lookup expr-fn-lookup}))))
(defn production-load-order-comp [a b]
(< (-> a meta ::rule-load-order)
@@ -2063,7 +2052,7 @@
(if (previous new-production)
previous
(conj! previous new-production)))
- (transient #{}))
+ (hs/mut-set))
persistent!)
options-cache (get options :cache)
session-cache (cond
diff --git a/src/main/clojure/clara/rules/schema.clj b/src/main/clojure/clara/rules/schema.clj
index 50669daf..fc35e696 100644
--- a/src/main/clojure/clara/rules/schema.clj
+++ b/src/main/clojure/clara/rules/schema.clj
@@ -1,7 +1,11 @@
(ns clara.rules.schema
"Schema definition of Clara data structures using Prismatic's Schema library. This includes structures for rules and queries, as well as the schema
for the underlying Rete network itself. This can be used by tools or other libraries working with rules."
- (:require [schema.core :as s]))
+ (:require [schema.core :as s])
+ (:import [ham_fisted UnsharedLongHashMap]))
+
+(def MutableLongHashMap
+ UnsharedLongHashMap)
(s/defn condition-type :- (s/enum :or :not :and :exists :fact :accumulator :test)
"Returns the type of node in a LHS condition expression."
@@ -138,20 +142,19 @@
;; A graph representing the beta side of the rete network.
(def BetaGraph
{;; Edges from parent to child nodes.
- :forward-edges {s/Int #{s/Int}}
+ :forward-edges MutableLongHashMap
;; Edges from child to parent nodes.
- :backward-edges {s/Int #{s/Int}}
+ :backward-edges MutableLongHashMap
;; Map of identifier to condition nodes.
- :id-to-condition-node {s/Int (s/cond-pre (s/eq :clara.rules.compiler/root-condition)
- ConditionNode)}
+ :id-to-condition-node MutableLongHashMap
;; Map of identifier to query or rule nodes.
- :id-to-production-node {s/Int ProductionNode}
+ :id-to-production-node MutableLongHashMap
;; Map of identifier to new bindings created by the corresponding node.
- :id-to-new-bindings {s/Int #{s/Keyword}}})
+ :id-to-new-bindings MutableLongHashMap})
(defn tuple
"Given `items`, a list of schemas, will generate a schema to validate that a vector contains and is in the order provided
@@ -197,4 +200,4 @@
;; deserialization. In such events the compile-ctx would only be valuable when the environment
;; where the Session is being deserialized doesn't match that of the serialization, ie functions
;; and symbols cannot be resolved on the deserialization side.
- {(tuple s/Int s/Keyword) (tuple (s/pred ifn? "ifn?") NodeCompilationValue)})
\ No newline at end of file
+ {(tuple s/Int s/Keyword) (tuple (s/pred ifn? "ifn?") NodeCompilationValue)})
From dfeee4e9efc1351647510f37463c7d224dc1bc36 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 21 Feb 2024 11:47:05 -0600
Subject: [PATCH 69/87] feat: compiler class loading enhancements
---
dev/user.clj | 40 +++++++++++++++++------
pom.xml | 4 +--
src/main/clojure/clara/rules/compiler.clj | 15 +++------
src/main/clojure/clara/rules/schema.clj | 5 ++-
4 files changed, 40 insertions(+), 24 deletions(-)
diff --git a/dev/user.clj b/dev/user.clj
index c53bff9f..2b6f120d 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -16,21 +16,41 @@
(remove-tap #'println)
(tap> "foobar"))
-(defonce session-cache
+(def session-cache
(cache/lru-cache-factory {}))
;; Cache of compiled expressions
-(defonce compiler-cache
+(def compiler-cache
(cache/soft-cache-factory {}))
-(defonce rules
- (vec
- (for [n (range 5000)
- :let [fact-type (keyword (str "fact" n))]]
- {:ns-name (ns-name *ns*)
- :lhs [{:type fact-type
- :constraints []}]
- :rhs `(println ~(str fact-type))})))
+(defmacro mk-rules
+ [n]
+ (let [facts (for [n (range n)]
+ {:fact-type {:t (symbol (format "FactType%s" n))
+ :c (symbol (format "%s.FactType%s" (ns-name *ns*) n))}
+ :fact-record {:t (symbol (format "FactRecord%s" n))
+ :c (symbol (format "%s.FactRecord%s" (ns-name *ns*) n))}})
+ type-declarations (for [{{:keys [t]} :fact-type} facts]
+ `(deftype ~t []))
+ record-declarations (for [{{:keys [t]} :fact-record} facts]
+ `(defrecord ~t []))
+ fact-rules (for [{:keys [fact-type
+ fact-record]} facts]
+ `(hash-map
+ :ns-name (ns-name *ns*)
+ :lhs [{:type ~(:c fact-type)
+ :constraints []}
+ {:type ~(:c fact-record)
+ :constraints []}]
+ :rhs '(println (str "class:" ~n ~fact-type ~fact-record))))]
+ `(do
+ ~@type-declarations
+ ~@record-declarations
+ (vector
+ ~@fact-rules))))
+
+(def rules
+ (mk-rules 5000))
(comment
(count (.cache ^clojure.core.cache.SoftCache @compiler-cache))
diff --git a/pom.xml b/pom.xml
index 5ed6daed..70354861 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.2.0
+ 1.3.0-SNAPSHOT
- 1.2.0
+ 1.3.0-SNAPSHOT
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index dba7caab..f7194d24 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -110,12 +110,6 @@
(and (symbol? expr)
(.startsWith (name expr) "?")))
-(def ^:private reflector
- "For some reason (bug?) the default reflector doesn't use the
- Clojure dynamic class loader, which prevents reflecting on
- `defrecords`. Work around by supplying our own which does."
- (clojure.reflect.JavaReflector. (clojure.lang.RT/makeClassLoader)))
-
(defn- get-field-accessors
"Given a clojure.lang.IRecord subclass, returns a map of field name to a
symbol representing the function used to access it."
@@ -148,7 +142,7 @@
(defn- effective-type*
[type]
(if (symbol? type)
- (.loadClass (clojure.lang.RT/makeClassLoader) (name type))
+ (clojure.lang.RT/classForName ^String (name type))
type))
(def effective-type
@@ -1288,7 +1282,6 @@
[s-expr (assoc compilation-ctx expr-key s-expr)])
id->expr)
- ;; If extract-exprs ever became a hot spot, this could be changed out to use more java interop.
id->expr (reduce (fn add-alpha-nodes
[prev alpha-node]
(let [{:keys [id condition env]} alpha-node
@@ -1334,7 +1327,7 @@
(fn add-conditions [prev id beta-node]
(let [condition (:condition beta-node)
condition (if (symbol? condition)
- (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
+ (clojure.lang.RT/classForName ^String (name condition))
condition)]
(case (or (:node-type beta-node)
;; If there is no :node-type then the node is the ::root-condition
@@ -1513,7 +1506,7 @@
(let [{:keys [condition production query join-bindings env]} beta-node
condition (if (symbol? condition)
- (.loadClass (clojure.lang.RT/makeClassLoader) (name condition))
+ (clojure.lang.RT/classForName ^String (name condition))
condition)
compiled-expr-fn (fn [id field] (first (safe-get expr-fn-lookup [id field])))]
@@ -1724,7 +1717,7 @@
(sc/defn compile-alpha-nodes :- [{:id sc/Int
:type sc/Any
- :alpha-fn sc/Any ;; TODO: is a function...
+ :alpha-fn schema/Function
(sc/optional-key :env) {sc/Keyword sc/Any}
:children [sc/Num]}]
[alpha-nodes :- [schema/AlphaNode]
diff --git a/src/main/clojure/clara/rules/schema.clj b/src/main/clojure/clara/rules/schema.clj
index fc35e696..7d42303f 100644
--- a/src/main/clojure/clara/rules/schema.clj
+++ b/src/main/clojure/clara/rules/schema.clj
@@ -193,6 +193,9 @@
{(tuple s/Int s/Keyword) (tuple SExpr (s/conditional :compile-ctx NodeCompilationContext
:else NodeCompilationValue))})
+(def Function
+ (s/pred ifn? "ifn?"))
+
;; An evaluated version of the schema mentioned above.
(def NodeFnLookup
;; This schema uses a relaxed version of NodeCompilationContext as once the expressions
@@ -200,4 +203,4 @@
;; deserialization. In such events the compile-ctx would only be valuable when the environment
;; where the Session is being deserialized doesn't match that of the serialization, ie functions
;; and symbols cannot be resolved on the deserialization side.
- {(tuple s/Int s/Keyword) (tuple (s/pred ifn? "ifn?") NodeCompilationValue)})
+ {(tuple s/Int s/Keyword) (tuple Function NodeCompilationValue)})
From 4ea3e1c8e0c812fa30fe47cf31ec0529c8094e6b Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 21 Feb 2024 12:34:13 -0600
Subject: [PATCH 70/87] feat: prepare for release 1.3.0
---
CHANGELOG.md | 3 +++
pom.xml | 4 ++--
2 files changed, 5 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2d7a3766..f18bc6b1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 1.3.0
+* Enhance compilation performance by using mutable maps from ham-fisted lib
+
# 1.2.0
* Enhance caching support by adding compile caching using core CacheProtocol
diff --git a/pom.xml b/pom.xml
index 70354861..71946fcd 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.3.0-SNAPSHOT
+ 1.3.0
- 1.3.0-SNAPSHOT
+ 1.3.0
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
From cf05a642df47219f023974d53ca7c8036523c4c2 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 21 Feb 2024 12:45:36 -0600
Subject: [PATCH 71/87] feat: update user ns
---
dev/user.clj | 17 ++++++++---------
1 file changed, 8 insertions(+), 9 deletions(-)
diff --git a/dev/user.clj b/dev/user.clj
index 2b6f120d..5ea16422 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -31,9 +31,9 @@
:fact-record {:t (symbol (format "FactRecord%s" n))
:c (symbol (format "%s.FactRecord%s" (ns-name *ns*) n))}})
type-declarations (for [{{:keys [t]} :fact-type} facts]
- `(deftype ~t []))
+ `(deftype ~t []))
record-declarations (for [{{:keys [t]} :fact-record} facts]
- `(defrecord ~t []))
+ `(defrecord ~t []))
fact-rules (for [{:keys [fact-type
fact-record]} facts]
`(hash-map
@@ -44,15 +44,14 @@
:constraints []}]
:rhs '(println (str "class:" ~n ~fact-type ~fact-record))))]
`(do
- ~@type-declarations
- ~@record-declarations
- (vector
- ~@fact-rules))))
-
-(def rules
- (mk-rules 5000))
+ ~@type-declarations
+ ~@record-declarations
+ (vector
+ ~@fact-rules))))
(comment
+ (def rules
+ (mk-rules 5000))
(count (.cache ^clojure.core.cache.SoftCache @compiler-cache))
(time
From 0b40d9044751c7aa18290279229a1e420619af2f Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Thu, 22 Feb 2024 12:56:20 -0600
Subject: [PATCH 72/87] feat: enhance caching performance by more predictable
md5 caching and sorting productions
---
CHANGELOG.md | 3 +
deps.edn | 1 +
dev/user.clj | 31 +++++---
pom.xml | 9 ++-
src/main/clojure/clara/rules/compiler.clj | 90 ++++++++++-------------
5 files changed, 73 insertions(+), 61 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f18bc6b1..86fb99ae 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 1.3.1
+* Enhance caching performance by more predictable md5 caching and sorting productions
+
# 1.3.0
* Enhance compilation performance by using mutable maps from ham-fisted lib
diff --git a/deps.edn b/deps.edn
index 37cea21d..85740a52 100644
--- a/deps.edn
+++ b/deps.edn
@@ -4,6 +4,7 @@
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.10.3"}
org.clojure/core.cache {:mvn/version "1.0.225"}
+ org.clj-commons/digest {:mvn/version "1.4.100"}
com.github.k13labs/futurama {:mvn/version "1.0.2"}
com.cnuernber/ham-fisted {:mvn/version "2.016"}
prismatic/schema {:mvn/version "1.4.1"}
diff --git a/dev/user.clj b/dev/user.clj
index 5ea16422..8d2ccc34 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -23,7 +23,7 @@
(def compiler-cache
(cache/soft-cache-factory {}))
-(defmacro mk-rules
+(defmacro mk-types
[n]
(let [facts (for [n (range n)]
{:fact-type {:t (symbol (format "FactType%s" n))
@@ -33,7 +33,18 @@
type-declarations (for [{{:keys [t]} :fact-type} facts]
`(deftype ~t []))
record-declarations (for [{{:keys [t]} :fact-record} facts]
- `(defrecord ~t []))
+ `(defrecord ~t []))]
+ `(do
+ ~@type-declarations
+ ~@record-declarations)))
+
+(defmacro mk-rules
+ [n]
+ (let [facts (for [n (range n)]
+ {:fact-type {:t (symbol (format "FactType%s" n))
+ :c (symbol (format "%s.FactType%s" (ns-name *ns*) n))}
+ :fact-record {:t (symbol (format "FactRecord%s" n))
+ :c (symbol (format "%s.FactRecord%s" (ns-name *ns*) n))}})
fact-rules (for [{:keys [fact-type
fact-record]} facts]
`(hash-map
@@ -43,21 +54,23 @@
{:type ~(:c fact-record)
:constraints []}]
:rhs '(println (str "class:" ~n ~fact-type ~fact-record))))]
- `(do
- ~@type-declarations
- ~@record-declarations
- (vector
- ~@fact-rules))))
+ `(vector
+ ~@fact-rules)))
(comment
+ (mk-types 5000)
(def rules
(mk-rules 5000))
+ (keys @session-cache)
+ (when-let [v (first (.cache ^clojure.core.cache.SoftCache @compiler-cache))]
+ (.getValue v))
+ (count @session-cache)
(count (.cache ^clojure.core.cache.SoftCache @compiler-cache))
(time
(mk-session (conj rules {:ns-name (ns-name *ns*)
- :lhs [{:type :foobar12
+ :lhs [{:type :foobar14
:constraints []}]
:rhs `(println ~(str :foobar))})
- :cache false
+ :cache session-cache
:compiler-cache compiler-cache)))
diff --git a/pom.xml b/pom.xml
index 71946fcd..3e791a4e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.3.0
+ 1.3.1
- 1.3.0
+ 1.3.1
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -29,6 +29,11 @@
core.cache
1.0.225
+
+ org.clj-commons
+ digest
+ 1.4.100
+
com.github.k13labs
futurama
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index f7194d24..e9263410 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -6,9 +6,11 @@
[clara.rules.schema :as schema]
[clara.rules.platform :refer [jeq-wrap] :as platform]
[clojure.core.cache.wrapped :as cache]
+ [clj-commons.digest :as digest]
[ham-fisted.api :as hf]
[ham-fisted.set :as hs]
[ham-fisted.mut-map :as hm]
+ [futurama.util :as u]
[clojure.set :as set]
[clojure.string :as string]
[clojure.walk :as walk]
@@ -104,6 +106,11 @@
;; A map of [node-id field-name] to function.
node-expr-fn-lookup :- schema/NodeFnLookup])
+(defn- md5-hash
+ "Returns the md5 digest of the given data after converting it to a string"
+ [x]
+ (digest/md5 ^String (str x)))
+
(defn- is-variable?
"Returns true if the given expression is a variable (a symbol prefixed by ?)"
[expr]
@@ -1422,13 +1429,16 @@
[key->expr :- schema/NodeExprLookup
expr-cache :- (sc/maybe sc/Any)
partition-size :- sc/Int]
- (let [prepare-expr (fn prepare-expr
+ (let [prepare-expr (fn do-prepare-expr
[[expr-key [expr compilation-ctx]]]
- (let [cache-key [(hash expr) (hash compilation-ctx)]]
- (if-let [compiled-expr (and expr-cache
- (cache/lookup expr-cache cache-key))]
- [:compiled [expr-key [compiled-expr compilation-ctx]]]
- [:prepared [expr-key [expr compilation-ctx]]])))
+ (if expr-cache
+ (let [cache-key (str (md5-hash expr) (md5-hash compilation-ctx))
+ compilation-ctx (assoc compilation-ctx :cache-key cache-key)
+ compiled-expr (cache/lookup expr-cache cache-key)]
+ (if compiled-expr
+ [:compiled [expr-key [compiled-expr compilation-ctx]]]
+ [:prepared [expr-key [expr compilation-ctx]]]))
+ [:prepared [expr-key [expr compilation-ctx]]]))
batching-try-eval (fn do-compile-exprs
[exprs compilation-ctxs]
;; Try to evaluate all of the expressions as a batch. If they fail the batch eval then we
@@ -1436,13 +1446,12 @@
;; anyway.
(try
(mapv
- (fn cache-expr
- [expr compiled-expr compilation-ctx]
- (let [cache-key [(hash expr) (hash compilation-ctx)]]
- (when expr-cache
- (cache/miss expr-cache cache-key compiled-expr))
- [compiled-expr compilation-ctx]))
- exprs (eval exprs) compilation-ctxs)
+ (fn do-cache-expr
+ [compiled-expr compilation-ctx]
+ (when-let [cache-key (:cache-key compilation-ctx)]
+ (cache/miss expr-cache cache-key compiled-expr))
+ [compiled-expr compilation-ctx])
+ (eval exprs) compilation-ctxs)
(catch Exception e
;; Using mapv here rather than map to avoid laziness, otherwise compilation failure might
;; fall into the throw below for the wrong reason.
@@ -1939,22 +1948,8 @@
"Compile the rules into a rete network and return the given session."
[productions :- #{schema/Production}
options :- {sc/Keyword sc/Any}]
- (let [;; We need to put the productions in a sorted set here, instead of in mk-session, since we don't want
- ;; a sorted set in the session-cache. If we did only rule order would be used to compare set equality
- ;; for finding a cache hit, and we want to use the entire production, which a non-sorted set does.
- ;; When using a sorted set, for constant options,
- ;; any session creation with n productions would be a cache hit if any previous session had n productions.
- ;; Furthermore, we can avoid the work of sorting the set until we know that there was a cache miss.
- ;;
- ;; Note that this ordering is not for correctness; we are just trying to increase consistency of rulebase compilation,
- ;; and hopefully thereby execution times, from run to run.
- _ (validate-names-unique productions)
- productions (with-meta (into (sorted-set-by production-load-order-comp)
- productions)
- ;; Store the name of the custom comparator for durability.
- {:clara.rules.durability/comparator-name `production-load-order-comp})
-
- ;; A stateful counter used for unique ids of the nodes of the graph.
+ (validate-names-unique productions)
+ (let [;; A stateful counter used for unique ids of the nodes of the graph.
id-counter (atom 0)
create-id-fn (fn [] (swap! id-counter inc))
@@ -2024,29 +2019,25 @@
(vary-meta production assoc ::rule-load-order (or n 0)))
(range) productions))
+(defn- do-load-productions
+ [x]
+ (if (u/instance-satisfies? IRuleSource x)
+ (load-rules x)
+ x))
+
(defn mk-session
"Creates a new session using the given rule source. The resulting session
is immutable, and can be used with insert, retract, fire-rules, and query functions."
([sources-and-options]
(let [sources (take-while (complement keyword?) sources-and-options)
options (apply hash-map (drop-while (complement keyword?) sources-and-options))
- productions (->> sources
- ;; Load rules from the source, or just use the input as a seq.
- (mapcat #(if (satisfies? IRuleSource %)
- (load-rules %)
- %))
- add-production-load-order
- ;; Ensure that we choose the earliest occurrence of a rule for the purpose of rule order.
- ;; There are Clojure core functions for distinctness, of course, but none of them seem to guarantee
- ;; which instance will be chosen in case of duplicates.
- (reduce (fn [previous new-production]
- ;; Contains? is broken for transient sets; see http://dev.clojure.org/jira/browse/CLJ-700
- ;; Since all values in this set should be truthy, we can just use the set as a function instead.
- (if (previous new-production)
- previous
- (conj! previous new-production)))
- (hs/mut-set))
- persistent!)
+ productions-loaded (->> (mapcat do-load-productions sources)
+ (add-production-load-order))
+ productions-unique (hs/set productions-loaded)
+ productions-sorted (with-meta
+ (into (sorted-set-by production-load-order-comp) productions-unique)
+ ;; Store the name of the custom comparator for durability.
+ {:clara.rules.durability/comparator-name `production-load-order-comp})
options-cache (get options :cache)
session-cache (cond
(true? options-cache)
@@ -2054,11 +2045,10 @@
(nil? options-cache)
default-session-cache
:else options-cache)
- options (dissoc options :cache)
;;; this is simpler than storing all the productions and options in the cache
- session-key [(count productions) (hash productions) (hash options)]]
+ session-key (str (md5-hash productions-sorted) (md5-hash (dissoc options :cache :compiler-cache)))]
(if session-cache
(cache/lookup-or-miss session-cache session-key
(fn do-mk-session [_]
- (mk-session* productions options)))
- (mk-session* productions options)))))
+ (mk-session* productions-sorted options)))
+ (mk-session* productions-sorted options)))))
From e01304f1d4ffc1f1c97c76ecbd9d71d61add8e16 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 25 Feb 2024 18:38:42 -0600
Subject: [PATCH 73/87] feat: enhance memory add-activations implementation by
replacing get/set with compute!
---
CHANGELOG.md | 3 +++
pom.xml | 4 ++--
src/main/clojure/clara/rules/memory.clj | 23 ++++++++++++-----------
src/main/clojure/clara/rules/platform.clj | 7 -------
4 files changed, 17 insertions(+), 20 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 86fb99ae..2af77d23 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,8 @@
This is a history of changes to k13labs/clara-rules.
+# 1.3.2
+* Enhance memory add-activations implementation by replacing get/set with compute!
+
# 1.3.1
* Enhance caching performance by more predictable md5 caching and sorting productions
diff --git a/pom.xml b/pom.xml
index 3e791a4e..145a7b93 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.3.1
+ 1.3.2
- 1.3.1
+ 1.3.2
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
diff --git a/src/main/clojure/clara/rules/memory.clj b/src/main/clojure/clara/rules/memory.clj
index 48e35533..26fb78b6 100644
--- a/src/main/clojure/clara/rules/memory.clj
+++ b/src/main/clojure/clara/rules/memory.clj
@@ -674,17 +674,18 @@
(add-activations!
[memory production new-activations]
- (let [activation-group (activation-group-fn production)
- previous (.get activation-map activation-group)]
- ;; The reasoning here is the same as in add-elements! impl above.
- (cond
- previous
- (queue-activations! previous new-activations)
-
- (not (coll-empty? new-activations))
- (.put activation-map
- activation-group
- (->activation-priority-queue new-activations)))))
+ (let [activation-group (activation-group-fn production)]
+ (hm/compute! activation-map activation-group
+ (fn do-add-activations
+ [_ old-activations]
+ (cond
+ old-activations
+ (queue-activations! old-activations new-activations)
+
+ (not (coll-empty? new-activations))
+ (->activation-priority-queue new-activations)
+
+ :else nil)))))
(pop-activations!
[memory count]
diff --git a/src/main/clojure/clara/rules/platform.clj b/src/main/clojure/clara/rules/platform.clj
index 5a5d7137..d514f288 100644
--- a/src/main/clojure/clara/rules/platform.clj
+++ b/src/main/clojure/clara/rules/platform.clj
@@ -1,14 +1,7 @@
(ns clara.rules.platform
"This namespace is for internal use and may move in the future.
Platform unified code Clojure/ClojureScript."
- (:require [futurama.core :refer [!
Date: Mon, 26 Feb 2024 18:57:02 -0600
Subject: [PATCH 74/87] chore: remove deprecated either schema usage from
compiler ns
---
src/main/clojure/clara/rules/compiler.clj | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index e9263410..95e52752 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -941,7 +941,9 @@
[beta-graph :- schema/BetaGraph
source-ids :- [sc/Int]
target-id :- sc/Int
- target-node :- (sc/either schema/ConditionNode schema/ProductionNode)]
+ target-node :- (sc/conditional
+ (comp #{:production :query} :node-type) schema/ProductionNode
+ :else schema/ConditionNode)]
(hf/assoc! (:id-to-new-bindings beta-graph) target-id
;; A ProductionNode will not have the set of new bindings previously created,
;; so we assign them an empty set here. A ConditionNode will always have a set of new bindings,
@@ -1506,7 +1508,9 @@
(sc/defn ^:private compile-node
"Compiles a given node description into a node usable in the network with the
given children."
- [beta-node :- (sc/either schema/ConditionNode schema/ProductionNode)
+ [beta-node :- (sc/conditional
+ (comp #{:production :query} :node-type) schema/ProductionNode
+ :else schema/ConditionNode)
id :- sc/Int
is-root :- sc/Bool
children :- [sc/Any]
From 741707fe65ddd70decdfc4cf3b68a9119b452531 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Mon, 26 Feb 2024 22:02:22 -0600
Subject: [PATCH 75/87] feat: extract way to build the production without
adding to beta network
---
src/main/clojure/clara/rules.clj | 7 +-
src/main/clojure/clara/rules/compiler.clj | 156 +++++++++++++++++-----
2 files changed, 127 insertions(+), 36 deletions(-)
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
index a706cdda..a1180e7b 100644
--- a/src/main/clojure/clara/rules.clj
+++ b/src/main/clojure/clara/rules.clj
@@ -247,9 +247,10 @@
See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
[name & body]
- (let [doc (if (string? (first body)) (first body) nil)]
- `(def ~(vary-meta name assoc :rule true :doc doc)
- ~(dsl/build-rule name body (meta &form)))))
+ (let [doc (if (string? (first body)) (first body) nil)
+ rule (dsl/build-rule name body (meta &form))]
+ `(def ~(vary-meta name assoc :rule rule :doc doc)
+ ~rule)))
(defmacro defquery
"Defines a query and stored it in the given var. For instance, a simple query that accepts no
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 95e52752..683e26cc 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -973,22 +973,16 @@
(declare add-production)
-(sc/defn ^:private extract-negation :- {:new-expression schema/Condition
- :beta-with-negations schema/BetaGraph}
-
- "Extracts complex, nested negations and adds a rule to the beta graph to trigger the returned
- negation expression."
+(sc/defn ^:private get-complex-negation :- (sc/maybe
+ {:new-expression schema/Condition
+ :generated-rule schema/Production})
[previous-expressions :- [schema/Condition]
expression :- schema/Condition
- parent-ids :- [sc/Int]
ancestor-bindings :- #{sc/Keyword}
- beta-graph :- schema/BetaGraph
- production :- schema/Production
- create-id-fn]
-
- (if (and (= :not (first expression))
- (sequential? (second expression))
- (#{:and :or :not} (first (second expression))))
+ production :- schema/Production]
+ (when (and (= :not (first expression))
+ (sequential? (second expression))
+ (#{:and :or :not} (first (second expression))))
;; Dealing with a compound negation, so extract it out.
(let [negation-expr (second expression)
@@ -1034,18 +1028,28 @@
true (assoc-in [:props :clara-rules/internal-salience] :extracted-negation)
;; Propagate the the environment (such as local bindings) if applicable.
- (:env production) (assoc :env (:env production)))
-
- ;; Add the generated rule to the beta network.
-
- beta-with-negations (add-production generated-rule beta-graph create-id-fn)]
-
+ (:env production) (assoc :env (:env production)))]
{:new-expression modified-expression
- :beta-with-negations beta-with-negations})
+ :generated-rule generated-rule})))
- ;; The expression wasn't a negation, so return the previous content.
- {:new-expression expression
- :beta-with-negations beta-graph}))
+(sc/defn ^:private add-complex-negation :- (sc/maybe
+ {:new-expression schema/Condition
+ :beta-with-negations schema/BetaGraph})
+
+ "Extracts complex, nested negations and adds a rule to the beta graph to trigger the returned
+ negation expression."
+ [previous-expressions :- [schema/Condition]
+ expression :- schema/Condition
+ ancestor-bindings :- #{sc/Keyword}
+ beta-graph :- schema/BetaGraph
+ production :- schema/Production
+ create-id-fn]
+ (when-let [{:keys [new-expression
+ generated-rule]} (get-complex-negation previous-expressions expression
+ ancestor-bindings production)]
+ ;; The expression was a negation, so add it to the Beta graph
+ {:new-expression new-expression
+ :beta-with-negations (add-production generated-rule beta-graph create-id-fn)}))
;; A beta graph with no nodes.
(defn ^:private empty-beta-graph
@@ -1056,6 +1060,25 @@
:id-to-production-node (hf/mut-long-map)
:id-to-new-bindings (hf/mut-long-map)})
+(sc/defn ^:private get-condition-bindings :- {:bindings #{sc/Keyword}}
+ "Get the bindings from a sequence of condition/conjunctions."
+ [conditions :- [schema/Condition]
+ env :- (sc/maybe {sc/Keyword sc/Any})
+ ancestor-bindings :- #{sc/Keyword}]
+ (loop [bindings ancestor-bindings
+ [expression & remaining-expressions] conditions]
+ (if expression
+ (let [node (condition-to-node expression env bindings)
+
+ {:keys [result-binding fact-binding]} expression
+
+ all-bindings (cond-> (set/union bindings (:used-bindings node))
+ result-binding (conj result-binding)
+ fact-binding (conj fact-binding))]
+ (recur all-bindings remaining-expressions))
+ ;; No expressions remaining, so return the structure.
+ {:bindings bindings})))
+
(sc/defn ^:private add-conjunctions :- {:beta-graph schema/BetaGraph
:new-ids [sc/Int]
:bindings #{sc/Keyword}}
@@ -1149,6 +1172,73 @@
:new-ids parent-ids
:bindings bindings})))
+(sc/defn build-production :- schema/ProductionNode
+ [production :- schema/Production]
+ (when (:rhs production)
+ (let [flattened-conditions (for [condition (:lhs production)
+ child-condition (if (#{'and :and} (first condition))
+ (rest condition)
+ [condition])]
+
+ child-condition)
+
+ sorted-conditions (sort-conditions flattened-conditions)
+
+ {ancestor-bindings :bindings}
+ (loop [previous-conditions []
+ [current-condition & remaining-conditions] sorted-conditions
+ ancestor-bindings #{}]
+ (if-not current-condition
+ {:bindings ancestor-bindings}
+ (let [{:keys [new-expression]}
+ (get-complex-negation previous-conditions
+ current-condition
+ ancestor-bindings
+ production)
+
+ condition (or new-expression
+ current-condition)
+
+ ;; Extract disjunctions from the condition.
+ dnf-expression (to-dnf condition)
+
+ ;; Get a sequence of disjunctions.
+ disjunctions (for [expression
+ (if (= :or (first dnf-expression)) ; Ignore top-level or in DNF.
+ (rest dnf-expression)
+ [dnf-expression])]
+
+ (if (= :and (first expression)) ; Ignore nested ands in DNF.
+ (rest expression)
+ [expression]))
+
+ {all-bindings :bindings}
+ (reduce (fn [previous-result conjunctions]
+ ;; Get the complete bindings
+ (let [;; Convert exists operations to accumulator and test nodes.
+ exists-extracted (extract-exists conjunctions)
+ ;; Compute the new bindings with the expressions.
+ new-result (get-condition-bindings exists-extracted
+ (:env production)
+ ancestor-bindings)]
+
+ ;; Combine the bindings
+ ;; for use in descendent nodes.
+ {:bindings (set/union (:bindings previous-result)
+ (:bindings new-result))}))
+
+ ;; Initial reduce value, combining previous graph, parent ids, and ancestor variable bindings.
+ {:bindings ancestor-bindings}
+
+ ;; Each disjunction contains a sequence of conjunctions.
+ disjunctions)]
+ (recur (conj previous-conditions current-condition)
+ remaining-conditions
+ all-bindings))))]
+ {:node-type :production
+ :production production
+ :bindings ancestor-bindings})))
+
(sc/defn ^:private add-production :- schema/BetaGraph
"Adds a production to the graph of beta nodes."
[production :- schema/Production
@@ -1174,16 +1264,16 @@
(if current-condition
(let [{:keys [new-expression beta-with-negations]}
- (extract-negation previous-conditions
- current-condition
- parent-ids
- ancestor-bindings
- beta-graph
- production
- create-id-fn)
+ (add-complex-negation previous-conditions
+ current-condition
+ ancestor-bindings
+ beta-graph
+ production
+ create-id-fn)
+
+ beta-graph (or beta-with-negations beta-graph)
- condition (or new-expression
- current-condition)
+ condition (or new-expression current-condition)
;; Extract disjunctions from the condition.
dnf-expression (to-dnf condition)
@@ -1221,7 +1311,7 @@
(:bindings new-result))}))
;; Initial reduce value, combining previous graph, parent ids, and ancestor variable bindings.
- {:beta-graph beta-with-negations
+ {:beta-graph beta-graph
:new-ids []
:bindings ancestor-bindings}
From a106cf1f2b06b6756fe6107802df506af799b100 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 27 Feb 2024 09:35:17 -0600
Subject: [PATCH 76/87] feat: some more progress to support functions that are
rules
---
src/main/clojure/clara/rules.clj | 30 +++++++++++++----
src/main/clojure/clara/rules/compiler.clj | 29 ++++++++++------
src/main/clojure/clara/rules/dsl.clj | 41 +++++++++++++++++++++++
src/main/clojure/clara/rules/schema.clj | 7 ++--
4 files changed, 87 insertions(+), 20 deletions(-)
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
index a1180e7b..bf67a8cb 100644
--- a/src/main/clojure/clara/rules.clj
+++ b/src/main/clojure/clara/rules.clj
@@ -166,9 +166,19 @@
(sort (fn [v1 v2]
(compare (or (:line (meta v1)) 0)
(or (:line (meta v2)) 0))))
- (mapcat #(if (:production-seq (meta %))
- (deref %)
- [(deref %)]))))))
+ (mapcat (fn do-load-from-var
+ [x]
+ (let [mx (meta x)
+ dx (deref x)]
+ (cond
+ (:production-seq mx)
+ dx
+
+ (fn? dx)
+ [(assoc (dx) :handler dx)]
+
+ :else
+ [dx]))))))))
(defmacro mk-session
"Creates a new session using the given rule sources. The resulting session
@@ -248,9 +258,17 @@
See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
[name & body]
(let [doc (if (string? (first body)) (first body) nil)
- rule (dsl/build-rule name body (meta &form))]
- `(def ~(vary-meta name assoc :rule rule :doc doc)
- ~rule)))
+ rule (dsl/build-rule name body (meta &form))
+ rule-action (dsl/build-rule-action name body (meta &form))
+ rule-node (com/build-rule-node rule-action)
+ {:keys [bindings production]} rule-node
+ rule-handler (com/compile-action-handler name bindings
+ (:rhs production)
+ (:env production))]
+ `(defn ~(vary-meta name assoc :rule true :doc doc)
+ ([]
+ ~rule)
+ (~@(drop 2 rule-handler)))))
(defmacro defquery
"Defines a query and stored it in the given var. For instance, a simple query that accepts no
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 683e26cc..3d475889 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -391,9 +391,8 @@
`(array-map :handler ~test-handler
:constraints '~constraints)))
-(defn compile-action
- "Compile the right-hand-side action of a rule, returning a function to execute it."
- [node-id binding-keys rhs env]
+(defn compile-action-handler
+ [name bindings-keys rhs env]
(let [;; Avoid creating let bindings in the compile code that aren't actually used in the body.
;; The bindings only exist in the scope of the RHS body, not in any code called by it,
;; so this scanning strategy will detect all possible uses of binding variables in the RHS.
@@ -406,19 +405,25 @@
(comp
(filter rhs-bindings-used)
(mapcat build-token-assignment))
- binding-keys)
+ bindings-keys)
;; The destructured environment, if any.
destructured-env (if (> (count env) 0)
{:keys (mapv #(symbol (name %)) (keys env))}
- '?__env__)
-
- ;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-action' to be used for this scenario
- fn-name (mk-node-fn-name "ProductionNode" node-id "AE")]
- `(fn ~fn-name [~'?__token__ ~destructured-env]
+ '?__env__)]
+ `(fn ~name
+ [~'?__token__ ~destructured-env]
(let [~@assignments]
~rhs))))
+(defn compile-action
+ "Compile the right-hand-side action of a rule, returning a function to execute it."
+ [node-id binding-keys rhs env]
+ (let [;; Hardcoding the node-type and fn-type as we would only ever expect 'compile-action' to be used for this scenario
+ fn-name (mk-node-fn-name "ProductionNode" node-id "AE")
+ handler (compile-action-handler fn-name binding-keys rhs env)]
+ `~handler))
+
(defn compile-accum
"Used to create accumulators that take the environment into account."
[node-id node-type accum env]
@@ -1172,7 +1177,7 @@
:new-ids parent-ids
:bindings bindings})))
-(sc/defn build-production :- schema/ProductionNode
+(sc/defn build-rule-node :- schema/ProductionNode
[production :- schema/Production]
(when (:rhs production)
(let [flattened-conditions (for [condition (:lhs production)
@@ -1526,7 +1531,9 @@
(if expr-cache
(let [cache-key (str (md5-hash expr) (md5-hash compilation-ctx))
compilation-ctx (assoc compilation-ctx :cache-key cache-key)
- compiled-expr (cache/lookup expr-cache cache-key)]
+ compiled-handler (some-> compilation-ctx :compile-ctx :production :handler)
+ compiled-expr (or compiled-handler
+ (cache/lookup expr-cache cache-key))]
(if compiled-expr
[:compiled [expr-key [compiled-expr compilation-ctx]]]
[:prepared [expr-key [expr compilation-ctx]]]))
diff --git a/src/main/clojure/clara/rules/dsl.clj b/src/main/clojure/clara/rules/dsl.clj
index d01dfb57..070eea4a 100644
--- a/src/main/clojure/clara/rules/dsl.clj
+++ b/src/main/clojure/clara/rules/dsl.clj
@@ -245,6 +245,33 @@
;; Add the environment, if given.
(seq env) (assoc :env matching-env)))))
+(defn parse-rule-action*
+ "Creates a rule action from the DSL syntax using the given environment map. *ns*
+ should be bound to the namespace the rule is meant to be defined in."
+ ([lhs rhs properties env]
+ (parse-rule-action* lhs rhs properties env {}))
+ ([lhs rhs properties env rule-meta]
+ (let [conditions (into [] (for [expr lhs]
+ (parse-expression expr rule-meta)))
+
+ rule {:ns-name (ns-name *ns*)
+ :lhs (mapv #(resolve-vars % (destructure-syms %))
+ conditions)
+ :rhs (vary-meta rhs assoc :file *file*)}
+
+ symbols (set (filter symbol? (com/flatten-expression (concat lhs rhs))))
+ matching-env (into {} (for [sym (keys env)
+ :when (symbols sym)]
+ [(keyword (name sym)) sym]))]
+
+ (cond-> rule
+
+ ;; Add properties, if given.
+ (seq properties) (assoc :props properties)
+
+ ;; Add the environment, if given.
+ (seq env) (assoc :env matching-env)))))
+
(defn parse-query*
"Creates a query from the DSL syntax using the given environment map."
([params lhs env]
@@ -298,6 +325,20 @@
name (assoc :name (production-name name))
doc (assoc :doc doc)))))
+(defn build-rule-action
+ "Function used to parse and build a rule action using the DSL syntax."
+ ([name body] (build-rule-action name body {}))
+ ([name body form-meta]
+ (let [doc (if (string? (first body)) (first body) nil)
+ body (if doc (rest body) body)
+ properties (if (map? (first body)) (first body) nil)
+ definition (if properties (rest body) body)
+ {:keys [lhs rhs]} (split-lhs-rhs definition)]
+ (cond-> (parse-rule-action* lhs rhs properties {} form-meta)
+
+ name (assoc :name (production-name name))
+ doc (assoc :doc doc)))))
+
(defmacro parse-query
"Macro used to dynamically create a new rule using the DSL syntax."
[params lhs]
diff --git a/src/main/clojure/clara/rules/schema.clj b/src/main/clojure/clara/rules/schema.clj
index 7d42303f..bb63e13a 100644
--- a/src/main/clojure/clara/rules/schema.clj
+++ b/src/main/clojure/clara/rules/schema.clj
@@ -7,6 +7,9 @@
(def MutableLongHashMap
UnsharedLongHashMap)
+(def Function
+ (s/pred ifn? "ifn?"))
+
(s/defn condition-type :- (s/enum :or :not :and :exists :fact :accumulator :test)
"Returns the type of node in a LHS condition expression."
[condition]
@@ -68,6 +71,7 @@
(s/optional-key :doc) s/Str
(s/optional-key :props) {s/Keyword s/Any}
(s/optional-key :env) {s/Keyword s/Any}
+ (s/optional-key :handler) (s/maybe Function)
:lhs [Condition]
:rhs s/Any})
@@ -193,9 +197,6 @@
{(tuple s/Int s/Keyword) (tuple SExpr (s/conditional :compile-ctx NodeCompilationContext
:else NodeCompilationValue))})
-(def Function
- (s/pred ifn? "ifn?"))
-
;; An evaluated version of the schema mentioned above.
(def NodeFnLookup
;; This schema uses a relaxed version of NodeCompilationContext as once the expressions
From 587a360e9653a945fabc79ddfb03ead2d7375470 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 27 Feb 2024 12:09:15 -0600
Subject: [PATCH 77/87] feat: change handler declaration to var, add var serde,
add load sources
---
src/main/clojure/clara/rules.clj | 38 +++++++++----------
src/main/clojure/clara/rules/compiler.clj | 33 +++++++++++-----
.../clara/rules/durability/fressian.clj | 11 ++++++
3 files changed, 51 insertions(+), 31 deletions(-)
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
index bf67a8cb..6e01bb40 100644
--- a/src/main/clojure/clara/rules.clj
+++ b/src/main/clojure/clara/rules.clj
@@ -134,6 +134,11 @@
[& facts]
(eng/rhs-retract-facts! facts))
+(extend-type clojure.lang.Fn
+ com/IRuleSource
+ (load-rules [afn]
+ [(afn)]))
+
(extend-type clojure.lang.Symbol
com/IRuleSource
(load-rules [sym]
@@ -166,19 +171,7 @@
(sort (fn [v1 v2]
(compare (or (:line (meta v1)) 0)
(or (:line (meta v2)) 0))))
- (mapcat (fn do-load-from-var
- [x]
- (let [mx (meta x)
- dx (deref x)]
- (cond
- (:production-seq mx)
- dx
-
- (fn? dx)
- [(assoc (dx) :handler dx)]
-
- :else
- [dx]))))))))
+ (mapcat com/load-rules-from-source)))))
(defmacro mk-session
"Creates a new session using the given rule sources. The resulting session
@@ -258,17 +251,20 @@
See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
[name & body]
(let [doc (if (string? (first body)) (first body) nil)
- rule (dsl/build-rule name body (meta &form))
- rule-action (dsl/build-rule-action name body (meta &form))
- rule-node (com/build-rule-node rule-action)
+ rule (dsl/build-rule name body (meta &form)) ;;; Full rule LHS + RHS
+ rule-action (dsl/build-rule-action name body (meta &form)) ;;; Only the RHS
+ rule-node (com/build-rule-node rule-action) ;;; The Node of the RHS
{:keys [bindings production]} rule-node
rule-handler (com/compile-action-handler name bindings
(:rhs production)
- (:env production))]
- `(defn ~(vary-meta name assoc :rule true :doc doc)
- ([]
- ~rule)
- (~@(drop 2 rule-handler)))))
+ (:env production))
+ name-with-meta (vary-meta name assoc :rule true :doc doc)] ;;; The compiled RHS
+ `(do
+ (declare ~name-with-meta)
+ (defn ~name-with-meta
+ ([]
+ (assoc ~rule :handler #'~name-with-meta))
+ (~@(drop 2 rule-handler))))))
(defmacro defquery
"Defines a query and stored it in the given var. For instance, a simple query that accepts no
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 3d475889..7254fc71 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -392,7 +392,7 @@
:constraints '~constraints)))
(defn compile-action-handler
- [name bindings-keys rhs env]
+ [action-name bindings-keys rhs env]
(let [;; Avoid creating let bindings in the compile code that aren't actually used in the body.
;; The bindings only exist in the scope of the RHS body, not in any code called by it,
;; so this scanning strategy will detect all possible uses of binding variables in the RHS.
@@ -409,9 +409,9 @@
;; The destructured environment, if any.
destructured-env (if (> (count env) 0)
- {:keys (mapv #(symbol (name %)) (keys env))}
+ {:keys (mapv (comp symbol name) (keys env))}
'?__env__)]
- `(fn ~name
+ `(fn ~action-name
[~'?__token__ ~destructured-env]
(let [~@assignments]
~rhs))))
@@ -1531,7 +1531,7 @@
(if expr-cache
(let [cache-key (str (md5-hash expr) (md5-hash compilation-ctx))
compilation-ctx (assoc compilation-ctx :cache-key cache-key)
- compiled-handler (some-> compilation-ctx :compile-ctx :production :handler)
+ compiled-handler (some-> compilation-ctx :compile-ctx :production :handler deref)
compiled-expr (or compiled-handler
(cache/lookup expr-cache cache-key))]
(if compiled-expr
@@ -2120,11 +2120,24 @@
(vary-meta production assoc ::rule-load-order (or n 0)))
(range) productions))
-(defn- do-load-productions
- [x]
- (if (u/instance-satisfies? IRuleSource x)
- (load-rules x)
- x))
+(defn load-rules-from-source
+ "loads the rules from a source if it implements `IRuleSource`, or navigates inside
+ collections to load rules from vectors, lists, sets, seqs."
+ [source]
+ (cond
+ (u/instance-satisfies? IRuleSource source)
+ (load-rules source)
+
+ (or (vector? source)
+ (list? source)
+ (set? source)
+ (seq? source))
+ (mapcat load-rules-from-source source)
+
+ (var? source)
+ (load-rules-from-source @source)
+
+ :else [source]))
(defn mk-session
"Creates a new session using the given rule source. The resulting session
@@ -2132,7 +2145,7 @@
([sources-and-options]
(let [sources (take-while (complement keyword?) sources-and-options)
options (apply hash-map (drop-while (complement keyword?) sources-and-options))
- productions-loaded (->> (mapcat do-load-productions sources)
+ productions-loaded (->> (mapcat load-rules-from-source sources)
(add-production-load-order))
productions-unique (hs/set productions-loaded)
productions-sorted (with-meta
diff --git a/src/main/clojure/clara/rules/durability/fressian.clj b/src/main/clojure/clara/rules/durability/fressian.clj
index 4149982d..fa5d1b5d 100644
--- a/src/main/clojure/clara/rules/durability/fressian.clj
+++ b/src/main/clojure/clara/rules/durability/fressian.clj
@@ -247,6 +247,17 @@
(read [_ rdr tag component-count]
(resolve (.readObject rdr))))}}
+ "clojure.lang/var"
+ {:class clojure.lang.Var
+ :writer (reify WriteHandler
+ (write [_ w c]
+ (.writeTag w "clojure.lang/var" 1)
+ (.writeObject w (.toSymbol ^clojure.lang.Var c))))
+ :readers {"clojure.lang/var"
+ (reify ReadHandler
+ (read [_ rdr tag component-count]
+ (resolve (.readObject rdr))))}}
+
"hamf/set"
(create-identity-based-handler
IAPersistentSet
From 271fa4f1c96ab55beb9f611ac98287e91cae416c Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 27 Feb 2024 12:23:04 -0600
Subject: [PATCH 78/87] chore: add useful user ns scribles
---
dev/user.clj | 34 +++++++++++++++++++++++-----------
1 file changed, 23 insertions(+), 11 deletions(-)
diff --git a/dev/user.clj b/dev/user.clj
index 8d2ccc34..404cafb1 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -3,7 +3,7 @@
quick-benchmark] :as crit]
[clara.rules.platform :refer [compute-for]]
[clojure.core.async :refer [go timeout
+ (println (str "class:" ~n ~fact-type ~fact-record))))]
+ `(do
+ ~@decl-rules
+ (vector
+ ~@fact-rules))))
(comment
- (mk-types 5000)
+ (clear-ns-productions!)
+ (mk-types 2500)
(def rules
- (mk-rules 5000))
+ (mk-rules 2500))
(keys @session-cache)
(when-let [v (first (.cache ^clojure.core.cache.SoftCache @compiler-cache))]
(.getValue v))
@@ -68,9 +80,9 @@
(count (.cache ^clojure.core.cache.SoftCache @compiler-cache))
(time
- (mk-session (conj rules {:ns-name (ns-name *ns*)
- :lhs [{:type :foobar14
- :constraints []}]
- :rhs `(println ~(str :foobar))})
+ (mk-session 'user [(conj rules {:ns-name (ns-name *ns*)
+ :lhs [{:type :foobar16
+ :constraints []}]
+ :rhs `(println ~(str :foobar))})]
:cache session-cache
:compiler-cache compiler-cache)))
From a01d840b93b75d941bf8c6047e111b09215fdee4 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 27 Feb 2024 12:24:09 -0600
Subject: [PATCH 79/87] chore: run CI on all branches
---
.github/workflows/clojure.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/clojure.yml b/.github/workflows/clojure.yml
index 76a26b86..76df1979 100644
--- a/.github/workflows/clojure.yml
+++ b/.github/workflows/clojure.yml
@@ -2,7 +2,7 @@ name: Clojure CI
on:
push:
- branches: [ "main" ]
+ branches: "*"
pull_request:
branches: [ "main" ]
From f897ad2ebcb3899c7f305fb2b2e18517168c4172 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 27 Feb 2024 13:35:34 -0600
Subject: [PATCH 80/87] chore: minor enhancements for rule loading
---
CHANGELOG.md | 6 ++++++
pom.xml | 4 ++--
src/main/clojure/clara/rules.clj | 15 +++++++--------
3 files changed, 15 insertions(+), 10 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2af77d23..1f7cab86 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,11 @@
This is a history of changes to k13labs/clara-rules.
+# 1.4.0
+* `defrule` now defines rules as functions with two arities, no args returns the rule map, and 2 args is the compiled RHS.
+* `clojure.lang.Fn` now implements `clara.rules.compiler/IRuleSource`, and returns a single rule by invoking the like `(a-rule)`.
+* add built-in support to serialize `clojure.lang.Var` so that a rule handler var can be serialized correctly.
+* add function `clara.rules.compiler/load-rules-from-source` to simplify loading rules
+
# 1.3.2
* Enhance memory add-activations implementation by replacing get/set with compute!
diff --git a/pom.xml b/pom.xml
index 145a7b93..9bdc082c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.3.2
+ 1.4.0-SNAPSHOT
- 1.3.2
+ 1.4.0
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
index 6e01bb40..86eb495c 100644
--- a/src/main/clojure/clara/rules.clj
+++ b/src/main/clojure/clara/rules.clj
@@ -155,8 +155,9 @@
(or (:query (meta resolved))
(:rule (meta resolved))) [@resolved]
- ;; The symbol refernces a sequence, so return it.
- (sequential? @resolved) @resolved
+ ;; The symbol references a sequence, so ensure we load all sources.
+ (sequential? @resolved)
+ (mapcat com/load-rules-from-source @resolved)
:else
(throw (ex-info (str "The source referenced by " sym " is not valid.") {:sym sym}))))
@@ -259,12 +260,10 @@
(:rhs production)
(:env production))
name-with-meta (vary-meta name assoc :rule true :doc doc)] ;;; The compiled RHS
- `(do
- (declare ~name-with-meta)
- (defn ~name-with-meta
- ([]
- (assoc ~rule :handler #'~name-with-meta))
- (~@(drop 2 rule-handler))))))
+ `(defn ~name-with-meta
+ ([]
+ (assoc ~rule :handler #'~name-with-meta))
+ (~@(drop 2 rule-handler)))))
(defmacro defquery
"Defines a query and stored it in the given var. For instance, a simple query that accepts no
From 8d6ee9ace4bae4bc9f0db566c6d3e44ebb7ae258 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 27 Feb 2024 14:21:04 -0600
Subject: [PATCH 81/87] chore: prepare for snapshot release 1.4.0-SNAPSHOT
---
CHANGELOG.md | 2 +-
pom.xml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1f7cab86..2b6ab3af 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,6 +1,6 @@
This is a history of changes to k13labs/clara-rules.
-# 1.4.0
+# 1.4.0-SNAPSHOT
* `defrule` now defines rules as functions with two arities, no args returns the rule map, and 2 args is the compiled RHS.
* `clojure.lang.Fn` now implements `clara.rules.compiler/IRuleSource`, and returns a single rule by invoking the like `(a-rule)`.
* add built-in support to serialize `clojure.lang.Var` so that a rule handler var can be serialized correctly.
diff --git a/pom.xml b/pom.xml
index 9bdc082c..d39acb03 100644
--- a/pom.xml
+++ b/pom.xml
@@ -7,7 +7,7 @@
clara-rules
1.4.0-SNAPSHOT
- 1.4.0
+ 1.4.0-SNAPSHOT
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
From 07e0892ae1f802899e86d40d624643a0e7d83228 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Tue, 27 Feb 2024 14:43:04 -0600
Subject: [PATCH 82/87] feat: store handler as symbol instead of var
---
src/main/clojure/clara/rules.clj | 13 +++++++------
src/main/clojure/clara/rules/compiler.clj | 2 +-
src/test/clojure/clara/test_engine.clj | 1 +
3 files changed, 9 insertions(+), 7 deletions(-)
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
index 86eb495c..e29c5f9e 100644
--- a/src/main/clojure/clara/rules.clj
+++ b/src/main/clojure/clara/rules.clj
@@ -250,19 +250,20 @@
\"HVAC repairs must include a 27B-6 form.\")))
See the [rule authoring documentation](http://www.clara-rules.org/docs/rules/) for details."
- [name & body]
+ [rule-name & body]
(let [doc (if (string? (first body)) (first body) nil)
- rule (dsl/build-rule name body (meta &form)) ;;; Full rule LHS + RHS
- rule-action (dsl/build-rule-action name body (meta &form)) ;;; Only the RHS
+ rule (dsl/build-rule rule-name body (meta &form)) ;;; Full rule LHS + RHS
+ rule-action (dsl/build-rule-action rule-name body (meta &form)) ;;; Only the RHS
rule-node (com/build-rule-node rule-action) ;;; The Node of the RHS
{:keys [bindings production]} rule-node
- rule-handler (com/compile-action-handler name bindings
+ rule-handler (com/compile-action-handler rule-name bindings
(:rhs production)
(:env production))
- name-with-meta (vary-meta name assoc :rule true :doc doc)] ;;; The compiled RHS
+ name-with-meta (vary-meta rule-name assoc :rule true :doc doc)
+ handler-name (symbol (name (ns-name *ns*)) (name rule-name))] ;;; The compiled RHS
`(defn ~name-with-meta
([]
- (assoc ~rule :handler #'~name-with-meta))
+ (assoc ~rule :handler '~handler-name))
(~@(drop 2 rule-handler)))))
(defmacro defquery
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 7254fc71..0c18ce4e 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -1531,7 +1531,7 @@
(if expr-cache
(let [cache-key (str (md5-hash expr) (md5-hash compilation-ctx))
compilation-ctx (assoc compilation-ctx :cache-key cache-key)
- compiled-handler (some-> compilation-ctx :compile-ctx :production :handler deref)
+ compiled-handler (some-> compilation-ctx :compile-ctx :production :handler resolve)
compiled-expr (or compiled-handler
(cache/lookup expr-cache cache-key))]
(if compiled-expr
diff --git a/src/test/clojure/clara/test_engine.clj b/src/test/clojure/clara/test_engine.clj
index 2023d6ba..5a61e49e 100644
--- a/src/test/clojure/clara/test_engine.clj
+++ b/src/test/clojure/clara/test_engine.clj
@@ -12,6 +12,7 @@
[criterium.core :refer [report-result
with-progress-reporting
quick-benchmark]]))
+
(defrule test-slow-rule-1
"this rule does some async work using go block"
[:number [{:keys [value]}]
From b27dc9e77cf9334c5a87421367106b31fcd2afc1 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Wed, 28 Feb 2024 22:11:42 -0600
Subject: [PATCH 83/87] feat: add coverage test to verify coverage works
---
.github/workflows/clojure.yml | 2 ++
Makefile | 4 +++
deps.edn | 4 ++-
src/test/clojure/clara/coverage_ruleset.clj | 14 +++++++++++
src/test/clojure/clara/test_coverage.clj | 14 +++++++++++
tests.edn | 27 ++++++++++++++++++++-
6 files changed, 63 insertions(+), 2 deletions(-)
create mode 100644 src/test/clojure/clara/coverage_ruleset.clj
create mode 100644 src/test/clojure/clara/test_coverage.clj
diff --git a/.github/workflows/clojure.yml b/.github/workflows/clojure.yml
index 76df1979..b13d14be 100644
--- a/.github/workflows/clojure.yml
+++ b/.github/workflows/clojure.yml
@@ -25,6 +25,8 @@ jobs:
cli: 1.11.1.1429
- name: Run tests
run: make test
+ - name: Run coverage tests
+ run: make test-coverage
- name: Run generative tests
run: make test-generative
- name: Run clj-kondo linter
diff --git a/Makefile b/Makefile
index 0acebeff..5f9af981 100644
--- a/Makefile
+++ b/Makefile
@@ -14,6 +14,10 @@ repl: compile-test-java
test: compile-test-java
clojure -M:dev:test:runner --focus :unit --reporter kaocha.report/documentation --no-capture-output
+test-coverage: compile-test-java
+ clojure -M:dev:test:runner --focus :coverage --reporter kaocha.report/documentation --no-capture-output
+ jq '.coverage["clara/coverage_ruleset.clj"]|join("_")' target/coverage/codecov.json | grep "_1___1____1_0_1__1__" || echo "Unexpected coverage output for clara.coverage-ruleset." exit 1
+
test-generative: compile-test-java
clojure -M:dev:test:runner --focus :generative --reporter kaocha.report/documentation --no-capture-output
diff --git a/deps.edn b/deps.edn
index 85740a52..db0dea05 100644
--- a/deps.edn
+++ b/deps.edn
@@ -24,7 +24,9 @@
:main-opts ["-m" "clj-kondo.main"]}
:test {:extra-paths ["src/test/clojure" "target/test/classes"]
- :extra-deps {lambdaisland/kaocha {:mvn/version "1.80.1274"}
+ :extra-deps {lambdaisland/kaocha {:mvn/version "1.80.1274"}
+ lambdaisland/kaocha-junit-xml {:mvn/version "1.17.101"}
+ lambdaisland/kaocha-cloverage {:mvn/version "1.1.89"}
org.clojure/test.check {:mvn/version "1.1.1"}
pjstadig/humane-test-output {:mvn/version "0.10.0"}}}
diff --git a/src/test/clojure/clara/coverage_ruleset.clj b/src/test/clojure/clara/coverage_ruleset.clj
new file mode 100644
index 00000000..ede4bba9
--- /dev/null
+++ b/src/test/clojure/clara/coverage_ruleset.clj
@@ -0,0 +1,14 @@
+(ns clara.coverage-ruleset
+ (:require [clara.rules :refer [defrule defquery insert!]]))
+
+(defrule simple-rule-with-rhs
+ [:weather [{:keys [temperature]}]
+ (= temperature ?temperature)]
+ =>
+ (if (< ?temperature 50)
+ (insert! {:type :climate :label "Cold"})
+ (insert! {:type :climate :label "Warm"})))
+
+(defquery climate-query
+ []
+ [?result <- :climate])
diff --git a/src/test/clojure/clara/test_coverage.clj b/src/test/clojure/clara/test_coverage.clj
new file mode 100644
index 00000000..2a079a1d
--- /dev/null
+++ b/src/test/clojure/clara/test_coverage.clj
@@ -0,0 +1,14 @@
+(ns ^:coverage clara.test-coverage
+ (:require [clojure.test :refer [deftest testing is]]
+ [clara.rules :refer [mk-session insert fire-rules query]]
+ [clara.coverage-ruleset]))
+
+(deftest test-coverage
+ (testing "run the rules"
+ (let [result (-> (mk-session 'clara.coverage-ruleset :fact-type-fn :type)
+ (insert {:type :weather
+ :temperature 75})
+ (fire-rules)
+ (query clara.coverage-ruleset/climate-query))]
+ (is (= result [{:?result {:type :climate
+ :label "Warm"}}])))))
diff --git a/tests.edn b/tests.edn
index 0023dd3a..c1bcc385 100644
--- a/tests.edn
+++ b/tests.edn
@@ -3,15 +3,40 @@
:plugins [:kaocha.plugin/profiling
:kaocha.plugin/gc-profiling
:kaocha.plugin/print-invocations
+ :kaocha.plugin/junit-xml
+ :kaocha.plugin/cloverage
:kaocha.plugin/hooks
:preloads]
+ :kaocha.plugin.junit-xml/target-file "target/junit.xml"
+ :kaocha.plugin.junit-xml/add-location-metadata? true
+ :cloverage/opts {:ns-exclude-regex []
+ :text? false
+ :lcov? false
+ :high-watermark 80
+ :fail-threshold 0
+ :output "target/coverage"
+ :low-watermark 50
+ :src-ns-path ["src/main/clojure"
+ "src/test/clojure"]
+ :ns-regex ["clara.coverage-ruleset"]
+ :summary? true
+ :coveralls? false
+ :emma-xml? false
+ :html? true
+ :nop? false
+ :codecov? true}
:kaocha.hooks/pre-report [clara.test-setup/defuse-zero-assertions]
:kaocha.plugin.preloads/ns-names [clara.test-setup]
:tests [{:id :unit
:kaocha/source-paths ["src/main/clojure"]
:kaocha/test-paths ["src/test/clojure"]
:ns-patterns [".*"]
- :skip-meta [:generative]}
+ :skip-meta [:generative :coverage]}
+ {:id :coverage
+ :kaocha/source-paths ["src/main/clojure"]
+ :kaocha/test-paths ["src/test/clojure"]
+ :ns-patterns [".*"]
+ :focus-meta [:coverage]}
{:id :generative
:kaocha/source-paths ["src/main/clojure"]
:kaocha/test-paths ["src/test/clojure"]
From 4b77d7570cb4a9b614677cdef3f6e24db3499fb7 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Fri, 8 Mar 2024 15:54:59 -0600
Subject: [PATCH 84/87] feat: upgrade clojure to 1.11.2 and fix clj-kondo
linter
---
CHANGELOG.md | 4 ++++
.../clara/rules/hooks/clara_rules.clj_kondo | 6 ++++--
deps.edn | 2 +-
pom.xml | 6 +++---
4 files changed, 12 insertions(+), 6 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2af77d23..8984b930 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,9 @@
This is a history of changes to k13labs/clara-rules.
+# 1.3.3
+* Upgrade to clojure 1.11.2 to fix `CVE-2024-22871`, despite not really affecting clara-rules.
+* Add clj-kondo linter updates to fix bad docstring expression.
+
# 1.3.2
* Enhance memory add-activations implementation by replacing get/set with compute!
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
index 86c2ab60..8be16685 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
+++ b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
@@ -232,6 +232,7 @@
fn-node (api/list-node
(list
(api/token-node 'fn)
+ (api/token-node 'query-fn)
input-args
production-result))
new-node (api/map-node
@@ -286,7 +287,7 @@
(api/vector-node
(vec production-output))))
fn-node (api/list-node
- (cond-> (list (api/token-node 'fn))
+ (cond-> (list (api/token-node 'fn) production-name)
production-docs (concat [production-docs])
:always (concat [input-args])
production-opts (concat [production-opts])
@@ -337,6 +338,7 @@
fn-node (api/list-node
(list
(api/token-node 'fn)
+ (api/token-node 'rule-fn)
input-args
production-result))
new-node (api/map-node
@@ -384,7 +386,7 @@
(vec production-output))
body-seq))
fn-node (api/list-node
- (cond-> (list (api/token-node 'fn))
+ (cond-> (list (api/token-node 'fn) production-name)
production-docs (concat [production-docs])
:always (concat [input-args])
production-opts (concat [production-opts])
diff --git a/deps.edn b/deps.edn
index 85740a52..321bb4a7 100644
--- a/deps.edn
+++ b/deps.edn
@@ -2,7 +2,7 @@
:deps/prep-lib {:alias :build
:fn compile-main-java
:ensure "target/main/classes"}
- :deps {org.clojure/clojure {:mvn/version "1.10.3"}
+ :deps {org.clojure/clojure {:mvn/version "1.11.2"}
org.clojure/core.cache {:mvn/version "1.0.225"}
org.clj-commons/digest {:mvn/version "1.4.100"}
com.github.k13labs/futurama {:mvn/version "1.0.2"}
diff --git a/pom.xml b/pom.xml
index 145a7b93..3333c7f4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,9 +5,9 @@
com.github.k13labs
clara-rules
clara-rules
- 1.3.2
+ 1.3.3
- 1.3.2
+ 1.3.3
https://github.com/k13labs/clara-rules
scm:git:git://github.com/k13labs/clara-rules.git
scm:git:ssh://git@github.com/k13labs/clara-rules.git
@@ -22,7 +22,7 @@
org.clojure
clojure
- 1.10.3
+ 1.11.2
org.clojure
From bee29ff71dbfe1b80e695efd597d117a89d3216f Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 10 Mar 2024 14:31:08 -0500
Subject: [PATCH 85/87] feat: linter fixes
---
.../clara/rules/hooks/clara_rules.clj_kondo | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
index 86c2ab60..8be16685 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
+++ b/clj-kondo/clj-kondo.exports/clara/rules/hooks/clara_rules.clj_kondo
@@ -232,6 +232,7 @@
fn-node (api/list-node
(list
(api/token-node 'fn)
+ (api/token-node 'query-fn)
input-args
production-result))
new-node (api/map-node
@@ -286,7 +287,7 @@
(api/vector-node
(vec production-output))))
fn-node (api/list-node
- (cond-> (list (api/token-node 'fn))
+ (cond-> (list (api/token-node 'fn) production-name)
production-docs (concat [production-docs])
:always (concat [input-args])
production-opts (concat [production-opts])
@@ -337,6 +338,7 @@
fn-node (api/list-node
(list
(api/token-node 'fn)
+ (api/token-node 'rule-fn)
input-args
production-result))
new-node (api/map-node
@@ -384,7 +386,7 @@
(vec production-output))
body-seq))
fn-node (api/list-node
- (cond-> (list (api/token-node 'fn))
+ (cond-> (list (api/token-node 'fn) production-name)
production-docs (concat [production-docs])
:always (concat [input-args])
production-opts (concat [production-opts])
From b92f791531eb696976f9797bf642fba5e1208574 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 10 Mar 2024 15:07:24 -0500
Subject: [PATCH 86/87] feat: implement hierarchy loaders, fact loaders, update
deps (#2)
* wip: hierarchy work in progress
* feat: more progress implementing def'd hierarchies and facts
* fix: fix rule loading tests and fact loading handling of functions
* chore: update deps
* feat: linter updates and add defdata macro
---
CHANGELOG.md | 4 +
.../clj-kondo.exports/clara/rules/config.edn | 2 +
deps.edn | 34 ++---
dev/user.clj | 42 +++++-
pom.xml | 34 ++---
src/main/clojure/clara/rules.clj | 129 ++++++++++++++++--
src/main/clojure/clara/rules/compiler.clj | 116 ++++++++++++++--
src/main/clojure/clara/rules/hierarchy.clj | 85 ++++++++++++
...productions.clj => test_clear_ns_vars.clj} | 24 ++--
src/test/clojure/clara/test_rules.clj | 2 +-
10 files changed, 397 insertions(+), 75 deletions(-)
create mode 100644 src/main/clojure/clara/rules/hierarchy.clj
rename src/test/clojure/clara/{test_clear_ns_productions.clj => test_clear_ns_vars.clj} (71%)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2b6ab3af..9861ca0e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,10 +1,14 @@
This is a history of changes to k13labs/clara-rules.
# 1.4.0-SNAPSHOT
+* include linter fixes for `defrule` and `defquery` which were not processing docstrings correctly.
* `defrule` now defines rules as functions with two arities, no args returns the rule map, and 2 args is the compiled RHS.
* `clojure.lang.Fn` now implements `clara.rules.compiler/IRuleSource`, and returns a single rule by invoking the like `(a-rule)`.
* add built-in support to serialize `clojure.lang.Var` so that a rule handler var can be serialized correctly.
* add function `clara.rules.compiler/load-rules-from-source` to simplify loading rules
+* add `defhierarchy` macro to define hierarchies of facts allowing to easily establish parent/child relationships.
+* add `defdata` macro to define facts and collections of facts in namespaces allowing to easily embed and insert them during mk-session.
+* rename `clear-ns-productions!` to `clear-ns-vars!` since now there are ns-installed vars that are not productions.
# 1.3.2
* Enhance memory add-activations implementation by replacing get/set with compute!
diff --git a/clj-kondo/clj-kondo.exports/clara/rules/config.edn b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
index f402225d..c6c580c3 100644
--- a/clj-kondo/clj-kondo.exports/clara/rules/config.edn
+++ b/clj-kondo/clj-kondo.exports/clara/rules/config.edn
@@ -1,4 +1,6 @@
{:lint-as {clara.rules/defsession clojure.core/def
+ clara.rules/defhierarchy clojure.core/def
+ clara.rules/defdata clojure.core/def
clara.rules.platform/eager-for clojure.core/for
clara.rules.platform/compute-for clojure.core/for}
:hooks {:analyze-call {clara.rules/defquery hooks.clara-rules/analyze-defquery-macro
diff --git a/deps.edn b/deps.edn
index db0dea05..01b453fc 100644
--- a/deps.edn
+++ b/deps.edn
@@ -2,41 +2,41 @@
:deps/prep-lib {:alias :build
:fn compile-main-java
:ensure "target/main/classes"}
- :deps {org.clojure/clojure {:mvn/version "1.10.3"}
- org.clojure/core.cache {:mvn/version "1.0.225"}
+ :deps {org.clojure/clojure {:mvn/version "1.11.2"}
+ org.clojure/core.cache {:mvn/version "1.1.234"}
org.clj-commons/digest {:mvn/version "1.4.100"}
- com.github.k13labs/futurama {:mvn/version "1.0.2"}
- com.cnuernber/ham-fisted {:mvn/version "2.016"}
+ com.github.k13labs/futurama {:mvn/version "1.0.3"}
+ com.cnuernber/ham-fisted {:mvn/version "2.017"}
prismatic/schema {:mvn/version "1.4.1"}
- org.clojure/data.fressian {:mvn/version "1.0.0"}}
+ org.clojure/data.fressian {:mvn/version "1.1.0"}}
;for more examples of aliases see: https://github.com/seancorfield/dot-clojure
:aliases
{:build {:paths ["tool"]
- :deps {io.github.clojure/tools.build {:mvn/version "0.9.6"}}
+ :deps {io.github.clojure/tools.build {:mvn/version "0.10.0"}}
:ns-default build}
:dev {:extra-paths ["dev"]
:extra-deps {reloaded.repl/reloaded.repl {:mvn/version "0.2.4"}
- org.clojure/math.combinatorics {:mvn/version "0.1.3"}
+ org.clojure/math.combinatorics {:mvn/version "0.3.0"}
criterium/criterium {:mvn/version "0.4.6"}}}
- :clj-kondo {:extra-deps {clj-kondo/clj-kondo {:mvn/version "2023.12.15"}}
+ :clj-kondo {:extra-deps {clj-kondo/clj-kondo {:mvn/version "2024.03.05"}}
:main-opts ["-m" "clj-kondo.main"]}
:test {:extra-paths ["src/test/clojure" "target/test/classes"]
- :extra-deps {lambdaisland/kaocha {:mvn/version "1.80.1274"}
+ :extra-deps {lambdaisland/kaocha {:mvn/version "1.87.1366"}
lambdaisland/kaocha-junit-xml {:mvn/version "1.17.101"}
lambdaisland/kaocha-cloverage {:mvn/version "1.1.89"}
org.clojure/test.check {:mvn/version "1.1.1"}
- pjstadig/humane-test-output {:mvn/version "0.10.0"}}}
+ pjstadig/humane-test-output {:mvn/version "0.11.0"}}}
:runner {:main-opts ["-e" "(println \"warn-on-reflection =\" (set! *warn-on-reflection* true))"
"-m" "kaocha.runner"]
:exec-fn kaocha.runner/exec-fn}
- :repl {:extra-deps {nrepl/nrepl {:mvn/version "1.1.0"}
- cider/cider-nrepl {:mvn/version "0.44.0"}}
+ :repl {:extra-deps {nrepl/nrepl {:mvn/version "1.1.1"}
+ cider/cider-nrepl {:mvn/version "0.46.0"}}
:main-opts ["-e" "(println \"warn-on-reflection =\" (set! *warn-on-reflection* true))"
"-m" "nrepl.cmdline" "--interactive"
"--middleware" "[\"cider.nrepl/cider-middleware\"]"]}
@@ -47,20 +47,16 @@
:format-check {:extra-deps {cljfmt/cljfmt {:mvn/version "0.9.2"}}
:main-opts ["-m" "cljfmt.main" "check" "src" "dev"]}
- :outdated {:extra-deps {olical/depot {:mvn/version "2.3.0"}
- rewrite-clj/rewrite-clj {:mvn/version "0.6.1"}}
- :main-opts ["-m" "depot.outdated.main"]}
-
- :jar {:replace-deps {com.github.seancorfield/depstar {:mvn/version "2.0.216"}}
+ :jar {:replace-deps {com.github.seancorfield/depstar {:mvn/version "2.1.303"}}
:exec-fn hf.depstar/jar
:exec-args {:jar "build/clara-rules.jar"}}
- :install-maven {:extra-deps {slipset/deps-deploy {:mvn/version "0.1.5"}}
+ :install-maven {:extra-deps {slipset/deps-deploy {:mvn/version "0.2.2"}}
:exec-fn deps-deploy.deps-deploy/deploy
:exec-args {:installer :local
:artifact "build/clara-rules.jar"}}
- :deploy-maven {:replace-deps {slipset/deps-deploy {:mvn/version "0.1.5"}}
+ :deploy-maven {:replace-deps {slipset/deps-deploy {:mvn/version "0.2.2"}}
:exec-fn deps-deploy.deps-deploy/deploy
:exec-args {:installer :remote
:artifact "build/clara-rules.jar"}}}
diff --git a/dev/user.clj b/dev/user.clj
index 404cafb1..965ddfcd 100644
--- a/dev/user.clj
+++ b/dev/user.clj
@@ -1,9 +1,13 @@
(ns user
+ (:refer-clojure :exclude [derive underive])
(:require [criterium.core :refer [report-result
quick-benchmark] :as crit]
[clara.rules.platform :refer [compute-for]]
[clojure.core.async :refer [go timeout "foobar"))
+(defhierarchy foobar
+ (derive! :thing/foo :thing/that)
+ (doseq [x (range 20)]
+ (derive! [:thing/foo (- 20 x)] [:thing/that (- 20 x)]))
+ (derive! :thing/bar :thing/that))
+
+(defrule return-a-thing
+ [:thing/that [{:keys [value]}] (= value ?value)]
+ =>
+ (insert! {:type :thing/result
+ :value ?value}))
+
+(defquery query-a-thing
+ []
+ [?output <- :thing/result])
+
+(defdata foo
+ {:type :thing/foo
+ :value 1})
+
+(defdata bar
+ [{:type :thing/bar
+ :value 2}
+ {:type :thing/bar
+ :value 3}])
+
+(time
+ (-> (mk-session 'user :fact-type-fn :type)
+ (fire-rules)
+ (query query-a-thing)))
+
(def session-cache
(cache/lru-cache-factory {}))
@@ -69,7 +105,7 @@
~@fact-rules))))
(comment
- (clear-ns-productions!)
+ (clear-ns-vars!)
(mk-types 2500)
(def rules
(mk-rules 2500))
@@ -81,7 +117,7 @@
(time
(mk-session 'user [(conj rules {:ns-name (ns-name *ns*)
- :lhs [{:type :foobar16
+ :lhs [{:type :foobar1
:constraints []}]
:rhs `(println ~(str :foobar))})]
:cache session-cache
diff --git a/pom.xml b/pom.xml
index d39acb03..7437a30c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -22,37 +22,37 @@
org.clojure
clojure
- 1.10.3
+ 1.11.2
+
+
+ com.cnuernber
+ ham-fisted
+ 2.017
org.clojure
- core.cache
- 1.0.225
+ data.fressian
+ 1.1.0
- org.clj-commons
- digest
- 1.4.100
+ prismatic
+ schema
+ 1.4.1
com.github.k13labs
futurama
- 1.0.2
+ 1.0.3
- com.cnuernber
- ham-fisted
- 2.016
-
-
- prismatic
- schema
- 1.4.1
+ org.clj-commons
+ digest
+ 1.4.100
org.clojure
- data.fressian
- 1.0.0
+ core.cache
+ 1.1.234
diff --git a/src/main/clojure/clara/rules.clj b/src/main/clojure/clara/rules.clj
index e29c5f9e..18135862 100644
--- a/src/main/clojure/clara/rules.clj
+++ b/src/main/clojure/clara/rules.clj
@@ -1,6 +1,7 @@
(ns clara.rules
"Forward-chaining rules for Clojure. The primary API is in this namespace."
(:require [clara.rules.engine :as eng]
+ [clara.rules.hierarchy :as hierarchy]
[clara.rules.platform :as platform]
[clara.rules.compiler :as com]
[clara.rules.dsl :as dsl]))
@@ -140,6 +141,72 @@
[(afn)]))
(extend-type clojure.lang.Symbol
+ com/IFactSource
+ (load-facts [sym]
+ ;; Find the facts in the namespace, shred them,
+ ;; and compile them into a rule base.
+ (if (namespace sym)
+ ;; The symbol is qualified, so load hierarchies in the qualified symbol.
+ (let [resolved (resolve sym)]
+ (when (nil? resolved)
+ (throw (ex-info (str "Unable to resolve fact source: " sym) {:sym sym})))
+
+ (cond
+ ;; The symbol references a fact, so just return it
+ (:hierarchy (meta resolved))
+ (com/load-facts-from-source @resolved)
+
+ ;; The symbol references a sequence, so ensure we load all sources.
+ (sequential? @resolved)
+ (mapcat com/load-facts-from-source @resolved)
+
+ :else
+ []))
+
+ ;; The symbol is not qualified, so treat it as a namespace.
+ (->> (ns-interns sym)
+ (vals) ; Get the references in the namespace.
+ (filter var?)
+ (filter (comp (some-fn :fact :fact-seq) meta)) ; Filter down to fact and fact-seq, and seqs of both.
+ ;; If definitions are created dynamically (i.e. are not reflected in an actual code file)
+ ;; it is possible that they won't have :line metadata, so we have a default of 0.
+ (sort (fn [v1 v2]
+ (compare (or (:line (meta v1)) 0)
+ (or (:line (meta v2)) 0))))
+ (mapcat com/load-facts-from-source))))
+ com/IHierarchySource
+ (load-hierarchies [sym]
+ ;; Find the hierarchies in the namespace, shred them,
+ ;; and compile them into a rule base.
+ (if (namespace sym)
+ ;; The symbol is qualified, so load hierarchies in the qualified symbol.
+ (let [resolved (resolve sym)]
+ (when (nil? resolved)
+ (throw (ex-info (str "Unable to resolve hierarchy source: " sym) {:sym sym})))
+
+ (cond
+ ;; The symbol references a hierarchy, so just return it
+ (:hierarchy (meta resolved))
+ (com/load-hierarchies-from-source @resolved)
+
+ ;; The symbol references a sequence, so ensure we load all sources.
+ (sequential? @resolved)
+ (mapcat com/load-hierarchies-from-source @resolved)
+
+ :else
+ []))
+
+ ;; The symbol is not qualified, so treat it as a namespace.
+ (->> (ns-interns sym)
+ (vals) ; Get the references in the namespace.
+ (filter var?)
+ (filter (comp (some-fn :hierarchy :hierarchy-seq) meta)) ; Filter down to hierarchy and hierarchy-seq, and seqs of both.
+ ;; If definitions are created dynamically (i.e. are not reflected in an actual code file)
+ ;; it is possible that they won't have :line metadata, so we have a default of 0.
+ (sort (fn [v1 v2]
+ (compare (or (:line (meta v1)) 0)
+ (or (:line (meta v2)) 0))))
+ (mapcat com/load-hierarchies-from-source))))
com/IRuleSource
(load-rules [sym]
;; Find the rules and queries in the namespace, shred them,
@@ -151,16 +218,17 @@
(throw (ex-info (str "Unable to resolve rule source: " sym) {:sym sym})))
(cond
- ;; The symbol references a rule or query, so just return it
+ ;; The symbol references a rule or query, so just load it
(or (:query (meta resolved))
- (:rule (meta resolved))) [@resolved]
+ (:rule (meta resolved)))
+ (com/load-rules-from-source @resolved)
;; The symbol references a sequence, so ensure we load all sources.
(sequential? @resolved)
(mapcat com/load-rules-from-source @resolved)
:else
- (throw (ex-info (str "The source referenced by " sym " is not valid.") {:sym sym}))))
+ []))
;; The symbol is not qualified, so treat it as a namespace.
(->> (ns-interns sym)
@@ -267,7 +335,7 @@
(~@(drop 2 rule-handler)))))
(defmacro defquery
- "Defines a query and stored it in the given var. For instance, a simple query that accepts no
+ "Defines a query and stores it in the given var. For instance, a simple query that accepts no
parameters would look like this:
(defquery check-job
@@ -281,16 +349,55 @@
`(def ~(vary-meta name assoc :query true :doc doc)
~(dsl/build-query name body (meta &form)))))
-(defmacro clear-ns-productions!
+(defn derive!
+ [child parent]
+ (hierarchy/derive child parent))
+
+(defn underive!
+ [child parent]
+ (hierarchy/underive child parent))
+
+(defmacro defhierarchy
+ "Defines a hierarchy and stores it in the given var. For instance, a simple hierarchy that adds
+ several child->parent relationships would look like this:
+ (defhierarchy order-types
+ \"Defines several order types\"
+ (derive! :order/hvac :order/service)
+ (derive! :order/plumber :order/service)
+ (underive! :order/cinema :order/service))
+ See the [hierarchy authoring documentation](http://www.clara-rules.org)"
+ [name & body]
+ (let [doc (if (string? (first body)) (first body) nil)]
+ `(def ~(vary-meta name assoc :hierarchy true :doc doc)
+ (binding [hierarchy/*hierarchy* (atom (hierarchy/make-hierarchy))]
+ ~@body))))
+
+(defmacro defdata
+ "Defines a data fact which is stored in the given var. For instance, the following fact is simply a
+ map which is then inserted into the session when the namespace is loaded.
+
+ (defdata default-temperature
+ (Cold. 32))"
+ [name & body]
+ (let [doc (if (string? (first body)) (first body) nil)]
+ `(def ~(vary-meta name assoc :fact true :doc doc)
+ ~@body)))
+
+(defmacro clear-ns-vars!
"Ensures that any rule/query definitions which have been cached will be cleared from the associated namespace.
Rule and query definitions can be cached such that if their definitions are not explicitly overwritten with the same
name (i.e. deleted or renamed), the stale definitions can be loaded into a session using that namespace on
- reload via the REPL or mechanism such as figwheel. Place (clear-ns-productions!) at the top of any namespace
+ reload via the REPL or mechanism such as figwheel. Place (clear-ns-vars!) at the top of any namespace
defining rules/queries to ensure the cache is cleared properly."
[]
- (let [production-syms (->> (ns-interns *ns*)
- (filter (comp var? second))
- (filter (comp (some-fn :rule :query :production-seq) meta second)) ; Filter down to rules, queries, and seqs of both.
- (map first))] ; Take the symbols for the rule/query vars
- (doseq [psym production-syms]
+ (let [clara-syms (->> (ns-interns *ns*)
+ (filter (comp var? second))
+ (filter (comp (some-fn :rule
+ :query
+ :hierarchy
+ :fact
+ :fact-seq
+ :production-seq) meta second)) ; Filter down to rules, queries, facts, and hierarchy.
+ (map first))] ; Take the symbols for each var
+ (doseq [psym clara-syms]
(ns-unmap *ns* psym))))
diff --git a/src/main/clojure/clara/rules/compiler.clj b/src/main/clojure/clara/rules/compiler.clj
index 0c18ce4e..e7ae457b 100644
--- a/src/main/clojure/clara/rules/compiler.clj
+++ b/src/main/clojure/clara/rules/compiler.clj
@@ -5,6 +5,7 @@
(:require [clara.rules.engine :as eng]
[clara.rules.schema :as schema]
[clara.rules.platform :refer [jeq-wrap] :as platform]
+ [clara.rules.hierarchy :as hierarchy]
[clojure.core.cache.wrapped :as cache]
[clj-commons.digest :as digest]
[ham-fisted.api :as hf]
@@ -66,6 +67,12 @@
(defprotocol IRuleSource
(load-rules [source]))
+(defprotocol IFactSource
+ (load-facts [source]))
+
+(defprotocol IHierarchySource
+ (load-hierarchies [source]))
+
(sc/defschema BetaNode
"These nodes exist in the beta network."
(sc/pred (comp #{ProductionNode
@@ -109,7 +116,7 @@
(defn- md5-hash
"Returns the md5 digest of the given data after converting it to a string"
[x]
- (digest/md5 ^String (str x)))
+ (digest/md5 ^String (pr-str x)))
(defn- is-variable?
"Returns true if the given expression is a variable (a symbol prefixed by ?)"
@@ -181,6 +188,8 @@
(def ^:dynamic *compile-ctx* nil)
+(def ^:dynamic *hierarchy* nil)
+
(defn try-eval
"Evals the given `expr`. If an exception is thrown, it is caught and an
ex-info exception is thrown with more details added. Uses *compile-ctx*
@@ -1933,6 +1942,15 @@
(recur))))
(hf/persistent! return-list))))))
+(defn create-ancestors-fn
+ [{:keys [ancestors-fn
+ hierarchy]}]
+ (let [hierarchy-fn (when hierarchy
+ (partial ancestors hierarchy))]
+ (if (and ancestors-fn hierarchy-fn)
+ (comp (partial apply set/union) (juxt ancestors-fn hierarchy-fn))
+ (or ancestors-fn hierarchy-fn ancestors))))
+
(sc/defn build-network
"Constructs the network from compiled beta tree and condition functions."
[id-to-node :- schema/MutableLongHashMap
@@ -2048,6 +2066,7 @@
(sc/defn mk-session*
"Compile the rules into a rete network and return the given session."
[productions :- #{schema/Production}
+ facts :- [sc/Any]
options :- {sc/Keyword sc/Any}]
(validate-names-unique productions)
(let [;; A stateful counter used for unique ids of the nodes of the graph.
@@ -2088,8 +2107,7 @@
type)
;; The ancestors for a logical type uses Clojure's ancestors function unless overridden.
- ancestors-fn (or (get options :ancestors-fn)
- ancestors)
+ ancestors-fn (create-ancestors-fn options)
;; The default is to sort activations in descending order by their salience.
activation-group-sort-fn (eng/options->activation-group-sort-fn options)
@@ -2104,13 +2122,14 @@
get-alphas-fn (:get-alphas-fn rulebase)
- transport (LocalTransport.)]
+ transport (LocalTransport.)
- (eng/assemble {:rulebase rulebase
- :memory (eng/local-memory rulebase transport activation-group-sort-fn activation-group-fn get-alphas-fn)
- :transport transport
- :listeners (get options :listeners [])
- :get-alphas-fn get-alphas-fn})))
+ session (eng/assemble {:rulebase rulebase
+ :memory (eng/local-memory rulebase transport activation-group-sort-fn activation-group-fn get-alphas-fn)
+ :transport transport
+ :listeners (get options :listeners [])
+ :get-alphas-fn get-alphas-fn})]
+ (eng/insert session facts)))
(defn add-production-load-order
"Adds ::rule-load-order to metadata of productions. Custom DSL's may need to use this if
@@ -2137,8 +2156,70 @@
(var? source)
(load-rules-from-source @source)
+ (:lhs source)
+ [source]
+
+ :else []))
+
+(defn load-facts-from-source
+ "loads the hierarchies from a source if it implements `IRuleSource`, or navigates inside
+ collections to load from vectors, lists, sets, seqs."
+ [source]
+ (cond
+ (u/instance-satisfies? IFactSource source)
+ (load-facts source)
+
+ (or (vector? source)
+ (list? source)
+ (set? source)
+ (seq? source))
+ (mapcat load-facts-from-source source)
+
+ (var? source)
+ (load-facts-from-source @source)
+
+ (fn? source) ;;; source is a rule fn so it can't also be a fact unless explicitly inserted
+ []
+
+ (:hierarchy-data source) ;;; source is a hierarchy so it can't also be a fact unless explicitly inserted
+ []
+
+ (:lhs source) ;;; source is a production so it can't also be a fact unless explicitly inserted
+ []
+
:else [source]))
+(defn load-hierarchies-from-source
+ "loads the hierarchies from a source if it implements `IRuleSource`, or navigates inside
+ collections to load from vectors, lists, sets, seqs."
+ [source]
+ (cond
+ (u/instance-satisfies? IHierarchySource source)
+ (load-hierarchies source)
+
+ (or (vector? source)
+ (list? source)
+ (set? source)
+ (seq? source))
+ (mapcat load-hierarchies-from-source source)
+
+ (var? source)
+ (load-hierarchies-from-source @source)
+
+ (:hierarchy-data source)
+ [source]
+
+ :else []))
+
+(defn- reduce-hierarchy
+ [h {:keys [hierarchy-data]}]
+ (reduce (fn apply-op
+ [h [op tag parent]]
+ (case op
+ :d (hierarchy/derive h tag parent)
+ :u (hierarchy/underive h tag parent)
+ (throw (ex-info "Unsupported operation building hierarchy" {:op op})))) h hierarchy-data))
+
(defn mk-session
"Creates a new session using the given rule source. The resulting session
is immutable, and can be used with insert, retract, fire-rules, and query functions."
@@ -2152,6 +2233,15 @@
(into (sorted-set-by production-load-order-comp) productions-unique)
;; Store the name of the custom comparator for durability.
{:clara.rules.durability/comparator-name `production-load-order-comp})
+ hierarchies-loaded (cond->> (mapcat load-hierarchies-from-source sources)
+ (:hierarchy options) (cons (:hierarchy options)))
+ hierarchy (when (seq hierarchies-loaded)
+ (reduce reduce-hierarchy (hierarchy/make-hierarchy) hierarchies-loaded))
+ facts (->> (mapcat load-facts-from-source sources)
+ (vec))
+ options (cond-> options
+ (some? hierarchy)
+ (assoc :hierarchy hierarchy))
options-cache (get options :cache)
session-cache (cond
(true? options-cache)
@@ -2160,9 +2250,11 @@
default-session-cache
:else options-cache)
;;; this is simpler than storing all the productions and options in the cache
- session-key (str (md5-hash productions-sorted) (md5-hash (dissoc options :cache :compiler-cache)))]
+ session-key (str (md5-hash productions-sorted)
+ (md5-hash (dissoc options :cache :compiler-cache))
+ (hash facts))]
(if session-cache
(cache/lookup-or-miss session-cache session-key
(fn do-mk-session [_]
- (mk-session* productions-sorted options)))
- (mk-session* productions-sorted options)))))
+ (mk-session* productions-sorted facts options)))
+ (mk-session* productions-sorted facts options)))))
diff --git a/src/main/clojure/clara/rules/hierarchy.clj b/src/main/clojure/clara/rules/hierarchy.clj
new file mode 100644
index 00000000..ef048653
--- /dev/null
+++ b/src/main/clojure/clara/rules/hierarchy.clj
@@ -0,0 +1,85 @@
+(ns clara.rules.hierarchy
+ (:refer-clojure :exclude [derive
+ underive
+ make-hierarchy])
+ (:require [clojure.core :as core]))
+
+(defn make-hierarchy
+ []
+ (-> (core/make-hierarchy)
+ (assoc :hierarchy-data [])))
+
+(def ^:dynamic *hierarchy* nil)
+
+(defn- derive*
+ [h tag parent]
+ (assert (some? h))
+ (assert (some? tag))
+ (assert (some? parent))
+ (assert (not= tag parent))
+ (let [tp (:parents h)
+ td (:descendants h)
+ ta (:ancestors h)
+ tf (fn do-transform
+ [m source sources target targets]
+ (reduce (fn [ret k]
+ (assoc ret k
+ (reduce conj (get targets k #{}) (cons target (targets target)))))
+ m (cons source (sources source))))]
+ (or
+ (when-not (contains? (tp tag) parent)
+ (when (contains? (ta tag) parent)
+ h)
+ (when (contains? (ta parent) tag)
+ (throw (Exception. (print-str "Cyclic derivation:" parent "has" tag "as ancestor"))))
+ (-> (assoc-in h [:parents tag] (conj (get tp tag #{}) parent))
+ (update :ancestors tf tag td parent ta)
+ (update :descendants tf parent ta tag td)))
+ h)))
+
+(defn- underive*
+ [h tag parent]
+ (assert (some? h))
+ (assert (some? tag))
+ (assert (some? parent))
+ (assert (not= tag parent))
+ (let [parent-map (:parents h)
+ childs-parents (if (parent-map tag)
+ (disj (parent-map tag) parent) #{})
+ new-parents (if (not-empty childs-parents)
+ (assoc parent-map tag childs-parents)
+ (dissoc parent-map tag))
+ deriv-seq (map #(cons (key %) (interpose (key %) (val %)))
+ (seq new-parents))]
+ (if (contains? (parent-map tag) parent)
+ (reduce (fn do-derive
+ [h [t p]]
+ (derive* h t p)) (make-hierarchy)
+ deriv-seq)
+ h)))
+
+(defn derive
+ "Establishes a parent/child relationship between parent and
+ tag. Both tag and parent cannot be null, h must be a hierarchy obtained from make-hierarchy.
+ Unlike `clojure.core/underive`, there is no restriction
+ on the type of values that tag and parent can be.
+ When only two tag and parent are passed, this function modifies the *hierarchy* atom."
+ ([tag parent]
+ (assert *hierarchy* "*hierarchy* must be bound")
+ (swap! *hierarchy* derive tag parent))
+ ([h tag parent]
+ (-> (derive* h tag parent)
+ (update :hierarchy-data conj [:d tag parent]))))
+
+(defn underive
+ "Removes a parent/child relationship between parent and
+ tag. h must be a hierarchy obtained from make-hierarchy.
+ Unlike `clojure.core/underive`, there is no restriction
+ on the type of values that tag and parent can be.
+ When only two tag and parent are passed, this function modifies the *hierarchy* atom."
+ ([tag parent]
+ (assert *hierarchy* "*hierarchy* must be bound")
+ (swap! *hierarchy* underive tag parent))
+ ([h tag parent]
+ (-> (underive* h tag parent)
+ (update :hierarchy-data conj [:u tag parent]))))
diff --git a/src/test/clojure/clara/test_clear_ns_productions.clj b/src/test/clojure/clara/test_clear_ns_vars.clj
similarity index 71%
rename from src/test/clojure/clara/test_clear_ns_productions.clj
rename to src/test/clojure/clara/test_clear_ns_vars.clj
index fa970f7a..0847c4f7 100644
--- a/src/test/clojure/clara/test_clear_ns_productions.clj
+++ b/src/test/clojure/clara/test_clear_ns_vars.clj
@@ -1,7 +1,7 @@
-;;; Tests that clear-ns-productions! correction clears all vars marked as productions from the namespace.
-(ns clara.test-clear-ns-productions
+;;; Tests that clear-ns-vars! correction clears all vars marked as productions from the namespace.
+(ns clara.test-clear-ns-vars
(:require
- [clara.rules :refer [clear-ns-productions! defquery defrule defsession
+ [clara.rules :refer [clear-ns-vars! defquery defrule defsession
fire-rules insert insert! query]]
[clara.tools.testing-utils :as tu]
[clojure.test :refer [deftest is testing use-fixtures]])
@@ -20,14 +20,14 @@
(def ^:production-seq ns-production-seq-to-be-cleared
[{:doc "Before clearing"
- :name "clara.test-clear-ns-productions/production-seq-to-be-cleared"
+ :name "clara.test-clear-ns-vars/production-seq-to-be-cleared"
:lhs '[{:type :a
:constraints []}]
:rhs '(clara.rules/insert! :before-clearing-seq)}])
-(defsession uncleared-session 'clara.test-clear-ns-productions :fact-type-fn identity)
+(defsession uncleared-session 'clara.test-clear-ns-vars :fact-type-fn identity)
-(clear-ns-productions!)
+(clear-ns-vars!)
(defrule rule-after-clearing
[:a]
@@ -41,12 +41,12 @@
(def ^:production-seq production-seq-after-clearing
[{:doc "After clearing"
- :name "clara.test-clear-ns-productions/production-seq-after-clearing"
+ :name "clara.test-clear-ns-vars/production-seq-after-clearing"
:lhs '[{:type :a
:constraints []}]
:rhs '(clara.rules/insert! :after-clearing-seq)}])
-(defsession cleared-session 'clara.test-clear-ns-productions :fact-type-fn identity)
+(defsession cleared-session 'clara.test-clear-ns-vars :fact-type-fn identity)
;;; Then tests validating what productions the respective sessions have.
(deftest cleared?
@@ -54,7 +54,7 @@
(is (= :before-clearing @tu/side-effect-holder))
(reset! tu/side-effect-holder nil))
(let [cleared (-> cleared-session (insert :a) (fire-rules))]
- (testing "cleared-session should not contain any productions before (clear-ns-productions!)"
+ (testing "cleared-session should not contain any productions before (clear-ns-vars!)"
(is (= nil @tu/side-effect-holder))
(is (empty? (query cleared query-before-clearing)))
(is (not-empty (query cleared query-after-clearing))))
@@ -64,6 +64,6 @@
(deftest query-cleared?
(let [uncleared (-> uncleared-session (insert :a) (fire-rules))
cleared (-> cleared-session (insert :a) (fire-rules))]
- (is (not-empty (query uncleared "clara.test-clear-ns-productions/query-to-be-cleared")))
- (is (thrown-with-msg? IllegalArgumentException #"clara.test-clear-ns-productions/query-to-be-cleared"
- (query cleared "clara.test-clear-ns-productions/query-to-be-cleared")))))
+ (is (not-empty (query uncleared "clara.test-clear-ns-vars/query-to-be-cleared")))
+ (is (thrown-with-msg? IllegalArgumentException #"clara.test-clear-ns-vars/query-to-be-cleared"
+ (query cleared "clara.test-clear-ns-vars/query-to-be-cleared")))))
diff --git a/src/test/clojure/clara/test_rules.clj b/src/test/clojure/clara/test_rules.clj
index 748cd06f..d4d53a03 100644
--- a/src/test/clojure/clara/test_rules.clj
+++ b/src/test/clojure/clara/test_rules.clj
@@ -2442,7 +2442,7 @@
{:doc "An extra rule to test for duplicate names."
:name :clara.rules.test-rules-data/is-cold-and-windy-data
:lhs []
- :rhs '(println "I have no meaning outside of this test")}))) {})))
+ :rhs '(println "I have no meaning outside of this test")}))) [] {})))
#_{:clj-kondo/ignore [:unresolved-symbol]}
(deftest test-negation-multiple-children-exception
From 364fa0fc3ae6c6ba2cb5971c8158fa3f676b0e55 Mon Sep 17 00:00:00 2001
From: Jose Gomez
Date: Sun, 10 Mar 2024 19:19:37 -0500
Subject: [PATCH 87/87] chore: update deps
---
deps.edn | 14 +++++++-------
pom.xml | 32 ++++++++++++++++----------------
2 files changed, 23 insertions(+), 23 deletions(-)
diff --git a/deps.edn b/deps.edn
index 321bb4a7..0cd1e257 100644
--- a/deps.edn
+++ b/deps.edn
@@ -3,12 +3,12 @@
:fn compile-main-java
:ensure "target/main/classes"}
:deps {org.clojure/clojure {:mvn/version "1.11.2"}
- org.clojure/core.cache {:mvn/version "1.0.225"}
+ org.clojure/core.cache {:mvn/version "1.1.234"}
org.clj-commons/digest {:mvn/version "1.4.100"}
- com.github.k13labs/futurama {:mvn/version "1.0.2"}
- com.cnuernber/ham-fisted {:mvn/version "2.016"}
+ com.github.k13labs/futurama {:mvn/version "1.0.3"}
+ com.cnuernber/ham-fisted {:mvn/version "2.017"}
prismatic/schema {:mvn/version "1.4.1"}
- org.clojure/data.fressian {:mvn/version "1.0.0"}}
+ org.clojure/data.fressian {:mvn/version "1.1.0"}}
;for more examples of aliases see: https://github.com/seancorfield/dot-clojure
:aliases
@@ -49,16 +49,16 @@
rewrite-clj/rewrite-clj {:mvn/version "0.6.1"}}
:main-opts ["-m" "depot.outdated.main"]}
- :jar {:replace-deps {com.github.seancorfield/depstar {:mvn/version "2.0.216"}}
+ :jar {:replace-deps {com.github.seancorfield/depstar {:mvn/version "2.1.303"}}
:exec-fn hf.depstar/jar
:exec-args {:jar "build/clara-rules.jar"}}
- :install-maven {:extra-deps {slipset/deps-deploy {:mvn/version "0.1.5"}}
+ :install-maven {:extra-deps {slipset/deps-deploy {:mvn/version "0.2.2"}}
:exec-fn deps-deploy.deps-deploy/deploy
:exec-args {:installer :local
:artifact "build/clara-rules.jar"}}
- :deploy-maven {:replace-deps {slipset/deps-deploy {:mvn/version "0.1.5"}}
+ :deploy-maven {:replace-deps {slipset/deps-deploy {:mvn/version "0.2.2"}}
:exec-fn deps-deploy.deps-deploy/deploy
:exec-args {:installer :remote
:artifact "build/clara-rules.jar"}}}
diff --git a/pom.xml b/pom.xml
index 3333c7f4..b39ff62b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -24,35 +24,35 @@
clojure
1.11.2
+
+ com.cnuernber
+ ham-fisted
+ 2.017
+
org.clojure
- core.cache
- 1.0.225
+ data.fressian
+ 1.1.0
- org.clj-commons
- digest
- 1.4.100
+ prismatic
+ schema
+ 1.4.1
com.github.k13labs
futurama
- 1.0.2
+ 1.0.3
- com.cnuernber
- ham-fisted
- 2.016
-
-
- prismatic
- schema
- 1.4.1
+ org.clj-commons
+ digest
+ 1.4.100
org.clojure
- data.fressian
- 1.0.0
+ core.cache
+ 1.1.234