From 74027575039aff675de21ae1c22e3c82d9e72d00 Mon Sep 17 00:00:00 2001 From: simon-at-fugu Date: Thu, 10 Sep 2020 12:19:02 +0200 Subject: [PATCH 1/5] Try to enable deeptaylor analyzer. Please review :) --- innvestigate/analyzer/__init__.py | 8 ++++---- innvestigate/analyzer/base.py | 3 +++ innvestigate/analyzer/deeptaylor.py | 18 +++++++++--------- .../relevance_based/relevance_rule_base.py | 4 ++-- 4 files changed, 18 insertions(+), 15 deletions(-) diff --git a/innvestigate/analyzer/__init__.py b/innvestigate/analyzer/__init__.py index 2619921c..19991243 100644 --- a/innvestigate/analyzer/__init__.py +++ b/innvestigate/analyzer/__init__.py @@ -44,8 +44,8 @@ from .relevance_based.relevance_analyzer import LRPSequentialCompositeB from .relevance_based.relevance_analyzer import LRPSequentialCompositeAFlat from .relevance_based.relevance_analyzer import LRPSequentialCompositeBFlat -# from .deeptaylor import DeepTaylor -# from .deeptaylor import BoundedDeepTaylor +from .deeptaylor import DeepTaylor +from .deeptaylor import BoundedDeepTaylor # from .wrapper import WrapperBase # from .wrapper import AugmentReduceBase # from .wrapper import GaussianSmoother @@ -106,8 +106,8 @@ "lrp.sequential_preset_b_flat": LRPSequentialPresetBFlat, # Deep Taylor - #"deep_taylor": DeepTaylor, - #"deep_taylor.bounded": BoundedDeepTaylor, + "deep_taylor": DeepTaylor, + "deep_taylor.bounded": BoundedDeepTaylor, # # DeepLIFT # "deep_lift.wrapper": DeepLIFTWrapper, diff --git a/innvestigate/analyzer/base.py b/innvestigate/analyzer/base.py index cf8c9309..6571b870 100644 --- a/innvestigate/analyzer/base.py +++ b/innvestigate/analyzer/base.py @@ -306,6 +306,9 @@ def analyze(self, X, neuron_selection="max_activation", layer_names=None): ret = reverse_map.apply_reverse_map(X, inp, all, neuron_selection=neuron_selection, layer_names=layer_names) ret = self._postprocess_analysis(ret) + if isinstance(ret, list) and len(ret) == 1: + ret = ret[0] + return ret def _postprocess_analysis(self, hm): diff --git a/innvestigate/analyzer/deeptaylor.py b/innvestigate/analyzer/deeptaylor.py index e8164ecf..720566fe 100644 --- a/innvestigate/analyzer/deeptaylor.py +++ b/innvestigate/analyzer/deeptaylor.py @@ -13,7 +13,7 @@ from tensorflow.python.keras.engine.input_layer import InputLayer from . import base -from .relevance_based import relevance_rule as lrp_rules +from .relevance_based import relevance_rule_base as lrp_rules from ..utils.keras import checks as kchecks from ..utils.keras import graph as kgraph @@ -71,7 +71,7 @@ def do_nothing(Xs, Ys, As, reverse_state): self._add_conditional_reverse_mapping( lambda l: (not kchecks.contains_kernel(l) and kchecks.contains_activation(l)), - self._gradient_reverse_mapping, + self._gradient_reverse_mapping(), name="deep_taylor_relu", ) @@ -88,18 +88,18 @@ def do_nothing(Xs, Ys, As, reverse_state): # Special layers. self._add_conditional_reverse_mapping( kchecks.is_max_pooling, - self._gradient_reverse_mapping, + self._gradient_reverse_mapping(), name="deep_taylor_max_pooling", ) self._add_conditional_reverse_mapping( kchecks.is_average_pooling, - self._gradient_reverse_mapping, + self._gradient_reverse_mapping(), name="deep_taylor_average_pooling", ) self._add_conditional_reverse_mapping( lambda l: isinstance(l, keras_layers.Add), # Ignore scaling with 0.5 - self._gradient_reverse_mapping, + self._gradient_reverse_mapping(), name="deep_taylor_add", ) self._add_conditional_reverse_mapping( @@ -112,7 +112,7 @@ def do_nothing(Xs, Ys, As, reverse_state): keras_layers.SpatialDropout2D, keras_layers.SpatialDropout3D, )), - self._gradient_reverse_mapping, + self._gradient_reverse_mapping(), name="deep_taylor_special_layers", ) @@ -133,19 +133,19 @@ def do_nothing(Xs, Ys, As, reverse_state): keras_layers.RepeatVector, keras_layers.Reshape, )), - self._gradient_reverse_mapping, + self._gradient_reverse_mapping(), name="deep_taylor_no_transform", ) return super(DeepTaylor, self)._create_analysis( *args, **kwargs) - def _default_reverse_mapping(self, Xs, Ys, reversed_Ys, reverse_state): + def _default_reverse_mapping(self, layer): """ Block all default mappings. """ raise NotImplementedError( - "Layer %s not supported." % reverse_state["layer"]) + "Layer %s not supported." % layer) def _prepare_model(self, model): """ diff --git a/innvestigate/analyzer/relevance_based/relevance_rule_base.py b/innvestigate/analyzer/relevance_based/relevance_rule_base.py index ba55899d..c771e1a9 100644 --- a/innvestigate/analyzer/relevance_based/relevance_rule_base.py +++ b/innvestigate/analyzer/relevance_based/relevance_rule_base.py @@ -691,8 +691,8 @@ def apply(self, ins, neuron_selection): def wrap_hook(self, ins, neuron_selection): to_low = keras_layers.Lambda(lambda x: x * 0 + self._low) to_high = keras_layers.Lambda(lambda x: x * 0 + self._high) - low = [to_low(x) for x in ins] - high = [to_high(x) for x in ins] + low = tf.convert_to_tensor([to_low(x) for x in ins]) + high = tf.convert_to_tensor([to_high(x) for x in ins]) with tf.GradientTape(persistent=True) as tape: tape.watch(ins) tape.watch(low) From 62867dc4f6ef8bdb737e16ccebbcf3f533a7b1d0 Mon Sep 17 00:00:00 2001 From: simon-at-fugu Date: Sun, 4 Oct 2020 20:00:54 +0200 Subject: [PATCH 2/5] change req. to tensorflow==2.3 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index c07581f0..1a0dbcb8 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ "numpy", "pillow", "scipy", - "tensorflow==2.1", + "tensorflow==2.3", ] setup_requirements = [ From e88edbf3922151e666fcd667a39bc3af2ff0b9cc Mon Sep 17 00:00:00 2001 From: simon-at-fugu Date: Sun, 4 Oct 2020 20:03:04 +0200 Subject: [PATCH 3/5] change req. to tensorflow==2.3 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 1a0dbcb8..c3062f4a 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ "numpy", "pillow", "scipy", - "tensorflow==2.3", + "tensorflow>=2.3", ] setup_requirements = [ From 8a48f017a97189b3ff65c6462fa8302d1a62574e Mon Sep 17 00:00:00 2001 From: simon-at-fugu Date: Wed, 30 Dec 2020 16:21:03 +0100 Subject: [PATCH 4/5] replace List comprehensions with tf.map_fn --- innvestigate/utils/keras/functional.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/innvestigate/utils/keras/functional.py b/innvestigate/utils/keras/functional.py index b905d122..b4f6bb11 100644 --- a/innvestigate/utils/keras/functional.py +++ b/innvestigate/utils/keras/functional.py @@ -615,8 +615,8 @@ def boundedrule_explanation(ins, layer_func, layer_func_pos, layer_func_neg, out #print("TRACING bound") to_low = keras_layers.Lambda(lambda x: x * 0 + low_param) to_high = keras_layers.Lambda(lambda x: x * 0 + high_param) - low = [to_low(x) for x in ins] - high = [to_high(x) for x in ins] + low = tf.map_fn(to_low, ins) + high = tf.map_fn(to_high, ins) A = out_func(ins, layer_func) B = out_func(low, layer_func_pos) From 248172c43e26d6de016f5e6ca728ff61c5514989 Mon Sep 17 00:00:00 2001 From: simon-at-fugu Date: Wed, 30 Dec 2020 16:22:12 +0100 Subject: [PATCH 5/5] for testing, replace bn_mapping with gradient_reverse_mapping --- innvestigate/analyzer/deeptaylor.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/innvestigate/analyzer/deeptaylor.py b/innvestigate/analyzer/deeptaylor.py index 04cfef72..9835f1f7 100644 --- a/innvestigate/analyzer/deeptaylor.py +++ b/innvestigate/analyzer/deeptaylor.py @@ -76,13 +76,18 @@ def do_nothing(Xs, Ys, As, reverse_state): ) # Assume conv layer beforehand -> unbounded - bn_mapping = kgraph.apply_mapping_to_fused_bn_layer( - lrp_rules.WSquareRule, - fuse_mode="one_linear", - ) + # bn_mapping = kgraph.apply_mapping_to_fused_bn_layer( + # lrp_rules.WSquareRule, + # fuse_mode="one_linear", + # ) + # self._add_conditional_reverse_mapping( + # kchecks.is_batch_normalization_layer, + # bn_mapping, + # name="deep_taylor_batch_norm", + # ) self._add_conditional_reverse_mapping( kchecks.is_batch_normalization_layer, - bn_mapping, + self._gradient_reverse_mapping(), name="deep_taylor_batch_norm", ) # Special layers.