You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
The analyzer shouldn't throw an error. It should work, since v2.0.0 is supposed to work with tensorflow >= 2 which is eagerly executed. Therefore, the analyzer should make use of tf.GradientTape
Error Output
---------------------------------------------------------------------------RuntimeErrorTraceback (mostrecentcalllast)
InputIn [10], in<cellline: 8>()
6model.fit(x=np.arange(10).reshape(1, 10), y=np.arange(10).reshape(1,10))
7analyzer=innvestigate.create_analyzer("gradient", model)
---->8analyzer.analyze(np.arange(10).reshape(1, 10))
File~/miniconda3/envs/snap/lib/python3.9/site-packages/innvestigate/analyzer/network_base.py:250, inAnalyzerNetworkBase.analyze(self, X, neuron_selection)
247# TODO: what does should mean in docstring?249ifself._analyzer_model_doneisFalse:
-->250self.create_analyzer_model()
252ifneuron_selectionisnotNoneandself._neuron_selection_mode!="index":
253raiseValueError(
254f"neuron_selection_mode {self._neuron_selection_mode} doesn't support ",
255"'neuron_selection' parameter.",
256 )
File~/miniconda3/envs/snap/lib/python3.9/site-packages/innvestigate/analyzer/network_base.py:164, inAnalyzerNetworkBase.create_analyzer_model(self)
161self._analysis_inputs=analysis_inputs162self._prepared_model=model-->164tmp=self._create_analysis(
165model, stop_analysis_at_tensors=stop_analysis_at_tensors166 )
167ifisinstance(tmp, tuple):
168iflen(tmp) ==3:
File~/miniconda3/envs/snap/lib/python3.9/site-packages/innvestigate/analyzer/reverse_base.py:269, inReverseAnalyzerBase._create_analysis(self, model, stop_analysis_at_tensors)
261return_all_reversed_tensors= (
262self._reverse_check_min_max_values263orself._reverse_check_finite264orself._reverse_keep_tensors265 )
267# if return_all_reversed_tensors is False,268# reversed_tensors will be None-->269reversed_input_tensors, reversed_tensors=self._reverse_model(
270model,
271stop_analysis_at_tensors=stop_analysis_at_tensors,
272return_all_reversed_tensors=return_all_reversed_tensors,
273 )
274ret=self._postprocess_analysis(reversed_input_tensors)
276ifreturn_all_reversed_tensors:
File~/miniconda3/envs/snap/lib/python3.9/site-packages/innvestigate/analyzer/reverse_base.py:242, inReverseAnalyzerBase._reverse_model(self, model, stop_analysis_at_tensors, return_all_reversed_tensors)
239ifstop_analysis_at_tensorsisNone:
240stop_analysis_at_tensors= []
-->242returnigraph.reverse_model(
243model,
244reverse_mappings=self._reverse_mapping,
245default_reverse_mapping=self._default_reverse_mapping,
246head_mapping=self._head_mapping,
247stop_mapping_at_tensors=stop_analysis_at_tensors,
248verbose=self._reverse_verbose,
249clip_all_reversed_tensors=self._reverse_clip_values,
250project_bottleneck_tensors=self._reverse_project_bottleneck_layers,
251return_all_reversed_tensors=return_all_reversed_tensors,
252 )
File~/miniconda3/envs/snap/lib/python3.9/site-packages/innvestigate/backend/graph.py:1237, inreverse_model(model, reverse_mappings, default_reverse_mapping, head_mapping, stop_mapping_at_tensors, verbose, return_all_reversed_tensors, clip_all_reversed_tensors, project_bottleneck_tensors, execution_trace, reapply_on_copied_layers)
1235_print(f"[NID: {nid}] Reverse layer-node {layer}")
1236reverse_mapping=initialized_reverse_mappings[layer]
->1237reversed_Xs=reverse_mapping(
1238Xs,
1239Ys,
1240reversed_Ys,
1241 {
1242"nid": nid,
1243"model": model,
1244"layer": layer,
1245"stop_mapping_at_ids": local_stop_mapping_at_ids,
1246 },
1247 )
1248reversed_Xs=ibackend.to_list(reversed_Xs)
1249add_reversed_tensors(nid, Xs, reversed_Xs)
File~/miniconda3/envs/snap/lib/python3.9/site-packages/innvestigate/analyzer/reverse_base.py:122, inReverseAnalyzerBase._gradient_reverse_mapping(self, Xs, Ys, reversed_Ys, reverse_state)
120"""Returns masked gradient."""121mask= [id(X) notinreverse_state["stop_mapping_at_ids"] forXinXs]
-->122grad=ibackend.gradients(Xs, Ys, reversed_Ys)
123returnibackend.apply_mask(grad, mask)
File~/miniconda3/envs/snap/lib/python3.9/site-packages/innvestigate/backend/__init__.py:82, ingradients(Xs, Ys, known_Ys)
77iflen(Ys) !=len(known_Ys):
78raiseValueError(
79"Gradient computation failesd, Ys and known_Ys not of same length"80 )
--->82grad=tf.gradients(Ys, Xs, grad_ys=known_Ys, stop_gradients=Xs)
83ifgradisNone:
84raiseTypeError("Gradient computation failed, returned None.")
File~/miniconda3/envs/snap/lib/python3.9/site-packages/tensorflow/python/ops/gradients_impl.py:311, ingradients_v2(ys, xs, grad_ys, name, gate_gradients, aggregation_method, stop_gradients, unconnected_gradients)
306# Creating the gradient graph for control flow mutates Operations.307# _mutation_lock ensures a Session.run call cannot occur between creating and308# mutating new ops.309# pylint: disable=protected-access310withops.get_default_graph()._mutation_lock():
-->311returngradients_util._GradientsHelper(
312ys, xs, grad_ys, name, True, gate_gradients,
313aggregation_method, stop_gradients,
314unconnected_gradients)
File~/miniconda3/envs/snap/lib/python3.9/site-packages/tensorflow/python/ops/gradients_util.py:479, in_GradientsHelper(ys, xs, grad_ys, name, colocate_gradients_with_ops, gate_gradients, aggregation_method, stop_gradients, unconnected_gradients, src_graph)
477"""Implementation of gradients()."""478ifcontext.executing_eagerly():
-->479raiseRuntimeError("tf.gradients is not supported when eager execution "480"is enabled. Use tf.GradientTape instead.")
481ys=_AsList(ys)
482xs=_AsList(xs)
RuntimeError: tf.gradientsisnotsupportedwheneagerexecutionisenabled. Usetf.GradientTapeinstead.
Platform information
OS: Debian 11
Python version: 3.9
iNNvestigate version: 2.0.0
TensorFlow version: 2.9.1
The text was updated successfully, but these errors were encountered:
thanks for your interest in the package and the thorough issue.
The original iNNvestigate 1.0 was written on top of TF1 and Keras (among other backends) and works by inverting the computational tree of the model to create an analyzer. To ensure compatibility with existing code and to have identical outputs between iNNvestigate 1 and 2, we've kept this graph-based approach.
I will open a separate issue with your feature request to support GradientTape. Contributions are more than welcome and this issue might just take a little tweak to this function.
In the meantime, I'll make the current requirement of using tf.compat.v1.disable_eager_execution() more obvious in the readme.
Read the docs
Done. It isn't documented behaviour.
Describe the bug
The error message in title is returned when trying to analyze a
tf.keras.Model
usinganalyzer.analyze
using tensorflow 2.Steps to reproduce the bug
Expected behavior
The analyzer shouldn't throw an error. It should work, since v2.0.0 is supposed to work with tensorflow >= 2 which is eagerly executed. Therefore, the analyzer should make use of
tf.GradientTape
Error Output
Platform information
The text was updated successfully, but these errors were encountered: