问题
I am using tensorflow for a problem where there is a function which is called once and it works correctly but the second time it is called the error " weak object has gone away" comes up which I don't understand what it means and where the problem might be.
The full trackback is as follows:
----------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in _hash_fix(self, elem)
108 try:
--> 109 hash(elem)
110 except TypeError:
TypeError: weak object has gone away
During handling of the above exception, another exception occurred:
AttributeError Traceback (most recent call last)
<ipython-input-23-f1a95ac20255> in <module>
----> 1 default_settings['sur_model'].predict_with_grad(np.atleast_2d(xx))
<ipython-input-5-7e554cb74b1d> in predict_with_grad(self, x)
127
128 with tf.GradientTape() as t:
--> 129 m, v = self.predict(x)
130 dmdx = t.gradient(m, x)
131 with tf.GradientTape() as t:
<ipython-input-5-7e554cb74b1d> in predict(self, X)
116 """
117
--> 118 X_embed = self.embedding_model(X)
119 #print('X_embed',X_embed)
120 m, v = self.Pr_model.predict_y(X_embed)
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/keras/engine/base_layer.py in __call__(self, inputs, *args, **kwargs)
889 with base_layer_utils.autocast_context_manager(
890 self._compute_dtype):
--> 891 outputs = self.call(cast_inputs, *args, **kwargs)
892 self._handle_activity_regularization(inputs, outputs)
893 self._set_mask_metadata(inputs, outputs, input_masks)
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py in __call__(self, *args, **kwds)
455
456 tracing_count = self._get_tracing_count()
--> 457 result = self._call(*args, **kwds)
458 if tracing_count == self._get_tracing_count():
459 self._call_counter.called_without_tracing()
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py in _call(self, *args, **kwds)
492 # In this case we have not created variables on the first call. So we can
493 # run the first trace but we should fail if variables are created.
--> 494 results = self._stateful_fn(*args, **kwds)
495 if self._created_variables:
496 raise ValueError("Creating variables on a non-first call to a function"
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in __call__(self, *args, **kwargs)
1820 def __call__(self, *args, **kwargs):
1821 """Calls a graph function specialized to the inputs."""
-> 1822 graph_function, args, kwargs = self._maybe_define_function(args, kwargs)
1823 return graph_function._filtered_call(args, kwargs) # pylint: disable=protected-access
1824
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in _maybe_define_function(self, args, kwargs)
2117
2118 with self._lock:
-> 2119 graph_function = self._function_cache.primary.get(cache_key, None)
2120 if graph_function is not None:
2121 return graph_function, args, kwargs
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in __eq__(self, other)
115
116 def __eq__(self, other):
--> 117 return self._fields_safe == other._fields_safe # pylint: disable=protected-access
118
119
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in _fields_safe(self)
91 def _fields_safe(self):
92 """Hash & equality-safe version of all the namedtuple fields."""
---> 93 return (self._hash_fix(self.input_signature), self.parent_graph,
94 self.device_functions, self.colocation_stack,
95 self.in_cross_replica_context)
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in _hash_fix(self, elem)
99 # Descend into tuples
100 if isinstance(elem, tuple):
--> 101 return tuple(self._hash_fix(i) for i in elem)
102
103 if isinstance(elem, set):
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in <genexpr>(.0)
99 # Descend into tuples
100 if isinstance(elem, tuple):
--> 101 return tuple(self._hash_fix(i) for i in elem)
102
103 if isinstance(elem, set):
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in _hash_fix(self, elem)
99 # Descend into tuples
100 if isinstance(elem, tuple):
--> 101 return tuple(self._hash_fix(i) for i in elem)
102
103 if isinstance(elem, set):
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in <genexpr>(.0)
99 # Descend into tuples
100 if isinstance(elem, tuple):
--> 101 return tuple(self._hash_fix(i) for i in elem)
102
103 if isinstance(elem, set):
~/.local/share/virtualenvs/tf-tRAPLeXL/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py in _hash_fix(self, elem)
110 except TypeError:
111 v = elem()
--> 112 return (v.__class__, tensor_spec.TensorSpec(v.shape, v.dtype))
113
114 return elem
AttributeError: 'NoneType' object has no attribute 'shape'
I find it rather strange and haven't found a clue to know from where I should fix it.
回答1:
I suppose your using tf.function /@tf.function
somewhere in your code and you may be trying to redefine an object which results in @tf.function
not retracing your graph. This is because the two objects share the same trace and that even using different parameters has no effect. the following snippet reproduces the behavior described above
import tensorflow as tf
@tf.function
def square(x):
return x**2
a = square(tf.Variable(2))
print(a)
a = square(tf.Variable(3))
print(a)
However, if you want different objects to have different traces .i.e not share traces, you may use different @tf.function
objects as below;
@tf.function
def square1(x):
return x**2
@tf.function
def square2(x):
return x**2
print(square1(tf.Variable(2)))
print(square2(tf.Variable(3)))
For more details, checkout the official_documentation. Hope this helps
回答2:
Just to expand on the answer, I was very unsatisfied with having to copy/paste the python function to fix this problem. Fortunately, it seems that simply ditching the decorator method and calling tf.function
as a function also solves the problem:
def square(x):
return x**2
traced1 = tf.function(square)
traced2 = tf.function(square)
print(traced1(tf.Variable(2)))
print(traced2(tf.Variable(3)))
This is much more programmatic and means you don't have to do any silly copy/pasting of code just to re-run something.
来源:https://stackoverflow.com/questions/58644906/weak-object-has-gone-away-what-does-it-mean