AttributeError: 'Tensor' object has no attribute '_keras_history
IS5882 opened this issue · 4 comments
@lzfelix I am using google colab, its a pretty simple network that uses an LSTM-BiLSTM and CRF. But I get this error " AttributeError: 'Tensor' object has no attribute '_keras_history'", when model.fit() is called.
I understand that I shouldn't have any + operations or numpy.add() and replace them with ADD(), but this is not my case. I also tried wrapping it Lambda function but it didn't work out(I think I wasnt dong it right)
Any help would be highly appreciated
This is my code:
input = Input(shape=(140,))
word_embedding_size = 300
model = Embedding(input_dim=n_words, output_dim=word_embedding_size, input_length=140)(input)
model = Bidirectional(LSTM(units=word_embedding_size,
return_sequences=True,
dropout=0.5,
recurrent_dropout=0.5,
kernel_initializer=k.initializers.he_normal()))(model)
model = LSTM(units=word_embedding_size * 2,
return_sequences=True,
dropout=0.5,
recurrent_dropout=0.5,
kernel_initializer=k.initializers.he_normal())(model)
model = TimeDistributed(Dense(n_tags, activation="relu"))(model) # previously softmax output layer
crf = CRF(n_tags) # CRF layer
out = crf(model) # output
model = Model(input, out)
adam = k.optimizers.Adam(lr=0.0005, beta_1=0.9, beta_2=0.999)
#model.compile(optimizer=adam, loss="categorical_crossentropy", metrics=["accuracy"])
model.compile(optimizer=adam, loss=crf.loss_function, metrics=[crf.accuracy, 'accuracy'])
model.summary()
history = model.fit(X_train, np.array(y_train), batch_size=256, epochs=20, validation_split=0.2, verbose=1, callbacks=callbacks_list)
filepath="ner-bi-lstm-td-model-{val_acc:.2f}.hdf5"
checkpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]
This is when I wrap in Lambda function:
from keras.layers import Lambda
input = Input(shape=(140,))
word_embedding_size = 300
modelEmb = Embedding(input_dim=n_words, output_dim=word_embedding_size, input_length=140)(input)
modelBI = Bidirectional(LSTM(units=word_embedding_size,
return_sequences=True,
dropout=0.5,
recurrent_dropout=0.5,
kernel_initializer=k.initializers.he_normal()))(modelEmb)
modelLSTM = LSTM(units=word_embedding_size * 2,
return_sequences=True,
dropout=0.5,
recurrent_dropout=0.5,
kernel_initializer=k.initializers.he_normal())(modelBI)
model = TimeDistributed(Dense(n_tags, activation="relu"))(modelLSTM) # previously softmax output layer
crf=tf.keras.layers.Lambda(lambda x: CRF(n_tags))
out=tf.keras.layers.Lambda(lambda x: crf(model))
This the error and traceback:
> > Epoch 1/20
> --------------------------------------------------------------------------- AttributeError Traceback (most recent call
> last) <ipython-input-45-160f633a590f> in <module>()
> ----> 1 history = model.fit(X_train, np.array(y_train), batch_size=256, epochs=20, validation_split=0.2, verbose=1,
> callbacks=callbacks_list)
>
> 9 frames
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py
> in fit(self, x, y, batch_size, epochs, verbose, callbacks,
> validation_split, validation_data, shuffle, class_weight,
> sample_weight, initial_epoch, steps_per_epoch, validation_steps,
> validation_batch_size, validation_freq, max_queue_size, workers,
> use_multiprocessing) 1098 _r=1): 1099
> callbacks.on_train_batch_begin(step)
> -> 1100 tmp_logs = self.train_function(iterator) 1101 if data_handler.should_sync: 1102
> context.async_wait()
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py
> in __call__(self, *args, **kwds)
> 826 tracing_count = self.experimental_get_tracing_count()
> 827 with trace.Trace(self._name) as tm:
> --> 828 result = self._call(*args, **kwds)
> 829 compiler = "xla" if self._experimental_compile else "nonXla"
> 830 new_tracing_count = self.experimental_get_tracing_count()
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py
> in _call(self, *args, **kwds)
> 869 # This is the first call of __call__, so we have to initialize.
> 870 initializers = []
> --> 871 self._initialize(args, kwds, add_initializers_to=initializers)
> 872 finally:
> 873 # At this point we know that the initialization is complete (or less
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py
> in _initialize(self, args, kwds, add_initializers_to)
> 724 self._concrete_stateful_fn = (
> 725 self._stateful_fn._get_concrete_function_internal_garbage_collected(
> # pylint: disable=protected-access
> --> 726 *args, **kwds))
> 727
> 728 def invalid_creator_scope(*unused_args, **unused_kwds):
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py
> in _get_concrete_function_internal_garbage_collected(self, *args,
> **kwargs) 2967 args, kwargs = None, None 2968 with self._lock:
> -> 2969 graph_function, _ = self._maybe_define_function(args, kwargs) 2970 return graph_function 2971
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py
> in _maybe_define_function(self, args, kwargs) 3359 3360
> self._function_cache.missed.add(call_context_key)
> -> 3361 graph_function = self._create_graph_function(args, kwargs) 3362 self._function_cache.primary[cache_key] =
> graph_function 3363
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py
> in _create_graph_function(self, args, kwargs,
> override_flat_arg_shapes) 3204 arg_names=arg_names,
> 3205 override_flat_arg_shapes=override_flat_arg_shapes,
> -> 3206 capture_by_value=self._capture_by_value), 3207 self._function_attributes, 3208
> function_spec=self.function_spec,
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py
> in func_graph_from_py_func(name, python_func, args, kwargs, signature,
> func_graph, autograph, autograph_options, add_control_dependencies,
> arg_names, op_return_value, collections, capture_by_value,
> override_flat_arg_shapes)
> 988 _, original_func = tf_decorator.unwrap(python_func)
> 989
> --> 990 func_outputs = python_func(*func_args, **func_kwargs)
> 991
> 992 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py
> in wrapped_fn(*args, **kwds)
> 632 xla_context.Exit()
> 633 else:
> --> 634 out = weak_wrapped_fn().__wrapped__(*args, **kwds)
> 635 return out
> 636
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py
> in wrapper(*args, **kwargs)
> 975 except Exception as e: # pylint:disable=broad-except
> 976 if hasattr(e, "ag_error_metadata"):
> --> 977 raise e.ag_error_metadata.to_exception(e)
> 978 else:
> 979 raise
>
> AttributeError: in user code:
>
> /usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py:805
> train_function *
> return step_function(self, iterator)
> /usr/local/lib/python3.6/dist-packages/keras_contrib/losses/crf_losses.py:54
> crf_loss *
> crf, idx = y_pred._keras_history[:2]
>
> **AttributeError: 'Tensor' object has no attribute '_keras_history'**
Hello, I have the same problem with BiLSTM-CRF for aspect term extraction. Have you found a solution?
Thank you in advance!
Did you switch colab to tensorflow 1? I do not think that CRF from keras-contrib works in TF2
%tensorflow_version 1.x
Here's the bug fixing
keras-team/keras#14464 (comment)
i run this code but my 'acc' became more than 1 . why?