AttributeError: 'TFOptimizer' object has no attribute 'learning_rate'
Delicious-Bitter-Melon opened this issue · 7 comments
python 3.6.0 keras 2.3.1 tensorflow 2.1.0
AttributeError: 'TFOptimizer' object has no attribute 'learning_rate'
AttributeError Traceback (most recent call last)
in
/opt/conda/lib/python3.6/site-packages/keras/legacy/interfaces.py in wrapper(*args, **kwargs)
89 warnings.warn('Update your ' + object_name + '
call to the ' +
90 'Keras 2 API: ' + signature, stacklevel=2)
---> 91 return func(*args, **kwargs)
92 wrapper._original_function = func
93 return wrapper
/opt/conda/lib/python3.6/site-packages/keras/engine/training.py in fit_generator(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, validation_freq, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)
1730 use_multiprocessing=use_multiprocessing,
1731 shuffle=shuffle,
-> 1732 initial_epoch=initial_epoch)
1733
1734 @interfaces.legacy_generator_methods_support
/opt/conda/lib/python3.6/site-packages/keras/engine/training_generator.py in fit_generator(model, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, validation_freq, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)
258 break
259
--> 260 callbacks.on_epoch_end(epoch, epoch_logs)
261 epoch += 1
262 if callbacks.model.stop_training:
/opt/conda/lib/python3.6/site-packages/keras/callbacks/callbacks.py in on_epoch_end(self, epoch, logs)
150 logs = logs or {}
151 for callback in self.callbacks:
--> 152 callback.on_epoch_end(epoch, logs)
153
154 def on_train_batch_begin(self, batch, logs=None):
/opt/conda/lib/python3.6/site-packages/keras/callbacks/callbacks.py in on_epoch_end(self, epoch, logs)
1034 def on_epoch_end(self, epoch, logs=None):
1035 logs = logs or {}
-> 1036 logs['lr'] = K.get_value(self.model.optimizer.lr)
1037 current = logs.get(self.monitor)
1038 if current is None:
/opt/conda/lib/python3.6/site-packages/keras/optimizers.py in lr(self)
159 def lr(self):
160 # Legacy support.
--> 161 return self.learning_rate
162
163
AttributeError: 'TFOptimizer' object has no attribute 'learning_rate'
when I use tf.keras instead of keras,this problem don't happen.
You have to import everthing in paralel sense like
Anybody have proble i can explian to him
Same problem
@Mehmet5353 i'm facing same issue.
optimizer=tf.keras.optimizers.Adam(learning_rate* hvd.size())
opt = hvd.DistributedOptimizer(optimizer)
Anybody have proble i can explian to him
I have same issue too!