ValueError: Operands could not be broadcast together with shapes (1, Dimension(810)) (10, 81)
cpoptic opened this issue · 0 comments
importing from DARNN import dual_attention
gives the error
ValueError: Operands could not be broadcast together with shapes (1, Dimension(810)) (10, 81)
ValueError Traceback (most recent call last)
in
----> 1 from DARNN import dual_attention
~/repos/A-Dual-Stage-Attention-Based-Recurrent-Neural-Network-for-Time-Series-Prediction/DARNN/dual_attention.py in
193 s_de0 = Input(shape=(n_sde0,))
194 Y = Input(shape=(T-1,1))
--> 195 X_ = encoder_attention(T,X,s0,h0)
196 print('X_:',X_)
197 X_ = Reshape((T,n))(X_)
~/repos/A-Dual-Stage-Attention-Based-Recurrent-Neural-Network-for-Time-Series-Prediction/DARNN/dual_attention.py in encoder_attention(T, X, s0, h0)
141 # break
142
--> 143 X_ = Multiply()([attention_weight_t,X])
144 print('return X:',X_)
145 return X_
~/anaconda3/envs/Dual-Stage-Attention-for-TS/lib/python3.6/site-packages/keras/engine/topology.py in call(self, inputs, **kwargs)
592 self.build(input_shapes[0])
593 else:
--> 594 self.build(input_shapes)
595 self.built = True
596
~/anaconda3/envs/Dual-Stage-Attention-for-TS/lib/python3.6/site-packages/keras/layers/merge.py in build(self, input_shape)
88 else:
89 shape = input_shape[i][1:]
---> 90 output_shape = self._compute_elemwise_op_output_shape(output_shape, shape)
91 # If the inputs have different ranks, we have to reshape them
92 # to make them broadcastable.
~/anaconda3/envs/Dual-Stage-Attention-for-TS/lib/python3.6/site-packages/keras/layers/merge.py in _compute_elemwise_op_output_shape(self, shape1, shape2)
59 raise ValueError('Operands could not be broadcast '
60 'together with shapes ' +
---> 61 str(shape1) + ' ' + str(shape2))
62 output_shape.append(i)
63 return tuple(output_shape)
ValueError: Operands could not be broadcast together with shapes (1, Dimension(810)) (10, 81)