RNN Dimensions must be equal, but are 600 and 320 for 'rnn/while/rnn/multi_rnn_cell/cell_0/cell_0/lstm_cell/MatMul_1' (op: 'MatMul') with input shapes: [?,600], [320,1200].
HoussamRazouk opened this issue · 3 comments
ValueError Traceback (most recent call last)
in ()
----> 1 prediction = RNN(x, weight, bias)
2
3 # Define loss and optimizer
4 loss_f = -tf.reduce_sum(y * tf.log(prediction))
5 optimizer = tf.train.AdamOptimizer(learning_rate = learning_rate).minimize(loss_f)
in RNN(x, weight, bias)
2 cell = rnn_cell.LSTMCell(n_hidden,state_is_tuple = True)
3 cell = rnn_cell.MultiRNNCell([cell] * 2)
----> 4 output, state = tf.nn.dynamic_rnn(cell, x, dtype = tf.float32)
5 output = tf.transpose(output, [1, 0, 2])
6 last = tf.gather(output, int(output.get_shape()[0]) - 1)
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/rnn.pyc in dynamic_rnn(cell, inputs, sequence_length, initial_state, dtype, parallel_iterations, swap_memory, time_major, scope)
612 swap_memory=swap_memory,
613 sequence_length=sequence_length,
--> 614 dtype=dtype)
615
616 # Outputs of _dynamic_rnn_loop are always shaped [time, batch, depth].
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/rnn.pyc in _dynamic_rnn_loop(cell, inputs, initial_state, parallel_iterations, swap_memory, sequence_length, dtype)
775 loop_vars=(time, output_ta, state),
776 parallel_iterations=parallel_iterations,
--> 777 swap_memory=swap_memory)
778
779 # Unpack final output if not using output tuples.
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/control_flow_ops.pyc in while_loop(cond, body, loop_vars, shape_invariants, parallel_iterations, back_prop, swap_memory, name)
2814 loop_context = WhileContext(parallel_iterations, back_prop, swap_memory) # pylint: disable=redefined-outer-name
2815 ops.add_to_collection(ops.GraphKeys.WHILE_CONTEXT, loop_context)
-> 2816 result = loop_context.BuildLoop(cond, body, loop_vars, shape_invariants)
2817 return result
2818
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/control_flow_ops.pyc in BuildLoop(self, pred, body, loop_vars, shape_invariants)
2638 self.Enter()
2639 original_body_result, exit_vars = self._BuildLoop(
-> 2640 pred, body, original_loop_vars, loop_vars, shape_invariants)
2641 finally:
2642 self.Exit()
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/control_flow_ops.pyc in _BuildLoop(self, pred, body, original_loop_vars, loop_vars, shape_invariants)
2588 structure=original_loop_vars,
2589 flat_sequence=vars_for_body_with_tensor_arrays)
-> 2590 body_result = body(*packed_vars_for_body)
2591 if not nest.is_sequence(body_result):
2592 body_result = [body_result]
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/rnn.pyc in _time_step(time, output_ta_t, state)
760 skip_conditionals=True)
761 else:
--> 762 (output, new_state) = call_cell()
763
764 # Pack state if using state tuples
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/rnn.pyc in ()
746
747 input_t = nest.pack_sequence_as(structure=inputs, flat_sequence=input_t)
--> 748 call_cell = lambda: cell(input_t, state)
749
750 if sequence_length is not None:
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/rnn_cell_impl.pyc in call(self, inputs, state, scope)
181 with vs.variable_scope(vs.get_variable_scope(),
182 custom_getter=self._rnn_get_variable):
--> 183 return super(RNNCell, self).call(inputs, state)
184
185 def _rnn_get_variable(self, getter, *args, **kwargs):
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/layers/base.pyc in call(self, inputs, *args, **kwargs)
573 if in_graph_mode:
574 self._assert_input_compatibility(inputs)
--> 575 outputs = self.call(inputs, *args, **kwargs)
576
577 if outputs is None:
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/rnn_cell_impl.pyc in call(self, inputs, state)
1064 [-1, cell.state_size])
1065 cur_state_pos += cell.state_size
-> 1066 cur_inp, new_state = cell(cur_inp, cur_state)
1067 new_states.append(new_state)
1068
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/rnn_cell_impl.pyc in call(self, inputs, state, scope)
181 with vs.variable_scope(vs.get_variable_scope(),
182 custom_getter=self._rnn_get_variable):
--> 183 return super(RNNCell, self).call(inputs, state)
184
185 def _rnn_get_variable(self, getter, *args, **kwargs):
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/layers/base.pyc in call(self, inputs, *args, **kwargs)
573 if in_graph_mode:
574 self._assert_input_compatibility(inputs)
--> 575 outputs = self.call(inputs, *args, **kwargs)
576
577 if outputs is None:
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/rnn_cell_impl.pyc in call(self, inputs, state)
609
610 # i = input_gate, j = new_input, f = forget_gate, o = output_gate
--> 611 lstm_matrix = self._linear1([inputs, m_prev])
612 i, j, f, o = array_ops.split(
613 value=lstm_matrix, num_or_size_splits=4, axis=1)
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/rnn_cell_impl.pyc in call(self, args)
1187 res = math_ops.matmul(args[0], self._weights)
1188 else:
-> 1189 res = math_ops.matmul(array_ops.concat(args, 1), self._weights)
1190 if self._build_bias:
1191 res = nn_ops.bias_add(res, self._biases)
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/math_ops.pyc in matmul(a, b, transpose_a, transpose_b, adjoint_a, adjoint_b, a_is_sparse, b_is_sparse, name)
1889 else:
1890 return gen_math_ops._mat_mul(
-> 1891 a, b, transpose_a=transpose_a, transpose_b=transpose_b, name=name)
1892
1893
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/ops/gen_math_ops.pyc in _mat_mul(a, b, transpose_a, transpose_b, name)
2435 _, _, _op = _op_def_lib._apply_op_helper(
2436 "MatMul", a=a, b=b, transpose_a=transpose_a, transpose_b=transpose_b,
-> 2437 name=name)
2438 _result = _op.outputs[:]
2439 _inputs_flat = _op.inputs
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/framework/op_def_library.pyc in _apply_op_helper(self, op_type_name, name, **keywords)
785 op = g.create_op(op_type_name, inputs, output_types, name=scope,
786 input_types=input_types, attrs=attr_protos,
--> 787 op_def=op_def)
788 return output_structure, op_def.is_stateful, op
789
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/framework/ops.pyc in create_op(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_shapes, compute_device)
2956 op_def=op_def)
2957 if compute_shapes:
-> 2958 set_shapes_for_outputs(ret)
2959 self._add_op(ret)
2960 self._record_op_seen_by_control_dependencies(ret)
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/framework/ops.pyc in set_shapes_for_outputs(op)
2207 shape_func = _call_cpp_shape_fn_and_require_op
2208
-> 2209 shapes = shape_func(op)
2210 if shapes is None:
2211 raise RuntimeError(
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/framework/ops.pyc in call_with_requiring(op)
2157
2158 def call_with_requiring(op):
-> 2159 return call_cpp_shape_fn(op, require_shape_fn=True)
2160
2161 _call_cpp_shape_fn_and_require_op = call_with_requiring
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/framework/common_shapes.pyc in call_cpp_shape_fn(op, require_shape_fn)
625 res = _call_cpp_shape_fn_impl(op, input_tensors_needed,
626 input_tensors_as_shapes_needed,
--> 627 require_shape_fn)
628 if not isinstance(res, dict):
629 # Handles the case where _call_cpp_shape_fn_impl calls unknown_shape(op).
/home/houssam/anaconda2/lib/python2.7/site-packages/tensorflow/python/framework/common_shapes.pyc in _call_cpp_shape_fn_impl(op, input_tensors_needed, input_tensors_as_shapes_needed, require_shape_fn)
689 missing_shape_fn = True
690 else:
--> 691 raise ValueError(err.message)
692
693 if missing_shape_fn:
ValueError: Dimensions must be equal, but are 600 and 320 for 'rnn/while/rnn/multi_rnn_cell/cell_0/cell_0/lstm_cell/MatMul_1' (op: 'MatMul') with input shapes: [?,600], [320,1200].
Hello! I'm also faced with the same problem. May I ask how you solved this ValueError? I would appreciate it very much if you could help me!
I mark the cell = rnn_cell.MultiRNNCell([cell] * 2) in def RNN.
and it works
@jenweilee facing same issue. what do you mean by "marking it"?