InvalidArgumentError: Graph execution error: Input to reshape is a tensor with 1204224 values, but the requested shape has 4816896
innat opened this issue · 0 comments
innat commented
import tensorflow as tf # 2.13
params = tf.experimental.tensorrt.ConversionParams(
precision_mode="FP16",
maximum_cached_engines=3
)
converter = tf.experimental.tensorrt.Converter(
input_saved_model_dir='/kaggle/working/swin_t', # TF SavedModel
conversion_params=params,
use_dynamic_shape=True,
dynamic_shape_profile_strategy='Range+Optimal'
)
converter.convert()
def input_fn():
input_shapes = [
[(1, 8, 224, 224, 3)],
[(1, 16, 224, 224, 3)],
[(1, 32, 224, 224, 3)]
]
for shapes in input_shapes:
yield [
np.zeros(x, dtype=np.float32) for x in shapes
]
converter.build(input_fn=input_fn)
converter.save("tensorrt_model")
---------------------------------------------------------------------------
InvalidArgumentError Traceback (most recent call last)
Cell In[8], line 1
----> 1 converter.build(input_fn=input_fn)
2 converter.save("tensorrt_model")
File /opt/conda/lib/python3.10/site-packages/tensorflow/python/compiler/tensorrt/trt_convert.py:1532, in TrtGraphConverterV2.build(self, input_fn)
1530 first_input = inp
1531 args, kwargs = _convert_to_tensor(inp)
-> 1532 func(*args, **kwargs)
1534 if self._need_trt_profiles():
1535 # Disable profile generation.
1536 self._for_each_trt_node(self._converted_graph_def,
1537 partial(_set_profile_generation_mode, False))
File /opt/conda/lib/python3.10/site-packages/tensorflow/python/eager/polymorphic_function/monomorphic_function.py:1184, in ConcreteFunction.__call__(self, *args, **kwargs)
1134 def __call__(self, *args, **kwargs):
1135 """Executes the wrapped function.
1136
1137 ConcreteFunctions have two signatures:
(...)
1182 TypeError: If the arguments do not match the function's signature.
1183 """
-> 1184 return self._call_impl(args, kwargs)
File /opt/conda/lib/python3.10/site-packages/tensorflow/python/eager/wrap_function.py:243, in WrappedFunction._call_impl(self, args, kwargs)
241 return self._call_flat(args, self.captured_inputs)
242 else:
--> 243 return super(WrappedFunction, self)._call_impl(args, kwargs)
File /opt/conda/lib/python3.10/site-packages/tensorflow/python/eager/polymorphic_function/monomorphic_function.py:1200, in ConcreteFunction._call_impl(self, args, kwargs)
1197 except TypeError:
1198 raise structured_err
-> 1200 return self._call_with_flat_signature(args, kwargs)
File /opt/conda/lib/python3.10/site-packages/tensorflow/python/eager/polymorphic_function/monomorphic_function.py:1252, in ConcreteFunction._call_with_flat_signature(self, args, kwargs)
1247 if not isinstance(
1248 arg, (ops.Tensor, resource_variable_ops.BaseResourceVariable)):
1249 raise TypeError(f"{self._flat_signature_summary()}: expected argument "
1250 f"#{i}(zero-based) to be a Tensor; "
1251 f"got {type(arg).__name__} ({arg}).")
-> 1252 return self._call_flat(args, self.captured_inputs)
File /opt/conda/lib/python3.10/site-packages/tensorflow/python/eager/polymorphic_function/monomorphic_function.py:1349, in ConcreteFunction._call_flat(self, args, captured_inputs)
1345 possible_gradient_type = gradients_util.PossibleTapeGradientTypes(args)
1346 if (possible_gradient_type == gradients_util.POSSIBLE_GRADIENT_TYPES_NONE
1347 and executing_eagerly):
1348 # No tape is watching; skip to running the function.
-> 1349 return self._build_call_outputs(self._inference_function(*args))
1350 forward_backward = self._select_forward_and_backward_functions(
1351 args,
1352 possible_gradient_type,
1353 executing_eagerly)
1354 forward_function, args_with_tangents = forward_backward.forward()
File /opt/conda/lib/python3.10/site-packages/tensorflow/python/eager/polymorphic_function/atomic_function.py:196, in AtomicFunction.__call__(self, *args)
194 with record.stop_recording():
195 if self._bound_context.executing_eagerly():
--> 196 outputs = self._bound_context.call_function(
197 self.name,
198 list(args),
199 len(self.function_type.flat_outputs),
200 )
201 else:
202 outputs = make_call_op_in_graph(self, list(args))
File /opt/conda/lib/python3.10/site-packages/tensorflow/python/eager/context.py:1457, in Context.call_function(self, name, tensor_inputs, num_outputs)
1455 cancellation_context = cancellation.context()
1456 if cancellation_context is None:
-> 1457 outputs = execute.execute(
1458 name.decode("utf-8"),
1459 num_outputs=num_outputs,
1460 inputs=tensor_inputs,
1461 attrs=attrs,
1462 ctx=self,
1463 )
1464 else:
1465 outputs = execute.execute_with_cancellation(
1466 name.decode("utf-8"),
1467 num_outputs=num_outputs,
(...)
1471 cancellation_manager=cancellation_context,
1472 )
File /opt/conda/lib/python3.10/site-packages/tensorflow/python/eager/execute.py:53, in quick_execute(op_name, num_outputs, inputs, attrs, ctx, name)
51 try:
52 ctx.ensure_initialized()
---> 53 tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,
54 inputs, attrs, num_outputs)
55 except core._NotOkStatusException as e:
56 if name is not None:
InvalidArgumentError: Graph execution error:
Input to reshape is a tensor with 1204224 values, but the requested shape has 4816896
[[{{node StatefulPartitionedCall/swin_transformer3d_1/BasicLayer1_1/swin_transformer_block3d_1/Reshape}}]]
[[TRTEngineOp_000_002]] [Op:__inference_pruned_36162]