Hi !
I am using the global_avpool_reduction option of the DFC to address the issue outlined in this discussion. This error does not occur if I optimize the model without this option. What am I doing wrong?
pre_quantization_optimization(global_avgpool_reduction, layers=avgpool1, division_factors=[4, 4])
When I try to inference the model, I am getting a runtime error.
with runner.infer_context(InferenceContext.SDK_QUANTIZED) as ctx:
outputs = runner.infer(ctx, img)
The traceback is
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/keras/engine/training.py", line 2169, in predict_function *
return step_function(self, iterator)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/keras/engine/training.py", line 2155, in step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/keras/engine/training.py", line 2143, in run_step **
outputs = model.predict_step(data)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/keras/engine/training.py", line 2111, in predict_step
return self(x, training=False)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/keras/utils/traceback_utils.py", line 70, in error_handler
raise e.with_traceback(filtered_tb) from None
File "/tmp/__autograph_generated_file8zz54eu5.py", line 12, in tf__call
retval_ = ag__.converted_call(ag__.ld(self)._model, (ag__.ld(inputs),), dict(**ag__.ld(kwargs)), fscope)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/utils/distributed_utils.py", line 122, in wrapper
res = func(self, *args, **kwargs)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/model/hailo_model/hailo_model.py", line 1069, in build
self.compute_output_shape(input_shape)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/model/hailo_model/hailo_model.py", line 1013, in compute_output_shape
return self.compute_and_verify_output_shape(input_shape, verify_layer_inputs_shape=False)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/model/hailo_model/hailo_model.py", line 1047, in compute_and_verify_output_shape
layer_output_shape = layer.compute_output_shape(layer_input_shapes)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/hailo_layers/base_hailo_layer.py", line 1502, in compute_output_shape
op_output_shape = op.compute_output_shape(op_input_shapes)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/atomic_ops/base_atomic_op.py", line 710, in compute_output_shape
shapes = self._compute_output_shape(input_shape)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/atomic_ops/conv_stripped_op.py", line 1136, in _compute_output_shape
h_out, w_out = self._spatial_output_shape(input_shape[1:3])
ValueError: Exception encountered when calling layer 'simulation_inference_model_17' (type SimulationInferenceModel).
in user code:
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/flows/inference_flow.py", line 135, in call *
return self._model(inputs, **kwargs)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/keras/utils/traceback_utils.py", line 70, in error_handler **
raise e.with_traceback(filtered_tb) from None
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/utils/distributed_utils.py", line 122, in wrapper
res = func(self, *args, **kwargs)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/model/hailo_model/hailo_model.py", line 1069, in build
self.compute_output_shape(input_shape)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/model/hailo_model/hailo_model.py", line 1013, in compute_output_shape
return self.compute_and_verify_output_shape(input_shape, verify_layer_inputs_shape=False)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/model/hailo_model/hailo_model.py", line 1047, in compute_and_verify_output_shape
layer_output_shape = layer.compute_output_shape(layer_input_shapes)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/hailo_layers/base_hailo_layer.py", line 1502, in compute_output_shape
op_output_shape = op.compute_output_shape(op_input_shapes)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/atomic_ops/base_atomic_op.py", line 710, in compute_output_shape
shapes = self._compute_output_shape(input_shape)
File "/local/workspace/hailo_virtualenv/lib/python3.8/site-packages/hailo_model_optimization/acceleras/atomic_ops/conv_stripped_op.py", line 1136, in _compute_output_shape
h_out, w_out = self._spatial_output_shape(input_shape[1:3])
ValueError: not enough values to unpack (expected 2, got 1)
Call arguments received by layer 'simulation_inference_model_17' (type SimulationInferenceModel):
• inputs=tf.Tensor(shape=(None, 416), dtype=float32)
• kwargs={'training': 'False'}