Compiling a .onnx to .hef with DFC v3.28.0 following official documentation

Good morning,
Hi, I wanted to learn the process of compiling a pre-trained model with DFC, so I downloaded DFC v3.28.0 and successfully installed it, then I downloaded a pre-trained model from hailo model zoo, unzipped it, located the .onnx file, translated it to a .har file, added some layers with model_script, optimized it with a calibration dataset and then i proceeded to the next step to compile it to a hef file, I ended up with this error:

[error] Failed to produce compiled graph

---------------------------------------------------------------------------

TypeError                                 Traceback (most recent call last)

<ipython-input-68-b7c37a0edc83> in <cell line: 1>()
----> 1 hef = runner.compile()

14 frames

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/runner/client_runner.py in compile(self)
    713             >>> compiled_model = runner.compile()
    714         """
--> 715         return self._compile()
    716 
    717     @contextmanager

/usr/local/lib/python3.10/dist-packages/hailo_sdk_common/states/states.py in wrapped_func(self, *args, **kwargs)
     14                 raise InvalidStateException("The execution of {} is not available under the state: "
     15                                             "{}".format(func.__name__, self._state.value))
---> 16             return func(self, *args, **kwargs)
     17 
     18         return wrapped_func

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/runner/client_runner.py in _compile(self, fps, mapping_timeout, allocator_script_filename)
    830             self.load_model_script(allocator_script_filename)
    831 
--> 832         serialized_hef = self._sdk_backend.compile(fps, self.model_script, mapping_timeout)
    833 
    834         self._auto_model_script = self._sdk_backend.get_auto_alls()

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/sdk_backend/sdk_backend.py in compile(self, fps, allocator_script, mapping_timeout)
   1438     def compile(self, fps, allocator_script=None, mapping_timeout=None):
   1439         self._model.fill_default_quantization_params(logger=self._logger)
-> 1440         hef, mapped_graph_file = self._compile(fps, allocator_script, mapping_timeout)
   1441         # TODO: https://hailotech.atlassian.net/browse/SDK-31038
   1442         if not SDKPaths().is_internal:

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/sdk_backend/sdk_backend.py in _compile(self, fps, allocator_script, mapping_timeout)
   1432                 'Did you forget to quantize?')
   1433 
-> 1434         hef, mapped_graph_file, auto_alls = self.hef_full_build(fps, mapping_timeout, model_params, allocator_script)
   1435         self._auto_alls = auto_alls
   1436         return hef, mapped_graph_file

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/sdk_backend/sdk_backend.py in hef_full_build(self, fps, mapping_timeout, params, allocator_script)
   1406         config_paths = ConfigPaths(self._hw_arch, self._model.name)
   1407         config_paths.set_stage('inference')
-> 1408         auto_alls, self._mapped_graph, self._integrated_graph = allocator.create_mapping_and_full_build_hef(
   1409             config_paths.get_path('network_graph'),
   1410             config_paths.get_path('mapped_graph'),

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/allocator/hailo_tools_runner.py in create_mapping_and_full_build_hef(self, network_graph_path, output_path, compilation_output_proto, agent, strategy, auto_mapping, params, expected_output_tensor, expected_pre_acts, allocator_script, allocator_script_mode, compiler_statistics_path, nms_metadata, har, alls_ignore_invalid_cmds)
    596                 "Number of clusters in layer placements is larger than allowed number of clusters")
    597 
--> 598         self.call_builder(network_graph_path, output_path, compilation_output_proto=compilation_output_proto,
    599                           agent=agent, strategy=strategy, exit_point=BuilderExitPoint.POST_CAT, params=params,
    600                           expected_output_tensor=expected_output_tensor, expected_pre_acts=expected_pre_acts,

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/allocator/hailo_tools_runner.py in call_builder(self, network_graph_path, output_path, blind_deserialize, **kwargs)
    554         sys.excepthook = _hailo_tools_exception_hook
    555         try:
--> 556             self.run_builder(network_graph_path, output_path, **kwargs)
    557         except BackendInternalException:
    558             try:

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/allocator/hailo_tools_runner.py in run_builder(self, network_graph_filename, output_filename, compilation_output_proto, agent, strategy, exit_point, params, expected_output_tensor, expected_pre_acts, allocator_script, allocator_script_mode, compiler_statistics_path, is_debug, nms_metadata, har, alls_ignore_invalid_cmds)
    413 
    414         try:
--> 415             self._run_hailo_tools(paths)
    416         except HailoToolsException as e:
    417             print("\033[?25h") # Bring back the cursur if it's still hidden

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/allocator/hailo_tools_runner.py in _run_hailo_tools(self, tool_args)
    289 
    290     def _run_hailo_tools(self, tool_args):
--> 291         self._output_integrated_pb_map, self._output_integrated_pb_graph = run_hailo_tools(tool_args, 'compiler',
    292                                                                                            client=self._client,
    293                                                                                            server=self._server,

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/allocator/hailo_tools_runner.py in run_hailo_tools(tool_args, exe_name, client, server, builder_pb_input)
    207 def run_hailo_tools(tool_args, exe_name, client=None, server=None, builder_pb_input=None):
    208     hailo_tools_path = SDKPaths().join_hailo_tools_path('build/' + exe_name)
--> 209     return run_tool_from_binary(hailo_tools_path, tool_args,
    210                                 client=client, server=server, builder_pb_input=builder_pb_input)
    211 

/usr/local/lib/python3.10/dist-packages/hailo_sdk_client/allocator/hailo_tools_runner.py in run_tool_from_binary(binary_path, tool_args, client, server, builder_pb_input)
    171     # Add the libraries for the or tools shared objects
    172     env = create_env()
--> 173     process = subprocess.Popen(cmd_args, env=env, pass_fds=(client.fileno(),))
    174     client.close()
    175 

/usr/lib/python3.10/subprocess.py in __init__(self, args, bufsize, executable, stdin, stdout, stderr, preexec_fn, close_fds, shell, cwd, env, universal_newlines, startupinfo, creationflags, restore_signals, start_new_session, pass_fds, user, group, extra_groups, encoding, errors, text, umask, pipesize)
    969                             encoding=encoding, errors=errors)
    970 
--> 971             self._execute_child(args, executable, preexec_fn, close_fds,
    972                                 pass_fds, cwd, env,
    973                                 startupinfo, creationflags, shell,

/usr/lib/python3.10/subprocess.py in _execute_child(self, args, executable, preexec_fn, close_fds, pass_fds, cwd, env, startupinfo, creationflags, shell, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite, restore_signals, gid, gids, uid, umask, start_new_session)
   1736 
   1737             if (_USE_POSIX_SPAWN
-> 1738                     and os.path.dirname(executable)
   1739                     and preexec_fn is None
   1740                     and not close_fds

/usr/lib/python3.10/posixpath.py in dirname(p)
    150 def dirname(p):
    151     """Returns the directory component of a pathname"""
--> 152     p = os.fspath(p)
    153     sep = _get_sep(p)
    154     i = p.rfind(sep) + 1

TypeError: expected str, bytes or os.PathLike object, not NoneType

If anybody encountered the same did you find out how to get around this issue?

Hi @shivatejasirimalla,
Did you used the end nodes described here (for the corresponding model)?

Hi @Omer, the first time when I was trying to translate the .onnx file, I extracted the input and output names with model.graph.input and I used those input and output names, however when I tried to translate the model to a har file, the translate api, recommended to use different output nodes and when I looked it up, it was correct, so I ended up changing, the output nodes and it worked and I successfully had the har file.

1 Like