from pathlib import Path
import numpy as np
from hailo_sdk_client import ClientRunner
from hailo_sdk_client.exposed_definitions import States, JoinAction
def merge_three_runners(har1_path, har2_path, har3_path, output_path):
# 轉成 Path / str
har1_path = str(har1_path)
har2_path = str(har2_path)
har3_path = str(har3_path)
output_path = Path(output_path)
runner1 = ClientRunner()
runner1.load_har(har1_path)
runner2 = ClientRunner()
runner2.load_har(har2_path)
print(runner1.state, runner2.state, "="*40)
runner1.join(
runner2,
scope1_name={'model': 'yolov5_model1'},
scope2_name={'model': 'yolov5_model2'},
join_action=JoinAction.NONE
)
runner3 = ClientRunner()
runner3.load_har(har3_path)
print(runner1.state, runner2.state, runner3.state, "="*40)
runner1.join(
runner3,
scope1_name={'yolov5_model1': 'yolov5_model1', 'yolov5_model2': 'yolov5_model2'},
# scope2_name={'model': 'general_model_512'},
scope2_name={'model': 'yolov5_model3'},
join_action=JoinAction.NONE
)
hn_dict = runner1.get_hn_dict()
input_layers = [
layer for layer in hn_dict.get('layers', {}).keys()
if hn_dict['layers'][layer].get('type') == 'input_layer'
]
model_script_commands = [
"performance_param(compiler_optimization_level=max)\n",
"yolov5_model1/normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0])\n",
"yolov5_model2/normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0])\n",
"yolov5_model3/normalization1 = normalization([0.0, 0.0, 0.0], [255.0, 255.0, 255.0])\n",
]
runner1.load_model_script("".join(model_script_commands))
runner1.optimize_full_precision()
#prepare calib_set
calib_set_plate = np.load('./weights/detect/yolov5_model1/calib_set.npy')
calib_set_char = np.load('./weights/detect/yolov5_model2/calib_set.npy')
calib_set_general = np.load('./weights/detect/yolov5_model3/calib_set.npy')
calib_set_dict = {
'yolov5_model1/input_layer1': calib_set_plate[:5000],
'yolov5_model2/input_layer1': calib_set_char[:5000],
'yolov5_model3/input_layer1': calib_set_general[:5000],
}
runner1.save_har(str(output_path))
print(f"合併後 HAR 已儲存到 {output_path}")
runner1.optimize(calib_set_dict)
hef = runner1.compile()
hef_path = output_path.with_suffix('.hef')
with open(hef_path, 'wb') as f:
f.write(hef)
print(f"合併完成!")
# print(f"HAR 檔案: {har_output_path}")
print(f"HEF 檔案: {hef_path}")
# 使用範例
if __name__ == "__main__":
har1_path = str('./weights/detect/yolov5_model1/yolov5_model1.har') # (224, 224, 3)
har2_path = str('./weights/detect/yolov5_model2/yolov5_model2.har') #(224, 224, 3)
har3_path = str('./weights/detect/yolov5_model3/yolov5_model3.har') #(512, 512, 3)
output_path = Path('./weights/detect/yolov5_model1_model2_model3/yolov5_model1_model2_model3.har')
merge_three_runners(har1_path, har2_path, har3_path, output_path)
The code above merges three models into a single .hef, and each .har is converted from an .onnx file using the code below.
from hailo_sdk_client import ClientRunner, InferenceContext
from utils import make_quant_dataset
assert os.path.isfile(onnx_path), "Please provide valid path for ONNX file"
model_name = "yolov5_model1"
onnx_path = f"./weights/detect/{model_name}/{model_name}.onnx"
runner = ClientRunner(hw_arch="hailo8l")
runner.translate_onnx_model(onnx_path, end_node_names=["/model/model.24/m.0/Conv", "/model/model.24/m.1/Conv", "/model/model.24/m.2/Conv"]) #yolov5s
runner.optimize_full_precision()
runner.save_har(f"./weights/detect/{model_name}/{model_name}.har")
The problem is that I can run the merge code successfully, but the inference results are really poor when merging three models. However, when I merge only two models, the results are good. The bounding boxes detected by my model are as follows:
Thank you for your time and patience in reviewing my question!
