Use Python with hailort without Tapps

I just install hailoRT4.18, and I want to inference face detection and here is my inference code :

# General imports used throughout the tutorial
from multiprocessing import Process

import numpy as np

from hailo_platform import (
    HEF,
    ConfigureParams,
    FormatType,
    HailoSchedulingAlgorithm,
    HailoStreamInterface,
    InferVStreams,
    InputVStreamParams,
    InputVStreams,
    OutputVStreamParams,
    OutputVStreams,
    VDevice,
)

import cv2
import numpy as np

# 读取图片
image_path = 'image/Aaron_Eckhart_0001.jpg'  # 替换为你的图片路径
image = cv2.imread(image_path)

# 将图片调整为640x640
resized_image = cv2.resize(image, (640, 640))

# 转换为numpy格式(其实读取的就是numpy格式)
numpy_array = np.array(resized_image)

# 打印结果
print(numpy_array.shape)  # 应该输出 (640, 640, 3) 或 (640, 640) 取决于图片类型

# Setting VDevice params to disable the HailoRT service feature
params = VDevice.create_params()
params.scheduling_algorithm = HailoSchedulingAlgorithm.NONE

# The target can be used as a context manager ("with" statement) to ensure it's released on time.
# Here it's avoided for the sake of simplicity
target = VDevice(params=params)

# Loading compiled HEFs to device:
model_name = "scrfd_500m"
hef_path = f"{model_name}.hef"
hef = HEF(hef_path)

# Get the "network groups" (connectivity groups, aka. "different networks") information from the .hef
configure_params = ConfigureParams.create_from_hef(hef=hef, interface=HailoStreamInterface.PCIe)
network_groups = target.configure(hef, configure_params)
network_group = network_groups[0]
network_group_params = network_group.create_params()

# Define dataset params
input_vstream_info = hef.get_input_vstream_infos()[0]
# print(f"input_info{input_vstream_info}")
output_vstream_info = hef.get_output_vstream_infos()[0]
# print(f"outpu_info{output_vstream_info}")
image_height, image_width, channels = input_vstream_info.shape
# print(image_height, image_width, channels)


def send(configured_network, num_frames):
    vstreams_params = InputVStreamParams.make(configured_network)
    with InputVStreams(configured_network, vstreams_params) as vstreams:
        configured_network.wait_for_activation(1000)
        vstream_to_buffer = {
            vstream: np.ndarray([1] + list(vstream.shape), dtype=vstream.dtype) for vstream in vstreams
        }
        for _ in range(num_frames):
            for vstream, buff in vstream_to_buffer.items():
                buff = numpy_array
                print(f"buff type {type(buff)}, shape {buff.shape}")
                vstream.send(buff)

def recv(configured_network, num_frames):
    vstreams_params = OutputVStreamParams.make(configured_network)
    configured_network.wait_for_activation(1000)
    with OutputVStreams(configured_network, vstreams_params) as vstreams:
        for _ in range(num_frames):
            for vstream in vstreams:
                _data = vstream.recv()

# Define the amount of frames to stream
num_of_frames = 2

# Start the streaming inference
send_process = Process(target=send, args=(network_group, num_of_frames))
recv_process = Process(target=recv, args=(network_group, num_of_frames))
send_process.start()
recv_process.start()

print(f"Starting streaming (hef='{model_name}', num_of_frames={num_of_frames})")
with network_group.activate(network_group_params):
    send_process.join()
    recv_process.join()

# Clean pcie target
target.release()
print("Done")

This code is from office inference tutorial. And I get error like below:

(hailo_env) ain@raspberrypi5:~/face_hailo $ python run.py 
(640, 640, 3)
Starting streaming (hef='scrfd_500m', num_of_frames=2)
buff type <class 'numpy.ndarray'>, shape (640, 640, 3)
[HailoRT] [error] CHECK failed - write size 0 must be 1228800
[HailoRT] [error] CHECK_SUCCESS failed with status=HAILO_INVALID_ARGUMENT(2) - HwWriteEl7scrfd_500m/input_layer1 (H2D) failed with status=HAILO_INVALID_ARGUMENT(2)
Process Process-1:
Traceback (most recent call last):
  File "/home/ain/hailo_env/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 3787, in send
    self._send_object.send(data)
hailo_platform.pyhailort._pyhailort.HailoRTStatusException: 2

The above exception was the direct cause of the following exception:

Traceback (most recent call last):
  File "/usr/lib/python3.11/multiprocessing/process.py", line 314, in _bootstrap
    self.run()
  File "/usr/lib/python3.11/multiprocessing/process.py", line 108, in run
    self._target(*self._args, **self._kwargs)
  File "/home/ain/face_hailo/run.py", line 81, in send
    vstream.send(buff)
  File "/home/ain/hailo_env/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 3786, in send
    with ExceptionWrapper():
  File "/home/ain/hailo_env/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 110, in __exit__
    self._raise_indicative_status_exception(value)
  File "/home/ain/hailo_env/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 155, in _raise_indicative_status_exception
    raise self.create_exception_from_status(error_code) from libhailort_exception
hailo_platform.pyhailort.pyhailort.HailoRTInvalidArgumentException: Invalid argument. See hailort.log for more information
[HailoRT] [error] CHECK failed - UserBuffQEl9scrfd_500m/conv40 (D2H) failed with status=HAILO_TIMEOUT(4) (timeout=10000ms)
Process Process-2:
Traceback (most recent call last):
  File "/home/ain/hailo_env/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 3962, in recv
    result_array = self._recv_object.recv()
                   ^^^^^^^^^^^^^^^^^^^^^^^^
hailo_platform.pyhailort._pyhailort.HailoRTStatusException: 4

The above exception was the direct cause of the following exception:

Traceback (most recent call last):
  File "/usr/lib/python3.11/multiprocessing/process.py", line 314, in _bootstrap
    self.run()
  File "/usr/lib/python3.11/multiprocessing/process.py", line 108, in run
    self._target(*self._args, **self._kwargs)
  File "/home/ain/face_hailo/run.py", line 89, in recv
    _data = vstream.recv()
            ^^^^^^^^^^^^^^
  File "/home/ain/hailo_env/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 3961, in recv
    with ExceptionWrapper():
  File "/home/ain/hailo_env/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 110, in __exit__
    self._raise_indicative_status_exception(value)
  File "/home/ain/hailo_env/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 155, in _raise_indicative_status_exception
    raise self.create_exception_from_status(error_code) from libhailort_exception
hailo_platform.pyhailort.pyhailort.HailoRTTimeout: Received a timeout - hailort has failed because a timeout had occurred
Done

Hey @jiahao.li,

The error message “CHECK failed - write size 0 must be 1228800” suggests a mismatch between the expected input buffer size and the data you’re providing during inference.

To resolve this:

  1. Verify Input Shape:
    Use hef.get_input_vstream_infos() to confirm the correct input dimensions.

  2. Check Buffer Size:
    Employ hailo_get_input_stream_frame_size() to ensure your buffer matches the expected size.

  3. Confirm Data Format:
    Ensure your buffer’s data type and format (e.g., uint8 for RGB, NHWC for images) align with the Hailo input stream requirements.

  4. Review Image Processing:
    Double-check that any preprocessing steps (like resizing) result in the correct shape and size.

Let me know if you need further clarification or assistance!

Best regards

Hi @jiahao.li,

As @omria said, the issue is a mismatch between expected input size and the actual size being sent.

I can see that you are sending each image to the device in your code, but please note that the device expects an extra dimension for batch size. For that, you can use, for example, the expand_dims method from numpy.