I have been trying to perform inferencing using the Hailo 8 on Pi5.
I have been following the PythonAPI tutorial.
I have been successful to perform classification using image. I convert my csv to image.jpg and then use the jpg for classification.
However, when I try to use the CSV file for direct classification, I get the following error.
#!/usr/bin/env python3
import numpy as np
from hailo_platform import __version__
from multiprocessing import Process, Queue, Manager
from hailo_platform import (HEF, Device, VDevice, HailoStreamInterface, ConfigureParams,
InputVStreamParams, OutputVStreamParams, InputVStreams, OutputVStreams, FormatType)
from zenlog import log
import time
import argparse
import csv
parser = argparse.ArgumentParser(description='Running a Hailo inference using CSV input')
parser.add_argument('hef', help="HEF file path")
parser.add_argument('--input-csv', help="CSV file path containing input data for classification.")
parser.add_argument('--output-csv', default="inference_results.csv", help="Path to save the results as a CSV file.")
args = parser.parse_args()
# ---------------- Post-processing function ----------------- #
def post_processing(inference_output, sample_name, results_list):
class_probabilities = inference_output[0] # Assuming a single output with class probabilities
predicted_class = np.argmax(class_probabilities) # Get the index of the highest probability
confidence = class_probabilities[predicted_class]
print(f'Sample {sample_name}: Predicted class is {predicted_class} with confidence {confidence:.4f}')
# Append the result to the results list
results_list.append([sample_name, predicted_class, confidence])
# ------------------------------------------------------------ #
# ---------------- Inference threads functions -------------- #
def send(configured_network, csv_data, num_samples):
vstreams_params = InputVStreamParams.make_from_network_group(configured_network, quantized=False, format_type=FormatType.FLOAT32)
configured_network.wait_for_activation(100)
print('Performing classification on CSV input...\n')
with InputVStreams(configured_network, vstreams_params) as vstreams:
for i in range(num_samples):
for vstream in vstreams:
data = np.expand_dims(csv_data[i], axis=0).astype(np.float32)
vstream.send(data)
def recv(configured_network, write_q, num_samples):
vstreams_params = OutputVStreamParams.make_from_network_group(configured_network, quantized=False, format_type=FormatType.FLOAT32)
configured_network.wait_for_activation(100)
with OutputVStreams(configured_network, vstreams_params) as vstreams:
for _ in range(num_samples):
curr_vstream_data_dict = {}
for vstream in vstreams:
data = vstream.recv()
if data.ndim == 1:
curr_vstream_data_dict[vstream.name] = data
else:
raise ValueError(f"Unexpected data shape: {data.shape}")
write_q.put(curr_vstream_data_dict)
def inference(read_q, sample_names, num_samples, results_list):
i = 0
while i < num_samples:
if not read_q.empty():
inference_dict = read_q.get(0)
inference_output = list(inference_dict.values())
post_processing(inference_output, sample_names[i], results_list)
i += 1
# ------------------------------------------------------------ #
# ---------------- Pre-processing CSV data ------------------ #
# ---------------- Pre-processing CSV data ------------------ #
def preprocess_csv_data(csv_file_path):
csv_data = []
sample_names = []
with open(csv_file_path, newline='') as csvfile:
csvreader = csv.reader(csvfile)
for i, row in enumerate(csvreader):
sample_data = np.array(row, dtype=np.float32) # No reshaping needed
csv_data.append(sample_data)
sample_names.append(f'Sample_{i+1}') # Create unique sample names
return np.array(csv_data), sample_names
# ------------------------------------------------------------ #
# ------------------------------------------------------------ #
# ---------------- Save results to CSV file ----------------- #
def save_results_to_csv(results_list, output_csv_path="inference_results.csv"):
if results_list:
with open(output_csv_path, mode='w', newline='') as file:
writer = csv.writer(file)
writer.writerow(["Sample Name", "Predicted Class", "Confidence"])
writer.writerows(results_list)
print(f'Results saved to {output_csv_path}')
else:
print("No results to save.")
# ------------------------------------------------------------ #
# ---------------- Start of the script --------------------- #
hef = HEF(args.hef)
height, width, channels = hef.get_input_vstream_infos()[0].shape
num_features = height * width * channels # Define the number of features based on model input shape
csv_file_path = args.input_csv
if not csv_file_path:
raise ValueError("CSV file path is required for input data.")
# Load and preprocess CSV data
csv_data, sample_names = preprocess_csv_data(csv_file_path)
num_samples = len(csv_data)
devices = Device.scan()
with VDevice(device_ids=devices) as target:
configure_params = ConfigureParams.create_from_hef(hef, interface=HailoStreamInterface.PCIe)
network_group = target.configure(hef, configure_params)[0]
queue = Queue()
# Use a Manager to create a shared list for results
with Manager() as manager:
results_list = manager.list() # Use a managed list
send_process = Process(target=send, args=(network_group, csv_data, num_samples))
recv_process = Process(target=recv, args=(network_group, queue, num_samples))
inference_process = Process(target=inference, args=(queue, sample_names, num_samples, results_list))
start_time = time.time()
recv_process.start()
send_process.start()
inference_process.start()
with network_group.activate():
recv_process.join()
send_process.join()
inference_process.join()
end_time = time.time()
print('Classification successful!\n')
log.info('-------------------------------------')
log.info(f' Infer Time: {end_time - start_time:.3f} sec')
log.info(f' Average FPS: {num_samples / (end_time - start_time):.3f}')
log.info('-------------------------------------')
# Save the results to a CSV file after inference
save_results_to_csv(list(results_list), args.output_csv) # Convert to a regular list before saving
I use the following command to run the file
./hailo_onnxruntime_inference_1D.py TrainedModel1D.hef --input-csv /home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/Reversed_Data_0611/F0000_1.csv --output-csv /home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/inference_results.csv
And I get the following error
[HailoRT] [error] CHECK failed - src size must be 800. passed size - 4
[HailoRT] [error] CHECK_SUCCESS failed with status=HAILO_INVALID_ARGUMENT(2)
[HailoRT] [error] CHECK_SUCCESS failed with status=HAILO_INVALID_ARGUMENT(2)
Process Process-2:
Traceback (most recent call last):
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/venv/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 3815, in send
self._send_object.send(data)
hailo_platform.pyhailort._pyhailort.HailoRTStatusException: 2
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/lib/python3.11/multiprocessing/process.py", line 314, in _bootstrap
self.run()
File "/usr/lib/python3.11/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/./hailo_onnxruntime_inference_1D.py", line 39, in send
vstream.send(data)
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/venv/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 3814, in send
with ExceptionWrapper():
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/venv/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 111, in __exit__
self._raise_indicative_status_exception(value)
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/venv/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 156, in _raise_indicative_status_exception
raise self.create_exception_from_status(error_code) from libhailort_exception
hailo_platform.pyhailort.pyhailort.HailoRTInvalidArgumentException: Invalid argument. See hailort.log for more information
[HailoRT] [error] CHECK failed - UserBuffQEl0TrainedModel1D/softmax1 (D2H) failed with status=HAILO_TIMEOUT(4) (timeout=10000ms)
Process Process-3:
Traceback (most recent call last):
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/venv/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 3990, in recv
result_array = self._recv_object.recv()
^^^^^^^^^^^^^^^^^^^^^^^^
hailo_platform.pyhailort._pyhailort.HailoRTStatusException: 4
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/lib/python3.11/multiprocessing/process.py", line 314, in _bootstrap
self.run()
File "/usr/lib/python3.11/multiprocessing/process.py", line 108, in run
self._target(*self._args, **self._kwargs)
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/./hailo_onnxruntime_inference_1D.py", line 48, in recv
data = vstream.recv()
^^^^^^^^^^^^^^
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/venv/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 3989, in recv
with ExceptionWrapper():
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/venv/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 111, in __exit__
self._raise_indicative_status_exception(value)
File "/home/Pi5/Desktop/MALI_BIJEN/PYTHON/Inference/venv/lib/python3.11/site-packages/hailo_platform/pyhailort/pyhailort.py", line 156, in _raise_indicative_status_exception
raise self.create_exception_from_status(error_code) from libhailort_exception
hailo_platform.pyhailort.pyhailort.HailoRTTimeout: Received a timeout - hailort has failed because a timeout had occurred
Could someone help me with this error?
If not is there any way for me to use csv file as in Input.
P.S. The reason I want to use CSV instead of image file is to avoid the time taken to convert csv file to image.