Error Building a Custom Pipeline

Hello! I am attempting to run the instance_segmentation pipeline and depth pipeline but I am receiving a few errors:

1: .env file not found: /home/sdp/hailo-rpi5-examples/hailo-apps-infra/.env

2: Trying to link elements cropper_wrapper_cropper and inference_scale_q that don’t share a common ancestor: inference_scale_q hasn’t been added to a bin or pipeline, and cropper_wrapper_cropper is in pipeline0

3: gst_element_link_pads_filtered: assertion ‘GST_IS_BIN (parent)’ failed

4: Trying to link elements inference_output_q and cropper_wrapper_agg that don’t share a common ancestor: inference_output_q hasn’t been added to a bin or pipeline, but cropper_wrapper_agg is in pipeline0

Pipeline Code:

# region imports

# Standard library imports

import gi
import os
import setproctitle
from pathlib import Path
import sys

# Local application-specific imports

from hailo_apps.hailo_app_python.core.common.installation_utils import detect_hailo_arch
from hailo_apps.hailo_app_python.core.common.core import get_default_parser, get_resource_path
from hailo_apps.hailo_app_python.core.common.defines import RESOURCES_JSON_DIR_NAME, HAILO_ARCH_KEY, INSTANCE_SEGMENTATION_APP_TITLE, INSTANCE_SEGMENTATION_PIPELINE, RESOURCES_MODELS_DIR_NAME, RESOURCES_SO_DIR_NAME, INSTANCE_SEGMENTATION_MODEL_NAME_H8, INSTANCE_SEGMENTATION_MODEL_NAME_H8L, INSTANCE_SEGMENTATION_POSTPROCESS_SO_FILENAME, INSTANCE_SEGMENTATION_POSTPROCESS_FUNCTION, DEFAULT_LOCAL_RESOURCES_PATH, JSON_FILE_EXTENSION, DEPTH_PIPELINE, DEPTH_POSTPROCESS_SO_FILENAME, DEPTH_POSTPROCESS_FUNCTION, DEPTH_MODEL_NAME
from hailo_apps.hailo_app_python.core.gstreamer.gstreamer_helper_pipelines import SOURCE_PIPELINE, INFERENCE_PIPELINE, INFERENCE_PIPELINE_WRAPPER, TRACKER_PIPELINE, USER_CALLBACK_PIPELINE, CROPPER_PIPELINE, DISPLAY_PIPELINE
from hailo_apps.hailo_app_python.core.gstreamer.gstreamer_app import GStreamerApp, app_callback_class, dummy_callback

# endregion imports

#-----------------------------------------------------------------------------------------------

# User GStreamer Application: Instance Segmentation + Depth Estimation

#-----------------------------------------------------------------------------------------------

class GStreamerApproachDepthApp(GStreamerApp):
def **init**(self, app_callback, user_data, app_path, parser=None):

    if parser is None:
        parser = get_default_parser()
    super().__init__(parser, user_data)

    # Hailo parameters
    self.batch_size = 2
    self.video_width = 800
    self.video_height = 480

    # Detect architecture if not provided
    if self.options_menu.arch is None:
        detected_arch = os.getenv(HAILO_ARCH_KEY, detect_hailo_arch())
        if detected_arch is None:
            raise ValueError("Could not auto-detect Hailo architecture. Please specify --arch manually.")
        self.arch = detected_arch
        print(f"Auto-detected Hailo architecture: {self.arch}")
    else:
        self.arch = self.options_menu.arch

    # Set HEF path (string) for segmentation models
    if self.options_menu.hef_path:
        self.hef_path = str(self.options_menu.hef_path)
    else:
        # get_resource_path will use RESOURCE_PATH from env
        self.hef_path = str(get_resource_path(
            pipeline_name=INSTANCE_SEGMENTATION_PIPELINE,
            resource_type=RESOURCES_MODELS_DIR_NAME,
        ))
        self.depth_hef_path = str(get_resource_path(
		    pipeline_name=DEPTH_PIPELINE,
		    resource_type=RESOURCES_MODELS_DIR_NAME,
		))

    # Determine which JSON config to use based on HEF filename
    hef_name = Path(self.hef_path).name
    if INSTANCE_SEGMENTATION_MODEL_NAME_H8 in hef_name:
        self.config_file = get_resource_path(INSTANCE_SEGMENTATION_PIPELINE, RESOURCES_JSON_DIR_NAME , (INSTANCE_SEGMENTATION_MODEL_NAME_H8 + JSON_FILE_EXTENSION))
        print(f"Using config file: {self.config_file}")
    elif INSTANCE_SEGMENTATION_MODEL_NAME_H8L in hef_name:
        self.config_file = get_resource_path(INSTANCE_SEGMENTATION_PIPELINE, RESOURCES_JSON_DIR_NAME , (INSTANCE_SEGMENTATION_MODEL_NAME_H8L + JSON_FILE_EXTENSION))
    else:
        raise ValueError("HEF version not supported; please provide a compatible segmentation HEF or config file.")

    # Post-process shared object
    self.post_process_so = get_resource_path(INSTANCE_SEGMENTATION_PIPELINE, RESOURCES_SO_DIR_NAME, INSTANCE_SEGMENTATION_POSTPROCESS_SO_FILENAME)
    self.post_function_name = INSTANCE_SEGMENTATION_POSTPROCESS_FUNCTION
	
    self.depth_post_process_so = get_resource_path(DEPTH_PIPELINE, RESOURCES_SO_DIR_NAME, DEPTH_POSTPROCESS_SO_FILENAME)
    self.depth_post_function_name = DEPTH_POSTPROCESS_FUNCTION
	
    self.post_process_so_cropper = os.path.join(app_path, 'resources/libdetections_cropper.so')
    self.cropper_post_function_name = "crop_detections"
    
    # Callback
    self.app_callback = app_callback

    # Set process title for easy identification
    setproctitle.setproctitle("Approach Depth App")

    # Build the GStreamer pipeline
    self.create_pipeline()

def get_pipeline_string(self):
    source_pipeline = SOURCE_PIPELINE(video_source=self.video_source,
                                      video_width=self.video_width, video_height=self.video_height,
                                      frame_rate=self.frame_rate, sync=self.sync
)

    infer_pipeline = INFERENCE_PIPELINE(
        hef_path=self.hef_path,
        post_process_so=self.post_process_so,
        post_function_name=self.post_function_name,
        batch_size=self.batch_size,
        config_json=self.config_file,
    )
    infer_pipeline_wrapper = INFERENCE_PIPELINE_WRAPPER(infer_pipeline)
    tracker_pipeline = TRACKER_PIPELINE(class_id=1)
    depth_pipeline = INFERENCE_PIPELINE(
		hef_path=self.depth_hef_path,
		post_process_so=self.depth_post_process_so,
		post_function_name=self.depth_post_function_name,
		#batch_size=self.batch_size,
	)
    cropper_pipeline = CROPPER_PIPELINE(
        inner_pipeline=(f'{depth_pipeline}'),
        so_path=self.post_process_so_cropper,
        function_name=self.cropper_post_function_name,
        internal_offset=True
    )
    user_callback_pipeline = USER_CALLBACK_PIPELINE()
    display_pipeline = DISPLAY_PIPELINE(
        video_sink=self.video_sink,
        sync=self.sync,
        show_fps=self.show_fps,
    )

    pipeline_string = (
        f"{source_pipeline} ! "
        f"{infer_pipeline_wrapper} ! "
        f"{tracker_pipeline} ! "
        f"{cropper_pipeline} ! "
        f"{user_callback_pipeline} ! "
        f"{display_pipeline}"
    )
    print(pipeline_string)
    return pipeline_string

def main():
user_data = app_callback_class()
app = GStreamerApproachDepthApp(dummy_callback, user_data)
app.run()

if **name** == “**main**”:
print(“Starting Approach Depth App…”)
main()

Thank you for your help!

Hey @fg793,

  1. About the .env file: Yeah, that should’ve been created during installation. If it’s missing, just go ahead and create it with user permissions. You’ll need it to configure your Hailo architecture settings and other parameters. Alternatively, you can configure those settings manually if you prefer!

Regarding Errors 2, 3, & 4: GStreamer Linking Issues

Your pipeline elements aren’t connecting the way they should. GStreamer is essentially saying “these pieces you’re trying to snap together don’t belong to the same structure.”

Why this happens: The pipeline components aren’t living in the same “container” (what we call a bin in GStreamer). Think of it like trying to connect LEGO pieces from different sets that just weren’t designed to fit together.

Here’s How to Fix It

You need to follow the right pattern. Here’s what that looks like:

# First, import the helper functions
from hailo_apps.hailo_gstreamer.gstreamer_helper_pipelines import (
    SOURCE_PIPELINE, INFERENCE_PIPELINE, CROPPER_PIPELINE, DISPLAY_PIPELINE
)

# Build each component
first_detector = INFERENCE_PIPELINE(hef_path='detector.hef', post_process_so='detector_post.so')
second_classifier = INFERENCE_PIPELINE(hef_path='classifier.hef', post_process_so='classifier_post.so')
cropper = CROPPER_PIPELINE(second_classifier, so_path='cropper.so', function_name='crop_func')

# Now connect everything together in ONE single string
pipeline_string = (
    f"{SOURCE_PIPELINE(video_source='input.mp4')} ! "
    f"{first_detector} ! {cropper} ! {DISPLAY_PIPELINE()}"
)

Hope this clears things up!