|
| 1 | +#!/bin/bash |
| 2 | +# ============================================================================== |
| 3 | +# Copyright (C) 2026 Intel Corporation |
| 4 | +# |
| 5 | +# SPDX-License-Identifier: MIT |
| 6 | +# ============================================================================== |
| 7 | + |
| 8 | +declare -A DLSTREAMER_PIPELINES |
| 9 | +declare -A DEEPSTREAM_PIPELINES |
| 10 | + |
| 11 | +# Get arguments |
| 12 | +INPUT="$1" |
| 13 | +PIPELINE="$2" |
| 14 | +OUTPUT="$3" |
| 15 | + |
| 16 | +if [[ ${OUTPUT} =~ \.mp4 ]]; then |
| 17 | + OUTPUT="${OUTPUT%.*}" |
| 18 | +fi |
| 19 | + |
| 20 | +# Check if input is rtsp or uri or file |
| 21 | +if [[ ${INPUT} =~ 'rtsp://' ]]; then |
| 22 | + SOURCE="rtspsrc location=${INPUT}" |
| 23 | +elif [[ ${INPUT} =~ 'https://' ]]; then |
| 24 | + SOURCE="urisourcebin buffer-size=4096 uri=${INPUT}" |
| 25 | +else |
| 26 | + SOURCE="filesrc location=/working_dir/${INPUT}" |
| 27 | +fi |
| 28 | + |
| 29 | +# Definition of pipelines |
| 30 | +DLSTREAMER_PIPELINES[LPR]="gst-launch-1.0 ${SOURCE} ! decodebin3 ! vapostproc ! video/x-raw\(memory:VAMemory\) ! queue \ |
| 31 | +! gvadetect model=/working_dir/public/yolov8_license_plate_detector/FP32/yolov8_license_plate_detector.xml \ |
| 32 | +device=GPU pre-process-backend=va ! queue ! videoconvert ! \ |
| 33 | +gvaclassify model=/working_dir/public/ch_PP-OCRv4_rec_infer/FP32/ch_PP-OCRv4_rec_infer.xml device=GPU pre-process-backend=va \ |
| 34 | +! queue ! vapostproc ! gvawatermark ! gvafpscounter ! vah264enc bitrate=2000 ! h264parse ! mp4mux ! filesink location=/working_dir/${OUTPUT}_dls.mp4" |
| 35 | + |
| 36 | + |
| 37 | +DEEPSTREAM_PIPELINES[LPR]="gst-launch-1.0 ${SOURCE} ! qtdemux ! h264parse ! nvv4l2decoder ! m.sink_0 nvstreammux \ |
| 38 | +name=m batch-size=1 width=1920 height=1080 batched-push-timeout=40000 ! queue ! nvvideoconvert \ |
| 39 | +! video/x-raw\(memory:NVMM\),format=RGBA ! nvinfer \ |
| 40 | +config-file-path=/working_dir/deepstream_tao_apps/configs/nvinfer/trafficcamnet_tao/pgie_trafficcamnet_config.txt \ |
| 41 | +unique-id=1 ! queue ! nvinfer \ |
| 42 | +config-file-path=/working_dir/deepstream_tao_apps/configs/nvinfer/LPD_us_tao/sgie_lpd_DetectNet2_us.txt unique-id=2 \ |
| 43 | +! queue ! nvinfer config-file-path=/working_dir/deepstream_tao_apps/configs/nvinfer/lpr_us_tao/sgie_lpr_us_config.txt \ |
| 44 | +unique-id=3 ! queue ! nvdsosd display-text=1 display-bbox=1 display-mask=0 process-mode=1 ! nvvideoconvert \ |
| 45 | +! video/x-raw\(memory:NVMM\),format=NV12 ! nvv4l2h264enc bitrate=2000000 ! h264parse ! qtmux \ |
| 46 | +! filesink location=/working_dir/${OUTPUT}_ds.mp4 sync=false" |
| 47 | + |
| 48 | +# Check if pipeline is valid |
| 49 | +if [[ ! ${DLSTREAMER_PIPELINES[${PIPELINE}]} || ! ${DEEPSTREAM_PIPELINES[${PIPELINE}]} ]]; then |
| 50 | + printf 'Pipeline %s not found.\n' "${PIPELINE}" |
| 51 | + printf 'Available pipelines: ' |
| 52 | + for key in "${!DLSTREAMER_PIPELINES[@]}"; do |
| 53 | + printf '%s ' "$key" |
| 54 | + done |
| 55 | + printf '\n' |
| 56 | + exit 1 |
| 57 | +fi |
| 58 | + |
| 59 | +# Check if there is /dev/dri folder to run on GPU |
| 60 | +if [[ -e "/dev/dri" ]]; then |
| 61 | + DEVICE_DRI="--device /dev/dri --group-add $(stat -c "%g" /dev/dri/render* | head -1)" |
| 62 | +fi |
| 63 | + |
| 64 | +# Check if there is /dev/accel folder to run on NPU |
| 65 | +if [[ -e "/dev/accel" ]]; then |
| 66 | + DEVICE_ACCEL="--device /dev/accel --group-add $(stat -c "%g" /dev/accel/accel* | head -1)" |
| 67 | +fi |
| 68 | + |
| 69 | +# Variable for running commands from DL Streamer Docker |
| 70 | +DLSTREAMER_DOCKER="docker run -i --rm -v ${PWD}:/working_dir ${DEVICE_DRI} ${DEVICE_ACCEL} \ |
| 71 | +-v ~/.Xauthority:/root/.Xauthority -v /tmp/.X11-unix/:/tmp/.X11-unix/ -e DISPLAY=$DISPLAY -v /dev/bus/usb:/dev/bus/usb \ |
| 72 | +--env ZE_ENABLE_ALT_DRIVERS=libze_intel_npu.so \ |
| 73 | +--env MODELS_PATH=/working_dir \ |
| 74 | +intel/dlstreamer:2025.2.0-ubuntu24 /bin/bash -c" |
| 75 | + |
| 76 | +DEEPSTREAM_SETUP_LPR=$(cat <<EOF |
| 77 | +if [[ -e "/working_dir/deepstream_tao_apps" ]]; then |
| 78 | + exit 0 |
| 79 | +fi |
| 80 | +
|
| 81 | +git clone https://github.com/NVIDIA-AI-IOT/deepstream_tao_apps.git |
| 82 | +
|
| 83 | +set -e |
| 84 | +
|
| 85 | +cd /working_dir/deepstream_tao_apps |
| 86 | +mkdir -p ./models/trafficcamnet |
| 87 | +cd ./models/trafficcamnet |
| 88 | +wget --no-check-certificate --content-disposition 'https://api.ngc.nvidia.com/v2/models/org/nvidia/team/tao/trafficcamnet/pruned_onnx_v1.0.4/files?redirect=true&path=resnet18_trafficcamnet_pruned.onnx' -O resnet18_trafficcamnet_pruned.onnx |
| 89 | +wget --no-check-certificate --content-disposition 'https://api.ngc.nvidia.com/v2/models/org/nvidia/team/tao/trafficcamnet/pruned_onnx_v1.0.4/files?redirect=true&path=resnet18_trafficcamnet_pruned_int8.txt' -O resnet18_trafficcamnet_pruned_int8.txt |
| 90 | +
|
| 91 | +cd /working_dir/deepstream_tao_apps |
| 92 | +mkdir -p ./models/LPD_us |
| 93 | +cd ./models/LPD_us |
| 94 | +wget --no-check-certificate --content-disposition 'https://api.ngc.nvidia.com/v2/models/org/nvidia/team/tao/lpdnet/pruned_v2.3.1/files?redirect=true&path=LPDNet_usa_pruned_tao5.onnx' -O LPDNet_usa_pruned_tao5.onnx |
| 95 | +wget --no-check-certificate --content-disposition 'https://api.ngc.nvidia.com/v2/models/org/nvidia/team/tao/lpdnet/pruned_v2.3.1/files?redirect=true&path=usa_cal_10.1.0.bin' -O usa_cal_10.1.0.bin |
| 96 | +wget --no-check-certificate https://api.ngc.nvidia.com/v2/models/nvidia/tao/lpdnet/versions/pruned_v1.0/files/usa_lpd_label.txt |
| 97 | +
|
| 98 | +cd /working_dir/deepstream_tao_apps |
| 99 | +mkdir -p ./models/LPR_us |
| 100 | +cd ./models/LPR_us |
| 101 | +wget --no-check-certificate --content-disposition 'https://api.ngc.nvidia.com/v2/models/org/nvidia/team/tao/lprnet/deployable_onnx_v1.1/files?redirect=true&path=us_lprnet_baseline18_deployable.onnx' -O us_lprnet_baseline18_deployable.onnx |
| 102 | +touch labels_us.txt |
| 103 | +
|
| 104 | +
|
| 105 | +cd /working_dir/deepstream_tao_apps/apps/tao_others/deepstream_lpr_app/nvinfer_custom_lpr_parser/ |
| 106 | +
|
| 107 | +make |
| 108 | +
|
| 109 | +cp /working_dir/deepstream_tao_apps/apps/tao_others/deepstream_lpr_app/dict_us.txt /working_dir/dict.txt |
| 110 | +
|
| 111 | +EOF |
| 112 | +) |
| 113 | + |
| 114 | +DEEPSTREAM_DOCKER="docker run -i --rm --network=host --gpus all -e DISPLAY=$DISPLAY --device /dev/snd -v /tmp/.X11-unix/:/tmp/.X11-unix -v ${PWD}:/working_dir -w /working_dir nvcr.io/nvidia/deepstream:8.0-samples-multiarch /bin/bash -c" |
| 115 | + |
| 116 | +# Check if there are models in current directory and download if necessary |
| 117 | +if [[ ! -e "${PWD}/public/yolov8_license_plate_detector" ]]; then |
| 118 | + printf 'Downloading models....\n' |
| 119 | + eval "${DLSTREAMER_DOCKER}" + '"/opt/intel/dlstreamer/samples/download_public_models.sh yolov8_license_plate_detector,ch_PP-OCRv4_rec_infer"' |
| 120 | +fi |
| 121 | + |
| 122 | +# Check for Intel and Nvidia hardware |
| 123 | +INTEL_GPU=$(lspci -nn | grep -E 'VGA|3D|Display' | grep -i "Intel") |
| 124 | +NVIDIA_GPU=$(lspci -nn | grep -E 'VGA|3D|Display' | grep -i "NVIDIA") |
| 125 | +INTEL_CPU=$(lscpu | grep -i "Intel") |
| 126 | + |
| 127 | +print_intel_detected() { |
| 128 | + local HARDWARE="$1" |
| 129 | + printf -- "---------------------------------------\n Intel %s detected. \ |
| 130 | +Using DL Streamer\n---------------------------------------\n\n" "${HARDWARE}" |
| 131 | +} |
| 132 | + |
| 133 | +print_nvidia_detected() { |
| 134 | + printf -- "----------------------------------------\n NVIDIA GPU detected. \ |
| 135 | +Using DeepStream\n----------------------------------------\n\n" |
| 136 | +} |
| 137 | + |
| 138 | +eval_dlstreamer_pipeline() { |
| 139 | + printf 'PIPELINE:\n%s\n\n' "${DLSTREAMER_PIPELINES[${PIPELINE}]}" |
| 140 | + eval "${DLSTREAMER_DOCKER}" + "\"${DLSTREAMER_PIPELINES[${PIPELINE}]}\"" & |
| 141 | +} |
| 142 | + |
| 143 | +eval_deepstream_pipeline() { |
| 144 | + printf 'PIPELINE:\n%s\n\n' "${DEEPSTREAM_PIPELINES[${PIPELINE}]}" |
| 145 | + eval "${DEEPSTREAM_DOCKER}" + "\"${DEEPSTREAM_SETUP_LPR}\"" |
| 146 | + eval "${DEEPSTREAM_DOCKER}" + "\"${DEEPSTREAM_PIPELINES[${PIPELINE}]}\"" & |
| 147 | +} |
| 148 | + |
| 149 | +replace_in_dlstreamer_pipeline() { |
| 150 | + local FROM="$1" |
| 151 | + local TO="$2" |
| 152 | + DLSTREAMER_PIPELINES[${PIPELINE}]=${DLSTREAMER_PIPELINES[${PIPELINE}]//"${FROM}"/"${TO}"} |
| 153 | +} |
| 154 | + |
| 155 | +# Run pipeline |
| 156 | +if [[ -n "${NVIDIA_GPU}" && -n "${INTEL_GPU}" ]]; then |
| 157 | + print_nvidia_detected |
| 158 | + print_intel_detected "GPU" |
| 159 | + eval_dlstreamer_pipeline |
| 160 | + eval_deepstream_pipeline |
| 161 | +elif [[ -n "${NVIDIA_GPU}" && -e "/dev/accel" ]]; then |
| 162 | + print_nvidia_detected |
| 163 | + print_intel_detected "NPU" |
| 164 | + replace_in_dlstreamer_pipeline "GPU" "NPU" |
| 165 | + eval_dlstreamer_pipeline |
| 166 | + eval_deepstream_pipeline |
| 167 | +elif [[ -n "${NVIDIA_GPU}" && -n "${INTEL_CPU}" ]]; then |
| 168 | + print_nvidia_detected |
| 169 | + print_intel_detected "CPU" |
| 170 | + replace_in_dlstreamer_pipeline "GPU" "CPU" |
| 171 | + replace_in_dlstreamer_pipeline "vapostproc !" "" |
| 172 | + replace_in_dlstreamer_pipeline "pre-process-backend=va" "" |
| 173 | + replace_in_dlstreamer_pipeline "video/x-raw\\(memory:VAMemory\) !" "" |
| 174 | + replace_in_dlstreamer_pipeline "vah264enc bitrate=2000" "openh264enc bitrate=2000000" |
| 175 | + eval_dlstreamer_pipeline |
| 176 | + eval_deepstream_pipeline |
| 177 | +elif [[ -n "${INTEL_GPU}" ]]; then |
| 178 | + print_intel_detected "GPU" |
| 179 | + eval_dlstreamer_pipeline |
| 180 | +elif [[ -n "${NVIDIA_GPU}" ]]; then |
| 181 | + print_nvidia_detected |
| 182 | + eval_deepstream_pipeline |
| 183 | +elif [[ -e "/dev/accel" ]]; then |
| 184 | + print_intel_detected "NPU" |
| 185 | + replace_in_dlstreamer_pipeline "GPU" "NPU" |
| 186 | + eval_dlstreamer_pipeline |
| 187 | +elif [[ -n "${INTEL_CPU}" ]]; then |
| 188 | + print_intel_detected "CPU" |
| 189 | + replace_in_dlstreamer_pipeline "GPU" "CPU" |
| 190 | + replace_in_dlstreamer_pipeline "vapostproc !" "" |
| 191 | + replace_in_dlstreamer_pipeline "pre-process-backend=va" "" |
| 192 | + replace_in_dlstreamer_pipeline "video/x-raw\\(memory:VAMemory\) !" "" |
| 193 | + replace_in_dlstreamer_pipeline "vah264enc bitrate=2000" "openh264enc bitrate=2000000" |
| 194 | + eval_dlstreamer_pipeline |
| 195 | +fi |
| 196 | + |
| 197 | +# wait because of evals with & |
| 198 | +wait |
0 commit comments