Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
6ed3083
start cleanup
david-tingdahl-nvidia Oct 27, 2025
bb40736
build and deps
david-tingdahl-nvidia Oct 27, 2025
2bf6348
printing and enum
david-tingdahl-nvidia Nov 20, 2025
282f961
user build args
david-tingdahl-nvidia Nov 20, 2025
f75ca5a
self validate
david-tingdahl-nvidia Nov 20, 2025
155510f
cpp unittest
david-tingdahl-nvidia Nov 21, 2025
2eea0e3
modularize
david-tingdahl-nvidia Nov 21, 2025
2452f85
debug build
david-tingdahl-nvidia Nov 21, 2025
f84783a
lint test
david-tingdahl-nvidia Nov 21, 2025
1fec657
arg name
david-tingdahl-nvidia Nov 21, 2025
8f24d20
pytest
david-tingdahl-nvidia Nov 21, 2025
d799e94
Add back lint file
david-tingdahl-nvidia Nov 24, 2025
d5675fc
add workflow action
david-tingdahl-nvidia Nov 24, 2025
a4899ee
remove build.txt
david-tingdahl-nvidia Nov 24, 2025
5dc1c59
on pr
david-tingdahl-nvidia Nov 24, 2025
8c0df9b
yaml -> yml
david-tingdahl-nvidia Nov 24, 2025
4b7fcb8
cleaner
david-tingdahl-nvidia Nov 25, 2025
6d6b3c4
comment
david-tingdahl-nvidia Nov 25, 2025
0e2f12a
renaming
david-tingdahl-nvidia Nov 25, 2025
0e1ea32
debug
david-tingdahl-nvidia Nov 25, 2025
970cf3f
test
david-tingdahl-nvidia Nov 25, 2025
995bbf2
test
david-tingdahl-nvidia Nov 25, 2025
065431e
checkout code
david-tingdahl-nvidia Nov 25, 2025
7197b91
path fix
david-tingdahl-nvidia Nov 25, 2025
6ee0485
move file
david-tingdahl-nvidia Nov 25, 2025
45eb1b6
ngc login
david-tingdahl-nvidia Nov 25, 2025
da40057
env -> secrets
david-tingdahl-nvidia Nov 25, 2025
a6514b7
Refactor to action files
david-tingdahl-nvidia Nov 25, 2025
7fdb5dd
Further refactoring
david-tingdahl-nvidia Nov 25, 2025
03d9796
Add a comment
david-tingdahl-nvidia Nov 25, 2025
c466719
pass ngc api key as input
david-tingdahl-nvidia Nov 25, 2025
6bf06d5
Trigger workflow with updated action
david-tingdahl-nvidia Nov 25, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions .github/actions/build-and-test/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
name: 'Build and Test'
description: 'Build nvblox Docker image and run tests'
inputs:
platform:
description: 'Platform to build for'
required: true
cuda-version:
description: 'CUDA version'
required: true
ubuntu-version:
description: 'Ubuntu version'
required: true
ngc-api-key:
description: 'NGC API Key for authentication'
required: true
runs:
using: 'composite'
steps:
- name: Checkout Code
uses: actions/checkout@v4
with:
fetch-depth: 0
lfs: true
- name: NGC Login
uses: ./.github/actions/ngc-login
with:
ngc-api-key: ${{ inputs.ngc-api-key }}
- name: Build dependencies image
shell: bash
run: |
ci/premerge.py --platform ${{ inputs.platform }} --cuda-version ${{ inputs.cuda-version }} --ubuntu-version ${{ inputs.ubuntu-version }} --build-image deps
- name: Build binaries image
shell: bash
run: |
ci/premerge.py --platform ${{ inputs.platform }} --cuda-version ${{ inputs.cuda-version }} --ubuntu-version ${{ inputs.ubuntu-version }} --build-image build
- name: Run CPP unit tests
shell: bash
run: |
# Note that we could skip the explicit build-image steps, but separating them makes the steps stand out in the UI.
ci/premerge.py --platform ${{ inputs.platform }} --cuda-version ${{ inputs.cuda-version }} --ubuntu-version ${{ inputs.ubuntu-version }} --build-and-test cpp
- name: Run Python unit tests
shell: bash
run: |
ci/premerge.py --platform ${{ inputs.platform }} --cuda-version ${{ inputs.cuda-version }} --ubuntu-version ${{ inputs.ubuntu-version }} --build-and-test python
21 changes: 21 additions & 0 deletions .github/actions/ngc-login/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
name: 'NGC Login'
description: 'Login to NVIDIA NGC Container Registry'
inputs:
ngc-api-key:
description: 'NGC API Key for authentication'
required: false
runs:
using: 'composite'
steps:
- name: NGC Login
shell: bash
run: |
# Only attempt NGC login if API key is available
if [ -n "${{ inputs.ngc-api-key }}" ]; then
echo "Logging into NGC registry..."
docker login -u \$oauthtoken -p ${{ inputs.ngc-api-key }} nvcr.io
echo "✅ Successfully logged into NGC registry"
else
echo "⚠️ NGC_API_KEY not available - skipping NGC login"
echo "This is normal for PRs from forks or when secrets are not configured"
fi
15 changes: 15 additions & 0 deletions .github/workflows/premerge.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: nvblox premerge
on:
pull_request:
jobs:
gpu-job:
name: Premerge
runs-on: [self-hosted, gpu] # GPU jobs will run on AWS
steps:
- name: x86_64-Cuda12-Ubuntu22 - Unit tests
uses: ./.github/actions/build-and-test
with:
platform: x86_64
cuda-version: '12'
ubuntu-version: '22'
ngc-api-key: ${{ secrets.NGC_API_KEY }}
244 changes: 244 additions & 0 deletions ci/ci_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,244 @@
#!/usr/bin/env python3
"""Base classes and abstractions for Docker image management.

This module provides abstract base classes for building and testing Docker images,
along with common enumerations used across different CI systems.
"""

import argparse
import subprocess
from abc import ABC, abstractmethod
from enum import Enum
from typing import List, Optional


class Platform(Enum):
X86_64 = 'x86_64'
JETPACK_5 = 'jetpack5'
JETPACK_6 = 'jetpack6'


class CudaVersion(Enum):
CUDA_11 = '11'
CUDA_12 = '12'
CUDA_13 = '13'


class UbuntuVersion(Enum):
UBUNTU_22 = '22'
UBUNTU_24 = '24'


class CudaSmArchitectures(Enum):
SM_X86_CI_SUPPORTED = '120;100;90;89;86;80;75'
SM_JETPACK_ORIN = '87'
SM_NATIVE = 'native'


class DockerImage(ABC):
"""Abstract base class for Docker images.

Wraps a dockerfile + build args. Supports single dependent parent image.
"""

def __init__(self, args: argparse.Namespace):
self.args = args

@abstractmethod
def image_name_base(self) -> str:
"""Base name for the image (without suffix)"""
pass

@abstractmethod
def dockerfile_path(self) -> str:
"""Path to the Dockerfile"""
pass

@abstractmethod
def parent_image(self) -> Optional['DockerImage']:
"""Image can have a single parent image.

Returns one of the DockerImage subclasses defined in this project.
"""
pass

def do_validate_image(self) -> bool:
"""Whether to validate the image after building. Override to disable validation."""
return True

@abstractmethod
def build_args(self) -> List[str]:
"""Build arguments for the docker build command"""
return []

def image_name_suffix(self) -> str:
"""Platform/arch dependent suffix for the image name"""
return (f'{self.args.platform.value}_cu{self.args.cuda_version.value}'
f'_u{self.args.ubuntu_version.value}')

def image_name(self) -> str:
"""Full image name with suffix"""
return self.image_name_base() + '_' + self.image_name_suffix()

def build(self) -> None:
"""Build a docker image from a Dockerfile. First builds the parent image if it exists."""

parent = self.parent_image()
if parent is not None:
parent.build()

image_name = self.image_name()

# Print build information
print('=' * 80)
print(f'BUILDING: {image_name}')
print('=' * 80)
print(f'Dockerfile: {self.dockerfile_path()}')
if parent is not None:
print(f'Parent image: {parent.image_name()}')
print(f'Platform: {self.args.platform.value}')
print(f'CUDA version: {self.args.cuda_version.value}')
print(f'CUDA architecture: {self.args.cuda_arch.value}')
print(f'Ubuntu version: {self.args.ubuntu_version.value}')
print(f'Max number of jobs: {self.args.max_num_jobs}')
build_args_str = ', '.join(self.build_args() or [])
print(f'Build arguments: {build_args_str}')
user_build_args_str = ', '.join(self.args.user_build_args or [])
print(f'User build arguments: {user_build_args_str}')
print('=' * 80)

cmd = [
'docker', 'build', '-f',
self.dockerfile_path(), '-t', image_name, '--network=host', '--progress=plain'
]

if parent is not None:
parent_name = parent.image_name()
cmd += ['--build-arg', f'BASE_IMAGE={parent_name}']

if self.build_args() is not None:
for arg in self.build_args():
cmd += ['--build-arg', arg]

# Add extra docker args from args if provided
if self.args.user_build_args is not None:
cmd += self.args.user_build_args

cmd += ['.']

print(' '.join(cmd))
subprocess.run(cmd, check=True)

if self.do_validate_image():
self._validate()

def _validate(self) -> None:
"""Validate that the correct cuda/ubuntu version was built"""

# Check ubuntu version
lsb_release_result = subprocess.run(
['docker', 'run', '--rm',
self.image_name(), 'lsb_release', '-a'],
check=True,
capture_output=True,
text=True)
expected_ubuntu = f'Ubuntu {self.args.ubuntu_version.value}'
assert expected_ubuntu in lsb_release_result.stdout, (
f'Failed to find the correct ubuntu version. '
f'Stdout: {lsb_release_result.stdout}')

# Check cuda version
cuda_version_result = subprocess.run(
['docker', 'run', '--rm',
self.image_name(), 'nvcc', '--version'],
check=True,
capture_output=True,
text=True)
expected_cuda = f'cuda_{self.args.cuda_version.value}'
assert expected_cuda in cuda_version_result.stdout, (
f'Failed to find the correct cuda version. '
f'Stdout: {cuda_version_result.stdout}')

print(f'Successfully validated image: {self.image_name()}')


class TestBase(ABC):
"""Base class for unit tests"""

def __init__(self, args: argparse.Namespace):
self.args = args

@abstractmethod
def image(self) -> DockerImage:
"""Get the image to run the test on"""
pass

@abstractmethod
def get_command(self) -> str:
"""Get the command to run in the test"""
pass

@abstractmethod
def get_cwd(self) -> str:
"""Get the current working directory"""
pass

def run(self) -> None:
"""Build image and run command inside it"""
self.image().build()
docker_cmd = ['docker', 'run', '--rm', self.image().image_name()]
cwd = self.get_cwd()
cmd = self.get_command()
full_cmd = docker_cmd + ['bash', '-c'] + [f'cd {cwd} && {cmd}']
subprocess.run(full_cmd, check=True)


class OsImage(DockerImage):
"""External cuda or jetpack OS base image. Used as a parent image for other images."""

AVAILABLE_OS_IMAGES = {
Platform.X86_64: {
CudaVersion.CUDA_11: {
UbuntuVersion.UBUNTU_22: 'nvcr.io/nvidia/cuda:11.8.0-devel-ubuntu22.04',
},
CudaVersion.CUDA_12: {
UbuntuVersion.UBUNTU_22: 'nvcr.io/nvidia/cuda:12.8.0-devel-ubuntu22.04',
UbuntuVersion.UBUNTU_24: 'nvcr.io/nvidia/cuda:12.8.0-devel-ubuntu24.04',
},
CudaVersion.CUDA_13: {
UbuntuVersion.UBUNTU_22: 'nvcr.io/nvidia/cuda:13.0.0-devel-ubuntu22.04',
UbuntuVersion.UBUNTU_24: 'nvcr.io/nvidia/cuda:13.0.0-devel-ubuntu24.04',
},
},
Platform.JETPACK_5: 'nvcr.io/nvidia/l4t-jetpack:r35.4.1',
Platform.JETPACK_6: 'nvcr.io/nvidia/l4t-jetpack:r36.3.0'
}

def get_os_image_name(self) -> str:
platform_images = self.AVAILABLE_OS_IMAGES.get(self.args.platform, {})
cuda_images = platform_images.get(self.args.cuda_version, {})
os_image = cuda_images.get(self.args.ubuntu_version)
if os_image is None:
raise ValueError(f'No OS image available for platform {self.args.platform}, '
f'cuda version {self.args.cuda_version}, '
f'and ubuntu version {self.args.ubuntu_version}')
return os_image

def image_name(self) -> str:
return self.get_os_image_name()

def image_name_base(self) -> str:
raise NotImplementedError('OsImage does not have a base name')

def dockerfile_path(self) -> str:
raise NotImplementedError('OsImage does not have a dockerfile')

def parent_image(self) -> None:
return None

def build_args(self) -> List[str]:
return []

def build(self) -> None:
"""OS images are external and do not need to be built"""
pass
65 changes: 65 additions & 0 deletions ci/lint_nvblox_h.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
#!/bin/bash

echo -e "Linting nvblox.h"
echo -e ""

set -exuo pipefail

SUCCESS=1

# Colors
RED='\033[0;31m'
GREEN='\033[0;32m'
NC='\033[0m' # No Color

# Get list of files expected in the nvblox.h header.
#These are files which are in include, but not in an internal subfolder.
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
HEADER_DIR=$SCRIPT_DIR/../nvblox/include
HEADER_FILE_LIST=$(find $HEADER_DIR -type f \( -iname "*.h" ! -iname "*nvblox.h" \) ! -path "*/internal/*" ! -path "*/experimental/*" -printf '%P\n')

# Check that there are no impl files in public folder locations
for HEADER_FILE in $HEADER_FILE_LIST
do
if [[ $HEADER_FILE == *"/impl/"* ]]
then
echo -e "${RED}Implementation file found in public folder: $HEADER_FILE"
SUCCESS=0
fi
done


# Search nvblox.h for each of these files.
NVBLOX_H_PATH=$HEADER_DIR/nvblox/nvblox.h
INCLUDES_STRING=""
AT_LEAST_ONE_HEADER_NOT_FOUND=0
for HEADER_FILE in $HEADER_FILE_LIST
do
if ! grep -Fq $HEADER_FILE $NVBLOX_H_PATH
then
echo -e "${RED}Public header not in nvblox.h: $HEADER_FILE${NC}"
AT_LEAST_ONE_HEADER_NOT_FOUND=1
fi
INCLUDES_STRING+="#include \"$HEADER_FILE\"\n"
done


# If not all headers in, fail and suggest headers to add.
if [ $AT_LEAST_ONE_HEADER_NOT_FOUND == 1 ]
then
echo -e ""
echo -e "${RED}Lint failing: Not all public headers are found in nvblox.h${NC}"
echo -e ""
echo -e "Replace includes in nvblox.h with the following:"
echo -e ""
echo -e $INCLUDES_STRING
SUCCESS=0
fi

if [ $SUCCESS == 0 ]
then
echo -e "${RED}Lint of public includes in nvblox.h failed."
exit 1
else
echo -e "${GREEN}Lint of public includes in nvblox.h passed."
fi
Loading
Loading