Skip to content

Commit 4665f43

Browse files
dans-msftLucas Pickup
andauthored
Adds ability to build llgtrt based on TRTLLM built from source (#18)
* Adds ability to build llgtrt based on TRTLLM built from source * Recommented a couple stray uncommented lines --------- Co-authored-by: Lucas Pickup <[email protected]>
1 parent d47d417 commit 4665f43

File tree

5 files changed

+91
-14
lines changed

5 files changed

+91
-14
lines changed

README.md

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,17 @@ To build a container, use:
4848

4949
The build script will initialize submodules if they are missing. It takes about 15 minutes on a GitHub runner and should typically be faster on a local machine.
5050

51+
#### Optional: Building TensorRT-LLM from source
52+
53+
The build process above uses prebuilt binaries from a release version of TensorRT-LLM. It is also possible to build your own version of TensorRT-LLM from source and create a build of llgtrt based on that. This can be used to build a version of llgtrt that will work with versions of TensorRT-LLM newer than the released versions in nVidia's repositories.
54+
55+
To do so, first build TensorRT-LLM from source following the instructions in https://nvidia.github.io/TensorRT-LLM/installation/build-from-source-linux.html
56+
57+
Now, build llgtrt based on the Docker image you built above
58+
```bash
59+
./docker/build.sh --trtllm tensorrt_llm/release
60+
```
61+
5162
### Building the TensorRT-LLM Engine
5263

5364
This is based on the [TensorRT-LLM Quick-start](https://nvidia.github.io/TensorRT-LLM/quick-start-guide.html).

docker/Dockerfile

Lines changed: 23 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,16 @@
1-
FROM nvcr.io/nvidia/tensorrt:24.12-py3 AS llgtrt_dev
1+
# This argument allows you to override the base image with your own custom build of TensorRT or TensorRT-LLM
2+
ARG BASE_IMAGE=nvcr.io/nvidia/tensorrt:24.12-py3
3+
4+
# Set this to false if your image already includes TensorRT-LLM
5+
ARG INSTALL_TRTLLM=true
6+
7+
# Set this to 1 to enable the CXX11_ABI during native builds
8+
ARG USE_CXX11_ABI=0
9+
10+
FROM ${BASE_IMAGE} as llgtrt_dev
11+
12+
ARG INSTALL_TRTLLM
13+
ARG USE_CXX11_ABI
214

315
RUN apt-get update
416
RUN DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
@@ -32,10 +44,14 @@ COPY TensorRT-LLM/examples/llama/requirements.txt /tmp/reqs/
3244
# COPY TensorRT-LLM/docker/common/install_mpi4py.sh /tmp/reqs/
3345
# COPY TensorRT-LLM/docker/common/install_tensorrt.sh /tmp/reqs/
3446

35-
# RUN bash /tmp/reqs/install_mpi4py.sh
36-
# RUN bash /tmp/reqs/install_tensorrt.sh
47+
#RUN bash /tmp/reqs/install_mpi4py.sh
48+
#RUN bash /tmp/reqs/install_tensorrt.sh
3749

38-
RUN cd /tmp/reqs && pip install --upgrade -r requirements.txt
50+
# Install tensorrt_llm and its backing c++ libraries from pip unless disabled
51+
RUN echo "INSTALL_TRTLLM is set to: ${INSTALL_TRTLLM}" && \
52+
if [ "${INSTALL_TRTLLM}" = "true" ]; then \
53+
cd /tmp/reqs && pip install --upgrade -r requirements.txt; \
54+
fi
3955

4056
# more packages for this image
4157
RUN pip install \
@@ -45,7 +61,7 @@ RUN pip install \
4561
pandas matplotlib plotly wheel
4662
RUN pip uninstall -y guidance
4763

48-
# RUN pip install --upgrade transformers
64+
RUN pip install --upgrade transformers
4965

5066
RUN cd /usr/local/lib/python3.12/dist-packages/tensorrt_llm/libs/ && \
5167
ln -s libnvinfer_plugin_tensorrt_llm.so libnvinfer_plugin_tensorrt_llm.so.10
@@ -65,7 +81,8 @@ COPY . .
6581

6682
# link stub, so that it builds without nvidia-runtime
6783
RUN cd /usr/local/cuda/lib64 && ln -s stubs/libnvidia-ml.so libnvidia-ml.so.1
68-
RUN ./scripts/build.sh --clean
84+
RUN bash -c 'env'
85+
RUN ./scripts/build.sh --clean --cxx11abi $USE_CXX11_ABI
6986
# remove stub just in case
7087
RUN rm /usr/local/cuda/lib64/libnvidia-ml.so.1
7188

docker/build.sh

Lines changed: 44 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,49 @@
1-
#!/bin/sh
1+
#!/bin/bash
22

3-
cd $(dirname $0)/..
3+
# Default values
4+
DEV_MODE=false
5+
BASE_IMAGE=""
6+
INSTALL_TRTLLM=true
7+
USE_CXX11_ABI=0
48

5-
if [ ! -f TensorRT-LLM/README.md ] ; then git submodule update --init ; fi
9+
# Parse command-line arguments
10+
while [[ $# -gt 0 ]]; do
11+
case "$1" in
12+
--dev)
13+
DEV_MODE=true
14+
shift
15+
;;
16+
--trtllm)
17+
if [[ -n "$2" && "$2" != --* ]]; then
18+
BASE_IMAGE="$2"
19+
INSTALL_TRTLLM=false
20+
USE_CXX11_ABI=1
21+
shift 2
22+
else
23+
echo "Error: --trtllm requires an argument (image name)."
24+
exit 1
25+
fi
26+
;;
27+
*)
28+
echo "Unknown option: $1"
29+
exit 1
30+
;;
31+
esac
32+
done
633

7-
docker build --progress=plain -t llgtrt/llgtrt:dev --target llgtrt_dev . -f docker/Dockerfile
34+
# Set the default base image if --trtllm was not provided
35+
if [[ -z "$BASE_IMAGE" ]]; then
36+
BASE_IMAGE="nvcr.io/nvidia/tensorrt:24.12-py3"
37+
fi
838

9-
if [ "$1" != "--dev" ] ; then
10-
docker build --progress=plain -t llgtrt/llgtrt:latest --target llgtrt_prod . -f docker/Dockerfile
39+
if $DEV_MODE; then
40+
TARGET="--target llgtrt_dev"
41+
else
42+
TARGET="--target llgtrt_prod"
1143
fi
44+
45+
# Build the Docker image with the appropriate arguments
46+
DOCKER_BUILD_ARGS="--progress=plain --build-arg BASE_IMAGE=$BASE_IMAGE --build-arg INSTALL_TRTLLM=$INSTALL_TRTLLM --build-arg USE_CXX11_ABI=$USE_CXX11_ABI $TARGET"
47+
48+
echo "Building Docker image $TARGET with arguments: $DOCKER_BUILD_ARGS"
49+
docker build $DOCKER_BUILD_ARGS . -f docker/Dockerfile

scripts/build.sh

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
#!/bin/bash
22

3+
USE_CXX11_ABI=0
34

45
while test $# -gt 0; do
56
case "$1" in
@@ -8,6 +9,15 @@ while test $# -gt 0; do
89
rm -rf target/release/* 2>/dev/null || :
910
shift
1011
;;
12+
--cxx11abi)
13+
if [[ -n "$2" && "$2" =~ ^[01]$ ]]; then
14+
USE_CXX11_ABI="$2"
15+
shift 2
16+
else
17+
echo "Error: --cxx11abi requires an argument (0 or 1)."
18+
exit 1
19+
fi
20+
;;
1121
*)
1222
echo "Unknown option $1"
1323
exit 1
@@ -25,7 +35,8 @@ fi
2535

2636
mkdir -p trtllm-c/build
2737
cd trtllm-c/build
28-
cmake ..
38+
cmake -DUSE_CXX11_ABI=$USE_CXX11_ABI ..
2939
make -j
3040
cd ../../llgtrt
41+
export RUSTC_LOG=rustc_codegen_ssa::back::link=info
3142
cargo build --release

trtllm-c/CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ set(TRTLLM_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../TensorRT-LLM")
77
set(TRTLLM_INCLUDE_DIR "${TRTLLM_DIR}/cpp/include")
88
set(TRTLLM_INCLUDE_COMMON_DIR "${TRTLLM_DIR}/cpp")
99

10-
set(USE_CXX11_ABI 0)
10+
option(USE_CXX11_ABI "Enable CXX11 ABI" 0)
1111
message(STATUS "Use CXX11 ABI: ${USE_CXX11_ABI}")
1212
add_compile_options("-D_GLIBCXX_USE_CXX11_ABI=${USE_CXX11_ABI}")
1313

0 commit comments

Comments
 (0)