forked from Github/frigate
Community Supported Boards Framework (#7114)
* Make main frigate build non rpi specific and build rpi using base image * Add boards to sidebar * Fix docker build * Fix docs build * Update pr branch for testing * remove target from rpi build * Remove manual build * Add push build for rpi * fix typos, improve wording * Add arm build for rpi * Cleanup and add default github ref name * Cleanup docker build file system * Setup to use docker bake * Add ci/cd for bake * Fix path * Fix devcontainer * Set targets * Fix build * Fix syntax * Add wheels target * Move dev container to trt * Update key and fix rpi local * Move requirements files and set intermediate targets * Add back --load * Update docs for community board development * Update installation docs to reflect different builds available * Update docs with official and community supported headers * Update codeowners docs * Update docs * Assemble main and standard builds * Change order of pushes * Remove community board after successful build * Fix rpi bake file names
This commit is contained in:
49
docker/tensorrt/Dockerfile
Normal file
49
docker/tensorrt/Dockerfile
Normal file
@@ -0,0 +1,49 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
|
||||
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Make this a separate target so it can be built/cached optionally
|
||||
FROM wheels as trt-wheels
|
||||
ARG DEBIAN_FRONTEND
|
||||
ARG TARGETARCH
|
||||
|
||||
# Add TensorRT wheels to another folder
|
||||
COPY docker/tensorrt/requirements.txt /requirements-tensorrt.txt
|
||||
RUN mkdir -p /trt-wheels && pip3 wheel --wheel-dir=/trt-wheels -r /requirements-tensorrt.txt
|
||||
|
||||
# Build TensorRT-specific library
|
||||
FROM nvcr.io/nvidia/tensorrt:23.03-py3 AS trt-deps
|
||||
|
||||
RUN --mount=type=bind,source=docker/tensorrt/detector/tensorrt_libyolo.sh,target=/tensorrt_libyolo.sh \
|
||||
/tensorrt_libyolo.sh
|
||||
|
||||
# Frigate w/ TensorRT Support as separate image
|
||||
FROM deps AS frigate-tensorrt
|
||||
|
||||
#Disable S6 Global timeout
|
||||
ENV S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0
|
||||
|
||||
ENV TRT_VER=8.5.3
|
||||
ENV YOLO_MODELS="yolov7-tiny-416"
|
||||
|
||||
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
COPY --from=trt-deps /usr/local/src/tensorrt_demos /usr/local/src/tensorrt_demos
|
||||
COPY docker/tensorrt/detector/rootfs/ /
|
||||
|
||||
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
||||
pip3 install -U /deps/trt-wheels/*.whl && \
|
||||
ldconfig
|
||||
|
||||
WORKDIR /opt/frigate/
|
||||
COPY --from=rootfs / /
|
||||
|
||||
# Dev Container w/ TRT
|
||||
FROM devcontainer AS devcontainer-trt
|
||||
|
||||
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
COPY --from=trt-deps /usr/local/src/tensorrt_demos /usr/local/src/tensorrt_demos
|
||||
COPY docker/tensorrt/detector/rootfs/ /
|
||||
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
||||
pip3 install -U /deps/trt-wheels/*.whl
|
||||
@@ -0,0 +1,6 @@
|
||||
/usr/local/lib
|
||||
/usr/local/lib/python3.9/dist-packages/nvidia/cudnn/lib
|
||||
/usr/local/lib/python3.9/dist-packages/nvidia/cuda_runtime/lib
|
||||
/usr/local/lib/python3.9/dist-packages/nvidia/cublas/lib
|
||||
/usr/local/lib/python3.9/dist-packages/nvidia/cuda_nvrtc/lib
|
||||
/usr/local/lib/python3.9/dist-packages/tensorrt
|
||||
53
docker/tensorrt/detector/rootfs/etc/s6-overlay/s6-rc.d/trt-model-prepare/run
Executable file
53
docker/tensorrt/detector/rootfs/etc/s6-overlay/s6-rc.d/trt-model-prepare/run
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Generate models for the TensorRT detector
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
MODEL_CACHE_DIR=${MODEL_CACHE_DIR:-"/config/model_cache/tensorrt"}
|
||||
OUTPUT_FOLDER="${MODEL_CACHE_DIR}/${TRT_VER}"
|
||||
|
||||
# Create output folder
|
||||
mkdir -p ${OUTPUT_FOLDER}
|
||||
|
||||
FIRST_MODEL=true
|
||||
MODEL_CONVERT=""
|
||||
|
||||
for model in ${YOLO_MODELS//,/ }
|
||||
do
|
||||
# Remove old link in case path/version changed
|
||||
rm -f ${MODEL_CACHE_DIR}/${model}.trt
|
||||
|
||||
if [[ ! -f ${OUTPUT_FOLDER}/${model}.trt ]]; then
|
||||
if [[ ${FIRST_MODEL} = true ]]; then
|
||||
MODEL_CONVERT="${model}"
|
||||
FIRST_MODEL=false;
|
||||
else
|
||||
MODEL_CONVERT+=",${model}";
|
||||
fi
|
||||
else
|
||||
ln -s ${OUTPUT_FOLDER}/${model}.trt ${MODEL_CACHE_DIR}/${model}.trt
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -z ${MODEL_CONVERT} ]]; then
|
||||
echo "No models to convert."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Generating the following TRT Models: ${MODEL_CONVERT}"
|
||||
|
||||
# Build trt engine
|
||||
cd /usr/local/src/tensorrt_demos/yolo
|
||||
|
||||
# Download yolo weights
|
||||
./download_yolo.sh $MODEL_CONVERT > /dev/null
|
||||
|
||||
for model in ${MODEL_CONVERT//,/ }
|
||||
do
|
||||
echo "Converting ${model} model"
|
||||
python3 yolo_to_onnx.py -m ${model} > /dev/null
|
||||
python3 onnx_to_tensorrt.py -m ${model} > /dev/null
|
||||
cp ${model}.trt ${OUTPUT_FOLDER}/${model}.trt
|
||||
ln -s ${OUTPUT_FOLDER}/${model}.trt ${MODEL_CACHE_DIR}/${model}.trt
|
||||
done
|
||||
@@ -0,0 +1 @@
|
||||
oneshot
|
||||
@@ -0,0 +1 @@
|
||||
/etc/s6-overlay/s6-rc.d/trt-model-prepare/run
|
||||
18
docker/tensorrt/detector/tensorrt_libyolo.sh
Executable file
18
docker/tensorrt/detector/tensorrt_libyolo.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
SCRIPT_DIR="/usr/local/src/tensorrt_demos"
|
||||
|
||||
# Clone tensorrt_demos repo
|
||||
git clone --depth 1 https://github.com/NateMeyer/tensorrt_demos.git -b conditional_download
|
||||
|
||||
# Build libyolo
|
||||
cd ./tensorrt_demos/plugins && make all
|
||||
cp libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
|
||||
# Store yolo scripts for later conversion
|
||||
cd ../
|
||||
mkdir -p ${SCRIPT_DIR}/plugins
|
||||
cp plugins/libyolo_layer.so ${SCRIPT_DIR}/plugins/libyolo_layer.so
|
||||
cp -a yolo ${SCRIPT_DIR}/
|
||||
12
docker/tensorrt/requirements.txt
Normal file
12
docker/tensorrt/requirements.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
# NVidia TensorRT Support (amd64 only)
|
||||
--extra-index-url 'https://pypi.nvidia.com'
|
||||
numpy < 1.24; platform_machine == 'x86_64'
|
||||
tensorrt == 8.5.3.*; platform_machine == 'x86_64'
|
||||
cuda-python == 11.8; platform_machine == 'x86_64'
|
||||
cython == 0.29.*; platform_machine == 'x86_64'
|
||||
nvidia-cuda-runtime-cu12 == 12.1.*; platform_machine == 'x86_64'
|
||||
nvidia-cuda-runtime-cu11 == 11.8.*; platform_machine == 'x86_64'
|
||||
nvidia-cublas-cu11 == 11.11.3.6; platform_machine == 'x86_64'
|
||||
nvidia-cudnn-cu11 == 8.6.0.*; platform_machine == 'x86_64'
|
||||
onnx==1.14.0; platform_machine == 'x86_64'
|
||||
protobuf==3.20.3; platform_machine == 'x86_64'
|
||||
48
docker/tensorrt/trt.hcl
Normal file
48
docker/tensorrt/trt.hcl
Normal file
@@ -0,0 +1,48 @@
|
||||
target deps {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/amd64"]
|
||||
target = "deps"
|
||||
}
|
||||
|
||||
target rootfs {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/amd64"]
|
||||
target = "rootfs"
|
||||
}
|
||||
|
||||
target wheels {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/amd64"]
|
||||
target = "wheels"
|
||||
}
|
||||
|
||||
target devcontainer {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/amd64"]
|
||||
target = "devcontainer"
|
||||
}
|
||||
|
||||
target tensorrt {
|
||||
dockerfile = "docker/tensorrt/Dockerfile"
|
||||
context = "."
|
||||
contexts = {
|
||||
deps = "target:deps",
|
||||
rootfs = "target:rootfs"
|
||||
wheels = "target:wheels"
|
||||
}
|
||||
platforms = ["linux/amd64"]
|
||||
target = "frigate-tensorrt"
|
||||
}
|
||||
|
||||
target devcontainer-trt {
|
||||
dockerfile = "docker/tensorrt/Dockerfile"
|
||||
context = "."
|
||||
contexts = {
|
||||
deps = "target:deps",
|
||||
rootfs = "target:rootfs"
|
||||
wheels = "target:wheels"
|
||||
devcontainer = "target:devcontainer"
|
||||
}
|
||||
platforms = ["linux/amd64"]
|
||||
target = "devcontainer-trt"
|
||||
}
|
||||
10
docker/tensorrt/trt.mk
Normal file
10
docker/tensorrt/trt.mk
Normal file
@@ -0,0 +1,10 @@
|
||||
BOARDS += trt
|
||||
|
||||
local-trt: version
|
||||
docker buildx bake --load --file=docker/tensorrt/trt.hcl --set tensorrt.tags=frigate:latest-tensorrt tensorrt
|
||||
|
||||
build-trt:
|
||||
docker buildx bake --file=docker/tensorrt/trt.hcl --set tensorrt.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-tensorrt tensorrt
|
||||
|
||||
push-trt: build-trt
|
||||
docker buildx bake --push --file=docker/tensorrt/trt.hcl --set tensorrt.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-tensorrt tensorrt
|
||||
Reference in New Issue
Block a user