forked from Github/frigate
Compare commits
48 Commits
v0.12.0-be
...
v0.12.0-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27a31e731f | ||
|
|
562e2627c2 | ||
|
|
babd976533 | ||
|
|
748815b6ce | ||
|
|
88252e0ae6 | ||
|
|
c0bf69b4bf | ||
|
|
b6b10e753f | ||
|
|
4a45089b95 | ||
|
|
3b9bcb356b | ||
|
|
e10ddb343c | ||
|
|
e8cd25ddf2 | ||
|
|
624c314335 | ||
|
|
b33094207c | ||
|
|
7083a5c9b6 | ||
|
|
db131d4971 | ||
|
|
74d6ab0555 | ||
|
|
66881eb89f | ||
|
|
ad60f4894b | ||
|
|
8d21c950a3 | ||
|
|
ea8ec23cbe | ||
|
|
f06e8b47be | ||
|
|
7edeaa3407 | ||
|
|
7a7e30e1ff | ||
|
|
78dc75a4f1 | ||
|
|
a5078b8517 | ||
|
|
77876bd663 | ||
|
|
a0697b81a1 | ||
|
|
88b903ba39 | ||
|
|
116edce3dc | ||
|
|
7130d93616 | ||
|
|
11054daebe | ||
|
|
7926ddd008 | ||
|
|
93f2316711 | ||
|
|
fd3688a9c7 | ||
|
|
a63dd9bccd | ||
|
|
f54b223adf | ||
|
|
9ddcf7b1fe | ||
|
|
07c635d822 | ||
|
|
6ed8977548 | ||
|
|
e50cccc78a | ||
|
|
feeb7107d5 | ||
|
|
f52de322d3 | ||
|
|
2f3e046130 | ||
|
|
edbdbb7f07 | ||
|
|
4bb5785d89 | ||
|
|
36c6ee73fe | ||
|
|
924f946e46 | ||
|
|
d22e25064b |
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@@ -19,6 +19,10 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Image Build
|
name: Image Build
|
||||||
steps:
|
steps:
|
||||||
|
- id: lowercaseRepo
|
||||||
|
uses: ASzc/change-string-case-action@v5
|
||||||
|
with:
|
||||||
|
string: ${{ github.repository }}
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
@@ -43,7 +47,7 @@ jobs:
|
|||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||||
target: frigate
|
target: frigate
|
||||||
tags: |
|
tags: |
|
||||||
ghcr.io/blakeblackshear/frigate:${{ github.ref_name }}-${{ env.SHORT_SHA }}
|
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ env.SHORT_SHA }}
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
cache-to: type=gha,mode=max
|
cache-to: type=gha,mode=max
|
||||||
- name: Build and push TensorRT
|
- name: Build and push TensorRT
|
||||||
@@ -54,5 +58,5 @@ jobs:
|
|||||||
platforms: linux/amd64
|
platforms: linux/amd64
|
||||||
target: frigate-tensorrt
|
target: frigate-tensorrt
|
||||||
tags: |
|
tags: |
|
||||||
ghcr.io/blakeblackshear/frigate:${{ github.ref_name }}-${{ env.SHORT_SHA }}-tensorrt
|
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ env.SHORT_SHA }}-tensorrt
|
||||||
cache-from: type=gha
|
cache-from: type=gha
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
|||||||
FROM wget AS go2rtc
|
FROM wget AS go2rtc
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
WORKDIR /rootfs/usr/local/go2rtc/bin
|
WORKDIR /rootfs/usr/local/go2rtc/bin
|
||||||
RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v0.1-rc.9/go2rtc_linux_${TARGETARCH}" \
|
RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v1.1.2/go2rtc_linux_${TARGETARCH}" \
|
||||||
&& chmod +x go2rtc
|
&& chmod +x go2rtc
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -64,6 +64,9 @@ if [[ "${TARGETARCH}" == "amd64" ]]; then
|
|||||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||||
intel-opencl-icd \
|
intel-opencl-icd \
|
||||||
mesa-va-drivers libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 radeontop intel-gpu-tools
|
mesa-va-drivers libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 radeontop intel-gpu-tools
|
||||||
|
# something about this dependency requires it to be installed in a separate call rather than in the line above
|
||||||
|
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||||
|
i965-va-driver-shaders
|
||||||
rm -f /etc/apt/sources.list.d/debian-testing.list
|
rm -f /etc/apt/sources.list.d/debian-testing.list
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
set -euxo pipefail
|
set -euxo pipefail
|
||||||
|
|
||||||
s6_version="3.1.2.1"
|
s6_version="3.1.3.0"
|
||||||
|
|
||||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||||
s6_arch="x86_64"
|
s6_arch="x86_64"
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ s6-svc -O .
|
|||||||
function get_ip_and_port_from_supervisor() {
|
function get_ip_and_port_from_supervisor() {
|
||||||
local ip_address
|
local ip_address
|
||||||
# Example: 192.168.1.10/24
|
# Example: 192.168.1.10/24
|
||||||
local ip_regex='^([0-9]{1,3}\.{3}[0-9]{1,3})/[0-9]{1,2}$'
|
local ip_regex='^([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})/[0-9]{1,2}$'
|
||||||
if ip_address=$(
|
if ip_address=$(
|
||||||
curl -fsSL \
|
curl -fsSL \
|
||||||
-H "Authorization: Bearer ${SUPERVISOR_TOKEN}" \
|
-H "Authorization: Bearer ${SUPERVISOR_TOKEN}" \
|
||||||
@@ -32,10 +32,10 @@ function get_ip_and_port_from_supervisor() {
|
|||||||
-H "Authorization: Bearer ${SUPERVISOR_TOKEN}" \
|
-H "Authorization: Bearer ${SUPERVISOR_TOKEN}" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
http://supervisor/addons/self/info |
|
http://supervisor/addons/self/info |
|
||||||
jq --exit-status --raw-output '.data.network["22/tcp"]'
|
jq --exit-status --raw-output '.data.network["8555/tcp"]'
|
||||||
) && [[ "${webrtc_port}" =~ ${port_regex} ]]; then
|
) && [[ "${webrtc_port}" =~ ${port_regex} ]]; then
|
||||||
webrtc_port="${BASH_REMATCH[1]}"
|
webrtc_port="${BASH_REMATCH[1]}"
|
||||||
echo "[INFO] Got WebRTC port from supervisor: ${ip_address}" >&2
|
echo "[INFO] Got WebRTC port from supervisor: ${webrtc_port}" >&2
|
||||||
else
|
else
|
||||||
echo "[WARN] Failed to get WebRTC port from supervisor" >&2
|
echo "[WARN] Failed to get WebRTC port from supervisor" >&2
|
||||||
return 0
|
return 0
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
@@ -40,6 +41,16 @@ if not go2rtc_config.get("webrtc", {}).get("candidates", []):
|
|||||||
default_candidates.append("stun:8555")
|
default_candidates.append("stun:8555")
|
||||||
|
|
||||||
go2rtc_config["webrtc"] = {"candidates": default_candidates}
|
go2rtc_config["webrtc"] = {"candidates": default_candidates}
|
||||||
|
else:
|
||||||
|
print("[INFO] Not injecting WebRTC candidates into go2rtc config as it has been set manually", file=sys.stderr)
|
||||||
|
|
||||||
|
# sets default RTSP response to be equivalent to ?video=h264,h265&audio=aac
|
||||||
|
# this means user does not need to specify audio codec when using restream
|
||||||
|
# as source for frigate and the integration supports HLS playback
|
||||||
|
if go2rtc_config.get("rtsp") is None:
|
||||||
|
go2rtc_config["rtsp"] = {"default_query": "mp4"}
|
||||||
|
elif go2rtc_config["rtsp"].get("default_query") is None:
|
||||||
|
go2rtc_config["rtsp"]["default_query"] = "mp4"
|
||||||
|
|
||||||
# need to replace ffmpeg command when using ffmpeg4
|
# need to replace ffmpeg command when using ffmpeg4
|
||||||
if not os.path.exists(BTBN_PATH):
|
if not os.path.exists(BTBN_PATH):
|
||||||
|
|||||||
@@ -107,8 +107,12 @@ According to [this discussion](https://github.com/blakeblackshear/frigate/issues
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
go2rtc:
|
go2rtc:
|
||||||
reolink: ffmpeg:http://reolink_ip/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=username&password=password#video=copy#audio=copy#audio=opus
|
streams:
|
||||||
reolink_sub: ffmpeg:http://reolink_ip/flv?port=1935&app=bcs&stream=channel0_ext.bcs&user=username&password=password
|
reolink:
|
||||||
|
- http://reolink_ip/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=username&password=password
|
||||||
|
- ffmpeg:reolink#audio=opus
|
||||||
|
reolink_sub:
|
||||||
|
- http://reolink_ip/flv?port=1935&app=bcs&stream=channel0_ext.bcs&user=username&password=password
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
reolink:
|
reolink:
|
||||||
@@ -118,7 +122,7 @@ cameras:
|
|||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
- path: rtsp://127.0.0.1:8554/reolink?video=copy
|
- path: rtsp://127.0.0.1:8554/reolink_sub?video=copy
|
||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
|
|||||||
@@ -101,7 +101,7 @@ The OpenVINO device to be used is specified using the `"device"` attribute accor
|
|||||||
|
|
||||||
OpenVINO is supported on 6th Gen Intel platforms (Skylake) and newer. A supported Intel platform is required to use the `GPU` device with OpenVINO. The `MYRIAD` device may be run on any platform, including Arm devices. For detailed system requirements, see [OpenVINO System Requirements](https://www.intel.com/content/www/us/en/developer/tools/openvino-toolkit/system-requirements.html)
|
OpenVINO is supported on 6th Gen Intel platforms (Skylake) and newer. A supported Intel platform is required to use the `GPU` device with OpenVINO. The `MYRIAD` device may be run on any platform, including Arm devices. For detailed system requirements, see [OpenVINO System Requirements](https://www.intel.com/content/www/us/en/developer/tools/openvino-toolkit/system-requirements.html)
|
||||||
|
|
||||||
An OpenVINO model is provided in the container at `/openvino-model/ssdlite_mobilenet_v2.xml` and is used by this detector type by default. The model comes from Intel's Open Model Zoo [SSDLite MobileNet V2](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/ssdlite_mobilenet_v2) and is converted to an FP16 precision IR model. Use the model configuration shown below when using the OpenVINO detector.
|
An OpenVINO model is provided in the container at `/openvino-model/ssdlite_mobilenet_v2.xml` and is used by this detector type by default. The model comes from Intel's Open Model Zoo [SSDLite MobileNet V2](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/ssdlite_mobilenet_v2) and is converted to an FP16 precision IR model. Use the model configuration shown below when using the OpenVINO detector with the default model.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
detectors:
|
detectors:
|
||||||
@@ -119,6 +119,25 @@ model:
|
|||||||
labelmap_path: /openvino-model/coco_91cl_bkgr.txt
|
labelmap_path: /openvino-model/coco_91cl_bkgr.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This detector also supports YOLOx models, and has been verified to work with the [yolox_tiny](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny) model from Intel's Open Model Zoo. Frigate does not come with `yolox_tiny` model, you will need to follow [OpenVINO documentation](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny) to provide your own model to Frigate. There is currently no support for other types of YOLO models (YOLOv3, YOLOv4, etc...). Below is an example of how `yolox_tiny` and other yolox variants can be used in Frigate:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
detectors:
|
||||||
|
ov:
|
||||||
|
type: openvino
|
||||||
|
device: AUTO
|
||||||
|
model:
|
||||||
|
path: /path/to/yolox_tiny.xml
|
||||||
|
|
||||||
|
model:
|
||||||
|
width: 416
|
||||||
|
height: 416
|
||||||
|
input_tensor: nchw
|
||||||
|
input_pixel_format: bgr
|
||||||
|
model_type: yolox
|
||||||
|
labelmap_path: /path/to/coco_80cl.txt
|
||||||
|
```
|
||||||
|
|
||||||
### Intel NCS2 VPU and Myriad X Setup
|
### Intel NCS2 VPU and Myriad X Setup
|
||||||
|
|
||||||
Intel produces a neural net inference accelleration chip called Myriad X. This chip was sold in their Neural Compute Stick 2 (NCS2) which has been discontinued. If intending to use the MYRIAD device for accelleration, additional setup is required to pass through the USB device. The host needs a udev rule installed to handle the NCS2 device.
|
Intel produces a neural net inference accelleration chip called Myriad X. This chip was sold in their Neural Compute Stick 2 (NCS2) which has been discontinued. If intending to use the MYRIAD device for accelleration, additional setup is required to pass through the USB device. The host needs a udev rule installed to handle the NCS2 device.
|
||||||
|
|||||||
@@ -28,16 +28,17 @@ Input args presets help make the config more readable and handle use cases for d
|
|||||||
|
|
||||||
See [the camera specific docs](/configuration/camera_specific.md) for more info on non-standard cameras and recommendations for using them in Frigate.
|
See [the camera specific docs](/configuration/camera_specific.md) for more info on non-standard cameras and recommendations for using them in Frigate.
|
||||||
|
|
||||||
| Preset | Usage | Other Notes |
|
| Preset | Usage | Other Notes |
|
||||||
| ------------------------- | ------------------------- | --------------------------------------------------- |
|
| -------------------------------- | ------------------------- | ------------------------------------------------------------------------------------------------ |
|
||||||
| preset-http-jpeg-generic | HTTP Live Jpeg | Recommend restreaming live jpeg instead |
|
| preset-http-jpeg-generic | HTTP Live Jpeg | Recommend restreaming live jpeg instead |
|
||||||
| preset-http-mjpeg-generic | HTTP Mjpeg Stream | Recommend restreaming mjpeg stream instead |
|
| preset-http-mjpeg-generic | HTTP Mjpeg Stream | Recommend restreaming mjpeg stream instead |
|
||||||
| preset-http-reolink | Reolink HTTP-FLV Stream | Only for reolink http, not when restreaming as rtsp |
|
| preset-http-reolink | Reolink HTTP-FLV Stream | Only for reolink http, not when restreaming as rtsp |
|
||||||
| preset-rtmp-generic | RTMP Stream | |
|
| preset-rtmp-generic | RTMP Stream | |
|
||||||
| preset-rtsp-generic | RTSP Stream | This is the default when nothing is specified |
|
| preset-rtsp-generic | RTSP Stream | This is the default when nothing is specified |
|
||||||
| preset-rtsp-restream | RTSP Stream from restream | Use when using rtsp restream as source |
|
| preset-rtsp-restream | RTSP Stream from restream | Use for rtsp restream as source for frigate |
|
||||||
| preset-rtsp-udp | RTSP Stream via UDP | Use when camera is UDP only |
|
| preset-rtsp-restream-low-latency | RTSP Stream from restream | Use for rtsp restream as source for frigate to lower latency, may cause issues with some cameras |
|
||||||
| preset-rtsp-blue-iris | Blue Iris RTSP Stream | Use when consuming a stream from Blue Iris |
|
| preset-rtsp-udp | RTSP Stream via UDP | Use when camera is UDP only |
|
||||||
|
| preset-rtsp-blue-iris | Blue Iris RTSP Stream | Use when consuming a stream from Blue Iris |
|
||||||
|
|
||||||
:::caution
|
:::caution
|
||||||
|
|
||||||
@@ -46,21 +47,22 @@ It is important to be mindful of input args when using restream because you can
|
|||||||
:::
|
:::
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
reolink_cam: http://192.168.0.139/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=admin&password=password
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
reolink_cam:
|
reolink_cam:
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
inputs:
|
inputs:
|
||||||
- path: http://192.168.0.139/flv?port=1935&app=bcs&stream=channel0_ext.bcs&user=admin&password={FRIGATE_CAM_PASSWORD}
|
- path: http://192.168.0.139/flv?port=1935&app=bcs&stream=channel0_ext.bcs&user=admin&password=password
|
||||||
input_args: preset-http-reolink
|
input_args: preset-http-reolink
|
||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
- path: rtsp://192.168.0.10:8554/garage
|
- path: rtsp://127.0.0.1:8554/reolink_cam
|
||||||
input_args: preset-rtsp-generic
|
input_args: preset-rtsp-generic
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
- path: http://192.168.0.139/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=admin&password={FRIGATE_CAM_PASSWORD}
|
|
||||||
roles:
|
|
||||||
- restream
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Output Args Presets
|
### Output Args Presets
|
||||||
|
|||||||
@@ -15,23 +15,39 @@ ffmpeg:
|
|||||||
hwaccel_args: preset-rpi-64-h264
|
hwaccel_args: preset-rpi-64-h264
|
||||||
```
|
```
|
||||||
|
|
||||||
### Intel-based CPUs (<10th Generation) via Quicksync
|
### Intel-based CPUs (<10th Generation) via VAAPI
|
||||||
|
|
||||||
|
VAAPI supports automatic profile selection so it will work automatically with both H.264 and H.265 streams. VAAPI is recommended for all generations of Intel-based CPUs if QSV does not work.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
hwaccel_args: preset-vaapi
|
hwaccel_args: preset-vaapi
|
||||||
```
|
```
|
||||||
|
|
||||||
**NOTICE**: With some of the processors, like the J4125, the default driver `iHD` doesn't seem to work correctly for hardware acceleration. You may need to change the driver to `i965` by adding the following environment variable `LIBVA_DRIVER_NAME=i965` to your docker-compose file or [in the frigate.yml for HA OS users](advanced.md#environment_vars).
|
**NOTICE**: With some of the processors, like the J4125, the default driver `iHD` doesn't seem to work correctly for hardware acceleration. You may need to change the driver to `i965` by adding the following environment variable `LIBVA_DRIVER_NAME=i965` to your docker-compose file or [in the frigate.yml for HA OS users](advanced.md#environment_vars).
|
||||||
|
|
||||||
### Intel-based CPUs (>=10th Generation) via Quicksync
|
### Intel-based CPUs (>=10th Generation) via Quicksync
|
||||||
|
|
||||||
|
QSV must be set specifically based on the video encoding of the stream.
|
||||||
|
|
||||||
|
#### H.264 streams
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
hwaccel_args: preset-intel-qsv-h264
|
hwaccel_args: preset-intel-qsv-h264
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### H.265 streams
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
ffmpeg:
|
||||||
|
hwaccel_args: preset-intel-qsv-h265
|
||||||
|
```
|
||||||
|
|
||||||
### AMD/ATI GPUs (Radeon HD 2000 and newer GPUs) via libva-mesa-driver
|
### AMD/ATI GPUs (Radeon HD 2000 and newer GPUs) via libva-mesa-driver
|
||||||
|
|
||||||
|
VAAPI supports automatic profile selection so it will work automatically with both H.264 and H.265 streams.
|
||||||
|
|
||||||
**Note:** You also need to set `LIBVA_DRIVER_NAME=radeonsi` as an environment variable on the container.
|
**Note:** You also need to set `LIBVA_DRIVER_NAME=radeonsi` as an environment variable on the container.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@@ -39,14 +55,20 @@ ffmpeg:
|
|||||||
hwaccel_args: preset-vaapi
|
hwaccel_args: preset-vaapi
|
||||||
```
|
```
|
||||||
|
|
||||||
### NVIDIA GPU
|
### NVIDIA GPUs
|
||||||
|
|
||||||
[Supported Nvidia GPUs for Decoding](https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new)
|
While older GPUs may work, it is recommended to use modern, supported GPUs. NVIDIA provides a [matrix of supported GPUs and features](https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new). If your card is on the list and supports CUVID/NVDEC, it will most likely work with Frigate for decoding. However, you must also use [a driver version that will work with FFmpeg](https://github.com/FFmpeg/nv-codec-headers/blob/master/README). Older driver versions may be missing symbols and fail to work, and older cards are not supported by newer driver versions. The only way around this is to [provide your own FFmpeg](/configuration/advanced#custom-ffmpeg-build) that will work with your driver version, but this is unsupported and may not work well if at all.
|
||||||
|
|
||||||
These instructions are based on the [jellyfin documentation](https://jellyfin.org/docs/general/administration/hardware-acceleration.html#nvidia-hardware-acceleration-on-docker-linux)
|
A more complete list of cards and ther compatible drivers is available in the [driver release readme](https://download.nvidia.com/XFree86/Linux-x86_64/525.85.05/README/supportedchips.html).
|
||||||
|
|
||||||
|
If your distribution does not offer NVIDIA driver packages, you can [download them here](https://www.nvidia.com/en-us/drivers/unix/).
|
||||||
|
|
||||||
|
#### Docker Configuration
|
||||||
|
|
||||||
|
Additional configuration is needed for the Docker container to be able to access the NVIDIA GPU. The supported method for this is to install the [NVIDIA Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html#docker) and specify the GPU to Docker. How you do this depends on how Docker is being run:
|
||||||
|
|
||||||
|
##### Docker Compose
|
||||||
|
|
||||||
Add `--gpus all` to your docker run command or update your compose file.
|
|
||||||
If you have multiple Nvidia graphic card, you can add them with their ids obtained via `nvidia-smi` command
|
|
||||||
```yaml
|
```yaml
|
||||||
services:
|
services:
|
||||||
frigate:
|
frigate:
|
||||||
@@ -62,11 +84,23 @@ services:
|
|||||||
capabilities: [gpu]
|
capabilities: [gpu]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
##### Docker Run CLI
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -d \
|
||||||
|
--name frigate \
|
||||||
|
...
|
||||||
|
--gpus=all \
|
||||||
|
ghcr.io/blakeblackshear/frigate:stable
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Setup Decoder
|
||||||
|
|
||||||
The decoder you need to pass in the `hwaccel_args` will depend on the input video.
|
The decoder you need to pass in the `hwaccel_args` will depend on the input video.
|
||||||
|
|
||||||
A list of supported codecs (you can use `ffmpeg -decoders | grep cuvid` in the container to get a list)
|
A list of supported codecs (you can use `ffmpeg -decoders | grep cuvid` in the container to get the ones your card supports)
|
||||||
|
|
||||||
```shell
|
```
|
||||||
V..... h263_cuvid Nvidia CUVID H263 decoder (codec h263)
|
V..... h263_cuvid Nvidia CUVID H263 decoder (codec h263)
|
||||||
V..... h264_cuvid Nvidia CUVID H264 decoder (codec h264)
|
V..... h264_cuvid Nvidia CUVID H264 decoder (codec h264)
|
||||||
V..... hevc_cuvid Nvidia CUVID HEVC decoder (codec hevc)
|
V..... hevc_cuvid Nvidia CUVID HEVC decoder (codec hevc)
|
||||||
@@ -87,12 +121,12 @@ ffmpeg:
|
|||||||
```
|
```
|
||||||
|
|
||||||
If everything is working correctly, you should see a significant improvement in performance.
|
If everything is working correctly, you should see a significant improvement in performance.
|
||||||
Verify that hardware decoding is working by running `nvidia-smi`, which should show the ffmpeg
|
Verify that hardware decoding is working by running `nvidia-smi`, which should show `ffmpeg`
|
||||||
processes:
|
processes:
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
|
|
||||||
nvidia-smi may not show ffmpeg processes when run inside the container [due to docker limitations](https://github.com/NVIDIA/nvidia-docker/issues/179#issuecomment-645579458)
|
`nvidia-smi` may not show `ffmpeg` processes when run inside the container [due to docker limitations](https://github.com/NVIDIA/nvidia-docker/issues/179#issuecomment-645579458).
|
||||||
|
|
||||||
:::
|
:::
|
||||||
|
|
||||||
@@ -123,3 +157,7 @@ nvidia-smi may not show ffmpeg processes when run inside the container [due to d
|
|||||||
| 0 N/A N/A 12827 C ffmpeg 417MiB |
|
| 0 N/A N/A 12827 C ffmpeg 417MiB |
|
||||||
+-----------------------------------------------------------------------------+
|
+-----------------------------------------------------------------------------+
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you do not see these processes, check the `docker logs` for the container and look for decoding errors.
|
||||||
|
|
||||||
|
These instructions were originally based on the [Jellyfin documentation](https://jellyfin.org/docs/general/administration/hardware-acceleration.html#nvidia-hardware-acceleration-on-docker-linux).
|
||||||
|
|||||||
@@ -105,6 +105,9 @@ model:
|
|||||||
# Optional: Object detection model input tensor format
|
# Optional: Object detection model input tensor format
|
||||||
# Valid values are nhwc or nchw (default: shown below)
|
# Valid values are nhwc or nchw (default: shown below)
|
||||||
input_tensor: nhwc
|
input_tensor: nhwc
|
||||||
|
# Optional: Object detection model type, currently only used with the OpenVINO detector
|
||||||
|
# Valid values are ssd or yolox (default: shown below)
|
||||||
|
model_type: ssd
|
||||||
# Optional: Label name modifications. These are merged into the standard labelmap.
|
# Optional: Label name modifications. These are merged into the standard labelmap.
|
||||||
labelmap:
|
labelmap:
|
||||||
2: vehicle
|
2: vehicle
|
||||||
@@ -271,11 +274,6 @@ record:
|
|||||||
# Optional: Enable recording (default: shown below)
|
# Optional: Enable recording (default: shown below)
|
||||||
# WARNING: If recording is disabled in the config, turning it on via
|
# WARNING: If recording is disabled in the config, turning it on via
|
||||||
# the UI or MQTT later will have no effect.
|
# the UI or MQTT later will have no effect.
|
||||||
# WARNING: Frigate does not currently support limiting recordings based
|
|
||||||
# on available disk space automatically. If using recordings,
|
|
||||||
# you must specify retention settings for a number of days that
|
|
||||||
# will fit within the available disk space of your drive or Frigate
|
|
||||||
# will crash.
|
|
||||||
enabled: False
|
enabled: False
|
||||||
# Optional: Number of minutes to wait between cleanup runs (default: shown below)
|
# Optional: Number of minutes to wait between cleanup runs (default: shown below)
|
||||||
# This can be used to reduce the frequency of deleting recording segments from disk if you want to minimize i/o
|
# This can be used to reduce the frequency of deleting recording segments from disk if you want to minimize i/o
|
||||||
@@ -355,7 +353,7 @@ rtmp:
|
|||||||
enabled: False
|
enabled: False
|
||||||
|
|
||||||
# Optional: Restream configuration
|
# Optional: Restream configuration
|
||||||
# Uses https://github.com/AlexxIT/go2rtc (v0.1-rc9)
|
# Uses https://github.com/AlexxIT/go2rtc (v1.1.2)
|
||||||
go2rtc:
|
go2rtc:
|
||||||
|
|
||||||
# Optional: jsmpeg stream configuration for WebUI
|
# Optional: jsmpeg stream configuration for WebUI
|
||||||
@@ -491,4 +489,10 @@ ui:
|
|||||||
timezone: None
|
timezone: None
|
||||||
# Optional: Use an experimental recordings / camera view UI (default: shown below)
|
# Optional: Use an experimental recordings / camera view UI (default: shown below)
|
||||||
experimental_ui: False
|
experimental_ui: False
|
||||||
|
|
||||||
|
# Optional: Telemetry configuration
|
||||||
|
telemetry:
|
||||||
|
# Optional: Enable the latest version outbound check (default: shown below)
|
||||||
|
# NOTE: If you use the HomeAssistant integration, disabling this will prevent it from reporting new versions
|
||||||
|
version_check: True
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -3,17 +3,17 @@ id: live
|
|||||||
title: Live View
|
title: Live View
|
||||||
---
|
---
|
||||||
|
|
||||||
Frigate has different live view options, some of which require [restream](restream.md) to be enabled.
|
Frigate has different live view options, some of which require the bundled `go2rtc` to be configured as shown in the [step by step guide](/guides/configuring_go2rtc).
|
||||||
|
|
||||||
## Live View Options
|
## Live View Options
|
||||||
|
|
||||||
Live view options can be selected while viewing the live stream. The options are:
|
Live view options can be selected while viewing the live stream. The options are:
|
||||||
|
|
||||||
| Source | Latency | Frame Rate | Resolution | Audio | Requires Restream | Other Limitations |
|
| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
|
||||||
| ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | ----------------- | -------------------------------------------- |
|
| ------ | ------- | ------------------------------------- | -------------- | ---------------------------- | --------------- | -------------------------------------------- |
|
||||||
| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
|
| jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none |
|
||||||
| mse | low | native | native | yes (depends on audio codec) | yes | not supported on iOS, Firefox is h.264 only |
|
| mse | low | native | native | yes (depends on audio codec) | yes | not supported on iOS, Firefox is h.264 only |
|
||||||
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
|
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
|
||||||
|
|
||||||
### Audio Support
|
### Audio Support
|
||||||
|
|
||||||
@@ -24,9 +24,10 @@ go2rtc:
|
|||||||
streams:
|
streams:
|
||||||
rtsp_cam: # <- for RTSP streams
|
rtsp_cam: # <- for RTSP streams
|
||||||
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio
|
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio
|
||||||
- ffmpeg:rtsp_cam#audio=opus # <- copy of the stream which transcodes audio to the missing codec (usually will be opus)
|
- "ffmpeg:rtsp_cam#audio=opus" # <- copy of the stream which transcodes audio to the missing codec (usually will be opus)
|
||||||
http_cam: # <- for http streams
|
http_cam: # <- for http streams
|
||||||
- "ffmpeg:http://192.168.50.155/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=user&password=password#video=copy#audio=copy#audio=opus" # <- http streams must use ffmpeg to set all types
|
- http://192.168.50.155/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=user&password=password # <- stream which supports video & aac audio
|
||||||
|
- "ffmpeg:http_cam#audio=opus" # <- copy of the stream which transcodes audio to the missing codec (usually will be opus)
|
||||||
```
|
```
|
||||||
|
|
||||||
### Setting Stream For Live UI
|
### Setting Stream For Live UI
|
||||||
@@ -36,12 +37,12 @@ There may be some cameras that you would prefer to use the sub stream for live v
|
|||||||
```yaml
|
```yaml
|
||||||
go2rtc:
|
go2rtc:
|
||||||
streams:
|
streams:
|
||||||
rtsp_cam:
|
rtsp_cam:
|
||||||
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio.
|
||||||
- ffmpeg:rtsp_cam#audio=opus # <- copy of the stream which transcodes audio to opus
|
- "ffmpeg:rtsp_cam#audio=opus" # <- copy of the stream which transcodes audio to opus
|
||||||
rtsp_cam_sub:
|
rtsp_cam_sub:
|
||||||
- rtsp://192.168.1.5:554/substream # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
- rtsp://192.168.1.5:554/substream # <- stream which supports video & aac audio.
|
||||||
- ffmpeg:rtsp_cam_sub#audio=opus # <- copy of the stream which transcodes audio to opus
|
- "ffmpeg:rtsp_cam_sub#audio=opus" # <- copy of the stream which transcodes audio to opus
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
test_cam:
|
test_cam:
|
||||||
@@ -49,11 +50,11 @@ cameras:
|
|||||||
output_args:
|
output_args:
|
||||||
record: preset-record-generic-audio-copy
|
record: preset-record-generic-audio-copy
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://127.0.0.1:8554/test_cam?video=copy&audio=aac # <--- the name here must match the name of the camera in restream
|
- path: rtsp://127.0.0.1:8554/test_cam # <--- the name here must match the name of the camera in restream
|
||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
- path: rtsp://127.0.0.1:8554/test_cam_sub?video=copy # <--- the name here must match the name of the camera_sub in restream
|
- path: rtsp://127.0.0.1:8554/test_cam_sub # <--- the name here must match the name of the camera_sub in restream
|
||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
@@ -68,15 +69,15 @@ WebRTC works by creating a TCP or UDP connection on port `8555`. However, it req
|
|||||||
- For external access, over the internet, setup your router to forward port `8555` to port `8555` on the Frigate device, for both TCP and UDP.
|
- For external access, over the internet, setup your router to forward port `8555` to port `8555` on the Frigate device, for both TCP and UDP.
|
||||||
- For internal/local access, unless you are running through the add-on, you will also need to set the WebRTC candidates list in the go2rtc config. For example, if `192.168.1.10` is the local IP of the device running Frigate:
|
- For internal/local access, unless you are running through the add-on, you will also need to set the WebRTC candidates list in the go2rtc config. For example, if `192.168.1.10` is the local IP of the device running Frigate:
|
||||||
|
|
||||||
```yaml title="/config/frigate.yaml"
|
```yaml title="/config/frigate.yaml"
|
||||||
go2rtc:
|
go2rtc:
|
||||||
streams:
|
streams:
|
||||||
test_cam: ...
|
test_cam: ...
|
||||||
webrtc:
|
webrtc:
|
||||||
candidates:
|
candidates:
|
||||||
- 192.168.1.10:8555
|
- 192.168.1.10:8555
|
||||||
- stun:8555
|
- stun:8555
|
||||||
```
|
```
|
||||||
|
|
||||||
:::tip
|
:::tip
|
||||||
|
|
||||||
|
|||||||
@@ -3,17 +3,38 @@ id: restream
|
|||||||
title: Restream
|
title: Restream
|
||||||
---
|
---
|
||||||
|
|
||||||
### RTSP
|
## RTSP
|
||||||
|
|
||||||
Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://<frigate_host>:8554/<camera_name>`. Port 8554 must be open. [This allows you to use a video feed for detection in Frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
|
Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://<frigate_host>:8554/<camera_name>`. Port 8554 must be open. [This allows you to use a video feed for detection in Frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
|
||||||
|
|
||||||
Frigate uses [go2rtc](https://github.com/AlexxIT/go2rtc) to provide its restream and MSE/WebRTC capabilities. The go2rtc config is hosted at the `go2rtc` in the config, see [go2rtc docs](https://github.com/AlexxIT/go2rtc#configuration) for more advanced configurations and features.
|
Frigate uses [go2rtc](https://github.com/AlexxIT/go2rtc) to provide its restream and MSE/WebRTC capabilities. The go2rtc config is hosted at the `go2rtc` in the config, see [go2rtc docs](https://github.com/AlexxIT/go2rtc#configuration) for more advanced configurations and features.
|
||||||
|
|
||||||
#### Birdseye Restream
|
:::note
|
||||||
|
|
||||||
|
You can access the go2rtc webUI at `http://frigate_ip:5000/live/webrtc` which can be helpful to debug as well as provide useful information about your camera streams.
|
||||||
|
|
||||||
|
:::
|
||||||
|
|
||||||
|
### Birdseye Restream
|
||||||
|
|
||||||
Birdseye RTSP restream can be enabled at `birdseye -> restream` and accessed at `rtsp://<frigate_host>:8554/birdseye`. Enabling the restream will cause birdseye to run 24/7 which may increase CPU usage somewhat.
|
Birdseye RTSP restream can be enabled at `birdseye -> restream` and accessed at `rtsp://<frigate_host>:8554/birdseye`. Enabling the restream will cause birdseye to run 24/7 which may increase CPU usage somewhat.
|
||||||
|
|
||||||
### RTMP (Deprecated)
|
### Securing Restream With Authentication
|
||||||
|
|
||||||
|
The go2rtc restream can be secured with RTSP based username / password authentication. Ex:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
rtsp:
|
||||||
|
username: "admin"
|
||||||
|
password: "pass"
|
||||||
|
streams:
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
**NOTE:** This does not apply to localhost requests, there is no need to provide credentials when using the restream as a source for frigate cameras.
|
||||||
|
|
||||||
|
## RTMP (Deprecated)
|
||||||
|
|
||||||
In previous Frigate versions RTMP was used for re-streaming. RTMP has disadvantages however including being incompatible with H.265, high bitrates, and certain audio codecs. RTMP is deprecated and it is recommended to move to the new restream role.
|
In previous Frigate versions RTMP was used for re-streaming. RTMP has disadvantages however including being incompatible with H.265, high bitrates, and certain audio codecs. RTMP is deprecated and it is recommended to move to the new restream role.
|
||||||
|
|
||||||
@@ -30,9 +51,10 @@ go2rtc:
|
|||||||
streams:
|
streams:
|
||||||
rtsp_cam: # <- for RTSP streams
|
rtsp_cam: # <- for RTSP streams
|
||||||
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio
|
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio
|
||||||
- ffmpeg:rtsp_cam#audio=opus # <- copy of the stream which transcodes audio to the missing codec (usually will be opus)
|
- "ffmpeg:rtsp_cam#audio=opus" # <- copy of the stream which transcodes audio to the missing codec (usually will be opus)
|
||||||
http_cam: # <- for http streams
|
http_cam: # <- for other streams
|
||||||
- "ffmpeg:http://192.168.50.155/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=user&password=password#video=copy#audio=copy#audio=opus" # <- http streams must use ffmpeg to set all types
|
- http://192.168.50.155/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=user&password=password # <- stream which supports video & aac audio
|
||||||
|
- "ffmpeg:http_cam#audio=opus" # <- copy of the stream which transcodes audio to the missing codec (usually will be opus)
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
rtsp_cam:
|
rtsp_cam:
|
||||||
@@ -40,7 +62,7 @@ cameras:
|
|||||||
output_args:
|
output_args:
|
||||||
record: preset-record-generic-audio-copy
|
record: preset-record-generic-audio-copy
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://127.0.0.1:8554/rtsp_cam?video=copy&audio=aac # <--- the name here must match the name of the camera in restream
|
- path: rtsp://127.0.0.1:8554/rtsp_cam # <--- the name here must match the name of the camera in restream
|
||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
@@ -50,14 +72,14 @@ cameras:
|
|||||||
output_args:
|
output_args:
|
||||||
record: preset-record-generic-audio-copy
|
record: preset-record-generic-audio-copy
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://127.0.0.1:8554/http_cam?video=copy&audio=aac # <--- the name here must match the name of the camera in restream
|
- path: rtsp://127.0.0.1:8554/http_cam # <--- the name here must match the name of the camera in restream
|
||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
- detect
|
- detect
|
||||||
```
|
```
|
||||||
|
|
||||||
#### With Sub Stream
|
### With Sub Stream
|
||||||
|
|
||||||
Two connections are made to the camera. One for the sub stream, one for the restream, `record` connects to the restream.
|
Two connections are made to the camera. One for the sub stream, one for the restream, `record` connects to the restream.
|
||||||
|
|
||||||
@@ -66,14 +88,16 @@ go2rtc:
|
|||||||
streams:
|
streams:
|
||||||
rtsp_cam:
|
rtsp_cam:
|
||||||
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
- rtsp://192.168.1.5:554/live0 # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
||||||
- ffmpeg:rtsp_cam#audio=opus # <- copy of the stream which transcodes audio to opus
|
- "ffmpeg:rtsp_cam#audio=opus" # <- copy of the stream which transcodes audio to opus
|
||||||
rtsp_cam_sub:
|
rtsp_cam_sub:
|
||||||
- rtsp://192.168.1.5:554/substream # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
- rtsp://192.168.1.5:554/substream # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
||||||
- ffmpeg:rtsp_cam_sub#audio=opus # <- copy of the stream which transcodes audio to opus
|
- "ffmpeg:rtsp_cam_sub#audio=opus" # <- copy of the stream which transcodes audio to opus
|
||||||
http_cam:
|
http_cam:
|
||||||
- "ffmpeg:http://192.168.50.155/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=user&password=password#video=copy#audio=copy#audio=opus" # <- http streams must use ffmpeg to set all types
|
- http://192.168.50.155/flv?port=1935&app=bcs&stream=channel0_main.bcs&user=user&password=password # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
||||||
|
- "ffmpeg:http_cam#audio=opus" # <- copy of the stream which transcodes audio to opus
|
||||||
http_cam_sub:
|
http_cam_sub:
|
||||||
- "ffmpeg:http://192.168.50.155/flv?port=1935&app=bcs&stream=channel0_ext.bcs&user=user&password=password#video=copy#audio=copy#audio=opus" # <- http streams must use ffmpeg to set all types
|
- http://192.168.50.155/flv?port=1935&app=bcs&stream=channel0_ext.bcs&user=user&password=password # <- stream which supports video & aac audio. This is only supported for rtsp streams, http must use ffmpeg
|
||||||
|
- "ffmpeg:http_cam_sub#audio=opus" # <- copy of the stream which transcodes audio to opus
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
rtsp_cam:
|
rtsp_cam:
|
||||||
@@ -81,11 +105,11 @@ cameras:
|
|||||||
output_args:
|
output_args:
|
||||||
record: preset-record-generic-audio-copy
|
record: preset-record-generic-audio-copy
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://127.0.0.1:8554/rtsp_cam?video=copy&audio=aac # <--- the name here must match the name of the camera in restream
|
- path: rtsp://127.0.0.1:8554/rtsp_cam # <--- the name here must match the name of the camera in restream
|
||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
- path: rtsp://127.0.0.1:8554/rtsp_cam_sub?video=copy&audio=aac # <--- the name here must match the name of the camera_sub in restream
|
- path: rtsp://127.0.0.1:8554/rtsp_cam_sub # <--- the name here must match the name of the camera_sub in restream
|
||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
@@ -94,12 +118,24 @@ cameras:
|
|||||||
output_args:
|
output_args:
|
||||||
record: preset-record-generic-audio-copy
|
record: preset-record-generic-audio-copy
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://127.0.0.1:8554/http_cam?video=copy&audio=aac # <--- the name here must match the name of the camera in restream
|
- path: rtsp://127.0.0.1:8554/http_cam # <--- the name here must match the name of the camera in restream
|
||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
- path: rtsp://127.0.0.1:8554/http_cam_sub?video=copy&audio=aac # <--- the name here must match the name of the camera_sub in restream
|
- path: rtsp://127.0.0.1:8554/http_cam_sub # <--- the name here must match the name of the camera_sub in restream
|
||||||
input_args: preset-rtsp-restream
|
input_args: preset-rtsp-restream
|
||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Advanced Restream Configurations
|
||||||
|
|
||||||
|
The [exec](https://github.com/AlexxIT/go2rtc#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below:
|
||||||
|
|
||||||
|
NOTE: The output will need to be passed with two curly braces `{{output}}`
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
stream1: exec:ffmpeg -hide_banner -re -stream_loop -1 -i /media/BigBuckBunny.mp4 -c copy -rtsp_transport tcp -f rtsp {{output}}
|
||||||
|
```
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ id: camera_setup
|
|||||||
title: Camera setup
|
title: Camera setup
|
||||||
---
|
---
|
||||||
|
|
||||||
Cameras configured to output H.264 video and AAC audio will offer the most compatibility with all features of Frigate and Home Assistant. H.265 has better compression, but far less compatibility. Safari and Edge are the only browsers able to play H.265. Ideally, cameras should be configured directly for the desired resolutions and frame rates you want to use in Frigate. Reducing frame rates within Frigate will waste CPU resources decoding extra frames that are discarded. There are three different goals that you want to tune your stream configurations around.
|
Cameras configured to output H.264 video and AAC audio will offer the most compatibility with all features of Frigate and Home Assistant. H.265 has better compression, but less compatibility. Chrome 108+, Safari and Edge are the only browsers able to play H.265 and only support a limited number of H.265 profiles. Ideally, cameras should be configured directly for the desired resolutions and frame rates you want to use in Frigate. Reducing frame rates within Frigate will waste CPU resources decoding extra frames that are discarded. There are three different goals that you want to tune your stream configurations around.
|
||||||
|
|
||||||
- **Detection**: This is the only stream that Frigate will decode for processing. Also, this is the stream where snapshots will be generated from. The resolution for detection should be tuned for the size of the objects you want to detect. See [Choosing a detect resolution](#choosing-a-detect-resolution) for more details. The recommended frame rate is 5fps, but may need to be higher for very fast moving objects. Higher resolutions and frame rates will drive higher CPU usage on your server.
|
- **Detection**: This is the only stream that Frigate will decode for processing. Also, this is the stream where snapshots will be generated from. The resolution for detection should be tuned for the size of the objects you want to detect. See [Choosing a detect resolution](#choosing-a-detect-resolution) for more details. The recommended frame rate is 5fps, but may need to be higher for very fast moving objects. Higher resolutions and frame rates will drive higher CPU usage on your server.
|
||||||
|
|
||||||
@@ -57,12 +57,21 @@ More information is available [in the detector docs](/configuration/detectors#op
|
|||||||
|
|
||||||
Inference speeds vary greatly depending on the CPU, GPU, or VPU used, some known examples are below:
|
Inference speeds vary greatly depending on the CPU, GPU, or VPU used, some known examples are below:
|
||||||
|
|
||||||
| Name | Inference Speed | Notes |
|
| Name | Inference Speed | Notes |
|
||||||
| ------------------- | --------------- | --------------------------------------------------------------------- |
|
| -------------------- | --------------- | --------------------------------------------------------------------- |
|
||||||
| Intel Celeron J4105 | ~ 25 ms | Inference speeds on CPU were ~ 150 ms |
|
| Intel NCS2 VPU | 60 - 65 ms | May vary based on host device |
|
||||||
| Intel Celeron N4020 | 50 - 200 ms | Inference speeds on CPU were ~ 800 ms, greatly depends on other loads |
|
| Intel Celeron J4105 | ~ 25 ms | Inference speeds on CPU were 150 - 200 ms |
|
||||||
| Intel NCS2 VPU | 60 - 65 ms | May vary based on host device |
|
| Intel Celeron N3060 | 130 - 150 ms | Inference speeds on CPU were ~ 550 ms |
|
||||||
| Intel i5 1135G7 | 10 - 15 ms | |
|
| Intel Celeron N3205U | ~ 120 ms | Inference speeds on CPU were ~ 380 ms |
|
||||||
|
| Intel Celeron N4020 | 50 - 200 ms | Inference speeds on CPU were ~ 800 ms, greatly depends on other loads |
|
||||||
|
| Intel i3 6100T | 15 - 35 ms | Inference speeds on CPU were 60 - 120 ms |
|
||||||
|
| Intel i3 8100 | ~ 15 ms | Inference speeds on CPU were ~ 65 ms |
|
||||||
|
| Intel i5 4590 | ~ 20 ms | Inference speeds on CPU were ~ 230 ms |
|
||||||
|
| Intel i5 6500 | ~ 15 ms | Inference speeds on CPU were ~ 150 ms |
|
||||||
|
| Intel i5 7200u | 15 - 25 ms | Inference speeds on CPU were ~ 150 ms |
|
||||||
|
| Intel i5 7500 | ~ 15 ms | Inference speeds on CPU were ~ 260 ms |
|
||||||
|
| Intel i5 1135G7 | 10 - 15 ms | |
|
||||||
|
| Intel i5 12600K | ~ 15 ms | Inference speeds on CPU were ~ 35 ms |
|
||||||
|
|
||||||
### TensorRT
|
### TensorRT
|
||||||
|
|
||||||
|
|||||||
77
docs/docs/guides/configuring_go2rtc.md
Normal file
77
docs/docs/guides/configuring_go2rtc.md
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
---
|
||||||
|
id: configuring_go2rtc
|
||||||
|
title: Configuring go2rtc
|
||||||
|
---
|
||||||
|
|
||||||
|
Use of the bundled go2rtc is optional. You can still configure FFmpeg to connect directly to your cameras. However, adding go2rtc to your configuration is required for the following features:
|
||||||
|
|
||||||
|
- WebRTC or MSE for live viewing with higher resolutions and frame rates than the jsmpeg stream which is limited to the detect stream
|
||||||
|
- RTSP (instead of RTMP) relay for use with Home Assistant or other consumers to reduce the number of connections to your camera streams
|
||||||
|
|
||||||
|
# Setup a go2rtc stream
|
||||||
|
|
||||||
|
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc#module-streams), not just rtsp.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
back:
|
||||||
|
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||||
|
```
|
||||||
|
|
||||||
|
The easiest live view to get working is MSE. After adding this to the config, restart Frigate and try to watch the live stream by selecting MSE in the dropdown after clicking on the camera.
|
||||||
|
|
||||||
|
### What if my video doesn't play?
|
||||||
|
|
||||||
|
If you are unable to see your video feed, first check the go2rtc logs in the Frigate UI under Logs in the sidebar. If go2rtc is having difficulty connecting to your camera, you should see some error messages in the log. If you do not see any errors, then the video codec of the stream may not be supported in your browser. If your camera stream is set to H265, try switching to H264. You can see more information about [video codec compatibility](https://github.com/AlexxIT/go2rtc#codecs-madness) in the go2rtc documentation. If you are not able to switch your camera settings from H265 to H264 or your stream is a different format such as MJPEG, you can use go2rtc to re-encode the video using the [FFmpeg parameters](https://github.com/AlexxIT/go2rtc#source-ffmpeg). It supports rotating and resizing video feeds and hardware acceleration. Keep in mind that transcoding video from one format to another is a resource intensive task and you may be better off using the built-in jsmpeg view. Here is an example of a config that will re-encode the stream to H264 without hardware acceleration:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
back:
|
||||||
|
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||||
|
- "ffmpeg:back#video=h264"
|
||||||
|
```
|
||||||
|
|
||||||
|
Some camera streams may need to use the ffmpeg module in go2rtc. This has the downside of slower startup times, but has compatibility with more stream types.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
back:
|
||||||
|
- ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||||
|
```
|
||||||
|
|
||||||
|
If you can see the video but do not have audio, this is most likely because your camera's audio stream is not AAC. If possible, update your camera's audio settings to AAC. If your cameras do not support AAC audio, you will need to tell go2rtc to re-encode the audio to AAC on demand if you want audio. This will use additional CPU and add some latency. To add AAC audio on demand, you can update your go2rtc config as follows:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
back:
|
||||||
|
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||||
|
- "ffmpeg:back#audio=aac"
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need to convert **both** the audio and video streams, you can use the following:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
back:
|
||||||
|
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||||
|
- "ffmpeg:back#video=h264#audio=aac"
|
||||||
|
```
|
||||||
|
|
||||||
|
When using the ffmpeg module, you would add AAC audio like this:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
go2rtc:
|
||||||
|
streams:
|
||||||
|
back:
|
||||||
|
- "ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2#video=copy#audio=copy#audio=aac"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Next steps
|
||||||
|
|
||||||
|
1. If the stream you added to go2rtc is also used by Frigate for the `record` or `detect` role, you can migrate your config to pull from the RTSP restream to reduce the number of connections to your camera as shown [here](/configuration/restream#reduce-connections-to-camera).
|
||||||
|
1. You may also prefer to [setup WebRTC](/configuration/live#webrtc-extra-configuration) for slightly lower latency than MSE. Note that WebRTC only supports h264 and specific audio formats.
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
id: events_setup
|
|
||||||
title: Setting Up Events
|
|
||||||
---
|
|
||||||
|
|
||||||
[Snapshots](../configuration/snapshots.md) and/or [Recordings](../configuration/record.md) must be enabled for events to be created for detected objects.
|
|
||||||
|
|
||||||
## Limiting Events to Areas of Interest
|
|
||||||
|
|
||||||
The best way to limit events to areas of interest is to use [zones](../configuration/zones.md) along with `required_zones` for events and snapshots to only have events created in areas of interest.
|
|
||||||
@@ -1,79 +1,32 @@
|
|||||||
---
|
---
|
||||||
id: getting_started
|
id: getting_started
|
||||||
title: Creating a config file
|
title: Getting started
|
||||||
---
|
---
|
||||||
|
|
||||||
This guide walks through the steps to build a configuration file for Frigate. It assumes that you already have an environment setup as described in [Installation](../frigate/installation.md). You should also configure your cameras according to the [camera setup guide](/guides/camera_setup)
|
This guide walks through the steps to build a configuration file for Frigate. It assumes that you already have an environment setup as described in [Installation](../frigate/installation.md). You should also configure your cameras according to the [camera setup guide](/frigate/camera_setup). Pay particular attention to the section on choosing a detect resolution.
|
||||||
|
|
||||||
### Step 1: Configure the MQTT server (Optional)
|
### Step 1: Add a detect stream
|
||||||
|
|
||||||
Use of a functioning MQTT server is optional for Frigate, but required for the home assistant integration. Start by adding the mqtt section at the top level in your config:
|
First we will add the detect stream for the camera:
|
||||||
|
|
||||||
If using mqtt:
|
|
||||||
```yaml
|
|
||||||
mqtt:
|
|
||||||
host: <ip of your mqtt server>
|
|
||||||
```
|
|
||||||
|
|
||||||
If not using mqtt:
|
|
||||||
```yaml
|
```yaml
|
||||||
mqtt:
|
mqtt:
|
||||||
enabled: False
|
enabled: False
|
||||||
```
|
|
||||||
|
|
||||||
If using the Mosquitto Addon in Home Assistant, a username and password is required. For example:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
mqtt:
|
|
||||||
host: <ip of your mqtt server>
|
|
||||||
user: <username>
|
|
||||||
password: <password>
|
|
||||||
```
|
|
||||||
|
|
||||||
Frigate supports many configuration options for mqtt. See the [configuration reference](../configuration/index.md#full-configuration-reference) for more info.
|
|
||||||
|
|
||||||
### Step 2: Configure detectors
|
|
||||||
|
|
||||||
By default, Frigate will use a single CPU detector. If you have a USB Coral, you will need to add a detectors section to your config.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
mqtt:
|
|
||||||
host: <ip of your mqtt server>
|
|
||||||
|
|
||||||
detectors:
|
|
||||||
coral:
|
|
||||||
type: edgetpu
|
|
||||||
device: usb
|
|
||||||
```
|
|
||||||
|
|
||||||
More details on available detectors can be found [here](../configuration/detectors.md).
|
|
||||||
|
|
||||||
### Step 3: Add a minimal camera configuration
|
|
||||||
|
|
||||||
Now let's add the first camera:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
mqtt:
|
|
||||||
host: <ip of your mqtt server>
|
|
||||||
|
|
||||||
detectors:
|
|
||||||
coral:
|
|
||||||
type: edgetpu
|
|
||||||
device: usb
|
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
camera_1: # <------ Name the camera
|
camera_1: # <------ Name the camera
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
inputs:
|
inputs:
|
||||||
- path: rtsp://10.0.10.10:554/rtsp # <----- Update for your camera
|
- path: rtsp://10.0.10.10:554/rtsp # <----- The stream you want to use for detection
|
||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
detect:
|
detect:
|
||||||
|
enabled: False # <---- disable detection until you have a working camera feed
|
||||||
width: 1280 # <---- update for your camera's resolution
|
width: 1280 # <---- update for your camera's resolution
|
||||||
height: 720 # <---- update for your camera's resolution
|
height: 720 # <---- update for your camera's resolution
|
||||||
```
|
```
|
||||||
|
|
||||||
### Step 4: Start Frigate
|
### Step 2: Start Frigate
|
||||||
|
|
||||||
At this point you should be able to start Frigate and see the the video feed in the UI.
|
At this point you should be able to start Frigate and see the the video feed in the UI.
|
||||||
|
|
||||||
@@ -81,41 +34,48 @@ If you get an error image from the camera, this means ffmpeg was not able to get
|
|||||||
|
|
||||||
FFmpeg arguments for other types of cameras can be found [here](../configuration/camera_specific.md).
|
FFmpeg arguments for other types of cameras can be found [here](../configuration/camera_specific.md).
|
||||||
|
|
||||||
### Step 5: Configure hardware acceleration (optional)
|
### Step 3: Configure hardware acceleration (recommended)
|
||||||
|
|
||||||
Now that you have a working camera configuration, you want to setup hardware acceleration to minimize the CPU required to decode your video streams. See the [hardware acceleration](../configuration/hardware_acceleration.md) config reference for examples applicable to your hardware.
|
Now that you have a working camera configuration, you want to setup hardware acceleration to minimize the CPU required to decode your video streams. See the [hardware acceleration](../configuration/hardware_acceleration.md) config reference for examples applicable to your hardware.
|
||||||
|
|
||||||
In order to best evaluate the performance impact of hardware acceleration, it is recommended to temporarily disable detection.
|
Here is an example configuration with hardware acceleration configured for Intel processors with an integrated GPU using the [preset](../configuration/ffmpeg_presets.md):
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
mqtt: ...
|
mqtt: ...
|
||||||
|
|
||||||
detectors: ...
|
|
||||||
|
|
||||||
cameras:
|
|
||||||
camera_1:
|
|
||||||
ffmpeg: ...
|
|
||||||
detect:
|
|
||||||
enabled: False
|
|
||||||
...
|
|
||||||
```
|
|
||||||
|
|
||||||
Here is an example configuration with hardware acceleration configured:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
mqtt: ...
|
|
||||||
|
|
||||||
detectors: ...
|
|
||||||
|
|
||||||
cameras:
|
cameras:
|
||||||
camera_1:
|
camera_1:
|
||||||
ffmpeg:
|
ffmpeg:
|
||||||
inputs: ...
|
inputs: ...
|
||||||
hwaccel_args: -c:v h264_v4l2m2m
|
hwaccel_args: preset-vaapi
|
||||||
detect: ...
|
detect: ...
|
||||||
```
|
```
|
||||||
|
|
||||||
### Step 6: Setup motion masks
|
### Step 4: Configure detectors
|
||||||
|
|
||||||
|
By default, Frigate will use a single CPU detector. If you have a USB Coral, you will need to add a detectors section to your config.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
mqtt: ...
|
||||||
|
|
||||||
|
detectors: # <---- add detectors
|
||||||
|
coral:
|
||||||
|
type: edgetpu
|
||||||
|
device: usb
|
||||||
|
|
||||||
|
cameras:
|
||||||
|
camera_1:
|
||||||
|
ffmpeg: ...
|
||||||
|
detect:
|
||||||
|
enabled: True # <---- turn on detection
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
More details on available detectors can be found [here](../configuration/detectors.md).
|
||||||
|
|
||||||
|
Restart Frigate and you should start seeing detections for `person`. If you want to track other objects, they will need to be added according to the [configuration file reference](../configuration/index.md#full-configuration-reference).
|
||||||
|
|
||||||
|
### Step 5: Setup motion masks
|
||||||
|
|
||||||
Now that you have optimized your configuration for decoding the video stream, you will want to check to see where to implement motion masks. To do this, navigate to the camera in the UI, select "Debug" at the top, and enable "Motion boxes" in the options below the video feed. Watch for areas that continuously trigger unwanted motion to be detected. Common areas to mask include camera timestamps and trees that frequently blow in the wind. The goal is to avoid wasting object detection cycles looking at these areas.
|
Now that you have optimized your configuration for decoding the video stream, you will want to check to see where to implement motion masks. To do this, navigate to the camera in the UI, select "Debug" at the top, and enable "Motion boxes" in the options below the video feed. Watch for areas that continuously trigger unwanted motion to be detected. Common areas to mask include camera timestamps and trees that frequently blow in the wind. The goal is to avoid wasting object detection cycles looking at these areas.
|
||||||
|
|
||||||
@@ -131,7 +91,7 @@ Your configuration should look similar to this now.
|
|||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
mqtt:
|
mqtt:
|
||||||
host: mqtt.local
|
enabled: False
|
||||||
|
|
||||||
detectors:
|
detectors:
|
||||||
coral:
|
coral:
|
||||||
@@ -153,9 +113,13 @@ cameras:
|
|||||||
- 0,461,3,0,1919,0,1919,843,1699,492,1344,458,1346,336,973,317,869,375,866,432
|
- 0,461,3,0,1919,0,1919,843,1699,492,1344,458,1346,336,973,317,869,375,866,432
|
||||||
```
|
```
|
||||||
|
|
||||||
### Step 7: Enable recording (optional)
|
### Step 6: Enable recording and/or snapshots
|
||||||
|
|
||||||
To enable recording video, add the `record` role to a stream and enable it in the config.
|
In order to see Events in the Frigate UI, either snapshots or record will need to be enabled.
|
||||||
|
|
||||||
|
#### Record
|
||||||
|
|
||||||
|
To enable recording video, add the `record` role to a stream and enable it in the config. If record is disabled in the config, turning it on via the UI will not have any effect.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
mqtt: ...
|
mqtt: ...
|
||||||
@@ -169,7 +133,7 @@ cameras:
|
|||||||
- path: rtsp://10.0.10.10:554/rtsp
|
- path: rtsp://10.0.10.10:554/rtsp
|
||||||
roles:
|
roles:
|
||||||
- detect
|
- detect
|
||||||
- path: rtsp://10.0.10.10:554/high_res_stream # <----- Add high res stream
|
- path: rtsp://10.0.10.10:554/high_res_stream # <----- Add stream you want to record from
|
||||||
roles:
|
roles:
|
||||||
- record
|
- record
|
||||||
detect: ...
|
detect: ...
|
||||||
@@ -182,9 +146,9 @@ If you don't have separate streams for detect and record, you would just add the
|
|||||||
|
|
||||||
By default, Frigate will retain video of all events for 10 days. The full set of options for recording can be found [here](../configuration/index.md#full-configuration-reference).
|
By default, Frigate will retain video of all events for 10 days. The full set of options for recording can be found [here](../configuration/index.md#full-configuration-reference).
|
||||||
|
|
||||||
### Step 8: Enable snapshots (optional)
|
#### Snapshots
|
||||||
|
|
||||||
To enable snapshots of your events, just enable it in the config.
|
To enable snapshots of your events, just enable it in the config. Snapshots are taken from the detect stream because it is the only stream decoded.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
mqtt: ...
|
mqtt: ...
|
||||||
@@ -201,3 +165,10 @@ cameras:
|
|||||||
```
|
```
|
||||||
|
|
||||||
By default, Frigate will retain snapshots of all events for 10 days. The full set of options for snapshots can be found [here](../configuration/index.md#full-configuration-reference).
|
By default, Frigate will retain snapshots of all events for 10 days. The full set of options for snapshots can be found [here](../configuration/index.md#full-configuration-reference).
|
||||||
|
|
||||||
|
### Step 7: Follow up guides
|
||||||
|
|
||||||
|
Now that you have a working install, you can use the following guides for additional features:
|
||||||
|
|
||||||
|
1. [Configuring go2rtc](configuring_go2rtc) - Additional live view options and RTSP relay
|
||||||
|
2. [Home Assistant Integration](../integrations/home-assistant.md) - Integrate with Home Assistant
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
id: reverse_proxy
|
id: reverse_proxy
|
||||||
title: Setting up a Reverse Proxy
|
title: Setting up a reverse proxy
|
||||||
---
|
---
|
||||||
|
|
||||||
This guide outlines the basic configuration steps needed to expose your Frigate UI to the internet.
|
This guide outlines the basic configuration steps needed to expose your Frigate UI to the internet.
|
||||||
@@ -8,6 +8,7 @@ A common way of accomplishing this is to use a reverse proxy webserver between y
|
|||||||
A reverse proxy accepts HTTP requests from the public internet and redirects them transparently to internal webserver(s) on your network.
|
A reverse proxy accepts HTTP requests from the public internet and redirects them transparently to internal webserver(s) on your network.
|
||||||
|
|
||||||
The suggested steps are:
|
The suggested steps are:
|
||||||
|
|
||||||
- **Configure** a 'proxy' HTTP webserver (such as [Apache2](https://httpd.apache.org/docs/current/) or [NPM](https://github.com/NginxProxyManager/nginx-proxy-manager)) and only expose ports 80/443 from this webserver to the internet
|
- **Configure** a 'proxy' HTTP webserver (such as [Apache2](https://httpd.apache.org/docs/current/) or [NPM](https://github.com/NginxProxyManager/nginx-proxy-manager)) and only expose ports 80/443 from this webserver to the internet
|
||||||
- **Encrypt** content from the proxy webserver by installing SSL (such as with [Let's Encrypt](https://letsencrypt.org/)). Note that SSL is then not required on your Frigate webserver as the proxy encrypts all requests for you
|
- **Encrypt** content from the proxy webserver by installing SSL (such as with [Let's Encrypt](https://letsencrypt.org/)). Note that SSL is then not required on your Frigate webserver as the proxy encrypts all requests for you
|
||||||
- **Restrict** access to your Frigate instance at the proxy using, for example, password authentication
|
- **Restrict** access to your Frigate instance at the proxy using, for example, password authentication
|
||||||
@@ -31,6 +32,7 @@ On Debian Apache2 the configuration file will be named along the lines of `/etc/
|
|||||||
|
|
||||||
Make life easier for yourself by presenting your Frigate interface as a DNS sub-domain rather than as a sub-folder of your main domain.
|
Make life easier for yourself by presenting your Frigate interface as a DNS sub-domain rather than as a sub-folder of your main domain.
|
||||||
Here we access Frigate via https://cctv.mydomain.co.uk
|
Here we access Frigate via https://cctv.mydomain.co.uk
|
||||||
|
|
||||||
```xml
|
```xml
|
||||||
<VirtualHost *:443>
|
<VirtualHost *:443>
|
||||||
ServerName cctv.mydomain.co.uk
|
ServerName cctv.mydomain.co.uk
|
||||||
@@ -38,7 +40,7 @@ Here we access Frigate via https://cctv.mydomain.co.uk
|
|||||||
ProxyPreserveHost On
|
ProxyPreserveHost On
|
||||||
ProxyPass "/" "http://frigatepi.local:5000/"
|
ProxyPass "/" "http://frigatepi.local:5000/"
|
||||||
ProxyPassReverse "/" "http://frigatepi.local:5000/"
|
ProxyPassReverse "/" "http://frigatepi.local:5000/"
|
||||||
|
|
||||||
ProxyPass /ws ws://frigatepi.local:5000/ws
|
ProxyPass /ws ws://frigatepi.local:5000/ws
|
||||||
ProxyPassReverse /ws ws://frigatepi.local:5000/ws
|
ProxyPassReverse /ws ws://frigatepi.local:5000/ws
|
||||||
|
|
||||||
|
|||||||
@@ -4,11 +4,11 @@ module.exports = {
|
|||||||
"frigate/index",
|
"frigate/index",
|
||||||
"frigate/hardware",
|
"frigate/hardware",
|
||||||
"frigate/installation",
|
"frigate/installation",
|
||||||
|
"frigate/camera_setup",
|
||||||
],
|
],
|
||||||
Guides: [
|
Guides: [
|
||||||
"guides/camera_setup",
|
|
||||||
"guides/getting_started",
|
"guides/getting_started",
|
||||||
"guides/events_setup",
|
"guides/configuring_go2rtc",
|
||||||
"guides/false_positives",
|
"guides/false_positives",
|
||||||
"guides/ha_notifications",
|
"guides/ha_notifications",
|
||||||
"guides/stationary_objects",
|
"guides/stationary_objects",
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import multiprocessing as mp
|
|||||||
from multiprocessing.queues import Queue
|
from multiprocessing.queues import Queue
|
||||||
from multiprocessing.synchronize import Event as MpEvent
|
from multiprocessing.synchronize import Event as MpEvent
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
import signal
|
import signal
|
||||||
import sys
|
import sys
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
@@ -116,6 +117,9 @@ class FrigateApp:
|
|||||||
if not "werkzeug" in self.config.logger.logs:
|
if not "werkzeug" in self.config.logger.logs:
|
||||||
logging.getLogger("werkzeug").setLevel("ERROR")
|
logging.getLogger("werkzeug").setLevel("ERROR")
|
||||||
|
|
||||||
|
if not "ws4py" in self.config.logger.logs:
|
||||||
|
logging.getLogger("ws4py").setLevel("ERROR")
|
||||||
|
|
||||||
def init_queues(self) -> None:
|
def init_queues(self) -> None:
|
||||||
# Queues for clip processing
|
# Queues for clip processing
|
||||||
self.event_queue: Queue = mp.Queue()
|
self.event_queue: Queue = mp.Queue()
|
||||||
@@ -155,7 +159,9 @@ class FrigateApp:
|
|||||||
self.db.bind(models)
|
self.db.bind(models)
|
||||||
|
|
||||||
def init_stats(self) -> None:
|
def init_stats(self) -> None:
|
||||||
self.stats_tracking = stats_init(self.camera_metrics, self.detectors)
|
self.stats_tracking = stats_init(
|
||||||
|
self.config, self.camera_metrics, self.detectors
|
||||||
|
)
|
||||||
|
|
||||||
def init_web_server(self) -> None:
|
def init_web_server(self) -> None:
|
||||||
self.flask_app = create_app(
|
self.flask_app = create_app(
|
||||||
@@ -177,8 +183,7 @@ class FrigateApp:
|
|||||||
if self.config.mqtt.enabled:
|
if self.config.mqtt.enabled:
|
||||||
comms.append(MqttClient(self.config))
|
comms.append(MqttClient(self.config))
|
||||||
|
|
||||||
self.ws_client = WebSocketClient(self.config)
|
comms.append(WebSocketClient(self.config))
|
||||||
comms.append(self.ws_client)
|
|
||||||
self.dispatcher = Dispatcher(self.config, self.camera_metrics, comms)
|
self.dispatcher = Dispatcher(self.config, self.camera_metrics, comms)
|
||||||
|
|
||||||
def start_detectors(self) -> None:
|
def start_detectors(self) -> None:
|
||||||
@@ -327,6 +332,22 @@ class FrigateApp:
|
|||||||
self.frigate_watchdog = FrigateWatchdog(self.detectors, self.stop_event)
|
self.frigate_watchdog = FrigateWatchdog(self.detectors, self.stop_event)
|
||||||
self.frigate_watchdog.start()
|
self.frigate_watchdog.start()
|
||||||
|
|
||||||
|
def check_shm(self) -> None:
|
||||||
|
available_shm = round(shutil.disk_usage("/dev/shm").total / 1000000, 1)
|
||||||
|
min_req_shm = 30
|
||||||
|
|
||||||
|
for _, camera in self.config.cameras.items():
|
||||||
|
min_req_shm += round(
|
||||||
|
(camera.detect.width * camera.detect.height * 1.5 * 9 + 270480)
|
||||||
|
/ 1048576,
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
if available_shm < min_req_shm:
|
||||||
|
logger.warning(
|
||||||
|
f"The current SHM size of {available_shm}MB is too small, recommend increasing it to at least {min_req_shm}MB."
|
||||||
|
)
|
||||||
|
|
||||||
def start(self) -> None:
|
def start(self) -> None:
|
||||||
self.init_logger()
|
self.init_logger()
|
||||||
logger.info(f"Starting Frigate ({VERSION})")
|
logger.info(f"Starting Frigate ({VERSION})")
|
||||||
@@ -375,6 +396,7 @@ class FrigateApp:
|
|||||||
self.start_recording_cleanup()
|
self.start_recording_cleanup()
|
||||||
self.start_stats_emitter()
|
self.start_stats_emitter()
|
||||||
self.start_watchdog()
|
self.start_watchdog()
|
||||||
|
self.check_shm()
|
||||||
# self.zeroconf = broadcast_zeroconf(self.config.mqtt.client_id)
|
# self.zeroconf = broadcast_zeroconf(self.config.mqtt.client_id)
|
||||||
|
|
||||||
def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None:
|
def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None:
|
||||||
@@ -394,7 +416,17 @@ class FrigateApp:
|
|||||||
logger.info(f"Stopping...")
|
logger.info(f"Stopping...")
|
||||||
self.stop_event.set()
|
self.stop_event.set()
|
||||||
|
|
||||||
self.ws_client.stop()
|
for detector in self.detectors.values():
|
||||||
|
detector.stop()
|
||||||
|
|
||||||
|
# Empty the detection queue and set the events for all requests
|
||||||
|
while not self.detection_queue.empty():
|
||||||
|
connection_id = self.detection_queue.get(timeout=1)
|
||||||
|
self.detection_out_events[connection_id].set()
|
||||||
|
self.detection_queue.close()
|
||||||
|
self.detection_queue.join_thread()
|
||||||
|
|
||||||
|
self.dispatcher.stop()
|
||||||
self.detected_frames_processor.join()
|
self.detected_frames_processor.join()
|
||||||
self.event_processor.join()
|
self.event_processor.join()
|
||||||
self.event_cleanup.join()
|
self.event_cleanup.join()
|
||||||
@@ -404,10 +436,20 @@ class FrigateApp:
|
|||||||
self.frigate_watchdog.join()
|
self.frigate_watchdog.join()
|
||||||
self.db.stop()
|
self.db.stop()
|
||||||
|
|
||||||
for detector in self.detectors.values():
|
|
||||||
detector.stop()
|
|
||||||
|
|
||||||
while len(self.detection_shms) > 0:
|
while len(self.detection_shms) > 0:
|
||||||
shm = self.detection_shms.pop()
|
shm = self.detection_shms.pop()
|
||||||
shm.close()
|
shm.close()
|
||||||
shm.unlink()
|
shm.unlink()
|
||||||
|
|
||||||
|
for queue in [
|
||||||
|
self.event_queue,
|
||||||
|
self.event_processed_queue,
|
||||||
|
self.video_output_queue,
|
||||||
|
self.detected_frames_queue,
|
||||||
|
self.recordings_info_queue,
|
||||||
|
self.log_queue,
|
||||||
|
]:
|
||||||
|
while not queue.empty():
|
||||||
|
queue.get_nowait()
|
||||||
|
queue.close()
|
||||||
|
queue.join_thread()
|
||||||
|
|||||||
@@ -27,6 +27,11 @@ class Communicator(ABC):
|
|||||||
"""Pass receiver so communicators can pass commands."""
|
"""Pass receiver so communicators can pass commands."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def stop(self) -> None:
|
||||||
|
"""Stop the communicator."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class Dispatcher:
|
class Dispatcher:
|
||||||
"""Handle communication between Frigate and communicators."""
|
"""Handle communication between Frigate and communicators."""
|
||||||
@@ -72,6 +77,10 @@ class Dispatcher:
|
|||||||
for comm in self.comms:
|
for comm in self.comms:
|
||||||
comm.publish(topic, payload, retain)
|
comm.publish(topic, payload, retain)
|
||||||
|
|
||||||
|
def stop(self) -> None:
|
||||||
|
for comm in self.comms:
|
||||||
|
comm.stop()
|
||||||
|
|
||||||
def _on_detect_command(self, camera_name: str, payload: str) -> None:
|
def _on_detect_command(self, camera_name: str, payload: str) -> None:
|
||||||
"""Callback for detect topic."""
|
"""Callback for detect topic."""
|
||||||
detect_settings = self.config.cameras[camera_name].detect
|
detect_settings = self.config.cameras[camera_name].detect
|
||||||
|
|||||||
@@ -35,6 +35,9 @@ class MqttClient(Communicator): # type: ignore[misc]
|
|||||||
f"{self.mqtt_config.topic_prefix}/{topic}", payload, retain=retain
|
f"{self.mqtt_config.topic_prefix}/{topic}", payload, retain=retain
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def stop(self) -> None:
|
||||||
|
self.client.disconnect()
|
||||||
|
|
||||||
def _set_initial_topics(self) -> None:
|
def _set_initial_topics(self) -> None:
|
||||||
"""Set initial state topics."""
|
"""Set initial state topics."""
|
||||||
for camera_name, camera in self.config.cameras.items():
|
for camera_name, camera in self.config.cameras.items():
|
||||||
|
|||||||
@@ -95,3 +95,4 @@ class WebSocketClient(Communicator): # type: ignore[misc]
|
|||||||
self.websocket_server.manager.join()
|
self.websocket_server.manager.join()
|
||||||
self.websocket_server.shutdown()
|
self.websocket_server.shutdown()
|
||||||
self.websocket_thread.join()
|
self.websocket_thread.join()
|
||||||
|
logger.info("Exiting websocket client...")
|
||||||
|
|||||||
@@ -74,6 +74,10 @@ class UIConfig(FrigateBaseModel):
|
|||||||
use_experimental: bool = Field(default=False, title="Experimental UI")
|
use_experimental: bool = Field(default=False, title="Experimental UI")
|
||||||
|
|
||||||
|
|
||||||
|
class TelemetryConfig(FrigateBaseModel):
|
||||||
|
version_check: bool = Field(default=True, title="Enable latest version check.")
|
||||||
|
|
||||||
|
|
||||||
class MqttConfig(FrigateBaseModel):
|
class MqttConfig(FrigateBaseModel):
|
||||||
enabled: bool = Field(title="Enable MQTT Communication.", default=True)
|
enabled: bool = Field(title="Enable MQTT Communication.", default=True)
|
||||||
host: str = Field(default="", title="MQTT Host")
|
host: str = Field(default="", title="MQTT Host")
|
||||||
@@ -818,6 +822,9 @@ class FrigateConfig(FrigateBaseModel):
|
|||||||
default_factory=dict, title="Frigate environment variables."
|
default_factory=dict, title="Frigate environment variables."
|
||||||
)
|
)
|
||||||
ui: UIConfig = Field(default_factory=UIConfig, title="UI configuration.")
|
ui: UIConfig = Field(default_factory=UIConfig, title="UI configuration.")
|
||||||
|
telemetry: TelemetryConfig = Field(
|
||||||
|
default_factory=TelemetryConfig, title="Telemetry configuration."
|
||||||
|
)
|
||||||
model: ModelConfig = Field(
|
model: ModelConfig = Field(
|
||||||
default_factory=ModelConfig, title="Detection model configuration."
|
default_factory=ModelConfig, title="Detection model configuration."
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -23,6 +23,11 @@ class InputTensorEnum(str, Enum):
|
|||||||
nhwc = "nhwc"
|
nhwc = "nhwc"
|
||||||
|
|
||||||
|
|
||||||
|
class ModelTypeEnum(str, Enum):
|
||||||
|
ssd = "ssd"
|
||||||
|
yolox = "yolox"
|
||||||
|
|
||||||
|
|
||||||
class ModelConfig(BaseModel):
|
class ModelConfig(BaseModel):
|
||||||
path: Optional[str] = Field(title="Custom Object detection model path.")
|
path: Optional[str] = Field(title="Custom Object detection model path.")
|
||||||
labelmap_path: Optional[str] = Field(title="Label map for custom object detector.")
|
labelmap_path: Optional[str] = Field(title="Label map for custom object detector.")
|
||||||
@@ -37,6 +42,9 @@ class ModelConfig(BaseModel):
|
|||||||
input_pixel_format: PixelFormatEnum = Field(
|
input_pixel_format: PixelFormatEnum = Field(
|
||||||
default=PixelFormatEnum.rgb, title="Model Input Pixel Color Format"
|
default=PixelFormatEnum.rgb, title="Model Input Pixel Color Format"
|
||||||
)
|
)
|
||||||
|
model_type: ModelTypeEnum = Field(
|
||||||
|
default=ModelTypeEnum.ssd, title="Object Detection Model Type"
|
||||||
|
)
|
||||||
_merged_labelmap: Optional[Dict[int, str]] = PrivateAttr()
|
_merged_labelmap: Optional[Dict[int, str]] = PrivateAttr()
|
||||||
_colormap: Dict[int, Tuple[int, int, int]] = PrivateAttr()
|
_colormap: Dict[int, Tuple[int, int, int]] = PrivateAttr()
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import numpy as np
|
|||||||
import openvino.runtime as ov
|
import openvino.runtime as ov
|
||||||
|
|
||||||
from frigate.detectors.detection_api import DetectionApi
|
from frigate.detectors.detection_api import DetectionApi
|
||||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
from frigate.detectors.detector_config import BaseDetectorConfig, ModelTypeEnum
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
from pydantic import Extra, Field
|
from pydantic import Extra, Field
|
||||||
|
|
||||||
@@ -24,12 +24,18 @@ class OvDetector(DetectionApi):
|
|||||||
def __init__(self, detector_config: OvDetectorConfig):
|
def __init__(self, detector_config: OvDetectorConfig):
|
||||||
self.ov_core = ov.Core()
|
self.ov_core = ov.Core()
|
||||||
self.ov_model = self.ov_core.read_model(detector_config.model.path)
|
self.ov_model = self.ov_core.read_model(detector_config.model.path)
|
||||||
|
self.ov_model_type = detector_config.model.model_type
|
||||||
|
|
||||||
|
self.h = detector_config.model.height
|
||||||
|
self.w = detector_config.model.width
|
||||||
|
|
||||||
self.interpreter = self.ov_core.compile_model(
|
self.interpreter = self.ov_core.compile_model(
|
||||||
model=self.ov_model, device_name=detector_config.device
|
model=self.ov_model, device_name=detector_config.device
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(f"Model Input Shape: {self.interpreter.input(0).shape}")
|
logger.info(f"Model Input Shape: {self.interpreter.input(0).shape}")
|
||||||
self.output_indexes = 0
|
self.output_indexes = 0
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
tensor_shape = self.interpreter.output(self.output_indexes).shape
|
tensor_shape = self.interpreter.output(self.output_indexes).shape
|
||||||
@@ -38,29 +44,92 @@ class OvDetector(DetectionApi):
|
|||||||
except:
|
except:
|
||||||
logger.info(f"Model has {self.output_indexes} Output Tensors")
|
logger.info(f"Model has {self.output_indexes} Output Tensors")
|
||||||
break
|
break
|
||||||
|
if self.ov_model_type == ModelTypeEnum.yolox:
|
||||||
|
self.num_classes = tensor_shape[2] - 5
|
||||||
|
logger.info(f"YOLOX model has {self.num_classes} classes")
|
||||||
|
self.set_strides_grids()
|
||||||
|
|
||||||
|
def set_strides_grids(self):
|
||||||
|
grids = []
|
||||||
|
expanded_strides = []
|
||||||
|
|
||||||
|
strides = [8, 16, 32]
|
||||||
|
|
||||||
|
hsizes = [self.h // stride for stride in strides]
|
||||||
|
wsizes = [self.w // stride for stride in strides]
|
||||||
|
|
||||||
|
for hsize, wsize, stride in zip(hsizes, wsizes, strides):
|
||||||
|
xv, yv = np.meshgrid(np.arange(wsize), np.arange(hsize))
|
||||||
|
grid = np.stack((xv, yv), 2).reshape(1, -1, 2)
|
||||||
|
grids.append(grid)
|
||||||
|
shape = grid.shape[:2]
|
||||||
|
expanded_strides.append(np.full((*shape, 1), stride))
|
||||||
|
self.grids = np.concatenate(grids, 1)
|
||||||
|
self.expanded_strides = np.concatenate(expanded_strides, 1)
|
||||||
|
|
||||||
def detect_raw(self, tensor_input):
|
def detect_raw(self, tensor_input):
|
||||||
|
|
||||||
infer_request = self.interpreter.create_infer_request()
|
infer_request = self.interpreter.create_infer_request()
|
||||||
infer_request.infer([tensor_input])
|
infer_request.infer([tensor_input])
|
||||||
|
|
||||||
results = infer_request.get_output_tensor()
|
if self.ov_model_type == ModelTypeEnum.ssd:
|
||||||
|
results = infer_request.get_output_tensor()
|
||||||
|
|
||||||
detections = np.zeros((20, 6), np.float32)
|
detections = np.zeros((20, 6), np.float32)
|
||||||
i = 0
|
i = 0
|
||||||
for object_detected in results.data[0, 0, :]:
|
for object_detected in results.data[0, 0, :]:
|
||||||
if object_detected[0] != -1:
|
if object_detected[0] != -1:
|
||||||
logger.debug(object_detected)
|
logger.debug(object_detected)
|
||||||
if object_detected[2] < 0.1 or i == 20:
|
if object_detected[2] < 0.1 or i == 20:
|
||||||
break
|
break
|
||||||
detections[i] = [
|
detections[i] = [
|
||||||
object_detected[1], # Label ID
|
object_detected[1], # Label ID
|
||||||
float(object_detected[2]), # Confidence
|
float(object_detected[2]), # Confidence
|
||||||
object_detected[4], # y_min
|
object_detected[4], # y_min
|
||||||
object_detected[3], # x_min
|
object_detected[3], # x_min
|
||||||
object_detected[6], # y_max
|
object_detected[6], # y_max
|
||||||
object_detected[5], # x_max
|
object_detected[5], # x_max
|
||||||
]
|
]
|
||||||
i += 1
|
i += 1
|
||||||
|
return detections
|
||||||
|
elif self.ov_model_type == ModelTypeEnum.yolox:
|
||||||
|
out_tensor = infer_request.get_output_tensor()
|
||||||
|
# [x, y, h, w, box_score, class_no_1, ..., class_no_80],
|
||||||
|
results = out_tensor.data
|
||||||
|
results[..., :2] = (results[..., :2] + self.grids) * self.expanded_strides
|
||||||
|
results[..., 2:4] = np.exp(results[..., 2:4]) * self.expanded_strides
|
||||||
|
image_pred = results[0, ...]
|
||||||
|
|
||||||
return detections
|
class_conf = np.max(
|
||||||
|
image_pred[:, 5 : 5 + self.num_classes], axis=1, keepdims=True
|
||||||
|
)
|
||||||
|
class_pred = np.argmax(image_pred[:, 5 : 5 + self.num_classes], axis=1)
|
||||||
|
class_pred = np.expand_dims(class_pred, axis=1)
|
||||||
|
|
||||||
|
conf_mask = (image_pred[:, 4] * class_conf.squeeze() >= 0.3).squeeze()
|
||||||
|
# Detections ordered as (x1, y1, x2, y2, obj_conf, class_conf, class_pred)
|
||||||
|
dets = np.concatenate((image_pred[:, :5], class_conf, class_pred), axis=1)
|
||||||
|
dets = dets[conf_mask]
|
||||||
|
|
||||||
|
ordered = dets[dets[:, 5].argsort()[::-1]][:20]
|
||||||
|
|
||||||
|
detections = np.zeros((20, 6), np.float32)
|
||||||
|
i = 0
|
||||||
|
|
||||||
|
for object_detected in ordered:
|
||||||
|
if i < 20:
|
||||||
|
detections[i] = [
|
||||||
|
object_detected[6], # Label ID
|
||||||
|
object_detected[5], # Confidence
|
||||||
|
(object_detected[1] - (object_detected[3] / 2))
|
||||||
|
/ self.h, # y_min
|
||||||
|
(object_detected[0] - (object_detected[2] / 2))
|
||||||
|
/ self.w, # x_min
|
||||||
|
(object_detected[1] + (object_detected[3] / 2))
|
||||||
|
/ self.h, # y_max
|
||||||
|
(object_detected[0] + (object_detected[2] / 2))
|
||||||
|
/ self.w, # x_max
|
||||||
|
]
|
||||||
|
i += 1
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
return detections
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ class EventProcessor(threading.Thread):
|
|||||||
|
|
||||||
while not self.stop_event.is_set():
|
while not self.stop_event.is_set():
|
||||||
try:
|
try:
|
||||||
event_type, camera, event_data = self.event_queue.get(timeout=10)
|
event_type, camera, event_data = self.event_queue.get(timeout=1)
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -1,14 +1,52 @@
|
|||||||
"""Handles inserting and maintaining ffmpeg presets."""
|
"""Handles inserting and maintaining ffmpeg presets."""
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from frigate.version import VERSION
|
from frigate.version import VERSION
|
||||||
from frigate.const import BTBN_PATH
|
from frigate.const import BTBN_PATH
|
||||||
|
from frigate.util import vainfo_hwaccel
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class LibvaGpuSelector:
|
||||||
|
"Automatically selects the correct libva GPU."
|
||||||
|
|
||||||
|
_selected_gpu = None
|
||||||
|
|
||||||
|
def get_selected_gpu(self) -> str:
|
||||||
|
"""Get selected libva GPU."""
|
||||||
|
if not os.path.exists("/dev/dri"):
|
||||||
|
return ""
|
||||||
|
|
||||||
|
if self._selected_gpu:
|
||||||
|
return self._selected_gpu
|
||||||
|
|
||||||
|
devices = list(filter(lambda d: d.startswith("render"), os.listdir("/dev/dri")))
|
||||||
|
|
||||||
|
if len(devices) < 2:
|
||||||
|
self._selected_gpu = "/dev/dri/renderD128"
|
||||||
|
return self._selected_gpu
|
||||||
|
|
||||||
|
for device in devices:
|
||||||
|
check = vainfo_hwaccel(device_name=device)
|
||||||
|
|
||||||
|
logger.debug(f"{device} return vainfo status code: {check.returncode}")
|
||||||
|
|
||||||
|
if check.returncode == 0:
|
||||||
|
self._selected_gpu = f"/dev/dri/{device}"
|
||||||
|
return self._selected_gpu
|
||||||
|
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
TIMEOUT_PARAM = "-timeout" if os.path.exists(BTBN_PATH) else "-stimeout"
|
TIMEOUT_PARAM = "-timeout" if os.path.exists(BTBN_PATH) else "-stimeout"
|
||||||
|
|
||||||
|
_gpu_selector = LibvaGpuSelector()
|
||||||
_user_agent_args = [
|
_user_agent_args = [
|
||||||
"-user_agent",
|
"-user_agent",
|
||||||
f"FFmpeg Frigate/{VERSION}",
|
f"FFmpeg Frigate/{VERSION}",
|
||||||
@@ -23,7 +61,7 @@ PRESETS_HW_ACCEL_DECODE = {
|
|||||||
"-hwaccel",
|
"-hwaccel",
|
||||||
"vaapi",
|
"vaapi",
|
||||||
"-hwaccel_device",
|
"-hwaccel_device",
|
||||||
"/dev/dri/renderD128",
|
_gpu_selector.get_selected_gpu(),
|
||||||
"-hwaccel_output_format",
|
"-hwaccel_output_format",
|
||||||
"vaapi",
|
"vaapi",
|
||||||
],
|
],
|
||||||
@@ -31,7 +69,7 @@ PRESETS_HW_ACCEL_DECODE = {
|
|||||||
"-hwaccel",
|
"-hwaccel",
|
||||||
"qsv",
|
"qsv",
|
||||||
"-qsv_device",
|
"-qsv_device",
|
||||||
"/dev/dri/renderD128",
|
_gpu_selector.get_selected_gpu(),
|
||||||
"-hwaccel_output_format",
|
"-hwaccel_output_format",
|
||||||
"qsv",
|
"qsv",
|
||||||
"-c:v",
|
"-c:v",
|
||||||
@@ -43,7 +81,7 @@ PRESETS_HW_ACCEL_DECODE = {
|
|||||||
"-hwaccel",
|
"-hwaccel",
|
||||||
"qsv",
|
"qsv",
|
||||||
"-qsv_device",
|
"-qsv_device",
|
||||||
"/dev/dri/renderD128",
|
_gpu_selector.get_selected_gpu(),
|
||||||
"-hwaccel_output_format",
|
"-hwaccel_output_format",
|
||||||
"qsv",
|
"qsv",
|
||||||
"-c:v",
|
"-c:v",
|
||||||
@@ -82,19 +120,20 @@ PRESETS_HW_ACCEL_DECODE = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
PRESETS_HW_ACCEL_SCALE = {
|
PRESETS_HW_ACCEL_SCALE = {
|
||||||
"preset-rpi-32-h264": "-r {} -s {}x{} -f rawvideo -pix_fmt yuv420p",
|
"preset-rpi-32-h264": "-r {0} -s {1}x{2} -f rawvideo -pix_fmt yuv420p",
|
||||||
"preset-rpi-64-h264": "-r {} -s {}x{} -f rawvideo -pix_fmt yuv420p",
|
"preset-rpi-64-h264": "-r {0} -s {1}x{2} -f rawvideo -pix_fmt yuv420p",
|
||||||
"preset-vaapi": "-vf fps={},scale_vaapi=w={}:h={},hwdownload,format=yuv420p -f rawvideo",
|
"preset-vaapi": "-r {0} -vf fps={0},scale_vaapi=w={1}:h={2},hwdownload,format=yuv420p -f rawvideo",
|
||||||
"preset-intel-qsv-h264": "-r {} -vf vpp_qsv=w={}:h={}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
"preset-intel-qsv-h264": "-r {0} -vf vpp_qsv=framerate={0}:w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
||||||
"preset-intel-qsv-h265": "-r {} -vf vpp_qsv=w={}:h={}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
"preset-intel-qsv-h265": "-r {0} -vf vpp_qsv=framerate={0}:w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
||||||
"preset-nvidia-h264": "-vf fps={},scale_cuda=w={}:h={}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
"preset-nvidia-h264": "-r {0} -vf fps={0},scale_cuda=w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
||||||
"preset-nvidia-h265": "-vf fps={},scale_cuda=w={}:h={}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
"preset-nvidia-h265": "-r {0} -vf fps={0},scale_cuda=w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
||||||
"default": "-r {} -s {}x{}",
|
"default": "-r {0} -s {1}x{2}",
|
||||||
}
|
}
|
||||||
|
|
||||||
PRESETS_HW_ACCEL_ENCODE = {
|
PRESETS_HW_ACCEL_ENCODE = {
|
||||||
"preset-rpi-32-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m -g 50 -bf 0 {1}",
|
"preset-rpi-32-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m -g 50 -bf 0 {1}",
|
||||||
"preset-rpi-64-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m -g 50 -bf 0 {1}",
|
"preset-rpi-64-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m -g 50 -bf 0 {1}",
|
||||||
|
"preset-vaapi": "ffmpeg -hide_banner -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device {2} {0} -c:v h264_vaapi -g 50 -bf 0 -profile:v high -level:v 4.1 -sei:v 0 -an -vf format=vaapi|nv12,hwupload {1}",
|
||||||
"preset-intel-qsv-h264": "ffmpeg -hide_banner {0} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
"preset-intel-qsv-h264": "ffmpeg -hide_banner {0} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||||
"preset-intel-qsv-h265": "ffmpeg -hide_banner {0} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
"preset-intel-qsv-h265": "ffmpeg -hide_banner {0} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||||
"preset-nvidia-h264": "ffmpeg -hide_banner {0} -c:v h264_nvenc -g 50 -profile:v high -level:v auto -preset:v p2 -tune:v ll {1}",
|
"preset-nvidia-h264": "ffmpeg -hide_banner {0} -c:v h264_nvenc -g 50 -profile:v high -level:v auto -preset:v p2 -tune:v ll {1}",
|
||||||
@@ -142,6 +181,7 @@ def parse_preset_hardware_acceleration_encode(arg: Any, input: str, output: str)
|
|||||||
return PRESETS_HW_ACCEL_ENCODE.get(arg, PRESETS_HW_ACCEL_ENCODE["default"]).format(
|
return PRESETS_HW_ACCEL_ENCODE.get(arg, PRESETS_HW_ACCEL_ENCODE["default"]).format(
|
||||||
input,
|
input,
|
||||||
output,
|
output,
|
||||||
|
_gpu_selector.get_selected_gpu(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -237,6 +277,17 @@ PRESETS_INPUT = {
|
|||||||
TIMEOUT_PARAM,
|
TIMEOUT_PARAM,
|
||||||
"5000000",
|
"5000000",
|
||||||
],
|
],
|
||||||
|
"preset-rtsp-restream-low-latency": _user_agent_args
|
||||||
|
+ [
|
||||||
|
"-rtsp_transport",
|
||||||
|
"tcp",
|
||||||
|
TIMEOUT_PARAM,
|
||||||
|
"5000000",
|
||||||
|
"-fflags",
|
||||||
|
"nobuffer",
|
||||||
|
"-flags",
|
||||||
|
"low_delay",
|
||||||
|
],
|
||||||
"preset-rtsp-udp": _user_agent_args
|
"preset-rtsp-udp": _user_agent_args
|
||||||
+ [
|
+ [
|
||||||
"-avoid_negative_ts",
|
"-avoid_negative_ts",
|
||||||
|
|||||||
@@ -870,7 +870,7 @@ def latest_frame(camera_name):
|
|||||||
response.headers["Content-Type"] = "image/jpeg"
|
response.headers["Content-Type"] = "image/jpeg"
|
||||||
response.headers["Cache-Control"] = "no-store"
|
response.headers["Cache-Control"] = "no-store"
|
||||||
return response
|
return response
|
||||||
elif camera_name == "birdseye" and current_app.frigate_config.restream.birdseye:
|
elif camera_name == "birdseye" and current_app.frigate_config.birdseye.restream:
|
||||||
frame = cv2.cvtColor(
|
frame = cv2.cvtColor(
|
||||||
current_app.detected_frames_processor.get_current_frame(camera_name),
|
current_app.detected_frames_processor.get_current_frame(camera_name),
|
||||||
cv2.COLOR_YUV2BGR_I420,
|
cv2.COLOR_YUV2BGR_I420,
|
||||||
|
|||||||
@@ -2,11 +2,16 @@
|
|||||||
import logging
|
import logging
|
||||||
import threading
|
import threading
|
||||||
import os
|
import os
|
||||||
|
import signal
|
||||||
import queue
|
import queue
|
||||||
|
import multiprocessing as mp
|
||||||
from multiprocessing.queues import Queue
|
from multiprocessing.queues import Queue
|
||||||
from logging import handlers
|
from logging import handlers
|
||||||
|
from typing import Optional
|
||||||
|
from types import FrameType
|
||||||
from setproctitle import setproctitle
|
from setproctitle import setproctitle
|
||||||
from typing import Deque
|
from typing import Deque, Optional
|
||||||
|
from types import FrameType
|
||||||
from collections import deque
|
from collections import deque
|
||||||
|
|
||||||
from frigate.util import clean_camera_user_pass
|
from frigate.util import clean_camera_user_pass
|
||||||
@@ -34,10 +39,21 @@ def log_process(log_queue: Queue) -> None:
|
|||||||
threading.current_thread().name = f"logger"
|
threading.current_thread().name = f"logger"
|
||||||
setproctitle("frigate.logger")
|
setproctitle("frigate.logger")
|
||||||
listener_configurer()
|
listener_configurer()
|
||||||
|
|
||||||
|
stop_event = mp.Event()
|
||||||
|
|
||||||
|
def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None:
|
||||||
|
stop_event.set()
|
||||||
|
|
||||||
|
signal.signal(signal.SIGTERM, receiveSignal)
|
||||||
|
signal.signal(signal.SIGINT, receiveSignal)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
record = log_queue.get(timeout=5)
|
record = log_queue.get(timeout=1)
|
||||||
except (queue.Empty, KeyboardInterrupt):
|
except (queue.Empty, KeyboardInterrupt):
|
||||||
|
if stop_event.is_set():
|
||||||
|
break
|
||||||
continue
|
continue
|
||||||
logger = logging.getLogger(record.name)
|
logger = logging.getLogger(record.name)
|
||||||
logger.handle(record)
|
logger.handle(record)
|
||||||
|
|||||||
@@ -88,6 +88,7 @@ def run_detector(
|
|||||||
stop_event = mp.Event()
|
stop_event = mp.Event()
|
||||||
|
|
||||||
def receiveSignal(signalNumber, frame):
|
def receiveSignal(signalNumber, frame):
|
||||||
|
logger.info("Signal to exit detection process...")
|
||||||
stop_event.set()
|
stop_event.set()
|
||||||
|
|
||||||
signal.signal(signal.SIGTERM, receiveSignal)
|
signal.signal(signal.SIGTERM, receiveSignal)
|
||||||
@@ -104,7 +105,7 @@ def run_detector(
|
|||||||
|
|
||||||
while not stop_event.is_set():
|
while not stop_event.is_set():
|
||||||
try:
|
try:
|
||||||
connection_id = detection_queue.get(timeout=5)
|
connection_id = detection_queue.get(timeout=1)
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
continue
|
continue
|
||||||
input_frame = frame_manager.get(
|
input_frame = frame_manager.get(
|
||||||
@@ -125,6 +126,8 @@ def run_detector(
|
|||||||
|
|
||||||
avg_speed.value = (avg_speed.value * 9 + duration) / 10
|
avg_speed.value = (avg_speed.value * 9 + duration) / 10
|
||||||
|
|
||||||
|
logger.info("Exited detection process...")
|
||||||
|
|
||||||
|
|
||||||
class ObjectDetectProcess:
|
class ObjectDetectProcess:
|
||||||
def __init__(
|
def __init__(
|
||||||
@@ -144,6 +147,9 @@ class ObjectDetectProcess:
|
|||||||
self.start_or_restart()
|
self.start_or_restart()
|
||||||
|
|
||||||
def stop(self):
|
def stop(self):
|
||||||
|
# if the process has already exited on its own, just return
|
||||||
|
if self.detect_process and self.detect_process.exitcode:
|
||||||
|
return
|
||||||
self.detect_process.terminate()
|
self.detect_process.terminate()
|
||||||
logging.info("Waiting for detection process to exit gracefully...")
|
logging.info("Waiting for detection process to exit gracefully...")
|
||||||
self.detect_process.join(timeout=30)
|
self.detect_process.join(timeout=30)
|
||||||
@@ -151,6 +157,7 @@ class ObjectDetectProcess:
|
|||||||
logging.info("Detection process didnt exit. Force killing...")
|
logging.info("Detection process didnt exit. Force killing...")
|
||||||
self.detect_process.kill()
|
self.detect_process.kill()
|
||||||
self.detect_process.join()
|
self.detect_process.join()
|
||||||
|
logging.info("Detection process has exited...")
|
||||||
|
|
||||||
def start_or_restart(self):
|
def start_or_restart(self):
|
||||||
self.detection_start.value = 0.0
|
self.detection_start.value = 0.0
|
||||||
@@ -173,12 +180,13 @@ class ObjectDetectProcess:
|
|||||||
|
|
||||||
|
|
||||||
class RemoteObjectDetector:
|
class RemoteObjectDetector:
|
||||||
def __init__(self, name, labels, detection_queue, event, model_config):
|
def __init__(self, name, labels, detection_queue, event, model_config, stop_event):
|
||||||
self.labels = labels
|
self.labels = labels
|
||||||
self.name = name
|
self.name = name
|
||||||
self.fps = EventsPerSecond()
|
self.fps = EventsPerSecond()
|
||||||
self.detection_queue = detection_queue
|
self.detection_queue = detection_queue
|
||||||
self.event = event
|
self.event = event
|
||||||
|
self.stop_event = stop_event
|
||||||
self.shm = mp.shared_memory.SharedMemory(name=self.name, create=False)
|
self.shm = mp.shared_memory.SharedMemory(name=self.name, create=False)
|
||||||
self.np_shm = np.ndarray(
|
self.np_shm = np.ndarray(
|
||||||
(1, model_config.height, model_config.width, 3),
|
(1, model_config.height, model_config.width, 3),
|
||||||
@@ -193,11 +201,14 @@ class RemoteObjectDetector:
|
|||||||
def detect(self, tensor_input, threshold=0.4):
|
def detect(self, tensor_input, threshold=0.4):
|
||||||
detections = []
|
detections = []
|
||||||
|
|
||||||
|
if self.stop_event.is_set():
|
||||||
|
return detections
|
||||||
|
|
||||||
# copy input to shared memory
|
# copy input to shared memory
|
||||||
self.np_shm[:] = tensor_input[:]
|
self.np_shm[:] = tensor_input[:]
|
||||||
self.event.clear()
|
self.event.clear()
|
||||||
self.detection_queue.put(self.name)
|
self.detection_queue.put(self.name)
|
||||||
result = self.event.wait(timeout=10.0)
|
result = self.event.wait(timeout=5.0)
|
||||||
|
|
||||||
# if it timed out
|
# if it timed out
|
||||||
if result is None:
|
if result is None:
|
||||||
|
|||||||
@@ -901,7 +901,7 @@ class TrackedObjectProcessor(threading.Thread):
|
|||||||
current_tracked_objects,
|
current_tracked_objects,
|
||||||
motion_boxes,
|
motion_boxes,
|
||||||
regions,
|
regions,
|
||||||
) = self.tracked_objects_queue.get(True, 10)
|
) = self.tracked_objects_queue.get(True, 1)
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -109,14 +109,15 @@ class FFMpegConverter:
|
|||||||
|
|
||||||
|
|
||||||
class BroadcastThread(threading.Thread):
|
class BroadcastThread(threading.Thread):
|
||||||
def __init__(self, camera, converter, websocket_server):
|
def __init__(self, camera, converter, websocket_server, stop_event):
|
||||||
super(BroadcastThread, self).__init__()
|
super(BroadcastThread, self).__init__()
|
||||||
self.camera = camera
|
self.camera = camera
|
||||||
self.converter = converter
|
self.converter = converter
|
||||||
self.websocket_server = websocket_server
|
self.websocket_server = websocket_server
|
||||||
|
self.stop_event = stop_event
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
while True:
|
while not self.stop_event.is_set():
|
||||||
buf = self.converter.read(65536)
|
buf = self.converter.read(65536)
|
||||||
if buf:
|
if buf:
|
||||||
manager = self.websocket_server.manager
|
manager = self.websocket_server.manager
|
||||||
@@ -426,7 +427,7 @@ def output_frames(config: FrigateConfig, video_output_queue):
|
|||||||
cam_config.live.quality,
|
cam_config.live.quality,
|
||||||
)
|
)
|
||||||
broadcasters[camera] = BroadcastThread(
|
broadcasters[camera] = BroadcastThread(
|
||||||
camera, converters[camera], websocket_server
|
camera, converters[camera], websocket_server, stop_event
|
||||||
)
|
)
|
||||||
|
|
||||||
if config.birdseye.enabled:
|
if config.birdseye.enabled:
|
||||||
@@ -439,7 +440,7 @@ def output_frames(config: FrigateConfig, video_output_queue):
|
|||||||
config.birdseye.restream,
|
config.birdseye.restream,
|
||||||
)
|
)
|
||||||
broadcasters["birdseye"] = BroadcastThread(
|
broadcasters["birdseye"] = BroadcastThread(
|
||||||
"birdseye", converters["birdseye"], websocket_server
|
"birdseye", converters["birdseye"], websocket_server, stop_event
|
||||||
)
|
)
|
||||||
|
|
||||||
websocket_thread.start()
|
websocket_thread.start()
|
||||||
@@ -463,7 +464,7 @@ def output_frames(config: FrigateConfig, video_output_queue):
|
|||||||
current_tracked_objects,
|
current_tracked_objects,
|
||||||
motion_boxes,
|
motion_boxes,
|
||||||
regions,
|
regions,
|
||||||
) = video_output_queue.get(True, 10)
|
) = video_output_queue.get(True, 1)
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -100,19 +100,12 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
for camera in grouped_recordings.keys():
|
for camera in grouped_recordings.keys():
|
||||||
segment_count = len(grouped_recordings[camera])
|
segment_count = len(grouped_recordings[camera])
|
||||||
if segment_count > keep_count:
|
if segment_count > keep_count:
|
||||||
####
|
logger.warning(
|
||||||
# Need to find a way to tell if these are aging out based on retention settings or if the system is overloaded.
|
f"Unable to keep up with recording segments in cache for {camera}. Keeping the {keep_count} most recent segments out of {segment_count} and discarding the rest..."
|
||||||
####
|
)
|
||||||
# logger.warning(
|
|
||||||
# f"Too many recording segments in cache for {camera}. Keeping the {keep_count} most recent segments out of {segment_count}, discarding the rest..."
|
|
||||||
# )
|
|
||||||
to_remove = grouped_recordings[camera][:-keep_count]
|
to_remove = grouped_recordings[camera][:-keep_count]
|
||||||
for f in to_remove:
|
for f in to_remove:
|
||||||
cache_path = f["cache_path"]
|
cache_path = f["cache_path"]
|
||||||
####
|
|
||||||
# Need to find a way to tell if these are aging out based on retention settings or if the system is overloaded.
|
|
||||||
####
|
|
||||||
# logger.warning(f"Discarding a recording segment: {cache_path}")
|
|
||||||
Path(cache_path).unlink(missing_ok=True)
|
Path(cache_path).unlink(missing_ok=True)
|
||||||
self.end_time_cache.pop(cache_path, None)
|
self.end_time_cache.pop(cache_path, None)
|
||||||
grouped_recordings[camera] = grouped_recordings[camera][-keep_count:]
|
grouped_recordings[camera] = grouped_recordings[camera][-keep_count:]
|
||||||
@@ -227,6 +220,19 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
cache_path,
|
cache_path,
|
||||||
record_mode,
|
record_mode,
|
||||||
)
|
)
|
||||||
|
# if it doesn't overlap with an event, go ahead and drop the segment
|
||||||
|
# if it ends more than the configured pre_capture for the camera
|
||||||
|
else:
|
||||||
|
pre_capture = self.config.cameras[
|
||||||
|
camera
|
||||||
|
].record.events.pre_capture
|
||||||
|
most_recently_processed_frame_time = self.recordings_info[
|
||||||
|
camera
|
||||||
|
][-1][0]
|
||||||
|
retain_cutoff = most_recently_processed_frame_time - pre_capture
|
||||||
|
if end_time.timestamp() < retain_cutoff:
|
||||||
|
Path(cache_path).unlink(missing_ok=True)
|
||||||
|
self.end_time_cache.pop(cache_path, None)
|
||||||
# else retain days includes this segment
|
# else retain days includes this segment
|
||||||
else:
|
else:
|
||||||
record_mode = self.config.cameras[camera].record.retain.mode
|
record_mode = self.config.cameras[camera].record.retain.mode
|
||||||
@@ -411,6 +417,10 @@ class RecordingCleanup(threading.Thread):
|
|||||||
logger.debug(f"Checking tmp clip {p}.")
|
logger.debug(f"Checking tmp clip {p}.")
|
||||||
if p.stat().st_mtime < (datetime.datetime.now().timestamp() - 60 * 1):
|
if p.stat().st_mtime < (datetime.datetime.now().timestamp() - 60 * 1):
|
||||||
logger.debug("Deleting tmp clip.")
|
logger.debug("Deleting tmp clip.")
|
||||||
|
|
||||||
|
# empty contents of file before unlinking https://github.com/blakeblackshear/frigate/issues/4769
|
||||||
|
with open(p, "w"):
|
||||||
|
pass
|
||||||
p.unlink(missing_ok=True)
|
p.unlink(missing_ok=True)
|
||||||
|
|
||||||
def expire_recordings(self):
|
def expire_recordings(self):
|
||||||
|
|||||||
@@ -22,7 +22,11 @@ from frigate.object_detection import ObjectDetectProcess
|
|||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def get_latest_version() -> str:
|
def get_latest_version(config: FrigateConfig) -> str:
|
||||||
|
|
||||||
|
if not config.telemetry.version_check:
|
||||||
|
return "disabled"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
request = requests.get(
|
request = requests.get(
|
||||||
"https://api.github.com/repos/blakeblackshear/frigate/releases/latest",
|
"https://api.github.com/repos/blakeblackshear/frigate/releases/latest",
|
||||||
@@ -40,6 +44,7 @@ def get_latest_version() -> str:
|
|||||||
|
|
||||||
|
|
||||||
def stats_init(
|
def stats_init(
|
||||||
|
config: FrigateConfig,
|
||||||
camera_metrics: dict[str, CameraMetricsTypes],
|
camera_metrics: dict[str, CameraMetricsTypes],
|
||||||
detectors: dict[str, ObjectDetectProcess],
|
detectors: dict[str, ObjectDetectProcess],
|
||||||
) -> StatsTrackingTypes:
|
) -> StatsTrackingTypes:
|
||||||
@@ -47,7 +52,8 @@ def stats_init(
|
|||||||
"camera_metrics": camera_metrics,
|
"camera_metrics": camera_metrics,
|
||||||
"detectors": detectors,
|
"detectors": detectors,
|
||||||
"started": int(time.time()),
|
"started": int(time.time()),
|
||||||
"latest_frigate_version": get_latest_version(),
|
"latest_frigate_version": get_latest_version(config),
|
||||||
|
"last_updated": int(time.time()),
|
||||||
}
|
}
|
||||||
return stats_tracking
|
return stats_tracking
|
||||||
|
|
||||||
@@ -239,6 +245,7 @@ def stats_snapshot(
|
|||||||
"latest_version": stats_tracking["latest_frigate_version"],
|
"latest_version": stats_tracking["latest_frigate_version"],
|
||||||
"storage": {},
|
"storage": {},
|
||||||
"temperatures": get_temperatures(),
|
"temperatures": get_temperatures(),
|
||||||
|
"last_updated": int(time.time()),
|
||||||
}
|
}
|
||||||
|
|
||||||
for path in [RECORD_DIR, CLIPS_DIR, CACHE_DIR, "/dev/shm"]:
|
for path in [RECORD_DIR, CLIPS_DIR, CACHE_DIR, "/dev/shm"]:
|
||||||
@@ -276,8 +283,10 @@ class StatsEmitter(threading.Thread):
|
|||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
time.sleep(10)
|
time.sleep(10)
|
||||||
while not self.stop_event.wait(self.config.mqtt.stats_interval):
|
while not self.stop_event.wait(self.config.mqtt.stats_interval):
|
||||||
|
logger.debug("Starting stats collection")
|
||||||
stats = stats_snapshot(
|
stats = stats_snapshot(
|
||||||
self.config, self.stats_tracking, self.hwaccel_errors
|
self.config, self.stats_tracking, self.hwaccel_errors
|
||||||
)
|
)
|
||||||
self.dispatcher.publish("stats", json.dumps(stats), retain=False)
|
self.dispatcher.publish("stats", json.dumps(stats), retain=False)
|
||||||
logger.info(f"Exiting watchdog...")
|
logger.debug("Finished stats collection")
|
||||||
|
logger.info(f"Exiting stats emitter...")
|
||||||
|
|||||||
@@ -29,3 +29,4 @@ class StatsTrackingTypes(TypedDict):
|
|||||||
detectors: dict[str, ObjectDetectProcess]
|
detectors: dict[str, ObjectDetectProcess]
|
||||||
started: int
|
started: int
|
||||||
latest_frigate_version: str
|
latest_frigate_version: str
|
||||||
|
last_updated: int
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ from abc import ABC, abstractmethod
|
|||||||
from collections import Counter
|
from collections import Counter
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from multiprocessing import shared_memory
|
from multiprocessing import shared_memory
|
||||||
from typing import Any, AnyStr, Tuple
|
from typing import Any, AnyStr, Optional, Tuple
|
||||||
|
|
||||||
import cv2
|
import cv2
|
||||||
import numpy as np
|
import numpy as np
|
||||||
@@ -628,8 +628,13 @@ def clipped(obj, frame_shape):
|
|||||||
|
|
||||||
|
|
||||||
def restart_frigate():
|
def restart_frigate():
|
||||||
# S6 overlay is configured to exit once the Frigate process exits
|
proc = psutil.Process(1)
|
||||||
os.kill(os.getpid(), signal.SIGTERM)
|
# if this is running via s6, sigterm pid 1
|
||||||
|
if proc.name() == "s6-svscan":
|
||||||
|
proc.terminate()
|
||||||
|
# otherwise, just try and exit frigate
|
||||||
|
else:
|
||||||
|
os.kill(os.getpid(), signal.SIGTERM)
|
||||||
|
|
||||||
|
|
||||||
class EventsPerSecond:
|
class EventsPerSecond:
|
||||||
@@ -921,6 +926,17 @@ def get_nvidia_gpu_stats() -> dict[str, str]:
|
|||||||
"--format=csv",
|
"--format=csv",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
if (
|
||||||
|
"CUDA_VISIBLE_DEVICES" in os.environ
|
||||||
|
and os.environ["CUDA_VISIBLE_DEVICES"].isdigit()
|
||||||
|
):
|
||||||
|
nvidia_smi_command.extend(["--id", os.environ["CUDA_VISIBLE_DEVICES"]])
|
||||||
|
elif (
|
||||||
|
"NVIDIA_VISIBLE_DEVICES" in os.environ
|
||||||
|
and os.environ["NVIDIA_VISIBLE_DEVICES"].isdigit()
|
||||||
|
):
|
||||||
|
nvidia_smi_command.extend(["--id", os.environ["NVIDIA_VISIBLE_DEVICES"]])
|
||||||
|
|
||||||
p = sp.run(
|
p = sp.run(
|
||||||
nvidia_smi_command,
|
nvidia_smi_command,
|
||||||
encoding="ascii",
|
encoding="ascii",
|
||||||
@@ -960,9 +976,13 @@ def ffprobe_stream(path: str) -> sp.CompletedProcess:
|
|||||||
return sp.run(ffprobe_cmd, capture_output=True)
|
return sp.run(ffprobe_cmd, capture_output=True)
|
||||||
|
|
||||||
|
|
||||||
def vainfo_hwaccel() -> sp.CompletedProcess:
|
def vainfo_hwaccel(device_name: Optional[str] = None) -> sp.CompletedProcess:
|
||||||
"""Run vainfo."""
|
"""Run vainfo."""
|
||||||
ffprobe_cmd = ["vainfo"]
|
ffprobe_cmd = (
|
||||||
|
["vainfo"]
|
||||||
|
if not device_name
|
||||||
|
else ["vainfo", "--display", "drm", "--device", f"/dev/dri/{device_name}"]
|
||||||
|
)
|
||||||
return sp.run(ffprobe_cmd, capture_output=True)
|
return sp.run(ffprobe_cmd, capture_output=True)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -160,6 +160,7 @@ def capture_frames(
|
|||||||
fps: mp.Value,
|
fps: mp.Value,
|
||||||
skipped_fps: mp.Value,
|
skipped_fps: mp.Value,
|
||||||
current_frame: mp.Value,
|
current_frame: mp.Value,
|
||||||
|
stop_event: mp.Event,
|
||||||
):
|
):
|
||||||
|
|
||||||
frame_size = frame_shape[0] * frame_shape[1]
|
frame_size = frame_shape[0] * frame_shape[1]
|
||||||
@@ -177,6 +178,9 @@ def capture_frames(
|
|||||||
try:
|
try:
|
||||||
frame_buffer[:] = ffmpeg_process.stdout.read(frame_size)
|
frame_buffer[:] = ffmpeg_process.stdout.read(frame_size)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
# shutdown has been initiated
|
||||||
|
if stop_event.is_set():
|
||||||
|
break
|
||||||
logger.error(f"{camera_name}: Unable to read frames from ffmpeg process.")
|
logger.error(f"{camera_name}: Unable to read frames from ffmpeg process.")
|
||||||
|
|
||||||
if ffmpeg_process.poll() != None:
|
if ffmpeg_process.poll() != None:
|
||||||
@@ -269,7 +273,20 @@ class CameraWatchdog(threading.Thread):
|
|||||||
self.logger.info("Waiting for ffmpeg to exit gracefully...")
|
self.logger.info("Waiting for ffmpeg to exit gracefully...")
|
||||||
self.ffmpeg_detect_process.communicate(timeout=30)
|
self.ffmpeg_detect_process.communicate(timeout=30)
|
||||||
except sp.TimeoutExpired:
|
except sp.TimeoutExpired:
|
||||||
self.logger.info("FFmpeg didnt exit. Force killing...")
|
self.logger.info("FFmpeg did not exit. Force killing...")
|
||||||
|
self.ffmpeg_detect_process.kill()
|
||||||
|
self.ffmpeg_detect_process.communicate()
|
||||||
|
elif self.camera_fps.value >= (self.config.detect.fps + 10):
|
||||||
|
self.camera_fps.value = 0
|
||||||
|
self.logger.info(
|
||||||
|
f"{self.camera_name} exceeded fps limit. Exiting ffmpeg..."
|
||||||
|
)
|
||||||
|
self.ffmpeg_detect_process.terminate()
|
||||||
|
try:
|
||||||
|
self.logger.info("Waiting for ffmpeg to exit gracefully...")
|
||||||
|
self.ffmpeg_detect_process.communicate(timeout=30)
|
||||||
|
except sp.TimeoutExpired:
|
||||||
|
self.logger.info("FFmpeg did not exit. Force killing...")
|
||||||
self.ffmpeg_detect_process.kill()
|
self.ffmpeg_detect_process.kill()
|
||||||
self.ffmpeg_detect_process.communicate()
|
self.ffmpeg_detect_process.communicate()
|
||||||
|
|
||||||
@@ -327,6 +344,7 @@ class CameraWatchdog(threading.Thread):
|
|||||||
self.frame_shape,
|
self.frame_shape,
|
||||||
self.frame_queue,
|
self.frame_queue,
|
||||||
self.camera_fps,
|
self.camera_fps,
|
||||||
|
self.stop_event,
|
||||||
)
|
)
|
||||||
self.capture_thread.start()
|
self.capture_thread.start()
|
||||||
|
|
||||||
@@ -355,13 +373,16 @@ class CameraWatchdog(threading.Thread):
|
|||||||
|
|
||||||
|
|
||||||
class CameraCapture(threading.Thread):
|
class CameraCapture(threading.Thread):
|
||||||
def __init__(self, camera_name, ffmpeg_process, frame_shape, frame_queue, fps):
|
def __init__(
|
||||||
|
self, camera_name, ffmpeg_process, frame_shape, frame_queue, fps, stop_event
|
||||||
|
):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self.name = f"capture:{camera_name}"
|
self.name = f"capture:{camera_name}"
|
||||||
self.camera_name = camera_name
|
self.camera_name = camera_name
|
||||||
self.frame_shape = frame_shape
|
self.frame_shape = frame_shape
|
||||||
self.frame_queue = frame_queue
|
self.frame_queue = frame_queue
|
||||||
self.fps = fps
|
self.fps = fps
|
||||||
|
self.stop_event = stop_event
|
||||||
self.skipped_fps = EventsPerSecond()
|
self.skipped_fps = EventsPerSecond()
|
||||||
self.frame_manager = SharedMemoryFrameManager()
|
self.frame_manager = SharedMemoryFrameManager()
|
||||||
self.ffmpeg_process = ffmpeg_process
|
self.ffmpeg_process = ffmpeg_process
|
||||||
@@ -379,6 +400,7 @@ class CameraCapture(threading.Thread):
|
|||||||
self.fps,
|
self.fps,
|
||||||
self.skipped_fps,
|
self.skipped_fps,
|
||||||
self.current_frame,
|
self.current_frame,
|
||||||
|
self.stop_event,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -391,6 +413,9 @@ def capture_camera(name, config: CameraConfig, process_info):
|
|||||||
signal.signal(signal.SIGTERM, receiveSignal)
|
signal.signal(signal.SIGTERM, receiveSignal)
|
||||||
signal.signal(signal.SIGINT, receiveSignal)
|
signal.signal(signal.SIGINT, receiveSignal)
|
||||||
|
|
||||||
|
threading.current_thread().name = f"capture:{name}"
|
||||||
|
setproctitle(f"frigate.capture:{name}")
|
||||||
|
|
||||||
frame_queue = process_info["frame_queue"]
|
frame_queue = process_info["frame_queue"]
|
||||||
camera_watchdog = CameraWatchdog(
|
camera_watchdog = CameraWatchdog(
|
||||||
name,
|
name,
|
||||||
@@ -445,7 +470,7 @@ def track_camera(
|
|||||||
motion_contour_area,
|
motion_contour_area,
|
||||||
)
|
)
|
||||||
object_detector = RemoteObjectDetector(
|
object_detector = RemoteObjectDetector(
|
||||||
name, labelmap, detection_queue, result_connection, model_config
|
name, labelmap, detection_queue, result_connection, model_config, stop_event
|
||||||
)
|
)
|
||||||
|
|
||||||
object_tracker = ObjectTracker(config.detect)
|
object_tracker = ObjectTracker(config.detect)
|
||||||
@@ -585,7 +610,7 @@ def process_frames(
|
|||||||
break
|
break
|
||||||
|
|
||||||
try:
|
try:
|
||||||
frame_time = frame_queue.get(True, 10)
|
frame_time = frame_queue.get(True, 1)
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -771,6 +796,7 @@ def process_frames(
|
|||||||
refining = True
|
refining = True
|
||||||
else:
|
else:
|
||||||
selected_objects.append(obj)
|
selected_objects.append(obj)
|
||||||
|
|
||||||
# set the detections list to only include top, complete objects
|
# set the detections list to only include top, complete objects
|
||||||
# and new detections
|
# and new detections
|
||||||
detections = selected_objects
|
detections = selected_objects
|
||||||
|
|||||||
@@ -11,7 +11,15 @@ export default function CameraImage({ camera, onload, searchParams = '', stretch
|
|||||||
const [hasLoaded, setHasLoaded] = useState(false);
|
const [hasLoaded, setHasLoaded] = useState(false);
|
||||||
const containerRef = useRef(null);
|
const containerRef = useRef(null);
|
||||||
const canvasRef = useRef(null);
|
const canvasRef = useRef(null);
|
||||||
const [{ width: availableWidth }] = useResizeObserver(containerRef);
|
const [{ width: containerWidth }] = useResizeObserver(containerRef);
|
||||||
|
|
||||||
|
// Add scrollbar width (when visible) to the available observer width to eliminate screen juddering.
|
||||||
|
// https://github.com/blakeblackshear/frigate/issues/1657
|
||||||
|
let scrollBarWidth = 0;
|
||||||
|
if (window.innerWidth && document.body.offsetWidth) {
|
||||||
|
scrollBarWidth = window.innerWidth - document.body.offsetWidth;
|
||||||
|
}
|
||||||
|
const availableWidth = scrollBarWidth ? containerWidth + scrollBarWidth : containerWidth;
|
||||||
|
|
||||||
const { name } = config ? config.cameras[camera] : '';
|
const { name } = config ? config.cameras[camera] : '';
|
||||||
const enabled = config ? config.cameras[camera].enabled : 'True';
|
const enabled = config ? config.cameras[camera].enabled : 'True';
|
||||||
@@ -22,7 +30,11 @@ export default function CameraImage({ camera, onload, searchParams = '', stretch
|
|||||||
const scaledHeight = Math.floor(availableWidth / aspectRatio);
|
const scaledHeight = Math.floor(availableWidth / aspectRatio);
|
||||||
return stretch ? scaledHeight : Math.min(scaledHeight, height);
|
return stretch ? scaledHeight : Math.min(scaledHeight, height);
|
||||||
}, [availableWidth, aspectRatio, height, stretch]);
|
}, [availableWidth, aspectRatio, height, stretch]);
|
||||||
const scaledWidth = useMemo(() => Math.ceil(scaledHeight * aspectRatio), [scaledHeight, aspectRatio]);
|
const scaledWidth = useMemo(() => Math.ceil(scaledHeight * aspectRatio - scrollBarWidth), [
|
||||||
|
scaledHeight,
|
||||||
|
aspectRatio,
|
||||||
|
scrollBarWidth,
|
||||||
|
]);
|
||||||
|
|
||||||
const img = useMemo(() => new Image(), []);
|
const img = useMemo(() => new Image(), []);
|
||||||
img.onload = useCallback(
|
img.onload = useCallback(
|
||||||
|
|||||||
@@ -1,61 +0,0 @@
|
|||||||
import { h } from 'preact';
|
|
||||||
|
|
||||||
const timeAgo = ({ time, dense = false }) => {
|
|
||||||
if (!time) return 'Invalid Time';
|
|
||||||
try {
|
|
||||||
const currentTime = new Date();
|
|
||||||
const pastTime = new Date(time);
|
|
||||||
const elapsedTime = currentTime - pastTime;
|
|
||||||
if (elapsedTime < 0) return 'Invalid Time';
|
|
||||||
|
|
||||||
const timeUnits = [
|
|
||||||
{ unit: 'ye', full: 'year', value: 31536000 },
|
|
||||||
{ unit: 'mo', full: 'month', value: 0 },
|
|
||||||
{ unit: 'day', full: 'day', value: 86400 },
|
|
||||||
{ unit: 'h', full: 'hour', value: 3600 },
|
|
||||||
{ unit: 'm', full: 'minute', value: 60 },
|
|
||||||
{ unit: 's', full: 'second', value: 1 },
|
|
||||||
];
|
|
||||||
|
|
||||||
let elapsed = elapsedTime / 1000;
|
|
||||||
if (elapsed < 60) {
|
|
||||||
return 'just now';
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let i = 0; i < timeUnits.length; i++) {
|
|
||||||
// if months
|
|
||||||
if (i === 1) {
|
|
||||||
// Get the month and year for the time provided
|
|
||||||
const pastMonth = pastTime.getUTCMonth();
|
|
||||||
const pastYear = pastTime.getUTCFullYear();
|
|
||||||
|
|
||||||
// get current month and year
|
|
||||||
const currentMonth = currentTime.getUTCMonth();
|
|
||||||
const currentYear = currentTime.getUTCFullYear();
|
|
||||||
|
|
||||||
let monthDiff = (currentYear - pastYear) * 12 + (currentMonth - pastMonth);
|
|
||||||
|
|
||||||
// check if the time provided is the previous month but not exceeded 1 month ago.
|
|
||||||
if (currentTime.getUTCDate() < pastTime.getUTCDate()) {
|
|
||||||
monthDiff--;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (monthDiff > 0) {
|
|
||||||
const unitAmount = monthDiff;
|
|
||||||
return `${unitAmount}${dense ? timeUnits[i].unit[0] : ` ${timeUnits[i].full}`}${dense ? '' : 's'} ago`;
|
|
||||||
}
|
|
||||||
} else if (elapsed >= timeUnits[i].value) {
|
|
||||||
const unitAmount = Math.floor(elapsed / timeUnits[i].value);
|
|
||||||
return `${unitAmount}${dense ? timeUnits[i].unit[0] : ` ${timeUnits[i].full}`}${dense ? '' : 's'} ago`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
return 'Invalid Time';
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const TimeAgo = (props) => {
|
|
||||||
return <span>{timeAgo({ ...props })}</span>;
|
|
||||||
};
|
|
||||||
|
|
||||||
export default TimeAgo;
|
|
||||||
84
web/src/components/TimeAgo.tsx
Normal file
84
web/src/components/TimeAgo.tsx
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import { h, FunctionComponent } from 'preact';
|
||||||
|
import { useEffect, useMemo, useState } from 'preact/hooks';
|
||||||
|
|
||||||
|
interface IProp {
|
||||||
|
/** The time to calculate time-ago from */
|
||||||
|
time: Date;
|
||||||
|
/** OPTIONAL: overwrite current time */
|
||||||
|
currentTime?: Date;
|
||||||
|
/** OPTIONAL: boolean that determines whether to show the time-ago text in dense format */
|
||||||
|
dense?: boolean;
|
||||||
|
/** OPTIONAL: set custom refresh interval in milliseconds, default 1000 (1 sec) */
|
||||||
|
refreshInterval?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
type TimeUnit = {
|
||||||
|
unit: string;
|
||||||
|
full: string;
|
||||||
|
value: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
const timeAgo = ({ time, currentTime = new Date(), dense = false }: IProp): string => {
|
||||||
|
if (typeof time !== 'number' || time < 0) return 'Invalid Time Provided';
|
||||||
|
|
||||||
|
const pastTime: Date = new Date(time);
|
||||||
|
const elapsedTime: number = currentTime.getTime() - pastTime.getTime();
|
||||||
|
|
||||||
|
const timeUnits: TimeUnit[] = [
|
||||||
|
{ unit: 'ye', full: 'year', value: 31536000 },
|
||||||
|
{ unit: 'mo', full: 'month', value: 0 },
|
||||||
|
{ unit: 'day', full: 'day', value: 86400 },
|
||||||
|
{ unit: 'h', full: 'hour', value: 3600 },
|
||||||
|
{ unit: 'm', full: 'minute', value: 60 },
|
||||||
|
{ unit: 's', full: 'second', value: 1 },
|
||||||
|
];
|
||||||
|
|
||||||
|
const elapsed: number = elapsedTime / 1000;
|
||||||
|
if (elapsed < 10) {
|
||||||
|
return 'just now';
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < timeUnits.length; i++) {
|
||||||
|
// if months
|
||||||
|
if (i === 1) {
|
||||||
|
// Get the month and year for the time provided
|
||||||
|
const pastMonth = pastTime.getUTCMonth();
|
||||||
|
const pastYear = pastTime.getUTCFullYear();
|
||||||
|
|
||||||
|
// get current month and year
|
||||||
|
const currentMonth = currentTime.getUTCMonth();
|
||||||
|
const currentYear = currentTime.getUTCFullYear();
|
||||||
|
|
||||||
|
let monthDiff = (currentYear - pastYear) * 12 + (currentMonth - pastMonth);
|
||||||
|
|
||||||
|
// check if the time provided is the previous month but not exceeded 1 month ago.
|
||||||
|
if (currentTime.getUTCDate() < pastTime.getUTCDate()) {
|
||||||
|
monthDiff--;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (monthDiff > 0) {
|
||||||
|
const unitAmount = monthDiff;
|
||||||
|
return `${unitAmount}${dense ? timeUnits[i].unit[0] : ` ${timeUnits[i].full}`}${dense ? '' : 's'} ago`;
|
||||||
|
}
|
||||||
|
} else if (elapsed >= timeUnits[i].value) {
|
||||||
|
const unitAmount: number = Math.floor(elapsed / timeUnits[i].value);
|
||||||
|
return `${unitAmount}${dense ? timeUnits[i].unit[0] : ` ${timeUnits[i].full}`}${dense ? '' : 's'} ago`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 'Invalid Time';
|
||||||
|
};
|
||||||
|
|
||||||
|
const TimeAgo: FunctionComponent<IProp> = ({ refreshInterval = 1000, ...rest }): JSX.Element => {
|
||||||
|
const [currentTime, setCurrentTime] = useState<Date>(new Date());
|
||||||
|
useEffect(() => {
|
||||||
|
const intervalId: NodeJS.Timeout = setInterval(() => {
|
||||||
|
setCurrentTime(new Date());
|
||||||
|
}, refreshInterval);
|
||||||
|
return () => clearInterval(intervalId);
|
||||||
|
}, [refreshInterval]);
|
||||||
|
|
||||||
|
const timeAgoValue = useMemo(() => timeAgo({ currentTime, ...rest }), [currentTime, rest]);
|
||||||
|
|
||||||
|
return <span>{timeAgoValue}</span>;
|
||||||
|
};
|
||||||
|
export default TimeAgo;
|
||||||
@@ -25,7 +25,8 @@ export default function Camera({ camera }) {
|
|||||||
const [viewMode, setViewMode] = useState('live');
|
const [viewMode, setViewMode] = useState('live');
|
||||||
|
|
||||||
const cameraConfig = config?.cameras[camera];
|
const cameraConfig = config?.cameras[camera];
|
||||||
const restreamEnabled = cameraConfig && Object.keys(config.go2rtc.streams || {}).includes(cameraConfig.live.stream_name);
|
const restreamEnabled =
|
||||||
|
cameraConfig && Object.keys(config.go2rtc.streams || {}).includes(cameraConfig.live.stream_name);
|
||||||
const jsmpegWidth = cameraConfig
|
const jsmpegWidth = cameraConfig
|
||||||
? Math.round(cameraConfig.live.height * (cameraConfig.detect.width / cameraConfig.detect.height))
|
? Math.round(cameraConfig.live.height * (cameraConfig.detect.width / cameraConfig.detect.height))
|
||||||
: 0;
|
: 0;
|
||||||
@@ -63,6 +64,10 @@ export default function Camera({ camera }) {
|
|||||||
return <ActivityIndicator />;
|
return <ActivityIndicator />;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!restreamEnabled) {
|
||||||
|
setViewSource('jsmpeg');
|
||||||
|
}
|
||||||
|
|
||||||
const optionContent = showSettings ? (
|
const optionContent = showSettings ? (
|
||||||
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4">
|
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-4 gap-4">
|
||||||
<Switch
|
<Switch
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { useCallback, useEffect, useState } from 'preact/hooks';
|
|||||||
import ButtonsTabbed from '../components/ButtonsTabbed';
|
import ButtonsTabbed from '../components/ButtonsTabbed';
|
||||||
import useSWR from 'swr';
|
import useSWR from 'swr';
|
||||||
import Button from '../components/Button';
|
import Button from '../components/Button';
|
||||||
|
import copy from 'copy-to-clipboard';
|
||||||
|
|
||||||
export default function Logs() {
|
export default function Logs() {
|
||||||
const [logService, setLogService] = useState('frigate');
|
const [logService, setLogService] = useState('frigate');
|
||||||
@@ -14,10 +15,7 @@ export default function Logs() {
|
|||||||
const { data: nginxLogs } = useSWR('logs/nginx');
|
const { data: nginxLogs } = useSWR('logs/nginx');
|
||||||
|
|
||||||
const handleCopyLogs = useCallback(() => {
|
const handleCopyLogs = useCallback(() => {
|
||||||
async function copy() {
|
copy(logs);
|
||||||
await window.navigator.clipboard.writeText(logs);
|
|
||||||
}
|
|
||||||
copy();
|
|
||||||
}, [logs]);
|
}, [logs]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import axios from 'axios';
|
|||||||
import { Table, Tbody, Thead, Tr, Th, Td } from '../components/Table';
|
import { Table, Tbody, Thead, Tr, Th, Td } from '../components/Table';
|
||||||
import { useState } from 'preact/hooks';
|
import { useState } from 'preact/hooks';
|
||||||
import Dialog from '../components/Dialog';
|
import Dialog from '../components/Dialog';
|
||||||
|
import TimeAgo from '../components/TimeAgo';
|
||||||
import copy from 'copy-to-clipboard';
|
import copy from 'copy-to-clipboard';
|
||||||
|
|
||||||
const emptyObject = Object.freeze({});
|
const emptyObject = Object.freeze({});
|
||||||
@@ -84,6 +85,12 @@ export default function System() {
|
|||||||
System <span className="text-sm">{service.version}</span>
|
System <span className="text-sm">{service.version}</span>
|
||||||
</Heading>
|
</Heading>
|
||||||
|
|
||||||
|
{service.last_updated && (
|
||||||
|
<p>
|
||||||
|
<span>Last refreshed: <TimeAgo time={service.last_updated * 1000} dense /></span>
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
|
||||||
{state.showFfprobe && (
|
{state.showFfprobe && (
|
||||||
<Dialog>
|
<Dialog>
|
||||||
<div className="p-4">
|
<div className="p-4">
|
||||||
@@ -180,8 +187,9 @@ export default function System() {
|
|||||||
<div className="p-2">
|
<div className="p-2">
|
||||||
{gpu_usages[gpu]['gpu'] == -1 ? (
|
{gpu_usages[gpu]['gpu'] == -1 ? (
|
||||||
<div className="p-4">
|
<div className="p-4">
|
||||||
There was an error getting usage stats. Either your GPU does not support this or Frigate does
|
There was an error getting usage stats. This does not mean hardware acceleration is not working.
|
||||||
not have proper access.
|
Either your GPU does not support this or Frigate does not have proper access to get statistics.
|
||||||
|
This is expected for the Home Assistant addon.
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<Table className="w-full">
|
<Table className="w-full">
|
||||||
@@ -247,11 +255,11 @@ export default function System() {
|
|||||||
<Td>{cameras[camera]['pid'] || '- '}</Td>
|
<Td>{cameras[camera]['pid'] || '- '}</Td>
|
||||||
|
|
||||||
{(() => {
|
{(() => {
|
||||||
if (cameras[camera]['pid'] && cameras[camera]['detection_enabled'] == 1)
|
if (cameras[camera]['pid'] && cameras[camera]['detection_enabled'] == 1)
|
||||||
return <Td>{cameras[camera]['detection_fps']} ({cameras[camera]['skipped_fps']} skipped)</Td>
|
return <Td>{cameras[camera]['detection_fps']} ({cameras[camera]['skipped_fps']} skipped)</Td>
|
||||||
else if (cameras[camera]['pid'] && cameras[camera]['detection_enabled'] == 0)
|
else if (cameras[camera]['pid'] && cameras[camera]['detection_enabled'] == 0)
|
||||||
return <Td>disabled</Td>
|
return <Td>disabled</Td>
|
||||||
|
|
||||||
return <Td>- </Td>
|
return <Td>- </Td>
|
||||||
})()}
|
})()}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user