forked from Github/frigate
Compare commits
36 Commits
v0.12.0-be
...
v0.12.0-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e8d8cc4f55 | ||
|
|
c20c982ad0 | ||
|
|
962bdc7fa5 | ||
|
|
a5e561c81d | ||
|
|
c4ebafe777 | ||
|
|
7ed715b371 | ||
|
|
161e7b3fd7 | ||
|
|
42eaa13402 | ||
|
|
17c26c9fa9 | ||
|
|
318240c14c | ||
|
|
34bdf2fc10 | ||
|
|
d97fa99ec5 | ||
|
|
9621b4b9a1 | ||
|
|
3611e874ca | ||
|
|
fbf29667d4 | ||
|
|
c13dd132ee | ||
|
|
3524d1a055 | ||
|
|
a8c567d877 | ||
|
|
80135342c2 | ||
|
|
c2b13fdbdf | ||
|
|
2797a60d4f | ||
|
|
0592c8b0e2 | ||
|
|
2b685ac343 | ||
|
|
c901707670 | ||
|
|
27d3676ba5 | ||
|
|
52459bf348 | ||
|
|
6cfa73a284 | ||
|
|
7b26935462 | ||
|
|
c9cd810c9f | ||
|
|
1715e2e09d | ||
|
|
b69c0daadb | ||
|
|
56d2978bc8 | ||
|
|
1ef109e171 | ||
|
|
08ab9dedf7 | ||
|
|
3d90366af2 | ||
|
|
c74c9ff161 |
@@ -1,6 +1,6 @@
|
||||
name: EdgeTpu Support Request
|
||||
description: Support for setting up EdgeTPU in Frigate
|
||||
title: "[EdgeTPU Support]: "
|
||||
name: Detector Support Request
|
||||
description: Support for setting up object detector in Frigate (Coral, OpenVINO, TensorRT, etc.)
|
||||
title: "[Detector Support]: "
|
||||
labels: ["support", "triage"]
|
||||
assignees: []
|
||||
body:
|
||||
5
.github/workflows/ci.yml
vendored
5
.github/workflows/ci.yml
vendored
@@ -19,6 +19,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
name: Image Build
|
||||
steps:
|
||||
- name: Remove unnecessary files
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /opt/ghc
|
||||
- id: lowercaseRepo
|
||||
uses: ASzc/change-string-case-action@v5
|
||||
with:
|
||||
|
||||
@@ -27,7 +27,7 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
||||
FROM wget AS go2rtc
|
||||
ARG TARGETARCH
|
||||
WORKDIR /rootfs/usr/local/go2rtc/bin
|
||||
RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v1.1.2/go2rtc_linux_${TARGETARCH}" \
|
||||
RUN wget -qO go2rtc "https://github.com/AlexxIT/go2rtc/releases/download/v1.2.0/go2rtc_linux_${TARGETARCH}" \
|
||||
&& chmod +x go2rtc
|
||||
|
||||
|
||||
@@ -207,6 +207,10 @@ FROM deps AS devcontainer
|
||||
# But start a fake service for simulating the logs
|
||||
COPY docker/fake_frigate_run /etc/s6-overlay/s6-rc.d/frigate/run
|
||||
|
||||
# Create symbolic link to the frigate source code, as go2rtc's create_config.sh uses it
|
||||
RUN mkdir -p /opt/frigate \
|
||||
&& ln -svf /workspace/frigate/frigate /opt/frigate/frigate
|
||||
|
||||
# Install Node 16
|
||||
RUN apt-get update \
|
||||
&& apt-get install wget -y \
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
s6_version="3.1.3.0"
|
||||
s6_version="3.1.4.1"
|
||||
|
||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
s6_arch="x86_64"
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
declare exit_code_container
|
||||
exit_code_container=$(cat /run/s6-linux-init-container-results/exitcode)
|
||||
readonly exit_code_container
|
||||
@@ -11,20 +13,16 @@ readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
readonly service="Frigate"
|
||||
|
||||
echo "Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})" >&2
|
||||
echo "[INFO] Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})"
|
||||
|
||||
if [[ "${exit_code_service}" -eq 256 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo $((128 + exit_code_signal)) > /run/s6-linux-init-container-results/exitcode
|
||||
echo $((128 + exit_code_signal)) >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo "${exit_code_service}" > /run/s6-linux-init-container-results/exitcode
|
||||
echo "${exit_code_service}" >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
else
|
||||
# Exit code 0 is expected when Frigate is restarted by the user. In this case,
|
||||
# we create a signal for the go2rtc finish script to tolerate the restart.
|
||||
touch /dev/shm/restarting-frigate
|
||||
fi
|
||||
|
||||
exec /run/s6/basedir/bin/halt
|
||||
|
||||
@@ -4,12 +4,14 @@
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
# Tell S6-Overlay not to restart this service
|
||||
s6-svc -O .
|
||||
|
||||
echo "[INFO] Starting Frigate..." >&2
|
||||
echo "[INFO] Starting Frigate..."
|
||||
|
||||
cd /opt/frigate || echo "[ERROR] Failed to change working directory to /opt/frigate" >&2
|
||||
cd /opt/frigate || echo "[ERROR] Failed to change working directory to /opt/frigate"
|
||||
|
||||
# Replace the bash process with the Frigate process, redirecting stderr to stdout
|
||||
exec 2>&1
|
||||
|
||||
12
docker/rootfs/etc/s6-overlay/s6-rc.d/go2rtc-healthcheck/finish
Executable file
12
docker/rootfs/etc/s6-overlay/s6-rc.d/go2rtc-healthcheck/finish
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
readonly service="go2rtc-healthcheck"
|
||||
|
||||
echo "[INFO] The ${service} service exited with code ${exit_code_service} (by signal ${exit_code_signal})"
|
||||
@@ -0,0 +1 @@
|
||||
go2rtc-log
|
||||
22
docker/rootfs/etc/s6-overlay/s6-rc.d/go2rtc-healthcheck/run
Executable file
22
docker/rootfs/etc/s6-overlay/s6-rc.d/go2rtc-healthcheck/run
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Start the go2rtc-healthcheck service
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
# Give some additional time for go2rtc to start before start pinging
|
||||
sleep 10s
|
||||
echo "[INFO] Starting go2rtc healthcheck service..."
|
||||
|
||||
while sleep 30s; do
|
||||
# Check if the service is running
|
||||
if ! curl --connect-timeout 10 --fail --silent --show-error --output /dev/null http://127.0.0.1:1984/api/streams 2>&1; then
|
||||
echo "[ERROR] The go2rtc service is not responding to ping, restarting..."
|
||||
# We can also use -r instead of -t to send kill signal rather than term
|
||||
s6-svc -t /var/run/service/go2rtc 2>&1
|
||||
# Give some additional time to go2rtc to restart before start pinging again
|
||||
sleep 10s
|
||||
fi
|
||||
done
|
||||
@@ -0,0 +1 @@
|
||||
5000
|
||||
@@ -0,0 +1 @@
|
||||
longrun
|
||||
@@ -1 +1,2 @@
|
||||
go2rtc
|
||||
go2rtc-healthcheck
|
||||
|
||||
@@ -1,32 +1,12 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Take down the S6 supervision tree when the service exits
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
declare exit_code_container
|
||||
exit_code_container=$(cat /run/s6-linux-init-container-results/exitcode)
|
||||
readonly exit_code_container
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
readonly service="go2rtc"
|
||||
|
||||
echo "Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})" >&2
|
||||
|
||||
if [[ "${exit_code_service}" -eq 256 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo $((128 + exit_code_signal)) > /run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo "${exit_code_service}" > /run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
else
|
||||
# go2rtc is not supposed to exit, so even when it exits with 0 we make the
|
||||
# container with 1. We only tolerate it when Frigate is restarting.
|
||||
if [[ "${exit_code_container}" -eq 0 && ! -f /dev/shm/restarting-frigate ]]; then
|
||||
echo "1" > /run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
fi
|
||||
|
||||
exec /run/s6/basedir/bin/halt
|
||||
echo "[INFO] The ${service} service exited with code ${exit_code_service} (by signal ${exit_code_signal})"
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Tell S6-Overlay not to restart this service
|
||||
s6-svc -O .
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
function get_ip_and_port_from_supervisor() {
|
||||
local ip_address
|
||||
@@ -19,9 +18,9 @@ function get_ip_and_port_from_supervisor() {
|
||||
jq --exit-status --raw-output '.data.ipv4.address[0]'
|
||||
) && [[ "${ip_address}" =~ ${ip_regex} ]]; then
|
||||
ip_address="${BASH_REMATCH[1]}"
|
||||
echo "[INFO] Got IP address from supervisor: ${ip_address}" >&2
|
||||
echo "[INFO] Got IP address from supervisor: ${ip_address}"
|
||||
else
|
||||
echo "[WARN] Failed to get IP address from supervisor" >&2
|
||||
echo "[WARN] Failed to get IP address from supervisor"
|
||||
return 0
|
||||
fi
|
||||
|
||||
@@ -35,26 +34,28 @@ function get_ip_and_port_from_supervisor() {
|
||||
jq --exit-status --raw-output '.data.network["8555/tcp"]'
|
||||
) && [[ "${webrtc_port}" =~ ${port_regex} ]]; then
|
||||
webrtc_port="${BASH_REMATCH[1]}"
|
||||
echo "[INFO] Got WebRTC port from supervisor: ${webrtc_port}" >&2
|
||||
echo "[INFO] Got WebRTC port from supervisor: ${webrtc_port}"
|
||||
else
|
||||
echo "[WARN] Failed to get WebRTC port from supervisor" >&2
|
||||
echo "[WARN] Failed to get WebRTC port from supervisor"
|
||||
return 0
|
||||
fi
|
||||
|
||||
export FRIGATE_GO2RTC_WEBRTC_CANDIDATE_INTERNAL="${ip_address}:${webrtc_port}"
|
||||
}
|
||||
|
||||
echo "[INFO] Preparing go2rtc config..." >&2
|
||||
if [[ ! -f "/dev/shm/go2rtc.yaml" ]]; then
|
||||
echo "[INFO] Preparing go2rtc config..."
|
||||
|
||||
if [[ -n "${SUPERVISOR_TOKEN:-}" ]]; then
|
||||
# Running as a Home Assistant add-on, infer the IP address and port
|
||||
get_ip_and_port_from_supervisor
|
||||
if [[ -n "${SUPERVISOR_TOKEN:-}" ]]; then
|
||||
# Running as a Home Assistant add-on, infer the IP address and port
|
||||
get_ip_and_port_from_supervisor
|
||||
fi
|
||||
|
||||
python3 /usr/local/go2rtc/create_config.py
|
||||
fi
|
||||
|
||||
raw_config=$(python3 /usr/local/go2rtc/create_config.py)
|
||||
|
||||
echo "[INFO] Starting go2rtc..." >&2
|
||||
echo "[INFO] Starting go2rtc..."
|
||||
|
||||
# Replace the bash process with the go2rtc process, redirecting stderr to stdout
|
||||
exec 2>&1
|
||||
exec go2rtc -config="${raw_config}"
|
||||
exec go2rtc -config=/dev/shm/go2rtc.yaml
|
||||
|
||||
@@ -4,6 +4,8 @@
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
declare exit_code_container
|
||||
exit_code_container=$(cat /run/s6-linux-init-container-results/exitcode)
|
||||
readonly exit_code_container
|
||||
@@ -11,18 +13,18 @@ readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
readonly service="NGINX"
|
||||
|
||||
echo "Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})" >&2
|
||||
echo "[INFO] Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})"
|
||||
|
||||
if [[ "${exit_code_service}" -eq 256 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo $((128 + exit_code_signal)) > /run/s6-linux-init-container-results/exitcode
|
||||
echo $((128 + exit_code_signal)) >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
if [[ "${exit_code_signal}" -eq 15 ]]; then
|
||||
exec /run/s6/basedir/bin/halt
|
||||
fi
|
||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo "${exit_code_service}" > /run/s6-linux-init-container-results/exitcode
|
||||
echo "${exit_code_service}" >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
exec /run/s6/basedir/bin/halt
|
||||
fi
|
||||
|
||||
@@ -4,7 +4,9 @@
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
echo "[INFO] Starting NGINX..." >&2
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
echo "[INFO] Starting NGINX..."
|
||||
|
||||
# Replace the bash process with the NGINX process, redirecting stderr to stdout
|
||||
exec 2>&1
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
exec 2>&1
|
||||
exec python3 -u -m frigate "${@}"
|
||||
@@ -5,8 +5,13 @@ import os
|
||||
import sys
|
||||
import yaml
|
||||
|
||||
sys.path.insert(0, "/opt/frigate")
|
||||
from frigate.const import BIRDSEYE_PIPE, BTBN_PATH
|
||||
from frigate.ffmpeg_presets import parse_preset_hardware_acceleration_encode
|
||||
|
||||
sys.path.remove("/opt/frigate")
|
||||
|
||||
|
||||
BTBN_PATH = "/usr/lib/btbn-ffmpeg"
|
||||
FRIGATE_ENV_VARS = {k: v for k, v in os.environ.items() if k.startswith("FRIGATE_")}
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
@@ -19,12 +24,18 @@ with open(config_file) as f:
|
||||
raw_config = f.read()
|
||||
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config = yaml.safe_load(raw_config)
|
||||
config: dict[str, any] = yaml.safe_load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config = json.loads(raw_config)
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
|
||||
go2rtc_config: dict[str, any] = config.get("go2rtc", {})
|
||||
|
||||
# Need to enable CORS for go2rtc so the frigate integration / card work automatically
|
||||
if go2rtc_config.get("api") is None:
|
||||
go2rtc_config["api"] = {"origin": "*"}
|
||||
elif go2rtc_config["api"].get("origin") is None:
|
||||
go2rtc_config["api"]["origin"] = "*"
|
||||
|
||||
# we want to ensure that logs are easy to read
|
||||
if go2rtc_config.get("log") is None:
|
||||
go2rtc_config["log"] = {"format": "text"}
|
||||
@@ -34,7 +45,9 @@ elif go2rtc_config["log"].get("format") is None:
|
||||
if not go2rtc_config.get("webrtc", {}).get("candidates", []):
|
||||
default_candidates = []
|
||||
# use internal candidate if it was discovered when running through the add-on
|
||||
internal_candidate = os.environ.get("FRIGATE_GO2RTC_WEBRTC_CANDIDATE_INTERNAL", None)
|
||||
internal_candidate = os.environ.get(
|
||||
"FRIGATE_GO2RTC_WEBRTC_CANDIDATE_INTERNAL", None
|
||||
)
|
||||
if internal_candidate is not None:
|
||||
default_candidates.append(internal_candidate)
|
||||
# should set default stun server so webrtc can work
|
||||
@@ -42,8 +55,10 @@ if not go2rtc_config.get("webrtc", {}).get("candidates", []):
|
||||
|
||||
go2rtc_config["webrtc"] = {"candidates": default_candidates}
|
||||
else:
|
||||
print("[INFO] Not injecting WebRTC candidates into go2rtc config as it has been set manually", file=sys.stderr)
|
||||
|
||||
print(
|
||||
"[INFO] Not injecting WebRTC candidates into go2rtc config as it has been set manually",
|
||||
)
|
||||
|
||||
# sets default RTSP response to be equivalent to ?video=h264,h265&audio=aac
|
||||
# this means user does not need to specify audio codec when using restream
|
||||
# as source for frigate and the integration supports HLS playback
|
||||
@@ -62,14 +77,30 @@ if not os.path.exists(BTBN_PATH):
|
||||
go2rtc_config["ffmpeg"][
|
||||
"rtsp"
|
||||
] = "-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
||||
|
||||
|
||||
for name in go2rtc_config.get("streams", {}):
|
||||
stream = go2rtc_config["streams"][name]
|
||||
|
||||
if isinstance(stream, str):
|
||||
go2rtc_config["streams"][name] = go2rtc_config["streams"][name].format(**FRIGATE_ENV_VARS)
|
||||
go2rtc_config["streams"][name] = go2rtc_config["streams"][name].format(
|
||||
**FRIGATE_ENV_VARS
|
||||
)
|
||||
elif isinstance(stream, list):
|
||||
for i, stream in enumerate(stream):
|
||||
go2rtc_config["streams"][name][i] = stream.format(**FRIGATE_ENV_VARS)
|
||||
|
||||
print(json.dumps(go2rtc_config))
|
||||
# add birdseye restream stream if enabled
|
||||
if config.get("birdseye", {}).get("restream", False):
|
||||
birdseye: dict[str, any] = config.get("birdseye")
|
||||
|
||||
input = f"-f rawvideo -pix_fmt yuv420p -video_size {birdseye.get('width', 1280)}x{birdseye.get('height', 720)} -r 10 -i {BIRDSEYE_PIPE}"
|
||||
ffmpeg_cmd = f"exec:{parse_preset_hardware_acceleration_encode(config.get('ffmpeg', {}).get('hwaccel_args'), input, '-rtsp_transport tcp -f rtsp {output}')}"
|
||||
|
||||
if go2rtc_config.get("streams"):
|
||||
go2rtc_config["streams"]["birdseye"] = ffmpeg_cmd
|
||||
else:
|
||||
go2rtc_config["streams"] = {"birdseye": ffmpeg_cmd}
|
||||
|
||||
# Write go2rtc_config to /dev/shm/go2rtc.yaml
|
||||
with open("/dev/shm/go2rtc.yaml", "w") as f:
|
||||
yaml.dump(go2rtc_config, f)
|
||||
|
||||
@@ -126,19 +126,17 @@ cameras:
|
||||
input_args: preset-rtsp-restream
|
||||
roles:
|
||||
- detect
|
||||
detect:
|
||||
width: 896
|
||||
height: 672
|
||||
fps: 7
|
||||
```
|
||||
|
||||
### Unifi Protect Cameras
|
||||
|
||||
In the Unifi 2.0 update Unifi Protect Cameras had a change in audio sample rate which causes issues for ffmpeg. The input rate needs to be set for record and rtmp.
|
||||
Unifi protect cameras require the rtspx stream to be used with go2rtc https://github.com/AlexxIT/go2rtc#source-rtsp
|
||||
|
||||
In the Unifi 2.0 update Unifi Protect Cameras had a change in audio sample rate which causes issues for ffmpeg. The input rate needs to be set for record and rtmp if used directly with unifi protect.
|
||||
|
||||
```yaml
|
||||
ffmpeg:
|
||||
output_args:
|
||||
record: preset-record-ubiquiti
|
||||
rtmp: preset-rtmp-ubiquiti
|
||||
rtmp: preset-rtmp-ubiquiti # recommend using go2rtc instead
|
||||
```
|
||||
|
||||
@@ -119,7 +119,7 @@ model:
|
||||
labelmap_path: /openvino-model/coco_91cl_bkgr.txt
|
||||
```
|
||||
|
||||
This detector also supports YOLOx models, and has been verified to work with the [yolox_tiny](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny) model from Intel's Open Model Zoo. Frigate does not come with `yolox_tiny` model, you will need to follow [OpenVINO documentation](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny) to provide your own model to Frigate. There is currently no support for other types of YOLO models (YOLOv3, YOLOv4, etc...). Below is an example of how `yolox_tiny` and other yolox variants can be used in Frigate:
|
||||
This detector also supports some YOLO variants: YOLOX, YOLOv5, and YOLOv8 specifically. Other YOLO variants are not officially supported/tested. Frigate does not come with any yolo models preloaded, so you will need to supply your own models. This detector has been verified to work with the [yolox_tiny](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny) model from Intel's Open Model Zoo. You can follow [these instructions](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny#download-a-model-and-convert-it-into-openvino-ir-format) to retrieve the OpenVINO-compatible `yolox_tiny` model. Make sure that the model input dimensions match the `width` and `height` parameters, and `model_type` is set accordingly. See [Full Configuration Reference](/configuration/index.md#full-configuration-reference) for a list of possible `model_type` options. Below is an example of how `yolox_tiny` can be used in Frigate:
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
@@ -231,6 +231,10 @@ yolov4x-mish-320
|
||||
yolov4x-mish-640
|
||||
yolov7-tiny-288
|
||||
yolov7-tiny-416
|
||||
yolov7-640
|
||||
yolov7-320
|
||||
yolov7x-640
|
||||
yolov7x-320
|
||||
```
|
||||
|
||||
### Configuration Parameters
|
||||
|
||||
@@ -19,7 +19,6 @@ cameras:
|
||||
- path: rtsp://viewer:{FRIGATE_RTSP_PASSWORD}@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
roles:
|
||||
- detect
|
||||
- restream
|
||||
detect:
|
||||
width: 1280
|
||||
height: 720
|
||||
@@ -106,7 +105,7 @@ model:
|
||||
# Valid values are nhwc or nchw (default: shown below)
|
||||
input_tensor: nhwc
|
||||
# Optional: Object detection model type, currently only used with the OpenVINO detector
|
||||
# Valid values are ssd or yolox (default: shown below)
|
||||
# Valid values are ssd, yolox, yolov5, or yolov8 (default: shown below)
|
||||
model_type: ssd
|
||||
# Optional: Label name modifications. These are merged into the standard labelmap.
|
||||
labelmap:
|
||||
@@ -149,7 +148,7 @@ birdseye:
|
||||
# More information about presets at https://docs.frigate.video/configuration/ffmpeg_presets
|
||||
ffmpeg:
|
||||
# Optional: global ffmpeg args (default: shown below)
|
||||
global_args: -hide_banner -loglevel warning
|
||||
global_args: -hide_banner -loglevel warning -threads 1
|
||||
# Optional: global hwaccel args (default: shown below)
|
||||
# NOTE: See hardware acceleration docs for your specific device
|
||||
hwaccel_args: []
|
||||
@@ -158,7 +157,7 @@ ffmpeg:
|
||||
# Optional: global output args
|
||||
output_args:
|
||||
# Optional: output args for detect streams (default: shown below)
|
||||
detect: -f rawvideo -pix_fmt yuv420p
|
||||
detect: -threads 1 -f rawvideo -pix_fmt yuv420p
|
||||
# Optional: output args for record streams (default: shown below)
|
||||
record: preset-record-generic
|
||||
# Optional: output args for rtmp streams (default: shown below)
|
||||
@@ -175,7 +174,6 @@ detect:
|
||||
# NOTE: Recommended value of 5. Ideally, try and reduce your FPS on the camera.
|
||||
fps: 5
|
||||
# Optional: enables detection for the camera (default: True)
|
||||
# This value can be set via MQTT and will be updated in startup based on retained value
|
||||
enabled: True
|
||||
# Optional: Number of frames without a detection before Frigate considers an object to be gone. (default: 5x the frame rate)
|
||||
max_disappeared: 25
|
||||
@@ -323,7 +321,6 @@ record:
|
||||
# NOTE: Can be overridden at the camera level
|
||||
snapshots:
|
||||
# Optional: Enable writing jpg snapshot to /media/frigate/clips (default: shown below)
|
||||
# This value can be set via MQTT and will be updated in startup based on retained value
|
||||
enabled: False
|
||||
# Optional: save a clean PNG copy of the snapshot image (default: shown below)
|
||||
clean_copy: True
|
||||
@@ -353,7 +350,7 @@ rtmp:
|
||||
enabled: False
|
||||
|
||||
# Optional: Restream configuration
|
||||
# Uses https://github.com/AlexxIT/go2rtc (v1.1.2)
|
||||
# Uses https://github.com/AlexxIT/go2rtc (v1.2.0)
|
||||
go2rtc:
|
||||
|
||||
# Optional: jsmpeg stream configuration for WebUI
|
||||
@@ -408,12 +405,12 @@ cameras:
|
||||
# Required: the path to the stream
|
||||
# NOTE: path may include environment variables, which must begin with 'FRIGATE_' and be referenced in {}
|
||||
- path: rtsp://viewer:{FRIGATE_RTSP_PASSWORD}@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
# Required: list of roles for this stream. valid values are: detect,record,restream,rtmp
|
||||
# NOTICE: In addition to assigning the record, restream, and rtmp roles,
|
||||
# Required: list of roles for this stream. valid values are: detect,record,rtmp
|
||||
# NOTICE: In addition to assigning the record and rtmp roles,
|
||||
# they must also be enabled in the camera config.
|
||||
roles:
|
||||
- detect
|
||||
- restream
|
||||
- record
|
||||
- rtmp
|
||||
# Optional: stream specific global args (default: inherit)
|
||||
# global_args:
|
||||
@@ -489,6 +486,29 @@ ui:
|
||||
timezone: None
|
||||
# Optional: Use an experimental recordings / camera view UI (default: shown below)
|
||||
experimental_ui: False
|
||||
# Optional: Set the time format used.
|
||||
# Options are browser, 12hour, or 24hour (default: shown below)
|
||||
time_format: browser
|
||||
# Optional: Set the date style for a specified length.
|
||||
# Options are: full, long, medium, sort
|
||||
# Examples:
|
||||
# short: 2/11/23
|
||||
# medium: Feb 11, 2023
|
||||
# full: Saturday, February 11, 2023
|
||||
# (default: shown below).
|
||||
date_style: short
|
||||
# Optional: Set the time style for a specified length.
|
||||
# Options are: full, long, medium, sort
|
||||
# Examples:
|
||||
# short: 8:14 PM
|
||||
# medium: 8:15:22 PM
|
||||
# full: 8:15:22 PM Mountain Standard Time
|
||||
# (default: shown below).
|
||||
time_style: medium
|
||||
# Optional: Ability to manually override the date / time styling to use strftime format
|
||||
# https://www.gnu.org/software/libc/manual/html_node/Formatting-Calendar-Time.html
|
||||
# possible values are shown above (default: not set)
|
||||
strftime_fmt: "%Y/%m/%d %H:%M"
|
||||
|
||||
# Optional: Telemetry configuration
|
||||
telemetry:
|
||||
|
||||
@@ -11,6 +11,24 @@ During testing, enable the Zones option for the debug feed so you can adjust as
|
||||
|
||||
To create a zone, follow [the steps for a "Motion mask"](masks.md), but use the section of the web UI for creating a zone instead.
|
||||
|
||||
### Restricting events to specific zones
|
||||
|
||||
Often you will only want events to be created when an object enters areas of interest. This is done using zones along with setting required_zones. Let's say you only want to be notified when an object enters your entire_yard zone, the config would be:
|
||||
|
||||
```yaml
|
||||
camera:
|
||||
record:
|
||||
events:
|
||||
required_zones:
|
||||
- entire_yard
|
||||
snapshots:
|
||||
required_zones:
|
||||
- entire_yard
|
||||
zones:
|
||||
entire_yard:
|
||||
coordinates: ...
|
||||
```
|
||||
|
||||
### Restricting zones to specific objects
|
||||
|
||||
Sometimes you want to limit a zone to specific object types to have more granular control of when events/snapshots are saved. The following example will limit one zone to person objects and the other to cars.
|
||||
|
||||
@@ -80,10 +80,15 @@ The TensortRT detector is able to run on x86 hosts that have an Nvidia GPU which
|
||||
Inference speeds will vary greatly depending on the GPU and the model used.
|
||||
`tiny` variants are faster than the equivalent non-tiny model, some known examples are below:
|
||||
|
||||
| Name | Model | Inference Speed |
|
||||
| -------- | --------------- | --------------- |
|
||||
| RTX 3050 | yolov4-tiny-416 | ~ 5 ms |
|
||||
| RTX 3050 | yolov7-tiny-416 | ~ 6 ms |
|
||||
| Name | Inference Speed |
|
||||
| --------------- | ----------------- |
|
||||
| GTX 1060 6GB | ~ 7 ms |
|
||||
| GTX 1070 | ~ 6 ms |
|
||||
| GTX 1660 SUPER | ~ 4 ms |
|
||||
| RTX 3050 | 5 - 7 ms |
|
||||
| RTX 3070 Mobile | ~ 5 ms |
|
||||
| Quadro P400 2GB | 20 - 25 ms |
|
||||
| Quadro P2000 | ~ 12 ms |
|
||||
|
||||
## What does Frigate use the CPU for and what does it use a detector for? (ELI5 Version)
|
||||
|
||||
|
||||
@@ -163,7 +163,10 @@ docker run -d \
|
||||
|
||||
:::caution
|
||||
|
||||
Due to limitations in Home Assistant Operating System, utilizing external storage for recordings or snapshots requires [modifying udev rules manually](https://community.home-assistant.io/t/solved-mount-usb-drive-in-hassio-to-be-used-on-the-media-folder-with-udev-customization/258406/46).
|
||||
There are important limitations in Home Assistant Operating System to be aware of:
|
||||
- Utilizing external storage for recordings or snapshots requires [modifying udev rules manually](https://community.home-assistant.io/t/solved-mount-usb-drive-in-hassio-to-be-used-on-the-media-folder-with-udev-customization/258406/46).
|
||||
- AMD GPUs are not supported because HA OS does not include the mesa driver.
|
||||
- Nvidia GPUs are not supported because addons do not support the nvidia runtime.
|
||||
|
||||
:::
|
||||
|
||||
@@ -194,6 +197,13 @@ There are several versions of the addon available:
|
||||
|
||||
## Home Assistant Supervised
|
||||
|
||||
:::caution
|
||||
|
||||
There are important limitations in Home Assistant Supervised to be aware of:
|
||||
- Nvidia GPUs are not supported because addons do not support the nvidia runtime.
|
||||
|
||||
:::
|
||||
|
||||
:::tip
|
||||
|
||||
If possible, it is recommended to run Frigate standalone in Docker and use [Frigate's Proxy Addon](https://github.com/blakeblackshear/frigate-hass-addons/blob/main/frigate_proxy/README.md).
|
||||
|
||||
@@ -17,16 +17,18 @@ ffmpeg:
|
||||
record: preset-record-generic-audio-aac
|
||||
```
|
||||
|
||||
### I can't view events or recordings in the Web UI.
|
||||
|
||||
Ensure your cameras send h264 encoded video, or [transcode them](/configuration/restream.md).
|
||||
|
||||
You can open `chrome://media-internals/` in another tab and then try to playback, the media internals page will give information about why playback is failing.
|
||||
|
||||
### My mjpeg stream or snapshots look green and crazy
|
||||
|
||||
This almost always means that the width/height defined for your camera are not correct. Double check the resolution with VLC or another player. Also make sure you don't have the width and height values backwards.
|
||||
|
||||

|
||||
|
||||
### I can't view events or recordings in the Web UI.
|
||||
|
||||
Ensure your cameras send h264 encoded video, or [transcode them](/configuration/restream.md).
|
||||
|
||||
### "[mov,mp4,m4a,3gp,3g2,mj2 @ 0x5639eeb6e140] moov atom not found"
|
||||
|
||||
These messages in the logs are expected in certain situations. Frigate checks the integrity of the recordings before storing. Occasionally these cached files will be invalid and cleaned up automatically.
|
||||
|
||||
@@ -28,7 +28,6 @@ from frigate.object_processing import TrackedObjectProcessor
|
||||
from frigate.output import output_frames
|
||||
from frigate.plus import PlusApi
|
||||
from frigate.record import RecordingCleanup, RecordingMaintainer
|
||||
from frigate.restream import RestreamApi
|
||||
from frigate.stats import StatsEmitter, stats_init
|
||||
from frigate.storage import StorageMaintainer
|
||||
from frigate.version import VERSION
|
||||
@@ -173,10 +172,6 @@ class FrigateApp:
|
||||
self.plus_api,
|
||||
)
|
||||
|
||||
def init_restream(self) -> None:
|
||||
self.restream = RestreamApi(self.config)
|
||||
self.restream.add_cameras()
|
||||
|
||||
def init_dispatcher(self) -> None:
|
||||
comms: list[Communicator] = []
|
||||
|
||||
@@ -381,7 +376,6 @@ class FrigateApp:
|
||||
print(e)
|
||||
self.log_process.terminate()
|
||||
sys.exit(1)
|
||||
self.init_restream()
|
||||
self.start_detectors()
|
||||
self.start_video_output_processor()
|
||||
self.start_detected_frames_processor()
|
||||
|
||||
@@ -66,12 +66,37 @@ class LiveModeEnum(str, Enum):
|
||||
webrtc = "webrtc"
|
||||
|
||||
|
||||
class TimeFormatEnum(str, Enum):
|
||||
browser = "browser"
|
||||
hours12 = "12hour"
|
||||
hours24 = "24hour"
|
||||
|
||||
|
||||
class DateTimeStyleEnum(str, Enum):
|
||||
full = "full"
|
||||
long = "long"
|
||||
medium = "medium"
|
||||
short = "short"
|
||||
|
||||
|
||||
class UIConfig(FrigateBaseModel):
|
||||
live_mode: LiveModeEnum = Field(
|
||||
default=LiveModeEnum.mse, title="Default Live Mode."
|
||||
)
|
||||
timezone: Optional[str] = Field(title="Override UI timezone.")
|
||||
use_experimental: bool = Field(default=False, title="Experimental UI")
|
||||
time_format: TimeFormatEnum = Field(
|
||||
default=TimeFormatEnum.browser, title="Override UI time format."
|
||||
)
|
||||
date_style: DateTimeStyleEnum = Field(
|
||||
default=DateTimeStyleEnum.short, title="Override UI dateStyle."
|
||||
)
|
||||
time_style: DateTimeStyleEnum = Field(
|
||||
default=DateTimeStyleEnum.medium, title="Override UI timeStyle."
|
||||
)
|
||||
strftime_fmt: Optional[str] = Field(
|
||||
default=None, title="Override date and time format using strftime syntax."
|
||||
)
|
||||
|
||||
|
||||
class TelemetryConfig(FrigateBaseModel):
|
||||
@@ -370,9 +395,16 @@ class BirdseyeCameraConfig(BaseModel):
|
||||
)
|
||||
|
||||
|
||||
FFMPEG_GLOBAL_ARGS_DEFAULT = ["-hide_banner", "-loglevel", "warning"]
|
||||
FFMPEG_GLOBAL_ARGS_DEFAULT = ["-hide_banner", "-loglevel", "warning", "-threads", "1"]
|
||||
FFMPEG_INPUT_ARGS_DEFAULT = "preset-rtsp-generic"
|
||||
DETECT_FFMPEG_OUTPUT_ARGS_DEFAULT = ["-f", "rawvideo", "-pix_fmt", "yuv420p"]
|
||||
DETECT_FFMPEG_OUTPUT_ARGS_DEFAULT = [
|
||||
"-threads",
|
||||
"1",
|
||||
"-f",
|
||||
"rawvideo",
|
||||
"-pix_fmt",
|
||||
"yuv420p",
|
||||
]
|
||||
RTMP_FFMPEG_OUTPUT_ARGS_DEFAULT = "preset-rtmp-generic"
|
||||
RECORD_FFMPEG_OUTPUT_ARGS_DEFAULT = "preset-record-generic"
|
||||
|
||||
|
||||
@@ -26,6 +26,8 @@ class InputTensorEnum(str, Enum):
|
||||
class ModelTypeEnum(str, Enum):
|
||||
ssd = "ssd"
|
||||
yolox = "yolox"
|
||||
yolov5 = "yolov5"
|
||||
yolov8 = "yolov8"
|
||||
|
||||
|
||||
class ModelConfig(BaseModel):
|
||||
|
||||
@@ -13,12 +13,19 @@ from .detector_config import BaseDetectorConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
plugin_modules = [
|
||||
importlib.import_module(name)
|
||||
for finder, name, ispkg in pkgutil.iter_modules(
|
||||
plugins.__path__, plugins.__name__ + "."
|
||||
)
|
||||
]
|
||||
|
||||
_included_modules = pkgutil.iter_modules(plugins.__path__, plugins.__name__ + ".")
|
||||
|
||||
plugin_modules = []
|
||||
|
||||
for _, name, _ in _included_modules:
|
||||
try:
|
||||
# currently openvino may fail when importing
|
||||
# on an arm device with 64 KiB page size.
|
||||
plugin_modules.append(importlib.import_module(name))
|
||||
except ImportError as e:
|
||||
logger.error(f"Error importing detector runtime: {e}")
|
||||
|
||||
|
||||
api_types = {det.type_key: det for det in DetectionApi.__subclasses__()}
|
||||
|
||||
|
||||
@@ -5,7 +5,11 @@ from frigate.detectors.detection_api import DetectionApi
|
||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||
from typing import Literal
|
||||
from pydantic import Extra, Field
|
||||
import tflite_runtime.interpreter as tflite
|
||||
|
||||
try:
|
||||
from tflite_runtime.interpreter import Interpreter
|
||||
except ModuleNotFoundError:
|
||||
from tensorflow.lite.python.interpreter import Interpreter
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -22,7 +26,7 @@ class CpuTfl(DetectionApi):
|
||||
type_key = DETECTOR_KEY
|
||||
|
||||
def __init__(self, detector_config: CpuDetectorConfig):
|
||||
self.interpreter = tflite.Interpreter(
|
||||
self.interpreter = Interpreter(
|
||||
model_path=detector_config.model.path or "/cpu_model.tflite",
|
||||
num_threads=detector_config.num_threads or 3,
|
||||
)
|
||||
|
||||
@@ -5,8 +5,11 @@ from frigate.detectors.detection_api import DetectionApi
|
||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||
from typing import Literal
|
||||
from pydantic import Extra, Field
|
||||
import tflite_runtime.interpreter as tflite
|
||||
from tflite_runtime.interpreter import load_delegate
|
||||
|
||||
try:
|
||||
from tflite_runtime.interpreter import Interpreter, load_delegate
|
||||
except ModuleNotFoundError:
|
||||
from tensorflow.lite.python.interpreter import Interpreter, load_delegate
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -33,7 +36,7 @@ class EdgeTpuTfl(DetectionApi):
|
||||
logger.info(f"Attempting to load TPU as {device_config['device']}")
|
||||
edge_tpu_delegate = load_delegate("libedgetpu.so.1.0", device_config)
|
||||
logger.info("TPU found")
|
||||
self.interpreter = tflite.Interpreter(
|
||||
self.interpreter = Interpreter(
|
||||
model_path=detector_config.model.path or "/edgetpu_model.tflite",
|
||||
experimental_delegates=[edge_tpu_delegate],
|
||||
)
|
||||
|
||||
@@ -67,6 +67,18 @@ class OvDetector(DetectionApi):
|
||||
self.grids = np.concatenate(grids, 1)
|
||||
self.expanded_strides = np.concatenate(expanded_strides, 1)
|
||||
|
||||
## Takes in class ID, confidence score, and array of [x, y, w, h] that describes detection position,
|
||||
## returns an array that's easily passable back to Frigate.
|
||||
def process_yolo(self, class_id, conf, pos):
|
||||
return [
|
||||
class_id, # class ID
|
||||
conf, # confidence score
|
||||
(pos[1] - (pos[3] / 2)) / self.h, # y_min
|
||||
(pos[0] - (pos[2] / 2)) / self.w, # x_min
|
||||
(pos[1] + (pos[3] / 2)) / self.h, # y_max
|
||||
(pos[0] + (pos[2] / 2)) / self.w, # x_max
|
||||
]
|
||||
|
||||
def detect_raw(self, tensor_input):
|
||||
infer_request = self.interpreter.create_infer_request()
|
||||
infer_request.infer([tensor_input])
|
||||
@@ -113,23 +125,50 @@ class OvDetector(DetectionApi):
|
||||
ordered = dets[dets[:, 5].argsort()[::-1]][:20]
|
||||
|
||||
detections = np.zeros((20, 6), np.float32)
|
||||
i = 0
|
||||
|
||||
for object_detected in ordered:
|
||||
if i < 20:
|
||||
detections[i] = [
|
||||
object_detected[6], # Label ID
|
||||
object_detected[5], # Confidence
|
||||
(object_detected[1] - (object_detected[3] / 2))
|
||||
/ self.h, # y_min
|
||||
(object_detected[0] - (object_detected[2] / 2))
|
||||
/ self.w, # x_min
|
||||
(object_detected[1] + (object_detected[3] / 2))
|
||||
/ self.h, # y_max
|
||||
(object_detected[0] + (object_detected[2] / 2))
|
||||
/ self.w, # x_max
|
||||
]
|
||||
i += 1
|
||||
else:
|
||||
break
|
||||
for i, object_detected in enumerate(ordered):
|
||||
detections[i] = self.process_yolo(
|
||||
object_detected[6], object_detected[5], object_detected[:4]
|
||||
)
|
||||
return detections
|
||||
elif self.ov_model_type == ModelTypeEnum.yolov8:
|
||||
out_tensor = infer_request.get_output_tensor()
|
||||
results = out_tensor.data[0]
|
||||
output_data = np.transpose(results)
|
||||
scores = np.max(output_data[:, 4:], axis=1)
|
||||
if len(scores) == 0:
|
||||
return np.zeros((20, 6), np.float32)
|
||||
scores = np.expand_dims(scores, axis=1)
|
||||
# add scores to the last column
|
||||
dets = np.concatenate((output_data, scores), axis=1)
|
||||
# filter out lines with scores below threshold
|
||||
dets = dets[dets[:, -1] > 0.5, :]
|
||||
# limit to top 20 scores, descending order
|
||||
ordered = dets[dets[:, -1].argsort()[::-1]][:20]
|
||||
detections = np.zeros((20, 6), np.float32)
|
||||
|
||||
for i, object_detected in enumerate(ordered):
|
||||
detections[i] = self.process_yolo(
|
||||
np.argmax(object_detected[4:-1]),
|
||||
object_detected[-1],
|
||||
object_detected[:4],
|
||||
)
|
||||
return detections
|
||||
elif self.ov_model_type == ModelTypeEnum.yolov5:
|
||||
out_tensor = infer_request.get_output_tensor()
|
||||
output_data = out_tensor.data[0]
|
||||
# filter out lines with scores below threshold
|
||||
conf_mask = (output_data[:, 4] >= 0.5).squeeze()
|
||||
output_data = output_data[conf_mask]
|
||||
# limit to top 20 scores, descending order
|
||||
ordered = output_data[output_data[:, 4].argsort()[::-1]][:20]
|
||||
|
||||
detections = np.zeros((20, 6), np.float32)
|
||||
|
||||
for i, object_detected in enumerate(ordered):
|
||||
detections[i] = self.process_yolo(
|
||||
np.argmax(object_detected[5:]),
|
||||
object_detected[4],
|
||||
object_detected[:4],
|
||||
)
|
||||
return detections
|
||||
|
||||
@@ -92,47 +92,35 @@ PRESETS_HW_ACCEL_DECODE = {
|
||||
"cuda",
|
||||
"-hwaccel_output_format",
|
||||
"cuda",
|
||||
"-extra_hw_frames",
|
||||
"2",
|
||||
"-c:v",
|
||||
"h264_cuvid",
|
||||
],
|
||||
"preset-nvidia-h265": [
|
||||
"-hwaccel",
|
||||
"cuda",
|
||||
"-hwaccel_output_format",
|
||||
"cuda",
|
||||
"-extra_hw_frames",
|
||||
"2",
|
||||
"-c:v",
|
||||
"hevc_cuvid",
|
||||
],
|
||||
"preset-nvidia-mjpeg": [
|
||||
"-hwaccel",
|
||||
"cuda",
|
||||
"-hwaccel_output_format",
|
||||
"cuda",
|
||||
"-extra_hw_frames",
|
||||
"2",
|
||||
"-c:v",
|
||||
"mjpeg_cuvid",
|
||||
],
|
||||
}
|
||||
|
||||
PRESETS_HW_ACCEL_SCALE = {
|
||||
"preset-rpi-32-h264": "-r {0} -s {1}x{2} -f rawvideo -pix_fmt yuv420p",
|
||||
"preset-rpi-64-h264": "-r {0} -s {1}x{2} -f rawvideo -pix_fmt yuv420p",
|
||||
"preset-vaapi": "-r {0} -vf fps={0},scale_vaapi=w={1}:h={2},hwdownload,format=yuv420p -f rawvideo",
|
||||
"preset-intel-qsv-h264": "-r {0} -vf vpp_qsv=framerate={0}:w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
||||
"preset-intel-qsv-h265": "-r {0} -vf vpp_qsv=framerate={0}:w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
||||
"preset-nvidia-h264": "-r {0} -vf fps={0},scale_cuda=w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
||||
"preset-nvidia-h265": "-r {0} -vf fps={0},scale_cuda=w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p -f rawvideo",
|
||||
"preset-rpi-32-h264": "-r {0} -s {1}x{2}",
|
||||
"preset-rpi-64-h264": "-r {0} -s {1}x{2}",
|
||||
"preset-vaapi": "-r {0} -vf fps={0},scale_vaapi=w={1}:h={2},hwdownload,format=yuv420p",
|
||||
"preset-intel-qsv-h264": "-r {0} -vf vpp_qsv=framerate={0}:w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p",
|
||||
"preset-intel-qsv-h265": "-r {0} -vf vpp_qsv=framerate={0}:w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p",
|
||||
"preset-nvidia-h264": "-r {0} -vf fps={0},scale_cuda=w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p",
|
||||
"preset-nvidia-h265": "-r {0} -vf fps={0},scale_cuda=w={1}:h={2}:format=nv12,hwdownload,format=nv12,format=yuv420p",
|
||||
"default": "-r {0} -s {1}x{2}",
|
||||
}
|
||||
|
||||
PRESETS_HW_ACCEL_ENCODE = {
|
||||
"preset-rpi-32-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m -g 50 -bf 0 {1}",
|
||||
"preset-rpi-64-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m -g 50 -bf 0 {1}",
|
||||
"preset-rpi-32-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m {1}",
|
||||
"preset-rpi-64-h264": "ffmpeg -hide_banner {0} -c:v h264_v4l2m2m {1}",
|
||||
"preset-vaapi": "ffmpeg -hide_banner -hwaccel vaapi -hwaccel_output_format vaapi -hwaccel_device {2} {0} -c:v h264_vaapi -g 50 -bf 0 -profile:v high -level:v 4.1 -sei:v 0 -an -vf format=vaapi|nv12,hwupload {1}",
|
||||
"preset-intel-qsv-h264": "ffmpeg -hide_banner {0} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||
"preset-intel-qsv-h265": "ffmpeg -hide_banner {0} -c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1 {1}",
|
||||
@@ -166,7 +154,9 @@ def parse_preset_hardware_acceleration_scale(
|
||||
scale = PRESETS_HW_ACCEL_SCALE.get(arg, "")
|
||||
|
||||
if scale:
|
||||
return scale.format(fps, width, height).split(" ")
|
||||
scale = scale.format(fps, width, height).split(" ")
|
||||
scale.extend(detect_args)
|
||||
return scale
|
||||
else:
|
||||
scale = scale.format(fps, width, height).split(" ")
|
||||
scale.extend(detect_args)
|
||||
|
||||
@@ -111,6 +111,7 @@ def events_summary():
|
||||
Event.select(
|
||||
Event.camera,
|
||||
Event.label,
|
||||
Event.sub_label,
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
@@ -124,6 +125,7 @@ def events_summary():
|
||||
.group_by(
|
||||
Event.camera,
|
||||
Event.label,
|
||||
Event.sub_label,
|
||||
fn.strftime(
|
||||
"%Y-%m-%d",
|
||||
fn.datetime(
|
||||
@@ -184,6 +186,18 @@ def send_to_plus(id):
|
||||
logger.error(message)
|
||||
return make_response(jsonify({"success": False, "message": message}), 404)
|
||||
|
||||
if event.end_time is None:
|
||||
logger.error(f"Unable to load clean png for in-progress event: {event.id}")
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Unable to load clean png for in-progress event",
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
if event.plus_id:
|
||||
message = "Already submitted to plus"
|
||||
logger.error(message)
|
||||
@@ -202,6 +216,15 @@ def send_to_plus(id):
|
||||
400,
|
||||
)
|
||||
|
||||
if image is None or image.size == 0:
|
||||
logger.error(f"Unable to load clean png for event: {event.id}")
|
||||
return make_response(
|
||||
jsonify(
|
||||
{"success": False, "message": "Unable to load clean png for event"}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
try:
|
||||
plus_id = current_app.plus_api.upload_image(image, event.camera)
|
||||
except Exception as ex:
|
||||
@@ -301,7 +324,9 @@ def get_sub_labels():
|
||||
sub_labels.remove(None)
|
||||
|
||||
if split_joined:
|
||||
for label in sub_labels:
|
||||
original_labels = sub_labels.copy()
|
||||
|
||||
for label in original_labels:
|
||||
if "," in label:
|
||||
sub_labels.remove(label)
|
||||
parts = label.split(",")
|
||||
@@ -310,6 +335,7 @@ def get_sub_labels():
|
||||
if not (part.strip()) in sub_labels:
|
||||
sub_labels.append(part.strip())
|
||||
|
||||
sub_labels.sort()
|
||||
return jsonify(sub_labels)
|
||||
|
||||
|
||||
@@ -617,7 +643,13 @@ def events():
|
||||
sub_label_clauses.append((Event.sub_label.is_null()))
|
||||
|
||||
for label in filtered_sub_labels:
|
||||
sub_label_clauses.append((Event.sub_label.cast("text") % f"*{label}*"))
|
||||
sub_label_clauses.append(
|
||||
(Event.sub_label.cast("text") == label)
|
||||
) # include exact matches
|
||||
|
||||
# include this label when part of a list
|
||||
sub_label_clauses.append((Event.sub_label.cast("text") % f"*{label},*"))
|
||||
sub_label_clauses.append((Event.sub_label.cast("text") % f"*, {label}*"))
|
||||
|
||||
sub_label_clause = reduce(operator.or_, sub_label_clauses)
|
||||
clauses.append((sub_label_clause))
|
||||
@@ -1275,12 +1307,12 @@ def ffprobe():
|
||||
output.append(
|
||||
{
|
||||
"return_code": ffprobe.returncode,
|
||||
"stderr": json.loads(ffprobe.stderr.decode("unicode_escape").strip())
|
||||
if ffprobe.stderr.decode()
|
||||
else {},
|
||||
"stderr": ffprobe.stderr.decode("unicode_escape").strip()
|
||||
if ffprobe.returncode != 0
|
||||
else "",
|
||||
"stdout": json.loads(ffprobe.stdout.decode("unicode_escape").strip())
|
||||
if ffprobe.stdout.decode()
|
||||
else {},
|
||||
if ffprobe.returncode == 0
|
||||
else "",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@@ -38,16 +38,10 @@ class FFMpegConverter:
|
||||
quality: int,
|
||||
birdseye_rtsp: bool = False,
|
||||
):
|
||||
if birdseye_rtsp:
|
||||
if os.path.exists(BIRDSEYE_PIPE):
|
||||
os.remove(BIRDSEYE_PIPE)
|
||||
self.bd_pipe = None
|
||||
|
||||
os.mkfifo(BIRDSEYE_PIPE, mode=0o777)
|
||||
stdin = os.open(BIRDSEYE_PIPE, os.O_RDONLY | os.O_NONBLOCK)
|
||||
self.bd_pipe = os.open(BIRDSEYE_PIPE, os.O_WRONLY)
|
||||
os.close(stdin)
|
||||
else:
|
||||
self.bd_pipe = None
|
||||
if birdseye_rtsp:
|
||||
self.recreate_birdseye_pipe()
|
||||
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
@@ -80,14 +74,36 @@ class FFMpegConverter:
|
||||
start_new_session=True,
|
||||
)
|
||||
|
||||
def recreate_birdseye_pipe(self) -> None:
|
||||
if self.bd_pipe:
|
||||
os.close(self.bd_pipe)
|
||||
|
||||
if os.path.exists(BIRDSEYE_PIPE):
|
||||
os.remove(BIRDSEYE_PIPE)
|
||||
|
||||
os.mkfifo(BIRDSEYE_PIPE, mode=0o777)
|
||||
stdin = os.open(BIRDSEYE_PIPE, os.O_RDONLY | os.O_NONBLOCK)
|
||||
self.bd_pipe = os.open(BIRDSEYE_PIPE, os.O_WRONLY)
|
||||
os.close(stdin)
|
||||
self.reading_birdseye = False
|
||||
|
||||
def write(self, b) -> None:
|
||||
self.process.stdin.write(b)
|
||||
|
||||
if self.bd_pipe:
|
||||
try:
|
||||
os.write(self.bd_pipe, b)
|
||||
self.reading_birdseye = True
|
||||
except BrokenPipeError:
|
||||
# catch error when no one is listening
|
||||
if self.reading_birdseye:
|
||||
# we know the pipe was being read from and now it is not
|
||||
# so we should recreate the pipe to ensure no partially-read
|
||||
# frames exist
|
||||
logger.debug(
|
||||
"Recreating the birdseye pipe because it was read from and now is not"
|
||||
)
|
||||
self.recreate_birdseye_pipe()
|
||||
|
||||
return
|
||||
|
||||
def read(self, length):
|
||||
|
||||
@@ -171,10 +171,12 @@ class RecordingMaintainer(threading.Thread):
|
||||
else:
|
||||
if duration == -1:
|
||||
logger.warning(
|
||||
f"Failed to probe corrupt segment {f}: {p.returncode} - {p.stderr}"
|
||||
f"Failed to probe corrupt segment {cache_path}: {p.returncode} - {p.stderr}"
|
||||
)
|
||||
|
||||
logger.warning(f"Discarding a corrupt recording segment: {f}")
|
||||
logger.warning(
|
||||
f"Discarding a corrupt recording segment: {cache_path}"
|
||||
)
|
||||
Path(cache_path).unlink(missing_ok=True)
|
||||
continue
|
||||
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
"""Controls go2rtc restream."""
|
||||
|
||||
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import BIRDSEYE_PIPE
|
||||
from frigate.ffmpeg_presets import (
|
||||
parse_preset_hardware_acceleration_encode,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RestreamApi:
|
||||
"""Control go2rtc relay API."""
|
||||
|
||||
def __init__(self, config: FrigateConfig) -> None:
|
||||
self.config: FrigateConfig = config
|
||||
|
||||
def add_cameras(self) -> None:
|
||||
"""Add cameras to go2rtc."""
|
||||
self.relays: dict[str, str] = {}
|
||||
|
||||
if self.config.birdseye.restream:
|
||||
self.relays[
|
||||
"birdseye"
|
||||
] = f"exec:{parse_preset_hardware_acceleration_encode(self.config.ffmpeg.hwaccel_args, f'-f rawvideo -pix_fmt yuv420p -video_size {self.config.birdseye.width}x{self.config.birdseye.height} -r 10 -i {BIRDSEYE_PIPE}', '-rtsp_transport tcp -f rtsp {output}')}"
|
||||
|
||||
for name, path in self.relays.items():
|
||||
params = {"src": path, "name": name}
|
||||
requests.put("http://127.0.0.1:1984/api/streams", params=params)
|
||||
41
web/package-lock.json
generated
41
web/package-lock.json
generated
@@ -10,6 +10,7 @@
|
||||
"dependencies": {
|
||||
"@cycjimmy/jsmpeg-player": "^6.0.5",
|
||||
"axios": "^1.2.2",
|
||||
"copy-to-clipboard": "3.3.3",
|
||||
"date-fns": "^2.29.3",
|
||||
"idb-keyval": "^6.2.0",
|
||||
"immer": "^9.0.16",
|
||||
@@ -19,6 +20,7 @@
|
||||
"preact-router": "^4.1.0",
|
||||
"react": "npm:@preact/compat@^17.1.2",
|
||||
"react-dom": "npm:@preact/compat@^17.1.2",
|
||||
"strftime": "^0.10.1",
|
||||
"swr": "^1.3.0",
|
||||
"video.js": "^7.20.3",
|
||||
"videojs-playlist": "^5.0.0",
|
||||
@@ -3369,6 +3371,14 @@
|
||||
"node": ">= 0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/copy-to-clipboard": {
|
||||
"version": "3.3.3",
|
||||
"resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz",
|
||||
"integrity": "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==",
|
||||
"dependencies": {
|
||||
"toggle-selection": "^1.0.6"
|
||||
}
|
||||
},
|
||||
"node_modules/core-js": {
|
||||
"version": "3.26.0",
|
||||
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.26.0.tgz",
|
||||
@@ -8547,6 +8557,14 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/strftime": {
|
||||
"version": "0.10.1",
|
||||
"resolved": "https://registry.npmjs.org/strftime/-/strftime-0.10.1.tgz",
|
||||
"integrity": "sha512-nVvH6JG8KlXFPC0f8lojLgEsPA18lRpLZ+RrJh/NkQV2tqOgZfbas8gcU8SFgnnqR3rWzZPYu6N2A3xzs/8rQg==",
|
||||
"engines": {
|
||||
"node": ">=0.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/strict-event-emitter": {
|
||||
"version": "0.2.8",
|
||||
"resolved": "https://registry.npmjs.org/strict-event-emitter/-/strict-event-emitter-0.2.8.tgz",
|
||||
@@ -8901,6 +8919,11 @@
|
||||
"node": ">=8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/toggle-selection": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz",
|
||||
"integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ=="
|
||||
},
|
||||
"node_modules/totalist": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.0.tgz",
|
||||
@@ -12151,6 +12174,14 @@
|
||||
"integrity": "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==",
|
||||
"dev": true
|
||||
},
|
||||
"copy-to-clipboard": {
|
||||
"version": "3.3.3",
|
||||
"resolved": "https://registry.npmjs.org/copy-to-clipboard/-/copy-to-clipboard-3.3.3.tgz",
|
||||
"integrity": "sha512-2KV8NhB5JqC3ky0r9PMCAZKbUHSwtEo4CwCs0KXgruG43gX5PMqDEBbVU4OUzw2MuAWUfsuFmWvEKG5QRfSnJA==",
|
||||
"requires": {
|
||||
"toggle-selection": "^1.0.6"
|
||||
}
|
||||
},
|
||||
"core-js": {
|
||||
"version": "3.26.0",
|
||||
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.26.0.tgz",
|
||||
@@ -15868,6 +15899,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"strftime": {
|
||||
"version": "0.10.1",
|
||||
"resolved": "https://registry.npmjs.org/strftime/-/strftime-0.10.1.tgz",
|
||||
"integrity": "sha512-nVvH6JG8KlXFPC0f8lojLgEsPA18lRpLZ+RrJh/NkQV2tqOgZfbas8gcU8SFgnnqR3rWzZPYu6N2A3xzs/8rQg=="
|
||||
},
|
||||
"strict-event-emitter": {
|
||||
"version": "0.2.8",
|
||||
"resolved": "https://registry.npmjs.org/strict-event-emitter/-/strict-event-emitter-0.2.8.tgz",
|
||||
@@ -16154,6 +16190,11 @@
|
||||
"is-number": "^7.0.0"
|
||||
}
|
||||
},
|
||||
"toggle-selection": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/toggle-selection/-/toggle-selection-1.0.6.tgz",
|
||||
"integrity": "sha512-BiZS+C1OS8g/q2RRbJmy59xpyghNBqrr6k5L/uKBGRsTfxmu3ffiRnd8mlGPUVayg8pvfi5urfnu8TU7DVOkLQ=="
|
||||
},
|
||||
"totalist": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/totalist/-/totalist-3.0.0.tgz",
|
||||
|
||||
@@ -18,17 +18,18 @@
|
||||
"date-fns": "^2.29.3",
|
||||
"idb-keyval": "^6.2.0",
|
||||
"immer": "^9.0.16",
|
||||
"monaco-yaml": "^4.0.2",
|
||||
"preact": "^10.11.3",
|
||||
"preact-async-route": "^2.2.1",
|
||||
"preact-router": "^4.1.0",
|
||||
"react": "npm:@preact/compat@^17.1.2",
|
||||
"react-dom": "npm:@preact/compat@^17.1.2",
|
||||
"vite-plugin-monaco-editor": "^1.1.0",
|
||||
"monaco-yaml": "^4.0.2",
|
||||
"strftime": "^0.10.1",
|
||||
"swr": "^1.3.0",
|
||||
"video.js": "^7.20.3",
|
||||
"videojs-playlist": "^5.0.0",
|
||||
"videojs-seek-buttons": "^3.0.1"
|
||||
"videojs-seek-buttons": "^3.0.1",
|
||||
"vite-plugin-monaco-editor": "^1.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@preact/preset-vite": "^2.5.0",
|
||||
|
||||
@@ -21,7 +21,7 @@ export default function Dialog({ children, portalRootID = 'dialogs' }) {
|
||||
>
|
||||
<div
|
||||
role="modal"
|
||||
className={`absolute rounded shadow-2xl bg-white dark:bg-gray-700 max-w-sm text-gray-900 dark:text-white transition-transform transition-opacity duration-75 transform scale-90 opacity-0 ${
|
||||
className={`absolute rounded shadow-2xl bg-white dark:bg-gray-700 sm:max-w-sm md:max-w-md lg:max-w-lg text-gray-900 dark:text-white transition-transform transition-opacity duration-75 transform scale-90 opacity-0 ${
|
||||
show ? 'scale-100 opacity-100' : ''
|
||||
}`}
|
||||
>
|
||||
|
||||
@@ -13,9 +13,11 @@ export default function MultiSelect({ className, title, options, selection, onTo
|
||||
const [state, setState] = useState({
|
||||
showMenu: false,
|
||||
});
|
||||
|
||||
|
||||
const isOptionSelected = (item) => { return selection == "all" || selection.split(',').indexOf(item) > -1; }
|
||||
|
||||
|
||||
const menuHeight = Math.round(window.innerHeight * 0.55);
|
||||
|
||||
return (
|
||||
<div className={`${className} p-2`} ref={popupRef}>
|
||||
<div
|
||||
@@ -26,7 +28,7 @@ export default function MultiSelect({ className, title, options, selection, onTo
|
||||
<ArrowDropdown className="w-6" />
|
||||
</div>
|
||||
{state.showMenu ? (
|
||||
<Menu relativeTo={popupRef} onDismiss={() => setState({ showMenu: false })}>
|
||||
<Menu className={`max-h-[${menuHeight}px] overflow-scroll`} relativeTo={popupRef} onDismiss={() => setState({ showMenu: false })}>
|
||||
<div className="flex flex-wrap justify-between items-center">
|
||||
<Heading className="p-4 justify-center" size="md">{title}</Heading>
|
||||
<Button tabindex="false" className="mx-4" onClick={() => onShowAll() }>
|
||||
|
||||
@@ -80,7 +80,9 @@ export default function RelativeModal({
|
||||
|
||||
// too close to bottom
|
||||
if (top + menuHeight > windowHeight - WINDOW_PADDING + window.scrollY) {
|
||||
newTop = WINDOW_PADDING;
|
||||
// If the pop-up modal would extend beyond the bottom of the visible window,
|
||||
// reposition the modal to appear above the clicked icon instead
|
||||
newTop = top - menuHeight;
|
||||
}
|
||||
|
||||
if (top <= WINDOW_PADDING + window.scrollY) {
|
||||
|
||||
@@ -10,7 +10,10 @@ import useSWR from 'swr';
|
||||
export default function Birdseye() {
|
||||
const { data: config } = useSWR('config');
|
||||
|
||||
const [viewSource, setViewSource, sourceIsLoaded] = usePersistence('birdseye-source', 'mse');
|
||||
const [viewSource, setViewSource, sourceIsLoaded] = usePersistence(
|
||||
'birdseye-source',
|
||||
getDefaultLiveMode(config)
|
||||
);
|
||||
const sourceValues = ['mse', 'webrtc', 'jsmpeg'];
|
||||
|
||||
if (!config || !sourceIsLoaded) {
|
||||
@@ -80,3 +83,16 @@ export default function Birdseye() {
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
function getDefaultLiveMode(config) {
|
||||
if (config) {
|
||||
if (config.birdseye.restream) {
|
||||
return config.ui.live_mode;
|
||||
}
|
||||
|
||||
return 'jsmpeg';
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ export default function Camera({ camera }) {
|
||||
: 0;
|
||||
const [viewSource, setViewSource, sourceIsLoaded] = usePersistence(
|
||||
`${camera}-source`,
|
||||
getDefaultLiveMode(config, cameraConfig)
|
||||
getDefaultLiveMode(config, cameraConfig, restreamEnabled)
|
||||
);
|
||||
const sourceValues = restreamEnabled ? ['mse', 'webrtc', 'jsmpeg'] : ['jsmpeg'];
|
||||
const [options, setOptions] = usePersistence(`${camera}-feed`, emptyObject);
|
||||
|
||||
@@ -66,6 +66,7 @@ export default function Events({ path, ...props }) {
|
||||
has_clip: false,
|
||||
has_snapshot: false,
|
||||
plus_id: undefined,
|
||||
end_time: null,
|
||||
});
|
||||
const [deleteFavoriteState, setDeleteFavoriteState] = useState({
|
||||
deletingFavoriteEventId: null,
|
||||
@@ -190,6 +191,7 @@ export default function Events({ path, ...props }) {
|
||||
has_clip: event.has_clip,
|
||||
has_snapshot: event.has_snapshot,
|
||||
plus_id: event.plus_id,
|
||||
end_time: event.end_time,
|
||||
}));
|
||||
downloadButton.current = e.target;
|
||||
setState({ ...state, showDownloadMenu: true });
|
||||
@@ -287,9 +289,6 @@ export default function Events({ path, ...props }) {
|
||||
return <ActivityIndicator />;
|
||||
}
|
||||
|
||||
const timezone = config.ui?.timezone || Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||
const locale = window.navigator?.language || 'en-US';
|
||||
|
||||
return (
|
||||
<div className="space-y-4 p-2 px-4 w-full">
|
||||
<Heading>Events</Heading>
|
||||
@@ -366,7 +365,7 @@ export default function Events({ path, ...props }) {
|
||||
download
|
||||
/>
|
||||
)}
|
||||
{downloadEvent.has_snapshot && !downloadEvent.plus_id && (
|
||||
{(downloadEvent.end_time && downloadEvent.has_snapshot && !downloadEvent.plus_id) && (
|
||||
<MenuItem
|
||||
icon={UploadPlus}
|
||||
label={uploading.includes(downloadEvent.id) ? 'Uploading...' : 'Send to Frigate+'}
|
||||
@@ -508,7 +507,7 @@ export default function Events({ path, ...props }) {
|
||||
</div>
|
||||
<div className="text-sm flex">
|
||||
<Clock className="h-5 w-5 mr-2 inline" />
|
||||
{formatUnixTimestampToDateTime(event.start_time, locale, timezone)}
|
||||
{formatUnixTimestampToDateTime(event.start_time, { ...config.ui })}
|
||||
<div className="hidden md:inline">
|
||||
<span className="m-1">-</span>
|
||||
<TimeAgo time={event.start_time * 1000} dense />
|
||||
@@ -527,7 +526,7 @@ export default function Events({ path, ...props }) {
|
||||
</div>
|
||||
</div>
|
||||
<div class="hidden sm:flex flex-col justify-end mr-2">
|
||||
{event.has_snapshot && (
|
||||
{(event.end_time && event.has_snapshot) && (
|
||||
<Fragment>
|
||||
{event.plus_id ? (
|
||||
<div className="uppercase text-xs">Sent to Frigate+</div>
|
||||
|
||||
@@ -133,7 +133,7 @@ export default function Recording({ camera, date, hour = '00', minute = '00', se
|
||||
return (
|
||||
<div className="space-y-4 p-2 px-4">
|
||||
<Heading>{camera.replaceAll('_', ' ')} Recordings</Heading>
|
||||
<div className="text-xs">Dates and times are based on the browser's timezone {timezone}</div>
|
||||
<div className="text-xs">Dates and times are based on the timezone {timezone}</div>
|
||||
|
||||
<VideoPlayer
|
||||
options={{
|
||||
|
||||
@@ -49,14 +49,14 @@ export default function System() {
|
||||
});
|
||||
|
||||
if (response.status === 200) {
|
||||
setState({ ...state, showFfprobe: true, ffprobe: JSON.stringify(response.data, null, 2) });
|
||||
setState({ ...state, showFfprobe: true, ffprobe: response.data });
|
||||
} else {
|
||||
setState({ ...state, showFfprobe: true, ffprobe: 'There was an error getting the ffprobe output.' });
|
||||
}
|
||||
};
|
||||
|
||||
const onCopyFfprobe = async () => {
|
||||
copy(JSON.stringify(state.ffprobe, null, 2));
|
||||
copy(JSON.stringify(state.ffprobe).replace(/[\\\s]+/gi, ''));
|
||||
setState({ ...state, ffprobe: '', showFfprobe: false });
|
||||
};
|
||||
|
||||
@@ -68,34 +68,95 @@ export default function System() {
|
||||
const response = await axios.get('vainfo');
|
||||
|
||||
if (response.status === 200) {
|
||||
setState({ ...state, showVainfo: true, vainfo: JSON.stringify(response.data, null, 2) });
|
||||
setState({
|
||||
...state,
|
||||
showVainfo: true,
|
||||
vainfo: response.data,
|
||||
});
|
||||
} else {
|
||||
setState({ ...state, showVainfo: true, vainfo: 'There was an error getting the vainfo output.' });
|
||||
}
|
||||
};
|
||||
|
||||
const onCopyVainfo = async () => {
|
||||
copy(JSON.stringify(state.vainfo, null, 2));
|
||||
copy(JSON.stringify(state.vainfo).replace(/[\\\s]+/gi, ''));
|
||||
setState({ ...state, vainfo: '', showVainfo: false });
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-4 p-2 px-4">
|
||||
<Heading>
|
||||
System <span className="text-sm">{service.version}</span>
|
||||
</Heading>
|
||||
<div className="flex justify-between">
|
||||
<Heading>
|
||||
System <span className="text-sm">{service.version}</span>
|
||||
</Heading>
|
||||
{config && (
|
||||
<Link
|
||||
className="p-1 text-blue-500 hover:underline"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
href="/live/webrtc/"
|
||||
>
|
||||
go2rtc dashboard
|
||||
</Link>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{service.last_updated && (
|
||||
<p>
|
||||
<span>Last refreshed: <TimeAgo time={service.last_updated * 1000} dense /></span>
|
||||
<span>
|
||||
Last refreshed: <TimeAgo time={service.last_updated * 1000} dense />
|
||||
</span>
|
||||
</p>
|
||||
)}
|
||||
|
||||
{state.showFfprobe && (
|
||||
<Dialog>
|
||||
<div className="p-4">
|
||||
<div className="p-4 mb-2 max-h-96 whitespace-pre-line overflow-scroll">
|
||||
<Heading size="lg">Ffprobe Output</Heading>
|
||||
{state.ffprobe != '' ? <p className="mb-2">{state.ffprobe}</p> : <ActivityIndicator />}
|
||||
{state.ffprobe != '' ? (
|
||||
<div>
|
||||
{state.ffprobe.map((stream, idx) => (
|
||||
<div key={idx} className="mb-2 max-h-96 whitespace-pre-line">
|
||||
<div>Stream {idx}:</div>
|
||||
<div className="px-2">Return Code: {stream.return_code}</div>
|
||||
<br />
|
||||
{stream.return_code == 0 ? (
|
||||
<div>
|
||||
{stream.stdout.streams.map((codec, idx) => (
|
||||
<div className="px-2" key={idx}>
|
||||
{codec.width ? (
|
||||
<div>
|
||||
<div>Video:</div>
|
||||
<br />
|
||||
<div>Codec: {codec.codec_long_name}</div>
|
||||
<div>
|
||||
Resolution: {codec.width}x{codec.height}
|
||||
</div>
|
||||
<div>FPS: {codec.avg_frame_rate == '0/0' ? 'Unknown' : codec.avg_frame_rate}</div>
|
||||
<br />
|
||||
</div>
|
||||
) : (
|
||||
<div>
|
||||
<div>Audio:</div>
|
||||
<br />
|
||||
<div>Codec: {codec.codec_long_name}</div>
|
||||
<br />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<div className="px-2">
|
||||
<div>Error: {stream.stderr}</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
) : (
|
||||
<ActivityIndicator />
|
||||
)}
|
||||
</div>
|
||||
<div className="p-2 flex justify-start flex-row-reverse space-x-2">
|
||||
<Button className="ml-2" onClick={() => onCopyFfprobe()} type="text">
|
||||
@@ -114,10 +175,16 @@ export default function System() {
|
||||
|
||||
{state.showVainfo && (
|
||||
<Dialog>
|
||||
<div className="p-4">
|
||||
<div className="p-4 overflow-scroll whitespace-pre-line">
|
||||
<Heading size="lg">Vainfo Output</Heading>
|
||||
{state.vainfo != '' ? (
|
||||
<p className="mb-2 max-h-96 overflow-scroll">{state.vainfo}</p>
|
||||
<div className="mb-2 max-h-96 whitespace-pre-line">
|
||||
<div className="">Return Code: {state.vainfo.return_code}</div>
|
||||
<br />
|
||||
<div className="">Process {state.vainfo.return_code == 0 ? 'Output' : 'Error'}:</div>
|
||||
<br />
|
||||
<div>{state.vainfo.return_code == 0 ? state.vainfo.stdout : state.vainfo.stderr}</div>
|
||||
</div>
|
||||
) : (
|
||||
<ActivityIndicator />
|
||||
)}
|
||||
@@ -256,11 +323,15 @@ export default function System() {
|
||||
|
||||
{(() => {
|
||||
if (cameras[camera]['pid'] && cameras[camera]['detection_enabled'] == 1)
|
||||
return <Td>{cameras[camera]['detection_fps']} ({cameras[camera]['skipped_fps']} skipped)</Td>
|
||||
return (
|
||||
<Td>
|
||||
{cameras[camera]['detection_fps']} ({cameras[camera]['skipped_fps']} skipped)
|
||||
</Td>
|
||||
);
|
||||
else if (cameras[camera]['pid'] && cameras[camera]['detection_enabled'] == 0)
|
||||
return <Td>disabled</Td>
|
||||
return <Td>disabled</Td>;
|
||||
|
||||
return <Td>- </Td>
|
||||
return <Td>- </Td>;
|
||||
})()}
|
||||
|
||||
<Td>{cpu_usages[cameras[camera]['pid']]?.['cpu'] || '- '}%</Td>
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import strftime from 'strftime';
|
||||
import { fromUnixTime, intervalToDuration, formatDuration } from 'date-fns';
|
||||
export const longToDate = (long: number): Date => new Date(long * 1000);
|
||||
export const epochToLong = (date: number): number => date / 1000;
|
||||
export const dateToLong = (date: Date): number => epochToLong(date.getTime());
|
||||
import { fromUnixTime, intervalToDuration, formatDuration } from 'date-fns';
|
||||
|
||||
const getDateTimeYesterday = (dateTime: Date): Date => {
|
||||
const twentyFourHoursInMilliseconds = 24 * 60 * 60 * 1000;
|
||||
@@ -17,28 +18,54 @@ export const getNowYesterdayInLong = (): number => {
|
||||
};
|
||||
|
||||
/**
|
||||
* This function takes in a unix timestamp, locale, timezone,
|
||||
* and returns a dateTime string.
|
||||
* If unixTimestamp is not provided, it returns 'Invalid time'
|
||||
* @param unixTimestamp: number
|
||||
* @param locale: string
|
||||
* @param timezone: string
|
||||
* @returns string - dateTime or 'Invalid time' if unixTimestamp is not provided
|
||||
* This function takes in a Unix timestamp, configuration options for date/time display, and an optional strftime format string,
|
||||
* and returns a formatted date/time string.
|
||||
*
|
||||
* If the Unix timestamp is not provided, it returns "Invalid time".
|
||||
*
|
||||
* The configuration options determine how the date and time are formatted.
|
||||
* The `timezone` option allows you to specify a specific timezone for the output, otherwise the user's browser timezone will be used.
|
||||
* The `use12hour` option allows you to display time in a 12-hour format if true, and 24-hour format if false.
|
||||
* The `dateStyle` and `timeStyle` options allow you to specify pre-defined formats for displaying the date and time.
|
||||
* The `strftime_fmt` option allows you to specify a custom format using the strftime syntax.
|
||||
*
|
||||
* If both `strftime_fmt` and `dateStyle`/`timeStyle` are provided, `strftime_fmt` takes precedence.
|
||||
*
|
||||
* @param unixTimestamp The Unix timestamp to format
|
||||
* @param config An object containing the configuration options for date/time display
|
||||
* @returns The formatted date/time string, or "Invalid time" if the Unix timestamp is not provided or invalid.
|
||||
*/
|
||||
export const formatUnixTimestampToDateTime = (unixTimestamp: number, locale: string, timezone: string): string => {
|
||||
interface DateTimeStyle {
|
||||
timezone: string;
|
||||
time_format: 'browser' | '12hour' | '24hour';
|
||||
date_style: 'full' | 'long' | 'medium' | 'short';
|
||||
time_style: 'full' | 'long' | 'medium' | 'short';
|
||||
strftime_fmt: string;
|
||||
}
|
||||
|
||||
export const formatUnixTimestampToDateTime = (unixTimestamp: number, config: DateTimeStyle): string => {
|
||||
const { timezone, time_format, date_style, time_style, strftime_fmt } = config;
|
||||
const locale = window.navigator?.language || 'en-us';
|
||||
|
||||
if (isNaN(unixTimestamp)) {
|
||||
return 'Invalid time';
|
||||
}
|
||||
|
||||
try {
|
||||
const date = new Date(unixTimestamp * 1000);
|
||||
|
||||
// use strftime_fmt if defined in config file
|
||||
if (strftime_fmt) {
|
||||
const strftime_locale = strftime.localizeByIdentifier(locale);
|
||||
return strftime_locale(strftime_fmt, date);
|
||||
}
|
||||
|
||||
// else use Intl.DateTimeFormat
|
||||
const formatter = new Intl.DateTimeFormat(locale, {
|
||||
day: '2-digit',
|
||||
month: '2-digit',
|
||||
year: 'numeric',
|
||||
hour: '2-digit',
|
||||
minute: '2-digit',
|
||||
second: '2-digit',
|
||||
timeZone: timezone,
|
||||
dateStyle: date_style,
|
||||
timeStyle: time_style,
|
||||
timeZone: timezone || Intl.DateTimeFormat().resolvedOptions().timeZone,
|
||||
hour12: time_format !== 'browser' ? time_format == '12hour' : undefined,
|
||||
});
|
||||
return formatter.format(date);
|
||||
} catch (error) {
|
||||
|
||||
Reference in New Issue
Block a user