forked from Github/frigate
Compare commits
83 Commits
v0.11.0-be
...
v0.11.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2eada219cd | ||
|
|
8dd367efa9 | ||
|
|
66dc8c772b | ||
|
|
68cdd9b94c | ||
|
|
65c211bb6d | ||
|
|
60ad38261b | ||
|
|
c02100ee6f | ||
|
|
8669c29e3d | ||
|
|
10783fec49 | ||
|
|
3bed4611f1 | ||
|
|
f0e836e5b6 | ||
|
|
a1ae5b67d8 | ||
|
|
53f7190d42 | ||
|
|
3846a13805 | ||
|
|
7c60753ab0 | ||
|
|
df40b96b44 | ||
|
|
faf583451f | ||
|
|
be7b858cbd | ||
|
|
a6a0e4d1de | ||
|
|
14faf0b2f6 | ||
|
|
bdfe4a961a | ||
|
|
1bc8d94312 | ||
|
|
7e9f913ff6 | ||
|
|
0882e4a454 | ||
|
|
699bd3748a | ||
|
|
2ca59f0abe | ||
|
|
64b1b8e15c | ||
|
|
b6f799e641 | ||
|
|
c461c9e700 | ||
|
|
9df415b3f2 | ||
|
|
f7c5e02a35 | ||
|
|
656e6a2a89 | ||
|
|
afefa3ec02 | ||
|
|
8c45dab9b8 | ||
|
|
0d6dd1ed0f | ||
|
|
911d6fdfa7 | ||
|
|
0cf759acad | ||
|
|
7c57c8c2da | ||
|
|
89c04acdff | ||
|
|
05d5f13f0e | ||
|
|
4682af81fb | ||
|
|
e649a1eb98 | ||
|
|
01482d791b | ||
|
|
6e2e297aeb | ||
|
|
5577ef081f | ||
|
|
2d5d3bdaf4 | ||
|
|
3376e85be6 | ||
|
|
7a1215d581 | ||
|
|
a94297ac93 | ||
|
|
37325c70ba | ||
|
|
3c46a33992 | ||
|
|
ed1897db71 | ||
|
|
dfbebb63ff | ||
|
|
e68f80b44a | ||
|
|
a67a768e89 | ||
|
|
43f05c18d6 | ||
|
|
3b076c28c2 | ||
|
|
cbf12e3f90 | ||
|
|
17b745434c | ||
|
|
37011c2fda | ||
|
|
fa95a041dd | ||
|
|
0879d7a2d1 | ||
|
|
653c2274e1 | ||
|
|
061fb15a80 | ||
|
|
3246fcce22 | ||
|
|
f2a3797b46 | ||
|
|
b80080ac52 | ||
|
|
b36b63599b | ||
|
|
5d8c0e43c2 | ||
|
|
7845995dfd | ||
|
|
afe88d6e3a | ||
|
|
560ee0104d | ||
|
|
dc8b625d55 | ||
|
|
162c0147d2 | ||
|
|
ef54cd6fb3 | ||
|
|
c2465a46a8 | ||
|
|
24d3a9cdd5 | ||
|
|
5e82eaed88 | ||
|
|
93cd973e59 | ||
|
|
53bf3cd2e6 | ||
|
|
f59871a189 | ||
|
|
2dda7608bb | ||
|
|
10db9faff9 |
84
.github/ISSUE_TEMPLATE/edgetpu_support_request.yml
vendored
Normal file
84
.github/ISSUE_TEMPLATE/edgetpu_support_request.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: EdgeTpu Support Request
|
||||
description: Support for setting up EdgeTPU in Frigate
|
||||
title: "[EdgeTPU Support]: "
|
||||
labels: ["support", "triage"]
|
||||
assignees: []
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the Debug page in the Web UI
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: coral
|
||||
attributes:
|
||||
label: Coral version
|
||||
options:
|
||||
- USB
|
||||
- PCIe
|
||||
- M.2
|
||||
- Dev Board
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
||||
96
.github/ISSUE_TEMPLATE/hwaccel_support_request.yml
vendored
Normal file
96
.github/ISSUE_TEMPLATE/hwaccel_support_request.yml
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
name: Hardware Acceleration Support Request
|
||||
description: Support for setting up GPU hardware acceleration in Frigate
|
||||
title: "[HW Accel Support]: "
|
||||
labels: ["support", "triage"]
|
||||
assignees: []
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the Debug page in the Web UI
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
||||
17
.github/stale.yml
vendored
17
.github/stale.yml
vendored
@@ -1,17 +0,0 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 30
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 3
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
||||
3
.github/workflows/pull_request.yml
vendored
3
.github/workflows/pull_request.yml
vendored
@@ -54,6 +54,7 @@ jobs:
|
||||
|
||||
python_tests:
|
||||
runs-on: ubuntu-latest
|
||||
name: Python Tests
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v2
|
||||
@@ -69,6 +70,8 @@ jobs:
|
||||
uses: docker/setup-qemu-action@v1
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
- name: Create Version Module
|
||||
run: make version
|
||||
- name: Build
|
||||
run: make
|
||||
- name: Run mypy
|
||||
|
||||
25
.github/workflows/stale.yml
vendored
Normal file
25
.github/workflows/stale.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
# Close Stale Issues
|
||||
# Warns and then closes issues and PRs that have had no activity for a specified amount of time.
|
||||
# https://github.com/actions/stale
|
||||
|
||||
name: "Stalebot"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # run stalebot once a day
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@main
|
||||
id: stale
|
||||
with:
|
||||
stale-issue-message: 'This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions.'
|
||||
close-issue-message: ''
|
||||
days-before-stale: 30
|
||||
days-before-close: 3
|
||||
exempt-draft-pr: true
|
||||
exempt-issue-labels: 'pinned,security'
|
||||
exempt-pr-labels: 'pinned,security'
|
||||
- name: Print outputs
|
||||
run: echo ${{ join(steps.stale.outputs.*, ',') }}
|
||||
2
Makefile
2
Makefile
@@ -1,7 +1,7 @@
|
||||
default_target: local
|
||||
|
||||
COMMIT_HASH := $(shell git log -1 --pretty=format:"%h"|tail -1)
|
||||
VERSION = 0.11.0
|
||||
VERSION = 0.11.1
|
||||
CURRENT_UID := $(shell id -u)
|
||||
CURRENT_GID := $(shell id -g)
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ services:
|
||||
# add groups from host for render, plugdev, video
|
||||
group_add:
|
||||
- "109" # render
|
||||
- "110" # render
|
||||
- "44" # video
|
||||
- "46" # plugdev
|
||||
shm_size: "256mb"
|
||||
|
||||
@@ -11,8 +11,8 @@ RUN apt-get -qq update \
|
||||
apt-transport-https \
|
||||
gnupg \
|
||||
wget \
|
||||
&& wget -O - http://archive.raspberrypi.org/debian/raspberrypi.gpg.key | apt-key add - \
|
||||
&& echo "deb http://archive.raspberrypi.org/debian/ bullseye main" | tee /etc/apt/sources.list.d/raspi.list \
|
||||
&& apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 9165938D90FDDD2E \
|
||||
&& echo "deb http://raspbian.raspberrypi.org/raspbian/ bullseye main contrib non-free rpi" | tee /etc/apt/sources.list.d/raspi.list \
|
||||
&& apt-get -qq update \
|
||||
&& apt-get -qq install -y \
|
||||
python3 \
|
||||
@@ -46,7 +46,6 @@ RUN pip3 wheel --wheel-dir=/wheels -r requirements-wheels.txt
|
||||
FROM debian:11-slim
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG JELLYFIN_FFMPEG_VERSION=4.3.2-1
|
||||
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||
ARG DEBIAN_FRONTEND="noninteractive"
|
||||
# http://stackoverflow.com/questions/48162574/ddg#49462622
|
||||
@@ -67,8 +66,8 @@ RUN apt-get -qq update \
|
||||
unzip tzdata libxml2 xz-utils \
|
||||
python3-pip \
|
||||
# add raspberry pi repo
|
||||
&& wget -O - http://archive.raspberrypi.org/debian/raspberrypi.gpg.key | apt-key add - \
|
||||
&& echo "deb http://archive.raspberrypi.org/debian/ bullseye main" | tee /etc/apt/sources.list.d/raspi.list \
|
||||
&& apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 9165938D90FDDD2E \
|
||||
&& echo "deb http://raspbian.raspberrypi.org/raspbian/ bullseye main contrib non-free rpi" | tee /etc/apt/sources.list.d/raspi.list \
|
||||
# add coral repo
|
||||
&& apt-key adv --fetch-keys https://packages.cloud.google.com/apt/doc/apt-key.gpg \
|
||||
&& echo "deb https://packages.cloud.google.com/apt coral-edgetpu-stable main" > /etc/apt/sources.list.d/coral-edgetpu.list \
|
||||
@@ -80,15 +79,35 @@ RUN apt-get -qq update \
|
||||
# coral drivers
|
||||
libedgetpu1-max python3-tflite-runtime python3-pycoral \
|
||||
&& pip3 install -U /wheels/*.whl \
|
||||
# btbn-ffmpeg -> amd64 / arm64
|
||||
&& if [ "${TARGETARCH}" = "amd64" ] || [ "${TARGETARCH}" = "arm64" ]; then \
|
||||
mkdir -p /usr/lib/btbn-ffmpeg \
|
||||
&& wget -O btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linux$( [ "$TARGETARCH" = "amd64" ] && echo "64" || echo "arm64" )-gpl-5.1.tar.xz" \
|
||||
&& tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/btbn-ffmpeg --strip-components 1 \
|
||||
&& rm btbn-ffmpeg.tar.xz; \
|
||||
fi \
|
||||
# ffmpeg -> arm32
|
||||
&& if [ "${TARGETARCH}" = "arm" ]; then \
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y ffmpeg; \
|
||||
fi \
|
||||
# arch specific packages
|
||||
&& if [ "${TARGETARCH}" = "amd64" ]; then \
|
||||
# jellyfin-ffmpeg
|
||||
wget -O jellyfin.deb "https://repo.jellyfin.org/releases/server/debian/versions/jellyfin-ffmpeg/${JELLYFIN_FFMPEG_VERSION}/jellyfin-ffmpeg_${JELLYFIN_FFMPEG_VERSION}-$( awk -F'=' '/^VERSION_CODENAME=/{ print $NF }' /etc/os-release )_$( dpkg --print-architecture ).deb" \
|
||||
&& apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
mesa-va-drivers intel-media-va-driver-non-free ./jellyfin.deb \
|
||||
&& rm jellyfin.deb; else \
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
ffmpeg; \
|
||||
mesa-va-drivers libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1; \
|
||||
fi \
|
||||
&& if [ "${TARGETARCH}" = "arm64" ]; then \
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
libva-drm2 mesa-va-drivers; \
|
||||
fi \
|
||||
# not sure why 32bit arm requires all these
|
||||
&& if [ "${TARGETARCH}" = "arm" ]; then \
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
libgtk-3-dev \
|
||||
libavcodec-dev libavformat-dev libswscale-dev libv4l-dev \
|
||||
libxvidcore-dev libx264-dev libjpeg-dev libpng-dev libtiff-dev \
|
||||
gfortran openexr libatlas-base-dev libssl-dev\
|
||||
libtbb2 libtbb-dev libdc1394-22-dev libopenexr-dev \
|
||||
libgstreamer-plugins-base1.0-dev libgstreamer1.0-dev; \
|
||||
fi \
|
||||
&& rm -rf /wheels \
|
||||
&& apt-get remove gnupg apt-transport-https -y \
|
||||
@@ -96,7 +115,7 @@ RUN apt-get -qq update \
|
||||
&& apt-get autoremove -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV PATH=$PATH:/usr/lib/jellyfin-ffmpeg
|
||||
ENV PATH=$PATH:/usr/lib/btbn-ffmpeg/bin
|
||||
|
||||
COPY --from=nginx /usr/local/nginx/ /usr/local/nginx/
|
||||
|
||||
|
||||
@@ -55,6 +55,7 @@ http {
|
||||
vod_upstream_location /api;
|
||||
vod_align_segments_to_key_frames on;
|
||||
vod_manifest_segment_durations_mode accurate;
|
||||
vod_ignore_edit_list on;
|
||||
|
||||
# vod caches
|
||||
vod_metadata_cache metadata_cache 512m;
|
||||
@@ -172,10 +173,11 @@ http {
|
||||
proxy_set_header Host $host;
|
||||
}
|
||||
|
||||
location ~* /api/(.*\.(jpg|jpeg|png)$) {
|
||||
location ~* /api/.*\.(jpg|jpeg|png)$ {
|
||||
add_header 'Access-Control-Allow-Origin' '*';
|
||||
add_header 'Access-Control-Allow-Methods' 'GET, POST, PUT, DELETE, OPTIONS';
|
||||
proxy_pass http://frigate_api/$1$is_args$args;
|
||||
rewrite ^/api/(.*)$ $1 break;
|
||||
proxy_pass http://frigate_api;
|
||||
proxy_pass_request_headers on;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
|
||||
@@ -4,8 +4,6 @@ title: Advanced Options
|
||||
sidebar_label: Advanced Options
|
||||
---
|
||||
|
||||
## Advanced configuration
|
||||
|
||||
### `logger`
|
||||
|
||||
Change the default log level for troubleshooting purposes.
|
||||
@@ -67,3 +65,14 @@ model:
|
||||
```
|
||||
|
||||
Note that if you rename objects in the labelmap, you will also need to update your `objects -> track` list as well.
|
||||
|
||||
## Custom ffmpeg build
|
||||
|
||||
Included with Frigate is a build of ffmpeg that works for the vast majority of users. However, there exists some hardware setups which have incompatibilities with the included build. In this case, a docker volume mapping can be used to overwrite the included ffmpeg build with an ffmpeg build that works for your specific hardware setup.
|
||||
|
||||
To do this:
|
||||
1. Download your ffmpeg build and uncompress to a folder on the host (let's use `/home/appdata/frigate/custom-ffmpeg` for this example).
|
||||
2. Update your docker-compose or docker CLI to include `'/home/appdata/frigate/custom-ffmpeg':'/usr/lib/btbn-ffmpeg':'ro'` in the volume mappings.
|
||||
3. Restart frigate and the custom version will be used if the mapping was done correctly.
|
||||
|
||||
NOTE: The folder that is mapped from the host needs to be the folder that contains `/bin`. So if the full structure is `/home/appdata/frigate/custom-ffmpeg/bin/ffmpeg` then `/home/appdata/frigate/custom-ffmpeg` needs to be mapped to `/usr/lib/btbn-ffmpeg`.
|
||||
|
||||
@@ -12,3 +12,24 @@ Birdseye offers different modes to customize which cameras show under which circ
|
||||
### Custom Birdseye Icon
|
||||
|
||||
A custom icon can be added to the birdseye background by provided a file `custom.png` inside of the Frigate `media` folder. The file must be a png with the icon as transparent, any non-transparent pixels will be white when displayed in the birdseye view.
|
||||
|
||||
### Birdseye view override at camera level
|
||||
|
||||
If you want to include a camera in Birdseye view only for specific circumstances, or just don't include it at all, the Birdseye setting can be set at the camera level.
|
||||
|
||||
```yaml
|
||||
# Include all cameras by default in Birdseye view
|
||||
birdseye:
|
||||
enabled: True
|
||||
mode: continuous
|
||||
|
||||
cameras:
|
||||
front:
|
||||
# Only include the "front" camera in Birdseye view when objects are detected
|
||||
birdseye:
|
||||
mode: objects
|
||||
back:
|
||||
# Exclude the "back" camera from Birdseye view
|
||||
birdseye:
|
||||
enabled: False
|
||||
```
|
||||
|
||||
@@ -3,7 +3,7 @@ id: camera_specific
|
||||
title: Camera Specific Configurations
|
||||
---
|
||||
|
||||
### MJPEG Cameras
|
||||
## MJPEG Cameras
|
||||
|
||||
The input and output parameters need to be adjusted for MJPEG cameras
|
||||
|
||||
@@ -19,7 +19,7 @@ output_args:
|
||||
rtmp: -c:v libx264 -an -f flv
|
||||
```
|
||||
|
||||
### JPEG Stream Cameras
|
||||
## JPEG Stream Cameras
|
||||
|
||||
Cameras using a live changing jpeg image will need input parameters as below
|
||||
|
||||
@@ -47,7 +47,7 @@ input_args:
|
||||
|
||||
Outputting the stream will have the same args and caveats as per [MJPEG Cameras](#mjpeg-cameras)
|
||||
|
||||
### RTMP Cameras
|
||||
## RTMP Cameras
|
||||
|
||||
The input parameters need to be adjusted for RTMP cameras
|
||||
|
||||
@@ -56,20 +56,67 @@ ffmpeg:
|
||||
input_args: -avoid_negative_ts make_zero -fflags nobuffer -flags low_delay -strict experimental -fflags +genpts+discardcorrupt -rw_timeout 5000000 -use_wallclock_as_timestamps 1 -f live_flv
|
||||
```
|
||||
|
||||
## UDP Only Cameras
|
||||
|
||||
If your cameras do not support TCP connections for RTSP, you can use UDP.
|
||||
|
||||
```yaml
|
||||
ffmpeg:
|
||||
input_args: -avoid_negative_ts make_zero -fflags +genpts+discardcorrupt -rtsp_transport udp -timeout 5000000 -use_wallclock_as_timestamps 1
|
||||
```
|
||||
|
||||
## Model/vendor specific setup
|
||||
|
||||
### Annke C800
|
||||
This camera is H.265 only. To be able to play clips on some devices (like MacOs or iPhone) the H.265 stream has to be repackaged and the audio stream has to be converted to aac. Unfortunately direct playback of in the browser is not working (yet), but the downloaded clip can be played locally.
|
||||
|
||||
```yaml
|
||||
cameras:
|
||||
annkec800: # <------ Name the camera
|
||||
ffmpeg:
|
||||
output_args:
|
||||
record: -f segment -segment_time 10 -segment_format mp4 -reset_timestamps 1 -strftime 1 -c:v copy -tag:v hvc1 -bsf:v hevc_mp4toannexb -c:a aac
|
||||
rtmp: -c:v copy -c:a aac -f flv
|
||||
|
||||
inputs:
|
||||
- path: rtsp://user:password@camera-ip:554/H264/ch1/main/av_stream # <----- Update for your camera
|
||||
roles:
|
||||
- detect
|
||||
- record
|
||||
- rtmp
|
||||
rtmp:
|
||||
enabled: False # <-- RTMP should be disabled if your stream is not H264
|
||||
detect:
|
||||
width: # <---- update for your camera's resolution
|
||||
height: # <---- update for your camera's resolution
|
||||
|
||||
|
||||
```
|
||||
|
||||
### Blue Iris RTSP Cameras
|
||||
|
||||
You will need to remove `nobuffer` flag for Blue Iris RTSP cameras
|
||||
|
||||
```yaml
|
||||
ffmpeg:
|
||||
input_args: -avoid_negative_ts make_zero -flags low_delay -strict experimental -fflags +genpts+discardcorrupt -rtsp_transport tcp -timeout 5000000 -use_wallclock_as_timestamps 1
|
||||
```
|
||||
|
||||
### Reolink 410/520 (possibly others)
|
||||
|
||||
According to [this discussion](https://github.com/blakeblackshear/frigate/issues/1713#issuecomment-932976305), the http video streams seem to be the most reliable for Reolink.
|
||||

|
||||
|
||||
According to [this discussion](https://github.com/blakeblackshear/frigate/issues/3235#issuecomment-1135876973), the http video streams seem to be the most reliable for Reolink.
|
||||
|
||||
```yaml
|
||||
cameras:
|
||||
reolink:
|
||||
ffmpeg:
|
||||
hwaccel_args:
|
||||
input_args:
|
||||
- -avoid_negative_ts
|
||||
- make_zero
|
||||
- -fflags
|
||||
- nobuffer+genpts+discardcorrupt
|
||||
- +genpts+discardcorrupt
|
||||
- -flags
|
||||
- low_delay
|
||||
- -strict
|
||||
@@ -94,22 +141,13 @@ cameras:
|
||||
fps: 7
|
||||
```
|
||||
|
||||

|
||||
### Unifi Protect Cameras
|
||||
|
||||
### Blue Iris RTSP Cameras
|
||||
|
||||
You will need to remove `nobuffer` flag for Blue Iris RTSP cameras
|
||||
In the Unifi 2.0 update Unifi Protect Cameras had a change in audio sample rate which causes issues for ffmpeg. The input rate needs to be set for record and rtmp.
|
||||
|
||||
```yaml
|
||||
ffmpeg:
|
||||
input_args: -avoid_negative_ts make_zero -flags low_delay -strict experimental -fflags +genpts+discardcorrupt -rtsp_transport tcp -stimeout 5000000 -use_wallclock_as_timestamps 1
|
||||
```
|
||||
|
||||
### UDP Only Cameras
|
||||
|
||||
If your cameras do not support TCP connections for RTSP, you can use UDP.
|
||||
|
||||
```yaml
|
||||
ffmpeg:
|
||||
input_args: -avoid_negative_ts make_zero -fflags +genpts+discardcorrupt -rtsp_transport udp -stimeout 5000000 -use_wallclock_as_timestamps 1
|
||||
```
|
||||
output_args:
|
||||
record: -f segment -segment_time 10 -segment_format mp4 -reset_timestamps 1 -strftime 1 -c:v copy -ar 44100 -c:a aac
|
||||
rtmp: -c:v copy -f flv -ar 44100 -c:a aac
|
||||
```
|
||||
@@ -43,3 +43,5 @@ cameras:
|
||||
front: ...
|
||||
side: ...
|
||||
```
|
||||
|
||||
For camera model specific settings check the [camera specific](/configuration/camera_specific) infos.
|
||||
@@ -21,6 +21,7 @@ ffmpeg:
|
||||
ffmpeg:
|
||||
hwaccel_args: -hwaccel vaapi -hwaccel_device /dev/dri/renderD128 -hwaccel_output_format yuv420p
|
||||
```
|
||||
**NOTICE**: With some of the processors, like the J4125, the default driver `iHD` doesn't seem to work correctly for hardware acceleration. You may need to change the driver to `i965` by adding the following environment variable `LIBVA_DRIVER_NAME=i965` to your docker-compose file or [in the frigate.yml for HA OS users](advanced.md#environment_vars).
|
||||
|
||||
### Intel-based CPUs (>=10th Generation) via Quicksync
|
||||
|
||||
@@ -40,22 +41,24 @@ ffmpeg:
|
||||
|
||||
### NVIDIA GPU
|
||||
|
||||
[Supported Nvidia GPUs for Decoding](https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new)
|
||||
|
||||
These instructions are based on the [jellyfin documentation](https://jellyfin.org/docs/general/administration/hardware-acceleration.html#nvidia-hardware-acceleration-on-docker-linux)
|
||||
|
||||
Add `--gpus all` to your docker run command or update your compose file.
|
||||
|
||||
If you have multiple Nvidia graphic card, you can add them with their ids obtained via `nvidia-smi` command
|
||||
```yaml
|
||||
services:
|
||||
frigate:
|
||||
...
|
||||
image: blakeblackshear/frigate:stable
|
||||
deploy: # <------------- Add this section
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: 1
|
||||
capabilities: [gpu]
|
||||
deploy: # <------------- Add this section
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
device_ids: ['0'] # this is only needed when using multiple GPUs
|
||||
capabilities: [gpu]
|
||||
```
|
||||
|
||||
The decoder you need to pass in the `hwaccel_args` will depend on the input video.
|
||||
@@ -83,7 +86,7 @@ ffmpeg:
|
||||
```
|
||||
|
||||
If everything is working correctly, you should see a significant improvement in performance.
|
||||
Verify that hardware decoding is working by running `nvidia-smi`, which should show the ffmpeg
|
||||
Verify that hardware decoding is working by running `docker exec -it frigate nvidia-smi`, which should show the ffmpeg
|
||||
processes:
|
||||
|
||||
```
|
||||
|
||||
@@ -25,6 +25,10 @@ cameras:
|
||||
height: 720
|
||||
```
|
||||
|
||||
### VSCode Configuration Schema
|
||||
|
||||
VSCode (and VSCode addon) supports the JSON schemas which will automatically validate the config. This can be added by adding `# yaml-language-server: $schema=http://frigate_host:5000/api/config/schema` to the top of the config file. `frigate_host` being the IP address of frigate or `ccab4aaf-frigate` if running in the addon.
|
||||
|
||||
### Full configuration reference:
|
||||
|
||||
:::caution
|
||||
@@ -135,7 +139,7 @@ ffmpeg:
|
||||
# NOTE: See hardware acceleration docs for your specific device
|
||||
hwaccel_args: []
|
||||
# Optional: global input args (default: shown below)
|
||||
input_args: -avoid_negative_ts make_zero -fflags +genpts+discardcorrupt -rtsp_transport tcp -stimeout 5000000 -use_wallclock_as_timestamps 1
|
||||
input_args: -avoid_negative_ts make_zero -fflags +genpts+discardcorrupt -rtsp_transport tcp -timeout 5000000 -use_wallclock_as_timestamps 1
|
||||
# Optional: global output args
|
||||
output_args:
|
||||
# Optional: output args for detect streams (default: shown below)
|
||||
@@ -311,6 +315,8 @@ snapshots:
|
||||
# Optional: Enable writing jpg snapshot to /media/frigate/clips (default: shown below)
|
||||
# This value can be set via MQTT and will be updated in startup based on retained value
|
||||
enabled: False
|
||||
# Optional: save a clean PNG copy of the snapshot image (default: shown below)
|
||||
clean_copy: True
|
||||
# Optional: print a timestamp on the snapshots (default: shown below)
|
||||
timestamp: False
|
||||
# Optional: draw bounding box on the snapshots (default: shown below)
|
||||
|
||||
@@ -42,3 +42,35 @@ The same options are available with events. Let's consider a scenario where you
|
||||
- With the `all` option all segments for the duration of the event would be saved for the event. This event would have 4 hours of footage.
|
||||
- With the `motion` option all segments for the duration of the event with motion would be saved. This means any segment where a car drove by in the street, person walked by, lighting changed, etc. would be saved.
|
||||
- With the `active_objects` it would only keep segments where the object was active. In this case the only segments that would be saved would be the ones where the car was driving up, you going inside, you coming outside, and the car driving away. Essentially reducing the 4 hours to a minute or two of event footage.
|
||||
|
||||
A configuration example of the above retain modes where all `motion` segments are stored for 7 days and `active objects` are stored for 14 days would be as follows:
|
||||
```yaml
|
||||
record:
|
||||
enabled: True
|
||||
retain:
|
||||
days: 7
|
||||
mode: motion
|
||||
events:
|
||||
retain:
|
||||
default: 14
|
||||
mode: active_objects
|
||||
```
|
||||
The above configuration example can be added globally or on a per camera basis.
|
||||
|
||||
### Object Specific Retention
|
||||
|
||||
You can also set specific retention length for an object type. The below configuration example builds on from above but also specifies that recordings of dogs only need to be kept for 2 days and recordings of cars should be kept for 7 days.
|
||||
```yaml
|
||||
record:
|
||||
enabled: True
|
||||
retain:
|
||||
days: 7
|
||||
mode: motion
|
||||
events:
|
||||
retain:
|
||||
default: 14
|
||||
mode: active_objects
|
||||
objects:
|
||||
dog: 2
|
||||
car: 7
|
||||
```
|
||||
|
||||
@@ -5,4 +5,4 @@ title: RTMP
|
||||
|
||||
Frigate can re-stream your video feed as a RTMP feed for other applications such as Home Assistant to utilize it at `rtmp://<frigate_host>/live/<camera_name>`. Port 1935 must be open. This allows you to use a video feed for detection in frigate and Home Assistant live view at the same time without having to make two separate connections to the camera. The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate.
|
||||
|
||||
Some video feeds are not compatible with RTMP. If you are experiencing issues, check to make sure your camera feed is h264 with AAC audio. If your camera doesn't support a compatible format for RTMP, you can use the ffmpeg args to re-encode it on the fly at the expense of increased CPU utilization. Some more information about it can be found [here](../faqs#audio-in-recordings).
|
||||
Some video feeds are not compatible with RTMP. If you are experiencing issues, check to make sure your camera feed is h264 with AAC audio. If your camera doesn't support a compatible format for RTMP, you can use the ffmpeg args to re-encode it on the fly at the expense of increased CPU utilization. Some more information about it can be found [here](/faqs#audio-in-recordings).
|
||||
|
||||
@@ -208,3 +208,16 @@ npm run build
|
||||
```
|
||||
|
||||
This command generates static content into the `build` directory and can be served using any static contents hosting service.
|
||||
|
||||
## Official builds
|
||||
|
||||
Setup buildx for multiarch
|
||||
|
||||
```
|
||||
docker buildx stop builder && docker buildx rm builder # <---- if existing
|
||||
docker run --privileged --rm tonistiigi/binfmt --install all
|
||||
docker buildx create --name builder --driver docker-container --driver-opt network=host --use
|
||||
docker buildx inspect builder --bootstrap
|
||||
make build_web
|
||||
make push
|
||||
```
|
||||
|
||||
@@ -47,3 +47,7 @@ These messages in the logs are expected in certain situations. Frigate checks th
|
||||
### "On connect called"
|
||||
|
||||
If you see repeated "On connect called" messages in your config, check for another instance of frigate. This happens when multiple frigate containers are trying to connect to mqtt with the same client_id.
|
||||
|
||||
### Error: Database Is Locked
|
||||
|
||||
sqlite does not work well on a network share, if the `/media` folder is mapped to a network share then [this guide](/configuration/advanced#database) should be used to move the database to a location on the internal drive.
|
||||
|
||||
10
docs/docs/guides/events_setup.md
Normal file
10
docs/docs/guides/events_setup.md
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
id: events_setup
|
||||
title: Setting Up Events
|
||||
---
|
||||
|
||||
[Snapshots](../configuration/snapshots.md) and/or [Recordings](../configuration/record.md) must be enabled for events to be created for detected objects.
|
||||
|
||||
## Limiting Events to Areas of Interest
|
||||
|
||||
The best way to limit events to areas of interest is to use [zones](../configuration/zones.md) along with `required_zones` for events and snapshots to only have events created in areas of interest.
|
||||
@@ -45,6 +45,12 @@ More details on available detectors can be found [here](/configuration/detectors
|
||||
|
||||
Now let's add the first camera:
|
||||
|
||||
:::caution
|
||||
|
||||
Note that passwords that contain special characters often cause issues with ffmpeg connecting to the camera. If receiving `end-of-file` or `unauthorized` errors with a verified correct password, try changing the password to something simple to rule out the possibility that the password is the issue.
|
||||
|
||||
:::
|
||||
|
||||
```yaml
|
||||
mqtt:
|
||||
host: <ip of your mqtt server>
|
||||
|
||||
@@ -3,7 +3,7 @@ id: stationary_objects
|
||||
title: Avoiding stationary objects
|
||||
---
|
||||
|
||||
Many people use Frigate to detect cars entering their driveway, and they often run into an issue with repeated events of a parked car being repeatedly detected. This is because object tracking stops when motion ends and the event ends. Motion detection works by determining if a sufficient number of pixels have changed between frames. Shadows or other lighting changes will be detected as motion. This will often cause a new event for a parked car.
|
||||
Many people use Frigate to detect cars entering their driveway, and they often run into an issue with repeated events of a parked car being repeatedly detected over the course of multiple days (for example if the car is lost at night and detected again the following morning.
|
||||
|
||||
You can use zones to restrict events and notifications to objects that have entered specific areas.
|
||||
|
||||
|
||||
@@ -23,15 +23,15 @@ I may earn a small commission for my endorsement, recommendation, testimonial, o
|
||||
|
||||
My current favorite is the Minisforum GK41 because of the dual NICs that allow you to setup a dedicated private network for your cameras where they can be blocked from accessing the internet. There are many used workstation options on eBay that work very well. Anything with an Intel CPU and capable of running Debian should work fine. As a bonus, you may want to look for devices with a M.2 or PCIe express slot that is compatible with the Google Coral. I may earn a small commission for my endorsement, recommendation, testimonial, or link to any products or services from this website.
|
||||
|
||||
| Name | Inference Speed | Coral Compatibility | Notes |
|
||||
| ------------------------------------------------------------------------------------------------------------------------------- | --------------- | ------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| <a href="https://amzn.to/3oH4BKi" target="_blank" rel="nofollow noopener sponsored">Odyssey X86 Blue J4125</a> (affiliate link) | 9-10ms | M.2 B+M | Dual gigabit NICs for easy isolated camera network. Easily handles several 1080p cameras. |
|
||||
| <a href="https://amzn.to/3ptnb8D" target="_blank" rel="nofollow noopener sponsored">Minisforum GK41</a> (affiliate link) | 9-10ms | USB | Dual gigabit NICs for easy isolated camera network. Easily handles several 1080p cameras. |
|
||||
| <a href="https://amzn.to/35E79BC" target="_blank" rel="nofollow noopener sponsored">Beelink GK55</a> (affiliate link) | 9-10ms | USB | Dual gigabit NICs for easy isolated camera network. Easily handles several 1080p cameras. |
|
||||
| <a href="https://amzn.to/3psFlHi" target="_blank" rel="nofollow noopener sponsored">Intel NUC</a> (affiliate link) | 8-10ms | USB | Overkill for most, but great performance. Can handle many cameras at 5fps depending on typical amounts of motion. Requires extra parts. |
|
||||
| <a href="https://amzn.to/3a6TBh8" target="_blank" rel="nofollow noopener sponsored">BMAX B2 Plus</a> (affiliate link) | 10-12ms | USB | Good balance of performance and cost. Also capable of running many other services at the same time as frigate. |
|
||||
| <a href="https://amzn.to/2YjpY9m" target="_blank" rel="nofollow noopener sponsored">Atomic Pi</a> (affiliate link) | 16ms | USB | Good option for a dedicated low power board with a small number of cameras. Can leverage Intel QuickSync for stream decoding. |
|
||||
| <a href="https://amzn.to/2YhSGHH" target="_blank" rel="nofollow noopener sponsored">Raspberry Pi 4 (64bit)</a> (affiliate link) | 10-15ms | USB | Can handle a small number of cameras. |
|
||||
| Name | Inference Speed | Coral Compatibility | Notes |
|
||||
| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | --------------- | ------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Odyssey X86 Blue J4125 (<a href="https://amzn.to/3oH4BKi" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) (<a href="https://www.seeedstudio.com/Frigate-NVR-with-Odyssey-Blue-and-Coral-USB-Accelerator.html?utm_source=Frigate" target="_blank" rel="nofollow noopener sponsored">SeeedStudio</a>) | 9-10ms | M.2 B+M, USB | Dual gigabit NICs for easy isolated camera network. Easily handles several 1080p cameras. |
|
||||
| Minisforum GK41 (<a href="https://amzn.to/3ptnb8D" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) | 9-10ms | USB | Dual gigabit NICs for easy isolated camera network. Easily handles several 1080p cameras. |
|
||||
| Beelink GK55 (<a href="https://amzn.to/35E79BC" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) | 9-10ms | USB | Dual gigabit NICs for easy isolated camera network. Easily handles several 1080p cameras. |
|
||||
| Intel NUC (<a href="https://amzn.to/3psFlHi" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) | 8-10ms | USB | Overkill for most, but great performance. Can handle many cameras at 5fps depending on typical amounts of motion. Requires extra parts. |
|
||||
| BMAX B2 Plus (<a href="https://amzn.to/3a6TBh8" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) | 10-12ms | USB | Good balance of performance and cost. Also capable of running many other services at the same time as frigate. |
|
||||
| Atomic Pi (<a href="https://amzn.to/2YjpY9m" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) | 16ms | USB | Good option for a dedicated low power board with a small number of cameras. Can leverage Intel QuickSync for stream decoding. |
|
||||
| Raspberry Pi 4 (64bit) (<a href="https://amzn.to/2YhSGHH" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) | 10-15ms | USB | Can handle a small number of cameras. |
|
||||
|
||||
## Google Coral TPU
|
||||
|
||||
|
||||
@@ -100,18 +100,7 @@ Additionally, the USB Coral draws a considerable amount of power. If using any o
|
||||
|
||||
## Docker
|
||||
|
||||
Running in Docker directly is the recommended install method.
|
||||
|
||||
Make sure you choose the right image for your architecture:
|
||||
|
||||
| Arch | Image Name |
|
||||
| ----------- | ------------------------------------------ |
|
||||
| amd64 | blakeblackshear/frigate:stable-amd64 |
|
||||
| amd64nvidia | blakeblackshear/frigate:stable-amd64nvidia |
|
||||
| armv7 | blakeblackshear/frigate:stable-armv7 |
|
||||
| aarch64 | blakeblackshear/frigate:stable-aarch64 |
|
||||
|
||||
It is recommended to run with docker-compose:
|
||||
Running in Docker with compose is the recommended install method:
|
||||
|
||||
```yaml
|
||||
version: "3.9"
|
||||
@@ -120,7 +109,7 @@ services:
|
||||
container_name: frigate
|
||||
privileged: true # this may not be necessary for all setups
|
||||
restart: unless-stopped
|
||||
image: blakeblackshear/frigate:<specify_version_tag>
|
||||
image: blakeblackshear/frigate:stable
|
||||
shm_size: "64mb" # update for your cameras based on calculation above
|
||||
devices:
|
||||
- /dev/bus/usb:/dev/bus/usb # passes the USB Coral, needs to be modified for other versions
|
||||
@@ -157,7 +146,7 @@ docker run -d \
|
||||
-e FRIGATE_RTSP_PASSWORD='password' \
|
||||
-p 5000:5000 \
|
||||
-p 1935:1935 \
|
||||
blakeblackshear/frigate:<specify_version_tag>
|
||||
blakeblackshear/frigate:stable
|
||||
```
|
||||
|
||||
## Home Assistant Operating System (HassOS)
|
||||
|
||||
@@ -183,6 +183,10 @@ Permanently deletes the event along with any clips/snapshots.
|
||||
|
||||
Sets retain to true for the event id.
|
||||
|
||||
### `POST /api/events/<id>/plus`
|
||||
|
||||
Submits the snapshot of the event to Frigate+ for labeling.
|
||||
|
||||
### `DELETE /api/events/<id>/retain`
|
||||
|
||||
Sets retain to false for the event id (event may be deleted quickly after removing).
|
||||
|
||||
@@ -85,6 +85,17 @@ The integration provides:
|
||||
|
||||
This is accessible via "Media Browser" on the left menu panel in Home Assistant.
|
||||
|
||||
## Casting Clips To Media Devices
|
||||
|
||||
The integration supports casting clips and camera streams to supported media devices.
|
||||
|
||||
:::tip
|
||||
For clips to be castable to media devices, audio is required and may need to be [enabled for recordings](../faqs.md#audio-in-recordings).
|
||||
|
||||
**NOTE: Even if you camera does not support audio, audio will need to be enabled for Casting to be accepted.**
|
||||
|
||||
:::
|
||||
|
||||
<a name="api"></a>
|
||||
|
||||
## Notification API
|
||||
@@ -167,7 +178,7 @@ for how to set these.
|
||||
|
||||
When multiple Frigate instances are configured, [API](#api) URLs should include an
|
||||
identifier to tell Home Assistant which Frigate instance to refer to. The
|
||||
identifier used is the MQTT `client_id` paremeter included in the configuration,
|
||||
identifier used is the MQTT `client_id` parameter included in the configuration,
|
||||
and is used like so:
|
||||
|
||||
```
|
||||
|
||||
@@ -45,6 +45,7 @@ Message published for each changed event. The first message is published when th
|
||||
"frame_time": 1607123961.837752,
|
||||
"snapshot_time": 1607123961.837752,
|
||||
"label": "person",
|
||||
"sub_label": null,
|
||||
"top_score": 0.958984375,
|
||||
"false_positive": false,
|
||||
"start_time": 1607123955.475377,
|
||||
@@ -69,6 +70,7 @@ Message published for each changed event. The first message is published when th
|
||||
"frame_time": 1607123962.082975,
|
||||
"snapshot_time": 1607123961.837752,
|
||||
"label": "person",
|
||||
"sub_label": null,
|
||||
"top_score": 0.958984375,
|
||||
"false_positive": false,
|
||||
"start_time": 1607123955.475377,
|
||||
@@ -140,3 +142,19 @@ Topic to turn improve_contrast for a camera on and off. Expected values are `ON`
|
||||
### `frigate/<camera_name>/improve_contrast/state`
|
||||
|
||||
Topic with current state of improve_contrast for a camera. Published values are `ON` and `OFF`.
|
||||
|
||||
### `frigate/<camera_name>/motion_threshold/set`
|
||||
|
||||
Topic to adjust motion threshold for a camera. Expected value is an integer.
|
||||
|
||||
### `frigate/<camera_name>/motion_threshold/state`
|
||||
|
||||
Topic with current motion threshold for a camera. Published value is an integer.
|
||||
|
||||
### `frigate/<camera_name>/motion_contour_area/set`
|
||||
|
||||
Topic to adjust motion contour area for a camera. Expected value is an integer.
|
||||
|
||||
### `frigate/<camera_name>/motion_contour_area/state`
|
||||
|
||||
Topic with current motion contour area for a camera. Published value is an integer.
|
||||
@@ -4,6 +4,7 @@ module.exports = {
|
||||
Guides: [
|
||||
"guides/camera_setup",
|
||||
"guides/getting_started",
|
||||
"guides/events_setup",
|
||||
"guides/false_positives",
|
||||
"guides/ha_notifications",
|
||||
"guides/stationary_objects",
|
||||
|
||||
@@ -95,6 +95,12 @@ class FrigateApp:
|
||||
"improve_contrast_enabled": mp.Value(
|
||||
"i", self.config.cameras[camera_name].motion.improve_contrast
|
||||
),
|
||||
"motion_threshold": mp.Value(
|
||||
"i", self.config.cameras[camera_name].motion.threshold
|
||||
),
|
||||
"motion_contour_area": mp.Value(
|
||||
"i", self.config.cameras[camera_name].motion.contour_area
|
||||
),
|
||||
"detection_fps": mp.Value("d", 0.0),
|
||||
"detection_frame": mp.Value("d", 0.0),
|
||||
"read_start": mp.Value("d", 0.0),
|
||||
|
||||
@@ -346,7 +346,7 @@ FFMPEG_INPUT_ARGS_DEFAULT = [
|
||||
"+genpts+discardcorrupt",
|
||||
"-rtsp_transport",
|
||||
"tcp",
|
||||
"-stimeout",
|
||||
"-timeout",
|
||||
"5000000",
|
||||
"-use_wallclock_as_timestamps",
|
||||
"1",
|
||||
|
||||
117
frigate/http.py
117
frigate/http.py
@@ -8,6 +8,7 @@ import subprocess as sp
|
||||
import time
|
||||
from functools import reduce
|
||||
from pathlib import Path
|
||||
from urllib.parse import unquote
|
||||
|
||||
import cv2
|
||||
|
||||
@@ -25,8 +26,9 @@ from flask import (
|
||||
from peewee import SqliteDatabase, operator, fn, DoesNotExist
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.const import CLIPS_DIR, PLUS_ENV_VAR
|
||||
from frigate.const import CLIPS_DIR
|
||||
from frigate.models import Event, Recordings
|
||||
from frigate.object_processing import TrackedObject, TrackedObjectProcessor
|
||||
from frigate.stats import stats_snapshot
|
||||
from frigate.version import VERSION
|
||||
|
||||
@@ -210,7 +212,7 @@ def delete_retain(id):
|
||||
@bp.route("/events/<id>/sub_label", methods=("POST",))
|
||||
def set_sub_label(id):
|
||||
try:
|
||||
event = Event.get(Event.id == id)
|
||||
event: Event = Event.get(Event.id == id)
|
||||
except DoesNotExist:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
||||
@@ -233,6 +235,16 @@ def set_sub_label(id):
|
||||
400,
|
||||
)
|
||||
|
||||
if not event.end_time:
|
||||
tracked_obj: TrackedObject = (
|
||||
current_app.detected_frames_processor.camera_states[
|
||||
event.camera
|
||||
].tracked_objects.get(event.id)
|
||||
)
|
||||
|
||||
if tracked_obj:
|
||||
tracked_obj.obj_data["sub_label"] = new_sub_label
|
||||
|
||||
event.sub_label = new_sub_label
|
||||
event.save()
|
||||
return make_response(
|
||||
@@ -256,7 +268,10 @@ def get_sub_labels():
|
||||
)
|
||||
|
||||
sub_labels = [e.sub_label for e in events]
|
||||
sub_labels.remove(None)
|
||||
|
||||
if None in sub_labels:
|
||||
sub_labels.remove(None)
|
||||
|
||||
return jsonify(sub_labels)
|
||||
|
||||
|
||||
@@ -338,11 +353,11 @@ def event_thumbnail(id, max_cache_age=2592000):
|
||||
@bp.route("/<camera_name>/<label>/best.jpg")
|
||||
@bp.route("/<camera_name>/<label>/thumbnail.jpg")
|
||||
def label_thumbnail(camera_name, label):
|
||||
label = unquote(label)
|
||||
if label == "any":
|
||||
event_query = (
|
||||
Event.select()
|
||||
.where(Event.camera == camera_name)
|
||||
.where(Event.has_snapshot == True)
|
||||
.order_by(Event.start_time.desc())
|
||||
)
|
||||
else:
|
||||
@@ -350,7 +365,6 @@ def label_thumbnail(camera_name, label):
|
||||
Event.select()
|
||||
.where(Event.camera == camera_name)
|
||||
.where(Event.label == label)
|
||||
.where(Event.has_snapshot == True)
|
||||
.order_by(Event.start_time.desc())
|
||||
)
|
||||
|
||||
@@ -421,6 +435,7 @@ def event_snapshot(id):
|
||||
|
||||
@bp.route("/<camera_name>/<label>/snapshot.jpg")
|
||||
def label_snapshot(camera_name, label):
|
||||
label = unquote(label)
|
||||
if label == "any":
|
||||
event_query = (
|
||||
Event.select()
|
||||
@@ -488,7 +503,7 @@ def event_clip(id):
|
||||
def events():
|
||||
limit = request.args.get("limit", 100)
|
||||
camera = request.args.get("camera", "all")
|
||||
label = request.args.get("label", "all")
|
||||
label = unquote(request.args.get("label", "all"))
|
||||
sub_label = request.args.get("sub_label", "all")
|
||||
zone = request.args.get("zone", "all")
|
||||
after = request.args.get("after", type=float)
|
||||
@@ -568,7 +583,7 @@ def config():
|
||||
for cmd in camera_dict["ffmpeg_cmds"]:
|
||||
cmd["cmd"] = " ".join(cmd["cmd"])
|
||||
|
||||
config["plus"] = {"enabled": PLUS_ENV_VAR in os.environ}
|
||||
config["plus"] = {"enabled": current_app.plus_api.is_active()}
|
||||
|
||||
return jsonify(config)
|
||||
|
||||
@@ -750,9 +765,9 @@ def recordings(camera_name):
|
||||
return jsonify([e for e in recordings.dicts()])
|
||||
|
||||
|
||||
@bp.route("/<camera>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
|
||||
@bp.route("/<camera>/start/<float:start_ts>/end/<float:end_ts>/clip.mp4")
|
||||
def recording_clip(camera, start_ts, end_ts):
|
||||
@bp.route("/<camera_name>/start/<int:start_ts>/end/<int:end_ts>/clip.mp4")
|
||||
@bp.route("/<camera_name>/start/<float:start_ts>/end/<float:end_ts>/clip.mp4")
|
||||
def recording_clip(camera_name, start_ts, end_ts):
|
||||
download = request.args.get("download", type=bool)
|
||||
|
||||
recordings = (
|
||||
@@ -762,7 +777,7 @@ def recording_clip(camera, start_ts, end_ts):
|
||||
| (Recordings.end_time.between(start_ts, end_ts))
|
||||
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
|
||||
)
|
||||
.where(Recordings.camera == camera)
|
||||
.where(Recordings.camera == camera_name)
|
||||
.order_by(Recordings.start_time.asc())
|
||||
)
|
||||
|
||||
@@ -777,36 +792,41 @@ def recording_clip(camera, start_ts, end_ts):
|
||||
if clip.end_time > end_ts:
|
||||
playlist_lines.append(f"outpoint {int(end_ts - clip.start_time)}")
|
||||
|
||||
file_name = f"clip_{camera}_{start_ts}-{end_ts}.mp4"
|
||||
file_name = f"clip_{camera_name}_{start_ts}-{end_ts}.mp4"
|
||||
path = f"/tmp/cache/{file_name}"
|
||||
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
"-y",
|
||||
"-protocol_whitelist",
|
||||
"pipe,file",
|
||||
"-f",
|
||||
"concat",
|
||||
"-safe",
|
||||
"0",
|
||||
"-i",
|
||||
"/dev/stdin",
|
||||
"-c",
|
||||
"copy",
|
||||
"-movflags",
|
||||
"+faststart",
|
||||
path,
|
||||
]
|
||||
if not os.path.exists(path):
|
||||
ffmpeg_cmd = [
|
||||
"ffmpeg",
|
||||
"-y",
|
||||
"-protocol_whitelist",
|
||||
"pipe,file",
|
||||
"-f",
|
||||
"concat",
|
||||
"-safe",
|
||||
"0",
|
||||
"-i",
|
||||
"/dev/stdin",
|
||||
"-c",
|
||||
"copy",
|
||||
"-movflags",
|
||||
"+faststart",
|
||||
path,
|
||||
]
|
||||
p = sp.run(
|
||||
ffmpeg_cmd,
|
||||
input="\n".join(playlist_lines),
|
||||
encoding="ascii",
|
||||
capture_output=True,
|
||||
)
|
||||
|
||||
p = sp.run(
|
||||
ffmpeg_cmd,
|
||||
input="\n".join(playlist_lines),
|
||||
encoding="ascii",
|
||||
capture_output=True,
|
||||
)
|
||||
if p.returncode != 0:
|
||||
logger.error(p.stderr)
|
||||
return f"Could not create clip from recordings for {camera}.", 500
|
||||
if p.returncode != 0:
|
||||
logger.error(p.stderr)
|
||||
return f"Could not create clip from recordings for {camera_name}.", 500
|
||||
else:
|
||||
logger.debug(
|
||||
f"Ignoring subsequent request for {path} as it already exists in the cache."
|
||||
)
|
||||
|
||||
response = make_response()
|
||||
response.headers["Content-Description"] = "File Transfer"
|
||||
@@ -822,9 +842,9 @@ def recording_clip(camera, start_ts, end_ts):
|
||||
return response
|
||||
|
||||
|
||||
@bp.route("/vod/<camera>/start/<int:start_ts>/end/<int:end_ts>")
|
||||
@bp.route("/vod/<camera>/start/<float:start_ts>/end/<float:end_ts>")
|
||||
def vod_ts(camera, start_ts, end_ts):
|
||||
@bp.route("/vod/<camera_name>/start/<int:start_ts>/end/<int:end_ts>")
|
||||
@bp.route("/vod/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
|
||||
def vod_ts(camera_name, start_ts, end_ts):
|
||||
recordings = (
|
||||
Recordings.select()
|
||||
.where(
|
||||
@@ -832,7 +852,7 @@ def vod_ts(camera, start_ts, end_ts):
|
||||
| Recordings.end_time.between(start_ts, end_ts)
|
||||
| ((start_ts > Recordings.start_time) & (end_ts < Recordings.end_time))
|
||||
)
|
||||
.where(Recordings.camera == camera)
|
||||
.where(Recordings.camera == camera_name)
|
||||
.order_by(Recordings.start_time.asc())
|
||||
)
|
||||
|
||||
@@ -843,16 +863,13 @@ def vod_ts(camera, start_ts, end_ts):
|
||||
for recording in recordings:
|
||||
clip = {"type": "source", "path": recording.path}
|
||||
duration = int(recording.duration * 1000)
|
||||
# Determine if offset is needed for first clip
|
||||
if recording.start_time < start_ts:
|
||||
offset = int((start_ts - recording.start_time) * 1000)
|
||||
clip["clipFrom"] = offset
|
||||
duration -= offset
|
||||
|
||||
# Determine if we need to end the last clip early
|
||||
if recording.end_time > end_ts:
|
||||
duration -= int((recording.end_time - end_ts) * 1000)
|
||||
|
||||
if duration > 0:
|
||||
clip["keyFrameDurations"] = [duration]
|
||||
clips.append(clip)
|
||||
durations.append(duration)
|
||||
else:
|
||||
@@ -873,14 +890,14 @@ def vod_ts(camera, start_ts, end_ts):
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/vod/<year_month>/<day>/<hour>/<camera>")
|
||||
def vod_hour(year_month, day, hour, camera):
|
||||
@bp.route("/vod/<year_month>/<day>/<hour>/<camera_name>")
|
||||
def vod_hour(year_month, day, hour, camera_name):
|
||||
start_date = datetime.strptime(f"{year_month}-{day} {hour}", "%Y-%m-%d %H")
|
||||
end_date = start_date + timedelta(hours=1) - timedelta(milliseconds=1)
|
||||
start_ts = start_date.timestamp()
|
||||
end_ts = end_date.timestamp()
|
||||
|
||||
return vod_ts(camera, start_ts, end_ts)
|
||||
return vod_ts(camera_name, start_ts, end_ts)
|
||||
|
||||
|
||||
@bp.route("/vod/event/<id>")
|
||||
|
||||
@@ -5,7 +5,14 @@ from frigate.config import MotionConfig
|
||||
|
||||
|
||||
class MotionDetector:
|
||||
def __init__(self, frame_shape, config: MotionConfig, improve_contrast_enabled):
|
||||
def __init__(
|
||||
self,
|
||||
frame_shape,
|
||||
config: MotionConfig,
|
||||
improve_contrast_enabled,
|
||||
motion_threshold,
|
||||
motion_contour_area,
|
||||
):
|
||||
self.config = config
|
||||
self.frame_shape = frame_shape
|
||||
self.resize_factor = frame_shape[0] / config.frame_height
|
||||
@@ -25,6 +32,8 @@ class MotionDetector:
|
||||
self.mask = np.where(resized_mask == [0])
|
||||
self.save_images = False
|
||||
self.improve_contrast = improve_contrast_enabled
|
||||
self.threshold = motion_threshold
|
||||
self.contour_area = motion_contour_area
|
||||
|
||||
def detect(self, frame):
|
||||
motion_boxes = []
|
||||
@@ -69,7 +78,7 @@ class MotionDetector:
|
||||
|
||||
# compute the threshold image for the current frame
|
||||
current_thresh = cv2.threshold(
|
||||
frameDelta, self.config.threshold, 255, cv2.THRESH_BINARY
|
||||
frameDelta, self.threshold.value, 255, cv2.THRESH_BINARY
|
||||
)[1]
|
||||
|
||||
# black out everything in the avg_delta where there isnt motion in the current frame
|
||||
@@ -79,7 +88,7 @@ class MotionDetector:
|
||||
# then look for deltas above the threshold, but only in areas where there is a delta
|
||||
# in the current frame. this prevents deltas from previous frames from being included
|
||||
thresh = cv2.threshold(
|
||||
avg_delta_image, self.config.threshold, 255, cv2.THRESH_BINARY
|
||||
avg_delta_image, self.threshold.value, 255, cv2.THRESH_BINARY
|
||||
)[1]
|
||||
|
||||
# dilate the thresholded image to fill in holes, then find contours
|
||||
@@ -94,7 +103,7 @@ class MotionDetector:
|
||||
for c in cnts:
|
||||
# if the contour is big enough, count it as motion
|
||||
contour_area = cv2.contourArea(c)
|
||||
if contour_area > self.config.contour_area:
|
||||
if contour_area > self.contour_area.value:
|
||||
x, y, w, h = cv2.boundingRect(c)
|
||||
motion_boxes.append(
|
||||
(
|
||||
@@ -111,8 +120,7 @@ class MotionDetector:
|
||||
# print(self.frame_counter)
|
||||
for c in cnts:
|
||||
contour_area = cv2.contourArea(c)
|
||||
# print(contour_area)
|
||||
if contour_area > self.config.contour_area:
|
||||
if contour_area > self.contour_area.value:
|
||||
x, y, w, h = cv2.boundingRect(c)
|
||||
cv2.rectangle(
|
||||
thresh_dilated,
|
||||
|
||||
@@ -84,6 +84,8 @@ def create_mqtt_client(config: FrigateConfig, camera_metrics):
|
||||
f"Turning on motion for {camera_name} due to detection being enabled."
|
||||
)
|
||||
camera_metrics[camera_name]["motion_enabled"].value = True
|
||||
state_topic = f"{message.topic[:-11]}/motion/state"
|
||||
client.publish(state_topic, payload, retain=True)
|
||||
elif payload == "OFF":
|
||||
if camera_metrics[camera_name]["detection_enabled"].value:
|
||||
logger.info(f"Turning off detection for {camera_name} via mqtt")
|
||||
@@ -145,6 +147,52 @@ def create_mqtt_client(config: FrigateConfig, camera_metrics):
|
||||
state_topic = f"{message.topic[:-4]}/state"
|
||||
client.publish(state_topic, payload, retain=True)
|
||||
|
||||
def on_motion_threshold_command(client, userdata, message):
|
||||
try:
|
||||
payload = int(message.payload.decode())
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
f"Received unsupported value at {message.topic}: {message.payload.decode()}"
|
||||
)
|
||||
return
|
||||
|
||||
logger.debug(f"on_motion_threshold_toggle: {message.topic} {payload}")
|
||||
|
||||
camera_name = message.topic.split("/")[-3]
|
||||
|
||||
motion_settings = config.cameras[camera_name].motion
|
||||
|
||||
logger.info(f"Setting motion threshold for {camera_name} via mqtt: {payload}")
|
||||
camera_metrics[camera_name]["motion_threshold"].value = payload
|
||||
motion_settings.threshold = payload
|
||||
|
||||
state_topic = f"{message.topic[:-4]}/state"
|
||||
client.publish(state_topic, payload, retain=True)
|
||||
|
||||
def on_motion_contour_area_command(client, userdata, message):
|
||||
try:
|
||||
payload = int(message.payload.decode())
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
f"Received unsupported value at {message.topic}: {message.payload.decode()}"
|
||||
)
|
||||
return
|
||||
|
||||
logger.debug(f"on_motion_contour_area_toggle: {message.topic} {payload}")
|
||||
|
||||
camera_name = message.topic.split("/")[-3]
|
||||
|
||||
motion_settings = config.cameras[camera_name].motion
|
||||
|
||||
logger.info(
|
||||
f"Setting motion contour area for {camera_name} via mqtt: {payload}"
|
||||
)
|
||||
camera_metrics[camera_name]["motion_contour_area"].value = payload
|
||||
motion_settings.contour_area = payload
|
||||
|
||||
state_topic = f"{message.topic[:-4]}/state"
|
||||
client.publish(state_topic, payload, retain=True)
|
||||
|
||||
def on_restart_command(client, userdata, message):
|
||||
restart_frigate()
|
||||
|
||||
@@ -195,6 +243,14 @@ def create_mqtt_client(config: FrigateConfig, camera_metrics):
|
||||
f"{mqtt_config.topic_prefix}/{name}/improve_contrast/set",
|
||||
on_improve_contrast_command,
|
||||
)
|
||||
client.message_callback_add(
|
||||
f"{mqtt_config.topic_prefix}/{name}/motion_threshold/set",
|
||||
on_motion_threshold_command,
|
||||
)
|
||||
client.message_callback_add(
|
||||
f"{mqtt_config.topic_prefix}/{name}/motion_contour_area/set",
|
||||
on_motion_contour_area_command,
|
||||
)
|
||||
|
||||
client.message_callback_add(
|
||||
f"{mqtt_config.topic_prefix}/restart", on_restart_command
|
||||
@@ -250,6 +306,21 @@ def create_mqtt_client(config: FrigateConfig, camera_metrics):
|
||||
"ON" if config.cameras[name].motion.improve_contrast else "OFF",
|
||||
retain=True,
|
||||
)
|
||||
client.publish(
|
||||
f"{mqtt_config.topic_prefix}/{name}/motion_threshold/state",
|
||||
config.cameras[name].motion.threshold,
|
||||
retain=True,
|
||||
)
|
||||
client.publish(
|
||||
f"{mqtt_config.topic_prefix}/{name}/motion_contour_area/state",
|
||||
config.cameras[name].motion.contour_area,
|
||||
retain=True,
|
||||
)
|
||||
client.publish(
|
||||
f"{mqtt_config.topic_prefix}/{name}/motion",
|
||||
"OFF",
|
||||
retain=False,
|
||||
)
|
||||
|
||||
return client
|
||||
|
||||
|
||||
@@ -180,6 +180,7 @@ class TrackedObject:
|
||||
"frame_time": self.obj_data["frame_time"],
|
||||
"snapshot_time": snapshot_time,
|
||||
"label": self.obj_data["label"],
|
||||
"sub_label": self.obj_data.get("sub_label"),
|
||||
"top_score": self.top_score,
|
||||
"false_positive": self.false_positive,
|
||||
"start_time": self.obj_data["start_time"],
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import requests
|
||||
from frigate.const import PLUS_ENV_VAR, PLUS_API_HOST
|
||||
from requests.models import Response
|
||||
@@ -28,10 +30,23 @@ def get_jpg_bytes(image: ndarray, max_dim: int, quality: int) -> bytes:
|
||||
class PlusApi:
|
||||
def __init__(self) -> None:
|
||||
self.host = PLUS_API_HOST
|
||||
self.key = None
|
||||
if PLUS_ENV_VAR in os.environ:
|
||||
self.key = os.environ.get(PLUS_ENV_VAR)
|
||||
else:
|
||||
# check for the addon options file
|
||||
elif os.path.isfile("/data/options.json"):
|
||||
with open("/data/options.json") as f:
|
||||
raw_options = f.read()
|
||||
options = json.loads(raw_options)
|
||||
self.key = options.get("plus_api_key")
|
||||
|
||||
if self.key is not None and not re.match(
|
||||
r"[a-z0-9]{8}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{4}-[a-z0-9]{12}:[a-z0-9]{40}",
|
||||
self.key,
|
||||
):
|
||||
logger.error("Plus API Key is not formatted correctly.")
|
||||
self.key = None
|
||||
|
||||
self._is_active: bool = self.key is not None
|
||||
self._token_data: dict = {}
|
||||
|
||||
|
||||
@@ -99,11 +99,23 @@ class RecordingMaintainer(threading.Thread):
|
||||
# delete all cached files past the most recent 5
|
||||
keep_count = 5
|
||||
for camera in grouped_recordings.keys():
|
||||
if len(grouped_recordings[camera]) > keep_count:
|
||||
segment_count = len(grouped_recordings[camera])
|
||||
if segment_count > keep_count:
|
||||
####
|
||||
# Need to find a way to tell if these are aging out based on retention settings or if the system is overloaded.
|
||||
####
|
||||
# logger.warning(
|
||||
# f"Too many recording segments in cache for {camera}. Keeping the {keep_count} most recent segments out of {segment_count}, discarding the rest..."
|
||||
# )
|
||||
to_remove = grouped_recordings[camera][:-keep_count]
|
||||
for f in to_remove:
|
||||
Path(f["cache_path"]).unlink(missing_ok=True)
|
||||
self.end_time_cache.pop(f["cache_path"], None)
|
||||
cache_path = f["cache_path"]
|
||||
####
|
||||
# Need to find a way to tell if these are aging out based on retention settings or if the system is overloaded.
|
||||
####
|
||||
# logger.warning(f"Discarding a recording segment: {cache_path}")
|
||||
Path(cache_path).unlink(missing_ok=True)
|
||||
self.end_time_cache.pop(cache_path, None)
|
||||
grouped_recordings[camera] = grouped_recordings[camera][-keep_count:]
|
||||
|
||||
for camera, recordings in grouped_recordings.items():
|
||||
@@ -155,7 +167,7 @@ class RecordingMaintainer(threading.Thread):
|
||||
f"{cache_path}",
|
||||
]
|
||||
p = sp.run(ffprobe_cmd, capture_output=True)
|
||||
if p.returncode == 0:
|
||||
if p.returncode == 0 and p.stdout.decode():
|
||||
duration = float(p.stdout.decode().strip())
|
||||
end_time = start_time + datetime.timedelta(seconds=duration)
|
||||
self.end_time_cache[cache_path] = (end_time, duration)
|
||||
@@ -264,28 +276,31 @@ class RecordingMaintainer(threading.Thread):
|
||||
file_path = os.path.join(directory, file_name)
|
||||
|
||||
try:
|
||||
start_frame = datetime.datetime.now().timestamp()
|
||||
# copy then delete is required when recordings are stored on some network drives
|
||||
shutil.copyfile(cache_path, file_path)
|
||||
logger.debug(
|
||||
f"Copied {file_path} in {datetime.datetime.now().timestamp()-start_frame} seconds."
|
||||
)
|
||||
os.remove(cache_path)
|
||||
if not os.path.exists(file_path):
|
||||
start_frame = datetime.datetime.now().timestamp()
|
||||
# copy then delete is required when recordings are stored on some network drives
|
||||
shutil.copyfile(cache_path, file_path)
|
||||
logger.debug(
|
||||
f"Copied {file_path} in {datetime.datetime.now().timestamp()-start_frame} seconds."
|
||||
)
|
||||
|
||||
rand_id = "".join(
|
||||
random.choices(string.ascii_lowercase + string.digits, k=6)
|
||||
)
|
||||
Recordings.create(
|
||||
id=f"{start_time.timestamp()}-{rand_id}",
|
||||
camera=camera,
|
||||
path=file_path,
|
||||
start_time=start_time.timestamp(),
|
||||
end_time=end_time.timestamp(),
|
||||
duration=duration,
|
||||
motion=motion_count,
|
||||
# TODO: update this to store list of active objects at some point
|
||||
objects=active_count,
|
||||
)
|
||||
rand_id = "".join(
|
||||
random.choices(string.ascii_lowercase + string.digits, k=6)
|
||||
)
|
||||
Recordings.create(
|
||||
id=f"{start_time.timestamp()}-{rand_id}",
|
||||
camera=camera,
|
||||
path=file_path,
|
||||
start_time=start_time.timestamp(),
|
||||
end_time=end_time.timestamp(),
|
||||
duration=duration,
|
||||
motion=motion_count,
|
||||
# TODO: update this to store list of active objects at some point
|
||||
objects=active_count,
|
||||
)
|
||||
else:
|
||||
logger.warning(f"Ignoring segment because {file_path} already exists.")
|
||||
os.remove(cache_path)
|
||||
except Exception as e:
|
||||
logger.error(f"Unable to store recording segment {cache_path}")
|
||||
Path(cache_path).unlink(missing_ok=True)
|
||||
|
||||
@@ -22,7 +22,8 @@ logger = logging.getLogger(__name__)
|
||||
def get_latest_version() -> str:
|
||||
try:
|
||||
request = requests.get(
|
||||
"https://api.github.com/repos/blakeblackshear/frigate/releases/latest"
|
||||
"https://api.github.com/repos/blakeblackshear/frigate/releases/latest",
|
||||
timeout=10,
|
||||
)
|
||||
except:
|
||||
return "unknown"
|
||||
|
||||
4
frigate/test/const.py
Normal file
4
frigate/test/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Consts for testing."""
|
||||
|
||||
TEST_DB = "test.db"
|
||||
TEST_DB_CLEANUPS = ["test.db", "test.db-shm", "test.db-wal"]
|
||||
328
frigate/test/test_http.py
Normal file
328
frigate/test/test_http.py
Normal file
@@ -0,0 +1,328 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
|
||||
from peewee_migrate import Router
|
||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||
from playhouse.sqliteq import SqliteQueueDatabase
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.http import create_app
|
||||
from frigate.models import Event, Recordings
|
||||
from frigate.plus import PlusApi
|
||||
|
||||
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
||||
|
||||
|
||||
class TestHttp(unittest.TestCase):
|
||||
def setUp(self):
|
||||
# setup clean database for each test run
|
||||
migrate_db = SqliteExtDatabase("test.db")
|
||||
del logging.getLogger("peewee_migrate").handlers[:]
|
||||
router = Router(migrate_db)
|
||||
router.run()
|
||||
migrate_db.close()
|
||||
self.db = SqliteQueueDatabase(TEST_DB)
|
||||
models = [Event, Recordings]
|
||||
self.db.bind(models)
|
||||
|
||||
self.minimal_config = {
|
||||
"mqtt": {"host": "mqtt"},
|
||||
"cameras": {
|
||||
"front_door": {
|
||||
"ffmpeg": {
|
||||
"inputs": [
|
||||
{"path": "rtsp://10.0.0.1:554/video", "roles": ["detect"]}
|
||||
]
|
||||
},
|
||||
"detect": {
|
||||
"height": 1080,
|
||||
"width": 1920,
|
||||
"fps": 5,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
self.test_stats = {
|
||||
"detection_fps": 13.7,
|
||||
"detectors": {
|
||||
"cpu1": {
|
||||
"detection_start": 0.0,
|
||||
"inference_speed": 91.43,
|
||||
"pid": 42,
|
||||
},
|
||||
"cpu2": {
|
||||
"detection_start": 0.0,
|
||||
"inference_speed": 84.99,
|
||||
"pid": 44,
|
||||
},
|
||||
},
|
||||
"front_door": {
|
||||
"camera_fps": 0.0,
|
||||
"capture_pid": 53,
|
||||
"detection_fps": 0.0,
|
||||
"pid": 52,
|
||||
"process_fps": 0.0,
|
||||
"skipped_fps": 0.0,
|
||||
},
|
||||
"service": {
|
||||
"storage": {
|
||||
"/dev/shm": {
|
||||
"free": 50.5,
|
||||
"mount_type": "tmpfs",
|
||||
"total": 67.1,
|
||||
"used": 16.6,
|
||||
},
|
||||
"/media/frigate/clips": {
|
||||
"free": 42429.9,
|
||||
"mount_type": "ext4",
|
||||
"total": 244529.7,
|
||||
"used": 189607.0,
|
||||
},
|
||||
"/media/frigate/recordings": {
|
||||
"free": 0.2,
|
||||
"mount_type": "ext4",
|
||||
"total": 8.0,
|
||||
"used": 7.8,
|
||||
},
|
||||
"/tmp/cache": {
|
||||
"free": 976.8,
|
||||
"mount_type": "tmpfs",
|
||||
"total": 1000.0,
|
||||
"used": 23.2,
|
||||
},
|
||||
},
|
||||
"uptime": 101113,
|
||||
"version": "0.10.1",
|
||||
"latest_version": "0.11",
|
||||
},
|
||||
}
|
||||
|
||||
def tearDown(self):
|
||||
if not self.db.is_closed():
|
||||
self.db.close()
|
||||
|
||||
try:
|
||||
for file in TEST_DB_CLEANUPS:
|
||||
os.remove(file)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
def test_get_event_list(self):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config), self.db, None, None, PlusApi()
|
||||
)
|
||||
id = "123456.random"
|
||||
id2 = "7890.random"
|
||||
|
||||
with app.test_client() as client:
|
||||
_insert_mock_event(id)
|
||||
events = client.get(f"/events").json
|
||||
assert events
|
||||
assert len(events) == 1
|
||||
assert events[0]["id"] == id
|
||||
_insert_mock_event(id2)
|
||||
events = client.get(f"/events").json
|
||||
assert events
|
||||
assert len(events) == 2
|
||||
events = client.get(
|
||||
f"/events",
|
||||
query_string={"limit": 1},
|
||||
).json
|
||||
assert events
|
||||
assert len(events) == 1
|
||||
events = client.get(
|
||||
f"/events",
|
||||
query_string={"has_clip": 0},
|
||||
).json
|
||||
assert not events
|
||||
|
||||
def test_get_good_event(self):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config), self.db, None, None, PlusApi()
|
||||
)
|
||||
id = "123456.random"
|
||||
|
||||
with app.test_client() as client:
|
||||
_insert_mock_event(id)
|
||||
event = client.get(f"/events/{id}").json
|
||||
|
||||
assert event
|
||||
assert event["id"] == id
|
||||
assert event == model_to_dict(Event.get(Event.id == id))
|
||||
|
||||
def test_get_bad_event(self):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config), self.db, None, None, PlusApi()
|
||||
)
|
||||
id = "123456.random"
|
||||
bad_id = "654321.other"
|
||||
|
||||
with app.test_client() as client:
|
||||
_insert_mock_event(id)
|
||||
event = client.get(f"/events/{bad_id}").json
|
||||
|
||||
assert not event
|
||||
|
||||
def test_delete_event(self):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config), self.db, None, None, PlusApi()
|
||||
)
|
||||
id = "123456.random"
|
||||
|
||||
with app.test_client() as client:
|
||||
_insert_mock_event(id)
|
||||
event = client.get(f"/events/{id}").json
|
||||
assert event
|
||||
assert event["id"] == id
|
||||
client.delete(f"/events/{id}")
|
||||
event = client.get(f"/events/{id}").json
|
||||
assert not event
|
||||
|
||||
def test_event_retention(self):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config), self.db, None, None, PlusApi()
|
||||
)
|
||||
id = "123456.random"
|
||||
|
||||
with app.test_client() as client:
|
||||
_insert_mock_event(id)
|
||||
client.post(f"/events/{id}/retain")
|
||||
event = client.get(f"/events/{id}").json
|
||||
assert event
|
||||
assert event["id"] == id
|
||||
assert event["retain_indefinitely"] == True
|
||||
client.delete(f"/events/{id}/retain")
|
||||
event = client.get(f"/events/{id}").json
|
||||
assert event
|
||||
assert event["id"] == id
|
||||
assert event["retain_indefinitely"] == False
|
||||
|
||||
def test_set_delete_sub_label(self):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config), self.db, None, None, PlusApi()
|
||||
)
|
||||
id = "123456.random"
|
||||
sub_label = "sub"
|
||||
|
||||
with app.test_client() as client:
|
||||
_insert_mock_event(id)
|
||||
client.post(
|
||||
f"/events/{id}/sub_label",
|
||||
data=json.dumps({"subLabel": sub_label}),
|
||||
content_type="application/json",
|
||||
)
|
||||
event = client.get(f"/events/{id}").json
|
||||
assert event
|
||||
assert event["id"] == id
|
||||
assert event["sub_label"] == sub_label
|
||||
client.post(
|
||||
f"/events/{id}/sub_label",
|
||||
data=json.dumps({"subLabel": ""}),
|
||||
content_type="application/json",
|
||||
)
|
||||
event = client.get(f"/events/{id}").json
|
||||
assert event
|
||||
assert event["id"] == id
|
||||
assert event["sub_label"] == ""
|
||||
|
||||
def test_sub_label_list(self):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config), self.db, None, None, PlusApi()
|
||||
)
|
||||
id = "123456.random"
|
||||
sub_label = "sub"
|
||||
|
||||
with app.test_client() as client:
|
||||
_insert_mock_event(id)
|
||||
client.post(
|
||||
f"/events/{id}/sub_label",
|
||||
data=json.dumps({"subLabel": sub_label}),
|
||||
content_type="application/json",
|
||||
)
|
||||
sub_labels = client.get("/sub_labels").json
|
||||
assert sub_labels
|
||||
assert sub_labels == [sub_label]
|
||||
|
||||
def test_config(self):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config).runtime_config,
|
||||
self.db,
|
||||
None,
|
||||
None,
|
||||
PlusApi(),
|
||||
)
|
||||
|
||||
with app.test_client() as client:
|
||||
config = client.get("/config").json
|
||||
assert config
|
||||
assert config["cameras"]["front_door"]
|
||||
|
||||
def test_recordings(self):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config).runtime_config,
|
||||
self.db,
|
||||
None,
|
||||
None,
|
||||
PlusApi(),
|
||||
)
|
||||
id = "123456.random"
|
||||
|
||||
with app.test_client() as client:
|
||||
_insert_mock_recording(id)
|
||||
recording = client.get("/front_door/recordings").json
|
||||
assert recording
|
||||
assert recording[0]["id"] == id
|
||||
|
||||
@patch("frigate.http.stats_snapshot")
|
||||
def test_stats(self, mock_stats):
|
||||
app = create_app(
|
||||
FrigateConfig(**self.minimal_config).runtime_config,
|
||||
self.db,
|
||||
None,
|
||||
None,
|
||||
PlusApi(),
|
||||
)
|
||||
mock_stats.return_value = self.test_stats
|
||||
|
||||
with app.test_client() as client:
|
||||
stats = client.get("/stats").json
|
||||
assert stats == self.test_stats
|
||||
|
||||
|
||||
def _insert_mock_event(id: str) -> Event:
|
||||
"""Inserts a basic event model with a given id."""
|
||||
return Event.insert(
|
||||
id=id,
|
||||
label="Mock",
|
||||
camera="front_door",
|
||||
start_time=datetime.datetime.now().timestamp(),
|
||||
end_time=datetime.datetime.now().timestamp() + 20,
|
||||
top_score=100,
|
||||
false_positive=False,
|
||||
zones=list(),
|
||||
thumbnail="",
|
||||
region=[],
|
||||
box=[],
|
||||
area=0,
|
||||
has_clip=True,
|
||||
has_snapshot=True,
|
||||
).execute()
|
||||
|
||||
|
||||
def _insert_mock_recording(id: str) -> Event:
|
||||
"""Inserts a basic recording model with a given id."""
|
||||
return Recordings.insert(
|
||||
id=id,
|
||||
camera="front_door",
|
||||
path=f"/recordings/{id}",
|
||||
start_time=datetime.datetime.now().timestamp() - 50,
|
||||
end_time=datetime.datetime.now().timestamp() - 60,
|
||||
duration=10,
|
||||
motion=True,
|
||||
objects=True,
|
||||
).execute()
|
||||
@@ -16,6 +16,8 @@ class CameraMetricsTypes(TypedDict):
|
||||
frame_queue: Queue
|
||||
motion_enabled: Synchronized
|
||||
improve_contrast_enabled: Synchronized
|
||||
motion_threshold: Synchronized
|
||||
motion_contour_area: Synchronized
|
||||
process: Optional[Process]
|
||||
process_fps: Synchronized
|
||||
read_start: Synchronized
|
||||
|
||||
@@ -363,13 +363,19 @@ def track_camera(
|
||||
detection_enabled = process_info["detection_enabled"]
|
||||
motion_enabled = process_info["motion_enabled"]
|
||||
improve_contrast_enabled = process_info["improve_contrast_enabled"]
|
||||
motion_threshold = process_info["motion_threshold"]
|
||||
motion_contour_area = process_info["motion_contour_area"]
|
||||
|
||||
frame_shape = config.frame_shape
|
||||
objects_to_track = config.objects.track
|
||||
object_filters = config.objects.filters
|
||||
|
||||
motion_detector = MotionDetector(
|
||||
frame_shape, config.motion, improve_contrast_enabled
|
||||
frame_shape,
|
||||
config.motion,
|
||||
improve_contrast_enabled,
|
||||
motion_threshold,
|
||||
motion_contour_area,
|
||||
)
|
||||
object_detector = RemoteObjectDetector(
|
||||
name, labelmap, detection_queue, result_connection, model_shape
|
||||
@@ -434,7 +440,13 @@ def intersects_any(box_a, boxes):
|
||||
|
||||
|
||||
def detect(
|
||||
object_detector, frame, model_shape, region, objects_to_track, object_filters
|
||||
detect_config: DetectConfig,
|
||||
object_detector,
|
||||
frame,
|
||||
model_shape,
|
||||
region,
|
||||
objects_to_track,
|
||||
object_filters,
|
||||
):
|
||||
tensor_input = create_tensor_input(frame, model_shape, region)
|
||||
|
||||
@@ -443,10 +455,15 @@ def detect(
|
||||
for d in region_detections:
|
||||
box = d[2]
|
||||
size = region[2] - region[0]
|
||||
x_min = int((box[1] * size) + region[0])
|
||||
y_min = int((box[0] * size) + region[1])
|
||||
x_max = int((box[3] * size) + region[0])
|
||||
y_max = int((box[2] * size) + region[1])
|
||||
x_min = int(max(0, (box[1] * size) + region[0]))
|
||||
y_min = int(max(0, (box[0] * size) + region[1]))
|
||||
x_max = int(min(detect_config.width - 1, (box[3] * size) + region[0]))
|
||||
y_max = int(min(detect_config.height - 1, (box[2] * size) + region[1]))
|
||||
|
||||
# ignore objects that were detected outside the frame
|
||||
if (x_min >= detect_config.width - 1) or (y_min >= detect_config.height - 1):
|
||||
continue
|
||||
|
||||
width = x_max - x_min
|
||||
height = y_max - y_min
|
||||
area = width * height
|
||||
@@ -614,6 +631,7 @@ def process_frames(
|
||||
for region in regions:
|
||||
detections.extend(
|
||||
detect(
|
||||
detect_config,
|
||||
object_detector,
|
||||
frame,
|
||||
model_shape,
|
||||
@@ -641,6 +659,7 @@ def process_frames(
|
||||
|
||||
# apply non-maxima suppression to suppress weak, overlapping bounding boxes
|
||||
# o[2] is the box of the object: xmin, ymin, xmax, ymax
|
||||
# apply max/min to ensure values do not exceed the known frame size
|
||||
boxes = [
|
||||
(
|
||||
o[2][0],
|
||||
@@ -672,6 +691,7 @@ def process_frames(
|
||||
|
||||
selected_objects.extend(
|
||||
detect(
|
||||
detect_config,
|
||||
object_detector,
|
||||
frame,
|
||||
model_shape,
|
||||
|
||||
@@ -68,6 +68,7 @@ export const handlers = [
|
||||
top_score: Math.random(),
|
||||
zones: ['front_patio'],
|
||||
thumbnail: '/9j/4aa...',
|
||||
camera: 'camera_name',
|
||||
}))
|
||||
)
|
||||
);
|
||||
|
||||
175
web/package-lock.json
generated
175
web/package-lock.json
generated
@@ -17,9 +17,9 @@
|
||||
"preact-async-route": "^2.2.1",
|
||||
"preact-router": "^4.0.1",
|
||||
"swr": "^1.2.2",
|
||||
"video.js": "^7.17.0",
|
||||
"video.js": "^7.20.2",
|
||||
"videojs-playlist": "^5.0.0",
|
||||
"videojs-seek-buttons": "^2.2.0"
|
||||
"videojs-seek-buttons": "^2.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/preset-env": "^7.16.11",
|
||||
@@ -30,6 +30,7 @@
|
||||
"@testing-library/preact": "^2.0.1",
|
||||
"@testing-library/preact-hooks": "^1.1.0",
|
||||
"@testing-library/user-event": "^13.5.0",
|
||||
"@types/video.js": "^7.3.44",
|
||||
"@typescript-eslint/eslint-plugin": "^5.18.0",
|
||||
"@typescript-eslint/parser": "^5.18.0",
|
||||
"autoprefixer": "^10.4.2",
|
||||
@@ -3234,6 +3235,12 @@
|
||||
"@types/jest": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/video.js": {
|
||||
"version": "7.3.44",
|
||||
"resolved": "https://registry.npmjs.org/@types/video.js/-/video.js-7.3.44.tgz",
|
||||
"integrity": "sha512-ov1HXNOjUkt38al/ybw8cj1181I5P3sOXdrqBR8AkDCqQX6GYwxOCzdmsGn/LDwKHTZ/3veNC9Ad6BjR5wSq4g==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/yargs": {
|
||||
"version": "16.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz",
|
||||
@@ -3867,17 +3874,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@videojs/http-streaming": {
|
||||
"version": "2.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@videojs/http-streaming/-/http-streaming-2.12.0.tgz",
|
||||
"integrity": "sha512-vdQA0lDYBXGJqV2T02AGqg1w4dcgyRoN+bYG+G8uF4DpCEMhEtUI0BA4tRu4/Njar8w/9D5k0a1KX40pcvM3fA==",
|
||||
"version": "2.14.2",
|
||||
"resolved": "https://registry.npmjs.org/@videojs/http-streaming/-/http-streaming-2.14.2.tgz",
|
||||
"integrity": "sha512-K1raSfO/pq5r8iUas3OSYni0kXOj91n8ealIpV02khghzGv9LQ6O3YUqYd/eAhJ1HIrmZWOnrYpK/P+mhUExXQ==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/vhs-utils": "3.0.4",
|
||||
"aes-decrypter": "3.1.2",
|
||||
"@videojs/vhs-utils": "3.0.5",
|
||||
"aes-decrypter": "3.1.3",
|
||||
"global": "^4.4.0",
|
||||
"m3u8-parser": "4.7.0",
|
||||
"mpd-parser": "0.19.2",
|
||||
"mux.js": "5.14.1",
|
||||
"m3u8-parser": "4.7.1",
|
||||
"mpd-parser": "0.21.1",
|
||||
"mux.js": "6.0.1",
|
||||
"video.js": "^6 || ^7"
|
||||
},
|
||||
"engines": {
|
||||
@@ -3889,9 +3896,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@videojs/vhs-utils": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@videojs/vhs-utils/-/vhs-utils-3.0.4.tgz",
|
||||
"integrity": "sha512-hui4zOj2I1kLzDgf8QDVxD3IzrwjS/43KiS8IHQO0OeeSsb4pB/lgNt1NG7Dv0wMQfCccUpMVLGcK618s890Yg==",
|
||||
"version": "3.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@videojs/vhs-utils/-/vhs-utils-3.0.5.tgz",
|
||||
"integrity": "sha512-PKVgdo8/GReqdx512F+ombhS+Bzogiofy1LgAj4tN8PfdBx3HSS7V5WfJotKTqtOWGwVfSWsrYN/t09/DSryrw==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"global": "^4.4.0",
|
||||
@@ -3978,12 +3985,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/aes-decrypter": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/aes-decrypter/-/aes-decrypter-3.1.2.tgz",
|
||||
"integrity": "sha512-42nRwfQuPRj9R1zqZBdoxnaAmnIFyDi0MNyTVhjdFOd8fifXKKRfwIHIZ6AMn1or4x5WONzjwRTbTWcsIQ0O4A==",
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/aes-decrypter/-/aes-decrypter-3.1.3.tgz",
|
||||
"integrity": "sha512-VkG9g4BbhMBy+N5/XodDeV6F02chEk9IpgRTq/0bS80y4dzy79VH2Gtms02VXomf3HmyRe3yyJYkJ990ns+d6A==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/vhs-utils": "^3.0.0",
|
||||
"@videojs/vhs-utils": "^3.0.5",
|
||||
"global": "^4.4.0",
|
||||
"pkcs7": "^1.0.4"
|
||||
}
|
||||
@@ -9409,12 +9416,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/m3u8-parser": {
|
||||
"version": "4.7.0",
|
||||
"resolved": "https://registry.npmjs.org/m3u8-parser/-/m3u8-parser-4.7.0.tgz",
|
||||
"integrity": "sha512-48l/OwRyjBm+QhNNigEEcRcgbRvnUjL7rxs597HmW9QSNbyNvt+RcZ9T/d9vxi9A9z7EZrB1POtZYhdRlwYQkQ==",
|
||||
"version": "4.7.1",
|
||||
"resolved": "https://registry.npmjs.org/m3u8-parser/-/m3u8-parser-4.7.1.tgz",
|
||||
"integrity": "sha512-pbrQwiMiq+MmI9bl7UjtPT3AK603PV9bogNlr83uC+X9IoxqL5E4k7kU7fMQ0dpRgxgeSMygqUa0IMLQNXLBNA==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/vhs-utils": "^3.0.0",
|
||||
"@videojs/vhs-utils": "^3.0.5",
|
||||
"global": "^4.4.0"
|
||||
}
|
||||
},
|
||||
@@ -9545,12 +9552,12 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/mpd-parser": {
|
||||
"version": "0.19.2",
|
||||
"resolved": "https://registry.npmjs.org/mpd-parser/-/mpd-parser-0.19.2.tgz",
|
||||
"integrity": "sha512-M5tAIdtBM2TN+OSTz/37T7V+h9ZLvhyNqq4TNIdtjAQ/Hg8UnMRf5nJQDjffcXag3POXi31yUJQEKOXdcAM/nw==",
|
||||
"version": "0.21.1",
|
||||
"resolved": "https://registry.npmjs.org/mpd-parser/-/mpd-parser-0.21.1.tgz",
|
||||
"integrity": "sha512-BxlSXWbKE1n7eyEPBnTEkrzhS3PdmkkKdM1pgKbPnPOH0WFZIc0sPOWi7m0Uo3Wd2a4Or8Qf4ZbS7+ASqQ49fw==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/vhs-utils": "^3.0.2",
|
||||
"@videojs/vhs-utils": "^3.0.5",
|
||||
"@xmldom/xmldom": "^0.7.2",
|
||||
"global": "^4.4.0"
|
||||
},
|
||||
@@ -9715,11 +9722,12 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/mux.js": {
|
||||
"version": "5.14.1",
|
||||
"resolved": "https://registry.npmjs.org/mux.js/-/mux.js-5.14.1.tgz",
|
||||
"integrity": "sha512-38kA/xjWRDzMbcpHQfhKbJAME8eTZVsb9U2Puk890oGvGqnyu8B/AkKdICKPHkigfqYX9MY20vje88TP14nhog==",
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/mux.js/-/mux.js-6.0.1.tgz",
|
||||
"integrity": "sha512-22CHb59rH8pWGcPGW5Og7JngJ9s+z4XuSlYvnxhLuc58cA1WqGDQPzuG8I+sPm1/p0CdgpzVTaKW408k5DNn8w==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.11.2"
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"global": "^4.4.0"
|
||||
},
|
||||
"bin": {
|
||||
"muxjs-transmux": "bin/transmux.js"
|
||||
@@ -11755,20 +11763,20 @@
|
||||
}
|
||||
},
|
||||
"node_modules/video.js": {
|
||||
"version": "7.17.0",
|
||||
"resolved": "https://registry.npmjs.org/video.js/-/video.js-7.17.0.tgz",
|
||||
"integrity": "sha512-8RbLu9+Pdpep9OTPncUHIvZXFgn/7hKdPnSTE/lGSnlFSucXtTUBp41R7NDwncscMLQ0WgazUbmFlvr4MNWMbA==",
|
||||
"version": "7.20.2",
|
||||
"resolved": "https://registry.npmjs.org/video.js/-/video.js-7.20.2.tgz",
|
||||
"integrity": "sha512-hdvAHKAyaL6bCDkeu0pPtFYKi1EDaOUovm7FN1xqBDolUxgH8FKy1WIgTS+Ouuaw7R54SCTcSeXjZEizhy9ouQ==",
|
||||
"dependencies": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/http-streaming": "2.12.0",
|
||||
"@videojs/vhs-utils": "^3.0.3",
|
||||
"@videojs/http-streaming": "2.14.2",
|
||||
"@videojs/vhs-utils": "^3.0.4",
|
||||
"@videojs/xhr": "2.6.0",
|
||||
"aes-decrypter": "3.1.2",
|
||||
"aes-decrypter": "3.1.3",
|
||||
"global": "^4.4.0",
|
||||
"keycode": "^2.2.0",
|
||||
"m3u8-parser": "4.7.0",
|
||||
"mpd-parser": "0.19.2",
|
||||
"mux.js": "5.14.1",
|
||||
"m3u8-parser": "4.7.1",
|
||||
"mpd-parser": "0.21.1",
|
||||
"mux.js": "6.0.1",
|
||||
"safe-json-parse": "4.0.0",
|
||||
"videojs-font": "3.2.0",
|
||||
"videojs-vtt.js": "^0.15.3"
|
||||
@@ -11792,9 +11800,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/videojs-seek-buttons": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/videojs-seek-buttons/-/videojs-seek-buttons-2.2.0.tgz",
|
||||
"integrity": "sha512-yjCA6ntq+8fRKgZi/H6QJlghQWgA1x9oSRl6wfLODAcujhynDXetwMgRKGgl4NlV5af2bKY6erNtJ0kOBko/nQ==",
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/videojs-seek-buttons/-/videojs-seek-buttons-2.2.1.tgz",
|
||||
"integrity": "sha512-tXInD8ElUddyc1vxkGsVNhxlFtz8JC4VagPOSyL8mj7LI7oXjLGxni2pjx2wqzsV3PaIBMMH47MBtsNWf+wbtw==",
|
||||
"dependencies": {
|
||||
"global": "^4.4.0",
|
||||
"video.js": "^6 || ^7"
|
||||
@@ -14504,6 +14512,12 @@
|
||||
"@types/jest": "*"
|
||||
}
|
||||
},
|
||||
"@types/video.js": {
|
||||
"version": "7.3.44",
|
||||
"resolved": "https://registry.npmjs.org/@types/video.js/-/video.js-7.3.44.tgz",
|
||||
"integrity": "sha512-ov1HXNOjUkt38al/ybw8cj1181I5P3sOXdrqBR8AkDCqQX6GYwxOCzdmsGn/LDwKHTZ/3veNC9Ad6BjR5wSq4g==",
|
||||
"dev": true
|
||||
},
|
||||
"@types/yargs": {
|
||||
"version": "16.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz",
|
||||
@@ -14881,24 +14895,24 @@
|
||||
}
|
||||
},
|
||||
"@videojs/http-streaming": {
|
||||
"version": "2.12.0",
|
||||
"resolved": "https://registry.npmjs.org/@videojs/http-streaming/-/http-streaming-2.12.0.tgz",
|
||||
"integrity": "sha512-vdQA0lDYBXGJqV2T02AGqg1w4dcgyRoN+bYG+G8uF4DpCEMhEtUI0BA4tRu4/Njar8w/9D5k0a1KX40pcvM3fA==",
|
||||
"version": "2.14.2",
|
||||
"resolved": "https://registry.npmjs.org/@videojs/http-streaming/-/http-streaming-2.14.2.tgz",
|
||||
"integrity": "sha512-K1raSfO/pq5r8iUas3OSYni0kXOj91n8ealIpV02khghzGv9LQ6O3YUqYd/eAhJ1HIrmZWOnrYpK/P+mhUExXQ==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/vhs-utils": "3.0.4",
|
||||
"aes-decrypter": "3.1.2",
|
||||
"@videojs/vhs-utils": "3.0.5",
|
||||
"aes-decrypter": "3.1.3",
|
||||
"global": "^4.4.0",
|
||||
"m3u8-parser": "4.7.0",
|
||||
"mpd-parser": "0.19.2",
|
||||
"mux.js": "5.14.1",
|
||||
"m3u8-parser": "4.7.1",
|
||||
"mpd-parser": "0.21.1",
|
||||
"mux.js": "6.0.1",
|
||||
"video.js": "^6 || ^7"
|
||||
}
|
||||
},
|
||||
"@videojs/vhs-utils": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/@videojs/vhs-utils/-/vhs-utils-3.0.4.tgz",
|
||||
"integrity": "sha512-hui4zOj2I1kLzDgf8QDVxD3IzrwjS/43KiS8IHQO0OeeSsb4pB/lgNt1NG7Dv0wMQfCccUpMVLGcK618s890Yg==",
|
||||
"version": "3.0.5",
|
||||
"resolved": "https://registry.npmjs.org/@videojs/vhs-utils/-/vhs-utils-3.0.5.tgz",
|
||||
"integrity": "sha512-PKVgdo8/GReqdx512F+ombhS+Bzogiofy1LgAj4tN8PfdBx3HSS7V5WfJotKTqtOWGwVfSWsrYN/t09/DSryrw==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"global": "^4.4.0",
|
||||
@@ -14967,12 +14981,12 @@
|
||||
"dev": true
|
||||
},
|
||||
"aes-decrypter": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/aes-decrypter/-/aes-decrypter-3.1.2.tgz",
|
||||
"integrity": "sha512-42nRwfQuPRj9R1zqZBdoxnaAmnIFyDi0MNyTVhjdFOd8fifXKKRfwIHIZ6AMn1or4x5WONzjwRTbTWcsIQ0O4A==",
|
||||
"version": "3.1.3",
|
||||
"resolved": "https://registry.npmjs.org/aes-decrypter/-/aes-decrypter-3.1.3.tgz",
|
||||
"integrity": "sha512-VkG9g4BbhMBy+N5/XodDeV6F02chEk9IpgRTq/0bS80y4dzy79VH2Gtms02VXomf3HmyRe3yyJYkJ990ns+d6A==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/vhs-utils": "^3.0.0",
|
||||
"@videojs/vhs-utils": "^3.0.5",
|
||||
"global": "^4.4.0",
|
||||
"pkcs7": "^1.0.4"
|
||||
}
|
||||
@@ -18873,12 +18887,12 @@
|
||||
"dev": true
|
||||
},
|
||||
"m3u8-parser": {
|
||||
"version": "4.7.0",
|
||||
"resolved": "https://registry.npmjs.org/m3u8-parser/-/m3u8-parser-4.7.0.tgz",
|
||||
"integrity": "sha512-48l/OwRyjBm+QhNNigEEcRcgbRvnUjL7rxs597HmW9QSNbyNvt+RcZ9T/d9vxi9A9z7EZrB1POtZYhdRlwYQkQ==",
|
||||
"version": "4.7.1",
|
||||
"resolved": "https://registry.npmjs.org/m3u8-parser/-/m3u8-parser-4.7.1.tgz",
|
||||
"integrity": "sha512-pbrQwiMiq+MmI9bl7UjtPT3AK603PV9bogNlr83uC+X9IoxqL5E4k7kU7fMQ0dpRgxgeSMygqUa0IMLQNXLBNA==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/vhs-utils": "^3.0.0",
|
||||
"@videojs/vhs-utils": "^3.0.5",
|
||||
"global": "^4.4.0"
|
||||
}
|
||||
},
|
||||
@@ -18979,12 +18993,12 @@
|
||||
"dev": true
|
||||
},
|
||||
"mpd-parser": {
|
||||
"version": "0.19.2",
|
||||
"resolved": "https://registry.npmjs.org/mpd-parser/-/mpd-parser-0.19.2.tgz",
|
||||
"integrity": "sha512-M5tAIdtBM2TN+OSTz/37T7V+h9ZLvhyNqq4TNIdtjAQ/Hg8UnMRf5nJQDjffcXag3POXi31yUJQEKOXdcAM/nw==",
|
||||
"version": "0.21.1",
|
||||
"resolved": "https://registry.npmjs.org/mpd-parser/-/mpd-parser-0.21.1.tgz",
|
||||
"integrity": "sha512-BxlSXWbKE1n7eyEPBnTEkrzhS3PdmkkKdM1pgKbPnPOH0WFZIc0sPOWi7m0Uo3Wd2a4Or8Qf4ZbS7+ASqQ49fw==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/vhs-utils": "^3.0.2",
|
||||
"@videojs/vhs-utils": "^3.0.5",
|
||||
"@xmldom/xmldom": "^0.7.2",
|
||||
"global": "^4.4.0"
|
||||
}
|
||||
@@ -19107,11 +19121,12 @@
|
||||
"dev": true
|
||||
},
|
||||
"mux.js": {
|
||||
"version": "5.14.1",
|
||||
"resolved": "https://registry.npmjs.org/mux.js/-/mux.js-5.14.1.tgz",
|
||||
"integrity": "sha512-38kA/xjWRDzMbcpHQfhKbJAME8eTZVsb9U2Puk890oGvGqnyu8B/AkKdICKPHkigfqYX9MY20vje88TP14nhog==",
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/mux.js/-/mux.js-6.0.1.tgz",
|
||||
"integrity": "sha512-22CHb59rH8pWGcPGW5Og7JngJ9s+z4XuSlYvnxhLuc58cA1WqGDQPzuG8I+sPm1/p0CdgpzVTaKW408k5DNn8w==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.11.2"
|
||||
"@babel/runtime": "^7.11.2",
|
||||
"global": "^4.4.0"
|
||||
}
|
||||
},
|
||||
"nanoid": {
|
||||
@@ -20599,20 +20614,20 @@
|
||||
}
|
||||
},
|
||||
"video.js": {
|
||||
"version": "7.17.0",
|
||||
"resolved": "https://registry.npmjs.org/video.js/-/video.js-7.17.0.tgz",
|
||||
"integrity": "sha512-8RbLu9+Pdpep9OTPncUHIvZXFgn/7hKdPnSTE/lGSnlFSucXtTUBp41R7NDwncscMLQ0WgazUbmFlvr4MNWMbA==",
|
||||
"version": "7.20.2",
|
||||
"resolved": "https://registry.npmjs.org/video.js/-/video.js-7.20.2.tgz",
|
||||
"integrity": "sha512-hdvAHKAyaL6bCDkeu0pPtFYKi1EDaOUovm7FN1xqBDolUxgH8FKy1WIgTS+Ouuaw7R54SCTcSeXjZEizhy9ouQ==",
|
||||
"requires": {
|
||||
"@babel/runtime": "^7.12.5",
|
||||
"@videojs/http-streaming": "2.12.0",
|
||||
"@videojs/vhs-utils": "^3.0.3",
|
||||
"@videojs/http-streaming": "2.14.2",
|
||||
"@videojs/vhs-utils": "^3.0.4",
|
||||
"@videojs/xhr": "2.6.0",
|
||||
"aes-decrypter": "3.1.2",
|
||||
"aes-decrypter": "3.1.3",
|
||||
"global": "^4.4.0",
|
||||
"keycode": "^2.2.0",
|
||||
"m3u8-parser": "4.7.0",
|
||||
"mpd-parser": "0.19.2",
|
||||
"mux.js": "5.14.1",
|
||||
"m3u8-parser": "4.7.1",
|
||||
"mpd-parser": "0.21.1",
|
||||
"mux.js": "6.0.1",
|
||||
"safe-json-parse": "4.0.0",
|
||||
"videojs-font": "3.2.0",
|
||||
"videojs-vtt.js": "^0.15.3"
|
||||
@@ -20633,9 +20648,9 @@
|
||||
}
|
||||
},
|
||||
"videojs-seek-buttons": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/videojs-seek-buttons/-/videojs-seek-buttons-2.2.0.tgz",
|
||||
"integrity": "sha512-yjCA6ntq+8fRKgZi/H6QJlghQWgA1x9oSRl6wfLODAcujhynDXetwMgRKGgl4NlV5af2bKY6erNtJ0kOBko/nQ==",
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/videojs-seek-buttons/-/videojs-seek-buttons-2.2.1.tgz",
|
||||
"integrity": "sha512-tXInD8ElUddyc1vxkGsVNhxlFtz8JC4VagPOSyL8mj7LI7oXjLGxni2pjx2wqzsV3PaIBMMH47MBtsNWf+wbtw==",
|
||||
"requires": {
|
||||
"global": "^4.4.0",
|
||||
"video.js": "^6 || ^7"
|
||||
|
||||
@@ -19,9 +19,9 @@
|
||||
"preact-async-route": "^2.2.1",
|
||||
"preact-router": "^4.0.1",
|
||||
"swr": "^1.2.2",
|
||||
"video.js": "^7.17.0",
|
||||
"video.js": "^7.20.2",
|
||||
"videojs-playlist": "^5.0.0",
|
||||
"videojs-seek-buttons": "^2.2.0"
|
||||
"videojs-seek-buttons": "^2.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/preset-env": "^7.16.11",
|
||||
@@ -32,6 +32,7 @@
|
||||
"@testing-library/preact": "^2.0.1",
|
||||
"@testing-library/preact-hooks": "^1.1.0",
|
||||
"@testing-library/user-event": "^13.5.0",
|
||||
"@types/video.js": "^7.3.44",
|
||||
"@typescript-eslint/eslint-plugin": "^5.18.0",
|
||||
"@typescript-eslint/parser": "^5.18.0",
|
||||
"autoprefixer": "^10.4.2",
|
||||
|
||||
@@ -65,7 +65,7 @@ function CameraSection({ sortedCameras }) {
|
||||
<Fragment>
|
||||
<Separator />
|
||||
{sortedCameras.map(([camera]) => (
|
||||
<Destination key={camera} href={`/cameras/${camera}`} text={camera} />
|
||||
<Destination key={camera} href={`/cameras/${camera}`} text={camera.replaceAll('_', ' ')} />
|
||||
))}
|
||||
<Separator />
|
||||
</Fragment>
|
||||
@@ -83,7 +83,7 @@ function RecordingSection({ sortedCameras }) {
|
||||
key={camera}
|
||||
path={`/recording/${camera}/:date?/:hour?/:seconds?`}
|
||||
href={`/recording/${camera}`}
|
||||
text={camera}
|
||||
text={camera.replaceAll('_', ' ')}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -3,17 +3,17 @@ import { useCallback, useEffect, useRef, useState } from 'preact/hooks';
|
||||
import { useApiHost } from '../../api';
|
||||
import { isNullOrUndefined } from '../../utils/objectUtils';
|
||||
|
||||
import 'videojs-seek-buttons';
|
||||
import 'video.js/dist/video-js.css';
|
||||
import 'videojs-seek-buttons/dist/videojs-seek-buttons.css';
|
||||
|
||||
import videojs, { VideoJsPlayer } from 'video.js';
|
||||
|
||||
interface OnTimeUpdateEvent {
|
||||
timestamp: number;
|
||||
isPlaying: boolean;
|
||||
}
|
||||
|
||||
interface VideoProperties {
|
||||
posterUrl: string;
|
||||
videoUrl: string;
|
||||
height: number;
|
||||
}
|
||||
|
||||
interface HistoryVideoProps {
|
||||
id?: string;
|
||||
isPlaying: boolean;
|
||||
@@ -32,68 +32,39 @@ export const HistoryVideo = ({
|
||||
onPlay,
|
||||
}: HistoryVideoProps) => {
|
||||
const apiHost = useApiHost();
|
||||
const videoRef = useRef<HTMLVideoElement|null>(null);
|
||||
const [videoHeight, setVideoHeight] = useState<number>(0);
|
||||
const [videoProperties, setVideoProperties] = useState<VideoProperties>({
|
||||
posterUrl: '',
|
||||
videoUrl: '',
|
||||
height: 0,
|
||||
});
|
||||
const videoRef = useRef<HTMLVideoElement>(null);
|
||||
|
||||
const currentVideo = videoRef.current;
|
||||
if (currentVideo && !videoHeight) {
|
||||
const currentVideoHeight = currentVideo.offsetHeight;
|
||||
if (currentVideoHeight > 0) {
|
||||
setVideoHeight(currentVideoHeight);
|
||||
}
|
||||
}
|
||||
const [video, setVideo] = useState<VideoJsPlayer>();
|
||||
|
||||
useEffect(() => {
|
||||
const idExists = !isNullOrUndefined(id);
|
||||
if (idExists) {
|
||||
if (videoRef.current && !videoRef.current.paused) {
|
||||
videoRef.current = null;
|
||||
}
|
||||
|
||||
setVideoProperties({
|
||||
posterUrl: `${apiHost}/api/events/${id}/snapshot.jpg`,
|
||||
videoUrl: `${apiHost}/vod/event/${id}/index.m3u8`,
|
||||
height: videoHeight,
|
||||
});
|
||||
} else {
|
||||
setVideoProperties({
|
||||
posterUrl: '',
|
||||
videoUrl: '',
|
||||
height: 0,
|
||||
});
|
||||
let video: VideoJsPlayer
|
||||
if (videoRef.current) {
|
||||
video = videojs(videoRef.current, {})
|
||||
setVideo(video)
|
||||
}
|
||||
}, [id, videoHeight, videoRef, apiHost]);
|
||||
() => video?.dispose()
|
||||
}, [videoRef]);
|
||||
|
||||
useEffect(() => {
|
||||
const playVideo = (video: HTMLMediaElement) => video.play();
|
||||
|
||||
const attemptPlayVideo = (video: HTMLMediaElement) => {
|
||||
const videoHasNotLoaded = video.readyState <= 1;
|
||||
if (videoHasNotLoaded) {
|
||||
video.oncanplay = () => {
|
||||
playVideo(video);
|
||||
};
|
||||
video.load();
|
||||
} else {
|
||||
playVideo(video);
|
||||
}
|
||||
};
|
||||
|
||||
const video = videoRef.current;
|
||||
const videoExists = !isNullOrUndefined(video);
|
||||
if (video && videoExists) {
|
||||
if (videoIsPlaying) {
|
||||
attemptPlayVideo(video);
|
||||
} else {
|
||||
video.pause();
|
||||
}
|
||||
if (!video) {
|
||||
return
|
||||
}
|
||||
}, [videoIsPlaying, videoRef]);
|
||||
|
||||
|
||||
if (!id) {
|
||||
video.pause()
|
||||
return
|
||||
}
|
||||
|
||||
video.src({
|
||||
src: `${apiHost}/vod/event/${id}/master.m3u8`,
|
||||
type: 'application/vnd.apple.mpegurl',
|
||||
});
|
||||
video.poster(`${apiHost}/api/events/${id}/snapshot.jpg`);
|
||||
if (videoIsPlaying) {
|
||||
video.play();
|
||||
}
|
||||
}, [video, id]);
|
||||
|
||||
useEffect(() => {
|
||||
const video = videoRef.current;
|
||||
@@ -111,32 +82,38 @@ export const HistoryVideo = ({
|
||||
isPlaying: videoIsPlaying,
|
||||
timestamp: target.currentTime,
|
||||
};
|
||||
|
||||
onTimeUpdate && onTimeUpdate(timeUpdateEvent);
|
||||
},
|
||||
[videoIsPlaying, onTimeUpdate]
|
||||
);
|
||||
|
||||
const videoPropertiesIsUndefined = isNullOrUndefined(videoProperties);
|
||||
if (videoPropertiesIsUndefined) {
|
||||
return <div style={{ height: `${videoHeight}px`, width: '100%' }} />;
|
||||
}
|
||||
useEffect(() => {
|
||||
if (video && video.readyState() >= 1) {
|
||||
if (videoIsPlaying) {
|
||||
video.play()
|
||||
} else {
|
||||
video.pause()
|
||||
}
|
||||
}
|
||||
}, [video, videoIsPlaying])
|
||||
|
||||
const { posterUrl, videoUrl, height } = videoProperties;
|
||||
const onLoad = useCallback(() => {
|
||||
if (video && video.readyState() >= 1 && videoIsPlaying) {
|
||||
video.play()
|
||||
}
|
||||
}, [video, videoIsPlaying])
|
||||
|
||||
return (
|
||||
<video
|
||||
ref={videoRef}
|
||||
key={posterUrl}
|
||||
onTimeUpdate={onTimeUpdateHandler}
|
||||
onPause={onPause}
|
||||
onPlay={onPlay}
|
||||
poster={posterUrl}
|
||||
preload='metadata'
|
||||
controls
|
||||
style={height ? { minHeight: `${height}px` } : {}}
|
||||
playsInline
|
||||
>
|
||||
<source type='application/vnd.apple.mpegurl' src={videoUrl} />
|
||||
</video>
|
||||
<div data-vjs-player>
|
||||
<video
|
||||
ref={videoRef}
|
||||
onTimeUpdate={onTimeUpdateHandler}
|
||||
onLoadedMetadata={onLoad}
|
||||
onPause={onPause}
|
||||
onPlay={onPlay}
|
||||
className="video-js vjs-fluid"
|
||||
data-setup="{}"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -44,12 +44,12 @@ export default function RecordingPlaylist({ camera, recordings, selectedDate })
|
||||
<div className="flex absolute inset-y-0 right-0 w-9/12 md:w-1/2 lg:w-3/5 max-w-md text-base text-white font-sans">
|
||||
<div
|
||||
onClick={toggle}
|
||||
className={`absolute ${openClass} cursor-pointer items-center self-center rounded-tl-lg rounded-bl-lg border border-r-0 w-6 h-20 py-7 bg-gray-800 bg-opacity-70`}
|
||||
className={`absolute ${openClass} cursor-pointer items-center self-center rounded-tl-lg rounded-bl-lg border border-r-0 w-6 h-20 py-7 bg-gray-800 bg-opacity-70 z-10`}
|
||||
>
|
||||
{active ? <Menu /> : <MenuOpen />}
|
||||
</div>
|
||||
<div
|
||||
className={`w-full h-full bg-gray-800 bg-opacity-70 border-l overflow-x-hidden overflow-y-auto${
|
||||
className={`w-full h-full bg-gray-800 bg-opacity-70 border-l overflow-x-hidden overflow-y-auto z-10${
|
||||
active ? '' : ' hidden'
|
||||
}`}
|
||||
>
|
||||
|
||||
@@ -27,12 +27,14 @@ export function Tabs({ children, selectedIndex: selectedIndexProp, onChange, cla
|
||||
);
|
||||
}
|
||||
|
||||
export function TextTab({ selected, text, onClick }) {
|
||||
const selectedStyle = selected
|
||||
? 'text-white bg-blue-500 dark:text-black dark:bg-white'
|
||||
: 'text-black dark:text-white bg-transparent';
|
||||
export function TextTab({ selected, text, onClick, disabled }) {
|
||||
const selectedStyle = disabled
|
||||
? 'text-gray-400 dark:text-gray-600 bg-transparent'
|
||||
: selected
|
||||
? 'text-white bg-blue-500 dark:text-black dark:bg-white'
|
||||
: 'text-black dark:text-white bg-transparent';
|
||||
return (
|
||||
<button onClick={onClick} className={`rounded-full px-4 py-2 ${selectedStyle}`}>
|
||||
<button onClick={onClick} disabled={disabled} className={`rounded-full px-4 py-2 ${selectedStyle}`}>
|
||||
<span>{text}</span>
|
||||
</button>
|
||||
);
|
||||
|
||||
@@ -20,11 +20,11 @@ export const TimelineBlocks = ({ timeline, firstBlockOffset, onEventClick }: Tim
|
||||
const timelineBlockOffset = (timelineContainerHeight - largestYOffsetInBlocks) / 2;
|
||||
return (
|
||||
<div
|
||||
className='relative'
|
||||
className="relative"
|
||||
style={{
|
||||
height: `${timelineContainerHeight}px`,
|
||||
width: `${timelineContainerWidth}px`,
|
||||
background: "url('/marker.png')",
|
||||
background: "url('/images/marker.png')",
|
||||
backgroundPosition: 'center',
|
||||
backgroundSize: '30px',
|
||||
backgroundRepeat: 'repeat',
|
||||
@@ -41,7 +41,7 @@ export const TimelineBlocks = ({ timeline, firstBlockOffset, onEventClick }: Tim
|
||||
</div>
|
||||
);
|
||||
}
|
||||
return <div />
|
||||
return <div />;
|
||||
}, [timeline, onEventClick, firstBlockOffset]);
|
||||
|
||||
return timelineEventBlocks;
|
||||
|
||||
@@ -120,7 +120,7 @@ export default function Camera({ camera }) {
|
||||
|
||||
return (
|
||||
<div className="space-y-4 p-2 px-4">
|
||||
<Heading size="2xl">{camera}</Heading>
|
||||
<Heading size="2xl">{camera.replaceAll('_', ' ')}</Heading>
|
||||
<ButtonsTabbed viewModes={['live', 'debug']} setViewMode={setViewMode} />
|
||||
|
||||
{player}
|
||||
@@ -133,8 +133,8 @@ export default function Camera({ camera }) {
|
||||
className="mb-4 mr-4"
|
||||
key={objectType}
|
||||
header={objectType}
|
||||
href={`/events?camera=${camera}&label=${objectType}`}
|
||||
media={<img src={`${apiHost}/api/${camera}/${objectType}/thumbnail.jpg`} />}
|
||||
href={`/events?camera=${camera}&label=${encodeURIComponent(objectType)}`}
|
||||
media={<img src={`${apiHost}/api/${camera}/${encodeURIComponent(objectType)}/thumbnail.jpg`} />}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
||||
@@ -50,6 +50,10 @@ function Camera({ name }) {
|
||||
{ name: 'Recordings', href: `/recording/${name}` },
|
||||
];
|
||||
}, [name]);
|
||||
const cleanName = useMemo(
|
||||
() => { return `${name.replaceAll('_', ' ')}` },
|
||||
[name]
|
||||
);
|
||||
const icons = useMemo(
|
||||
() => [
|
||||
{
|
||||
@@ -81,6 +85,6 @@ function Camera({ name }) {
|
||||
);
|
||||
|
||||
return (
|
||||
<Card buttons={buttons} href={href} header={name} icons={icons} media={<CameraImage camera={name} stretch />} />
|
||||
<Card buttons={buttons} href={href} header={cleanName} icons={icons} media={<CameraImage camera={name} stretch />} />
|
||||
);
|
||||
}
|
||||
|
||||
@@ -81,7 +81,7 @@ export default function Debug() {
|
||||
{cameraNames.map((camera, i) => (
|
||||
<Tr key={i} index={i}>
|
||||
<Td>
|
||||
<Link href={`/cameras/${camera}`}>{camera}</Link>
|
||||
<Link href={`/cameras/${camera}`}>{camera.replaceAll('_', ' ')}</Link>
|
||||
</Td>
|
||||
{cameraDataKeys.map((name) => (
|
||||
<Td key={`${name}-${camera}`}>{cameras[camera][name]}</Td>
|
||||
|
||||
@@ -2,6 +2,7 @@ import { h, Fragment } from 'preact';
|
||||
import { route } from 'preact-router';
|
||||
import ActivityIndicator from '../components/ActivityIndicator';
|
||||
import Heading from '../components/Heading';
|
||||
import { Tabs, TextTab } from '../components/Tabs';
|
||||
import { useApiHost } from '../api';
|
||||
import useSWR from 'swr';
|
||||
import useSWRInfinite from 'swr/infinite';
|
||||
@@ -54,6 +55,7 @@ export default function Events({ path, ...props }) {
|
||||
});
|
||||
const [uploading, setUploading] = useState([]);
|
||||
const [viewEvent, setViewEvent] = useState();
|
||||
const [eventDetailType, setEventDetailType] = useState('clip');
|
||||
const [downloadEvent, setDownloadEvent] = useState({
|
||||
id: null,
|
||||
has_clip: false,
|
||||
@@ -235,6 +237,10 @@ export default function Events({ path, ...props }) {
|
||||
}
|
||||
};
|
||||
|
||||
const handleEventDetailTabChange = (index) => {
|
||||
setEventDetailType(index == 0 ? 'clip' : 'image');
|
||||
};
|
||||
|
||||
if (!config) {
|
||||
return <ActivityIndicator />;
|
||||
}
|
||||
@@ -251,7 +257,7 @@ export default function Events({ path, ...props }) {
|
||||
<option value="all">all cameras</option>
|
||||
{filterValues.cameras.map((item) => (
|
||||
<option key={item} value={item}>
|
||||
{item}
|
||||
{item.replaceAll('_', ' ')}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
@@ -262,7 +268,7 @@ export default function Events({ path, ...props }) {
|
||||
>
|
||||
<option value="all">all labels</option>
|
||||
{filterValues.labels.map((item) => (
|
||||
<option key={item} value={item}>
|
||||
<option key={item.replaceAll('_', ' ')} value={item}>
|
||||
{item}
|
||||
</option>
|
||||
))}
|
||||
@@ -275,7 +281,7 @@ export default function Events({ path, ...props }) {
|
||||
<option value="all">all zones</option>
|
||||
{filterValues.zones.map((item) => (
|
||||
<option key={item} value={item}>
|
||||
{item}
|
||||
{item.replaceAll('_', ' ')}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
@@ -457,11 +463,11 @@ export default function Events({ path, ...props }) {
|
||||
</div>
|
||||
<div className="capitalize text-sm flex align-center mt-1">
|
||||
<Camera className="h-5 w-5 mr-2 inline" />
|
||||
{event.camera}
|
||||
{event.camera.replaceAll('_', ' ')}
|
||||
</div>
|
||||
<div className="capitalize text-sm flex align-center">
|
||||
<Zone className="w-5 h-5 mr-2 inline" />
|
||||
{event.zones.join(',')}
|
||||
{event.zones.join(', ').replaceAll('_', ' ')}
|
||||
</div>
|
||||
</div>
|
||||
<div class="hidden sm:flex flex-col justify-end mr-2">
|
||||
@@ -495,16 +501,22 @@ export default function Events({ path, ...props }) {
|
||||
{viewEvent !== event.id ? null : (
|
||||
<div className="space-y-4">
|
||||
<div className="mx-auto max-w-7xl">
|
||||
{event.has_clip ? (
|
||||
<>
|
||||
<Heading size="lg">Clip</Heading>
|
||||
<div className='flex justify-center w-full py-2'>
|
||||
<Tabs selectedIndex={event.has_clip && eventDetailType == 'clip' ? 0 : 1} onChange={handleEventDetailTabChange} className='justify'>
|
||||
<TextTab text='Clip' disabled={!event.has_clip} />
|
||||
<TextTab text={event.has_snapshot ? 'Snapshot' : 'Thumbnail'} />
|
||||
</Tabs>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
{((eventDetailType == 'clip') && event.has_clip) ? (
|
||||
<VideoPlayer
|
||||
options={{
|
||||
preload: 'auto',
|
||||
autoplay: true,
|
||||
sources: [
|
||||
{
|
||||
src: `${apiHost}/vod/event/${event.id}/index.m3u8`,
|
||||
src: `${apiHost}/vod/event/${event.id}/master.m3u8`,
|
||||
type: 'application/vnd.apple.mpegurl',
|
||||
},
|
||||
],
|
||||
@@ -512,23 +524,22 @@ export default function Events({ path, ...props }) {
|
||||
seekOptions={{ forward: 10, back: 5 }}
|
||||
onReady={() => {}}
|
||||
/>
|
||||
</>
|
||||
) : (
|
||||
<div className="flex justify-center">
|
||||
<div>
|
||||
<Heading size="sm">{event.has_snapshot ? 'Best Image' : 'Thumbnail'}</Heading>
|
||||
) : null }
|
||||
|
||||
{((eventDetailType == 'image') || !event.has_clip) ? (
|
||||
<div className="flex justify-center">
|
||||
<img
|
||||
className="flex-grow-0"
|
||||
src={
|
||||
event.has_snapshot
|
||||
? `${apiHost}/api/events/${event.id}/snapshot.jpg`
|
||||
: `data:image/jpeg;base64,${event.thumbnail}`
|
||||
: `${apiHost}/api/events/${event.id}/thumbnail.jpg`
|
||||
}
|
||||
alt={`${event.label} at ${(event.top_score * 100).toFixed(0)}% confidence`}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
) : null }
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -15,13 +15,13 @@ export default function Recording({ camera, date, hour = '00', minute = '00', se
|
||||
);
|
||||
|
||||
const apiHost = useApiHost();
|
||||
const { data: recordingsSummary } = useSWR(`${camera}/recordings/summary`);
|
||||
const { data: recordingsSummary } = useSWR(`${camera}/recordings/summary`, { revalidateOnFocus: false });
|
||||
|
||||
const recordingParams = {
|
||||
before: getUnixTime(endOfHour(currentDate)),
|
||||
after: getUnixTime(startOfHour(currentDate)),
|
||||
};
|
||||
const { data: recordings } = useSWR([`${camera}/recordings`, recordingParams]);
|
||||
const { data: recordings } = useSWR([`${camera}/recordings`, recordingParams], { revalidateOnFocus: false });
|
||||
|
||||
// calculates the seek seconds by adding up all the seconds in the segments prior to the playback time
|
||||
const seekSeconds = useMemo(() => {
|
||||
@@ -62,7 +62,7 @@ export default function Recording({ camera, date, hour = '00', minute = '00', se
|
||||
description: `${camera} recording @ ${h.hour}:00.`,
|
||||
sources: [
|
||||
{
|
||||
src: `${apiHost}/vod/${year}-${month}/${day}/${h.hour}/${camera}/index.m3u8`,
|
||||
src: `${apiHost}/vod/${year}-${month}/${day}/${h.hour}/${camera}/master.m3u8`,
|
||||
type: 'application/vnd.apple.mpegurl',
|
||||
},
|
||||
],
|
||||
@@ -103,7 +103,7 @@ export default function Recording({ camera, date, hour = '00', minute = '00', se
|
||||
}
|
||||
}, [seekSeconds, playlistIndex]);
|
||||
|
||||
if (!recordingsSummary) {
|
||||
if (!recordingsSummary || !recordings) {
|
||||
return <ActivityIndicator />;
|
||||
}
|
||||
|
||||
@@ -121,7 +121,7 @@ export default function Recording({ camera, date, hour = '00', minute = '00', se
|
||||
|
||||
return (
|
||||
<div className="space-y-4 p-2 px-4">
|
||||
<Heading>{camera} Recordings</Heading>
|
||||
<Heading>{camera.replaceAll('_', ' ')} Recordings</Heading>
|
||||
|
||||
<VideoPlayer
|
||||
onReady={(player) => {
|
||||
|
||||
Reference in New Issue
Block a user