Compare commits

..

54 Commits

Author SHA1 Message Date
Chris King
005911d6a3 Add web only amd64 build and push target to Makefile 2025-01-21 13:03:22 -08:00
Chris King
088ff992f8 Fix webonly build definitions
Fix typo in arm64 docker/webonly/Dockerfile -> docker/main/Dockerfile
2025-01-21 10:56:29 -08:00
Chris King
e36dc576d3 Add webonly build and push options to Makefile
Change container repo to private Gitea
Add webonly build Dockerfile
Add .node-version for fnm
Do not route settings, config, or logs to non admin users
Do not show settings, system logs, system restart or config editor links to non admin users
Add list of admin usernames to user.ts
2025-01-21 10:51:54 -08:00
Josh Hawkins
f4f3cfa911 Don't allow periods in zone or camera group names (#13400) 2024-08-28 06:26:50 -06:00
Josh Hawkins
ca0f6e4c0a Add portal the live player tooltip (#13389) 2024-08-27 19:14:22 -06:00
Marc Altmann
a7ccabd8f1 update go2rtc version in reference config (#13367) 2024-08-26 15:17:24 -06:00
Nicolas Mowen
453a8d794e Add tooltip for icons in review event list (#13334) 2024-08-25 07:57:10 -05:00
Blake Blackshear
ce79898cae fix default build (#13321) 2024-08-24 07:44:15 -05:00
Blake Blackshear
bf90daae2b update actions for release (#13318) 2024-08-24 07:25:24 -05:00
Josh Hawkins
fdb5d53960 Update discussion templates (#13303)
* Update discussion templates

* camera support go2rtc
2024-08-23 18:05:14 -05:00
Nicolas Mowen
2dc5a7f767 Fix delayed preview not showing (#13295) 2024-08-23 09:51:59 -05:00
Josh Hawkins
65ca3c8fa3 Fix discussion templates (#13292)
* Fix yaml spacing for discussion templates

* Remove browser question from detectors
2024-08-23 07:58:39 -05:00
Josh Hawkins
ff34af2c1f Update discussion templates (#13291)
* Revamp support discussion templates

* move text to description

* remove duplicate logs box

* ffprobe on camera support

* longer description on config support
2024-08-23 06:44:31 -06:00
Nicolas Mowen
e01b6ee76b Fix case where user's cgroup says it has 0 cpu cores (#13271) 2024-08-22 08:06:26 -05:00
Nicolas Mowen
1c7ee5f4e4 UI fixes (#13246)
* Fix bad data in stats

* Add support for changes dialog when leaving without saving config editor

* Fix scrolling into view
2024-08-21 08:19:07 -06:00
Nicolas Mowen
d96f76c27f Ensure only enabled birdseye cameras are considered active (#13194)
* Ensure only enabled birdseye cameras are considered active

* Cleanup
2024-08-19 16:01:48 -05:00
Nicolas Mowen
1da934e63c Dynamically detect if full screen is supported (#13197) 2024-08-19 16:01:21 -05:00
Nicolas Mowen
38a8d34ba5 Preview fixes (#13193)
* Handle case where preview was saved late

* fix timing
2024-08-19 10:45:55 -06:00
Josh Hawkins
8e31244fb3 Adjust MSE player playback rate logic (#13164)
* Fix MSE playback rate logic

* don't adjust playback rate if we just started streaming

* memoize onprogress
2024-08-18 12:13:21 -06:00
Nicolas Mowen
3a124dbb84 Fix plus view resetting (#13160) 2024-08-18 07:41:10 -06:00
Josh Hawkins
8c23ede683 Live player fixes (#13143)
* Jump to live when exceeding buffer time threshold in MSE player

* clean up

* Try adjusting playback rate instead of jumping to live

* clean up

* fallback to webrtc if enabled before jsmpeg

* baseline

* clean up

* remove comments

* adaptive playback rate and intelligent switching improvements

* increase logging and reset live mode after camera is no longer active on dashboard only

* jump to live on safari/iOS

* clean up

* clean up

* refactor camera live mode hook

* remove key listener

* resolve conflicts
2024-08-17 12:16:48 -06:00
Josh Hawkins
4133e454c4 Remove dashboard keyboard listener (#13102) 2024-08-15 16:13:11 -05:00
Josh Hawkins
4dce8ff60a Add shortcut key "r" to mark selected items as reviewed (#13087)
* Add shortcut key "r" to mark selected items as reviewed

* unselect after keypress
2024-08-15 09:51:44 -05:00
Nicolas Mowen
2e724291db Catch case where github sends bad json data (#13077) 2024-08-14 20:41:41 -05:00
Nicolas Mowen
f6b61c26ae Rename bug report (#13039) 2024-08-13 14:26:01 -05:00
Nicolas Mowen
1b876bf8d3 UI fixes (#13030)
* Fix difficulty overwriting export name

* Fix NaN for score selector
2024-08-13 10:12:06 -05:00
Nicolas Mowen
b0d42ea116 Fix last hour preview (#13027) 2024-08-13 08:23:46 -06:00
Nicolas Mowen
05bc3839cc Reset recordings when changing the date (#13009) 2024-08-12 15:12:49 -06:00
Nicolas Mowen
281482927a Recordings Fixes (#13005)
* If recordings don't exist mark as no recordings

* Fix reloading recordings failing

* Fix mark items not clearing selected

* Cleanup

* Default to last full hour when error occurs

* Remove check

* Cleanup

* Handle empty recordings list case

* Ensure that the start time is within the time range

* Catch other reset cases
2024-08-12 14:30:16 -06:00
Nicolas Mowen
132a712341 Hide record switch when disabled (#12997) 2024-08-12 08:21:21 -05:00
Nicolas Mowen
13d121f443 Catch case where recording starts right at end of request (#12956) 2024-08-11 08:32:17 -05:00
Josh Hawkins
67ba3dbd8b Add pan/pinch/zoom capability on plus snapshots (#12953) 2024-08-11 07:15:04 -06:00
Nicolas Mowen
4afa7bf4e1 Catch case where user tries to end definite manual event (#12951)
* Catch case where user tries to end definite manual event

* Formatting
2024-08-11 07:32:39 -05:00
Josh Hawkins
77bf710299 Add confirmation dialog before deleting review items (#12950) 2024-08-11 06:25:09 -06:00
Stavros Kois
9b96211faf add shortcut and query for fullscreen in live view (#12924)
* add shortcut and query for live view

* Update web/src/views/live/LiveDashboardView.tsx

* Update web/src/views/live/LiveDashboardView.tsx

Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>

* Apply suggestions from code review

Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>

* Update LiveDashboardView.tsx

---------

Co-authored-by: Nicolas Mowen <nickmowen213@gmail.com>
2024-08-10 10:25:13 -06:00
Nicolas Mowen
99e03576bf Remove user args from http jpeg (#12909) 2024-08-09 16:22:24 -06:00
Nicolas Mowen
78d67484e1 Web deps (#12908)
* Update web compnent deps

* Update other web deps
2024-08-09 16:12:07 -06:00
Nicolas Mowen
e9e86cc5af Fix use experimental migrator (#12906) 2024-08-09 16:59:55 -05:00
Nicolas Mowen
70618e93b7 Add button to mark review item as reviewed in filmstrip (#12878)
* Add button to mark review item as reviewd in filmstrip

* Add tooltip
2024-08-09 08:29:35 -05:00
Soren L. Hansen
c84511de16 Fix auth when serving Frigate at a subpath (#12815)
Ensure axios.defaults.baseURL is set when accessing login form.

Drop `/api` prefix in login form's `axios.post` call, since `/api` is
part of the baseURL.

Redirect to subpath on succesful authentication.

Prepend subpath to default logout url.

Fixes #12814
2024-08-09 07:26:26 -06:00
Josh Hawkins
6d9590b4ec Persist live view muted/unmuted for session only (#12727)
* Persist live view muted/unmuted for session only

* consistent naming
2024-08-09 06:46:39 -06:00
Josh Hawkins
33e04fe61f Add right click to delete points in desktop mask/zone editor (#12744) 2024-08-09 06:46:18 -06:00
Josh Hawkins
9f43d10ba7 Ensure review card icon color for event view is visible in light mode (#12812) 2024-08-08 07:54:13 -06:00
Marc Altmann
57503cc318 fix default model for rknn detector (#12807) 2024-08-08 07:54:13 -06:00
Nicolas Mowen
e563692fa2 Add camera name to audio debug line (#12799)
* Add camera name to audio debug line

* Formatting
2024-08-08 07:54:13 -06:00
Nicolas Mowen
9c2974438d Handle case where user stops scrubbing but remains hovering (#12794)
* Handle case where user stops scrubbing but remains hovering

* Add type
2024-08-08 07:54:13 -06:00
Josh Hawkins
54e1bd9eeb Ensure review cameras are sorted by config ui order if specified (#12789) 2024-08-08 07:54:13 -06:00
Nicolas Mowen
8212b66ee0 Use camera status to get state of camera config (#12787)
* Use camera status to get state of camera config

* Fix spelling
2024-08-08 07:54:13 -06:00
Nicolas Mowen
43d2986208 Handle case where sub label was null (#12785) 2024-08-08 07:54:13 -06:00
Nicolas Mowen
f8f7b74792 Update version 2024-08-08 07:54:13 -06:00
Nicolas Mowen
5069072a84 Fix iOS export buttons (#12755)
* Fix iOS export buttons

* Use layering instead of z index
2024-08-08 07:54:13 -06:00
Josh Hawkins
93b81756c6 Only use dense property on phones for motion review timeline (#12768) 2024-08-08 07:54:13 -06:00
Josh Hawkins
4a867ddd56 Use radix css var to limit desktop menu height (#12743) 2024-08-08 07:54:13 -06:00
Josh Hawkins
a347cb5a42 Fix large tablet recording view layout (#12753) 2024-08-08 07:54:13 -06:00
34 changed files with 224 additions and 440 deletions

View File

@@ -2,7 +2,8 @@ default_target: local
COMMIT_HASH := $(shell git log -1 --pretty=format:"%h"|tail -1)
VERSION = 0.14.1
IMAGE_REPO ?= ghcr.io/blakeblackshear/frigate
#IMAGE_REPO ?= ghcr.io/blakeblackshear/frigate
IMAGE_REPO ?= gitea.tremendousturtle.tools/chris/frigate
GITHUB_REF_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
CURRENT_UID := $(shell id -u)
CURRENT_GID := $(shell id -g)
@@ -23,15 +24,30 @@ local: version
amd64:
docker buildx build --platform linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/main/Dockerfile .
amd64_web:
docker buildx build --platform linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/webonly/Dockerfile .
arm64:
docker buildx build --platform linux/arm64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/main/Dockerfile .
arm64_web:
docker buildx build --platform linux/arm64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/webonly/Dockerfile .
build: version amd64 arm64
docker buildx build --platform linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/main/Dockerfile .
build_web: version amd64_web arm64_web
docker buildx build --platform linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/webonly/Dockerfile .
push: push-boards
docker buildx build --push --platform linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH) --file docker/main/Dockerfile .
push_web: push-boards
docker buildx build --push --platform linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH) --file docker/webonly/Dockerfile .
push_web-amd64:
docker buildx build --push --platform linux/amd64 --target=frigate --tag $(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH) --file docker/webonly/Dockerfile .
run: local
docker run --rm --publish=5000:5000 --volume=${PWD}/config:/config frigate:latest

19
docker/webonly/Dockerfile Normal file
View File

@@ -0,0 +1,19 @@
# syntax=docker/dockerfile:1.6
# Frigate web build
# This should be architecture agnostic, so speed up the build on multiarch by not using QEMU.
FROM --platform=$BUILDPLATFORM node:20 AS web-build
WORKDIR /work
COPY web/package.json web/package-lock.json ./
RUN npm install
COPY web/ ./
RUN npm run build \
&& mv dist/BASE_PATH/monacoeditorwork/* dist/assets/ \
&& rm -rf dist/BASE_PATH
FROM --platform=$BUILDPLATFORM ghcr.io/blakeblackshear/frigate:stable AS frigate
WORKDIR /opt/frigate/
RUN rm -rf web/ && mkdir web
COPY --from=web-build /work/dist/ web/

View File

@@ -4,9 +4,7 @@ title: Advanced Options
sidebar_label: Advanced Options
---
### Logging
#### Frigate `logger`
### `logger`
Change the default log level for troubleshooting purposes.
@@ -30,18 +28,6 @@ Examples of available modules are:
- `watchdog.<camera_name>`
- `ffmpeg.<camera_name>.<sorted_roles>` NOTE: All FFmpeg logs are sent as `error` level.
#### Go2RTC Logging
See [the go2rtc docs](for logging configuration)
```yaml
go2rtc:
streams:
...
log:
exec: trace
```
### `environment_vars`
This section can be used to set environment variables for those unable to modify the environment of the container (ie. within HassOS)
@@ -197,7 +183,7 @@ To do this:
3. Give `go2rtc` execute permission.
4. Restart Frigate and the custom version will be used, you can verify by checking go2rtc logs.
## Validating your config.yml file updates
## Validating your config.yaml file updates
When frigate starts up, it checks whether your config file is valid, and if it is not, the process exits. To minimize interruptions when updating your config, you have three options -- you can edit the config via the WebUI which has built in validation, use the config API, or you can validate on the command line using the frigate docker container.

View File

@@ -24,11 +24,6 @@ On startup, an admin user and password are generated and printed in the logs. It
In the event that you are locked out of your instance, you can tell Frigate to reset the admin password and print it in the logs on next startup using the `reset_admin_password` setting in your config file.
```yaml
auth:
reset_admin_password: true
```
## Login failure rate limiting
In order to limit the risk of brute force attacks, rate limiting is available for login failures. This is implemented with Flask-Limiter, and the string notation for valid values is available in [the documentation](https://flask-limiter.readthedocs.io/en/stable/configuration.html#rate-limit-string-notation).

View File

@@ -9,12 +9,6 @@ This page makes use of presets of FFmpeg args. For more information on presets,
:::
:::note
Many cameras support encoding options which greatly affect the live view experience, see the [Live view](/configuration/live) page for more info.
:::
## MJPEG Cameras
Note that mjpeg cameras require encoding the video into h264 for recording, and restream roles. This will use significantly more CPU than if the cameras supported h264 feeds directly. It is recommended to use the restream role to create an h264 restream and then use that as the source for ffmpeg.
@@ -65,18 +59,6 @@ ffmpeg:
## Model/vendor specific setup
### Amcrest & Dahua
Amcrest & Dahua cameras should be connected to via RTSP using the following format:
```
rtsp://USERNAME:PASSWORD@CAMERA-IP/cam/realmonitor?channel=1&subtype=0 # this is the main stream
rtsp://USERNAME:PASSWORD@CAMERA-IP/cam/realmonitor?channel=1&subtype=1 # this is the sub stream, typically supporting low resolutions only
rtsp://USERNAME:PASSWORD@CAMERA-IP/cam/realmonitor?channel=1&subtype=2 # higher end cameras support a third stream with a mid resolution (1280x720, 1920x1080)
rtsp://USERNAME:PASSWORD@CAMERA-IP/cam/realmonitor?channel=1&subtype=3 # new higher end cameras support a fourth stream with another mid resolution (1280x720, 1920x1080)
```
### Annke C800
This camera is H.265 only. To be able to play clips on some devices (like MacOs or iPhone) the H.265 stream has to be repackaged and the audio stream has to be converted to aac. Unfortunately direct playback of in the browser is not working (yet), but the downloaded clip can be played locally.
@@ -89,7 +71,7 @@ cameras:
record: -f segment -segment_time 10 -segment_format mp4 -reset_timestamps 1 -strftime 1 -c:v copy -tag:v hvc1 -bsf:v hevc_mp4toannexb -c:a aac
inputs:
- path: rtsp://USERNAME:PASSWORD@CAMERA-IP/H264/ch1/main/av_stream # <----- Update for your camera
- path: rtsp://user:password@camera-ip:554/H264/ch1/main/av_stream # <----- Update for your camera
roles:
- detect
- record
@@ -107,29 +89,6 @@ ffmpeg:
input_args: preset-rtsp-blue-iris
```
### Hikvision Cameras
Hikvision cameras should be connected to via RTSP using the following format:
```
rtsp://USERNAME:PASSWORD@CAMERA-IP/streaming/channels/101 # this is the main stream
rtsp://USERNAME:PASSWORD@CAMERA-IP/streaming/channels/102 # this is the sub stream, typically supporting low resolutions only
rtsp://USERNAME:PASSWORD@CAMERA-IP/streaming/channels/103 # higher end cameras support a third stream with a mid resolution (1280x720, 1920x1080)
```
:::note
[Some users have reported](https://www.reddit.com/r/frigate_nvr/comments/1hg4ze7/hikvision_security_settings) that newer Hikvision cameras require adjustments to the security settings:
```
RTSP Authentication - digest/basic
RTSP Digest Algorithm - MD5
WEB Authentication - digest/basic
WEB Digest Algorithm - MD5
```
:::
### Reolink Cameras
Reolink has older cameras (ex: 410 & 520) as well as newer camera (ex: 520a & 511wa) which support different subsets of options. In both cases using the http stream is recommended.

View File

@@ -46,14 +46,6 @@ cameras:
side: ...
```
:::note
If you only define one stream in your `inputs` and do not assign a `detect` role to it, Frigate will automatically assign it the `detect` role. Frigate will always decode a stream to support motion detection, Birdseye, the API image endpoints, and other features, even if you have disabled object detection with `enabled: False` in your config's `detect` section.
If you plan to use Frigate for recording only, it is still recommended to define a `detect` role for a low resolution stream to minimize resource usage from the required stream decoding.
:::
For camera model specific settings check the [camera specific](camera_specific.md) infos.
## Setting up camera PTZ controls
@@ -79,41 +71,29 @@ cameras:
If the ONVIF connection is successful, PTZ controls will be available in the camera's WebUI.
:::tip
If your ONVIF camera does not require authentication credentials, you may still need to specify an empty string for `user` and `password`, eg: `user: ""` and `password: ""`.
:::
An ONVIF-capable camera that supports relative movement within the field of view (FOV) can also be configured to automatically track moving objects and keep them in the center of the frame. For autotracking setup, see the [autotracking](autotracking.md) docs.
## ONVIF PTZ camera recommendations
This list of working and non-working PTZ cameras is based on user feedback.
| Brand or specific camera | PTZ Controls | Autotracking | Notes |
| ---------------------------- | :----------: | :----------: | ----------------------------------------------------------------------------------------------------------------------------------------------- |
| Amcrest | ✅ | ✅ | ⛔️ Generally, Amcrest should work, but some older models (like the common IP2M-841) don't support autotracking |
| Amcrest ASH21 | | ❌ | ONVIF service port: 80 |
| Amcrest IP4M-S2112EW-AI | ✅ | ❌ | FOV relative movement not supported. |
| Amcrest IP5M-1190EW | ✅ | | ONVIF Port: 80. FOV relative movement not supported. |
| Ctronics PTZ | ✅ | ❌ | |
| Dahua | ✅ | | |
| Dahua DH-SD2A500HB | ✅ | | |
| Foscam R5 | ✅ | ❌ | |
| Hanwha XNP-6550RH | ✅ | ❌ | |
| Hikvision | ✅ | ❌ | Incomplete ONVIF support (MoveStatus won't update even on latest firmware) - reported with HWP-N4215IH-DE and DS-2DE3304W-DE, but likely others |
| Hikvision DS-2DE3A404IWG-E/W | ✅ | | |
| Reolink 511WA | ✅ | ❌ | Zoom only |
| Reolink E1 Pro | ✅ | ❌ | |
| Reolink E1 Zoom | ✅ | ❌ | |
| Reolink RLC-823A 16x | ✅ | ❌ | |
| Speco O8P32X | ✅ | ❌ | |
| Sunba 405-D20X | ✅ | ❌ | |
| Tapo | ✅ | ❌ | Many models supported, ONVIF Service Port: 2020 |
| Uniview IPC672LR-AX4DUPK | ✅ | ❌ | Firmware says FOV relative movement is supported, but camera doesn't actually move when sending ONVIF commands |
| Uniview IPC6612SR-X33-VG | ✅ | ✅ | Leave `calibrate_on_startup` as `False`. A user has reported that zooming with `absolute` is working. |
| Vikylin PTZ-2804X-I2 | ❌ | ❌ | Incomplete ONVIF support |
| Brand or specific camera | PTZ Controls | Autotracking | Notes |
| ------------------------ | :----------: | :----------: | ----------------------------------------------------------------------------------------------------------------------------------------------- |
| Amcrest | ✅ | ✅ | ⛔️ Generally, Amcrest should work, but some older models (like the common IP2M-841) don't support autotracking |
| Amcrest ASH21 | | ❌ | No ONVIF support |
| Ctronics PTZ | ✅ | ❌ | |
| Dahua | ✅ | | |
| Foscam R5 | ✅ | ❌ | |
| Hanwha XNP-6550RH | ✅ | | |
| Hikvision | | ❌ | Incomplete ONVIF support (MoveStatus won't update even on latest firmware) - reported with HWP-N4215IH-DE and DS-2DE3304W-DE, but likely others |
| Reolink 511WA | ✅ | ❌ | Zoom only |
| Reolink E1 Pro | ✅ | ❌ | |
| Reolink E1 Zoom | ✅ | ❌ | |
| Reolink RLC-823A 16x | ✅ | | |
| Sunba 405-D20X | ✅ | ❌ | |
| Tapo | ✅ | ❌ | Many models supported, ONVIF Service Port: 2020 |
| Uniview IPC672LR-AX4DUPK | ✅ | ❌ | Firmware says FOV relative movement is supported, but camera doesn't actually move when sending ONVIF commands |
| Vikylin PTZ-2804X-I2 | ❌ | ❌ | Incomplete ONVIF support |
## Setting up camera groups

View File

@@ -370,7 +370,7 @@ Make sure to follow the [Rockchip specific installation instructions](/frigate/i
### Configuration
Add one of the following FFmpeg presets to your `config.yml` to enable hardware video processing:
Add one of the following FFmpeg presets to your `config.yaml` to enable hardware video processing:
```yaml
# if you try to decode a h264 encoded stream

View File

@@ -11,21 +11,11 @@ Frigate intelligently uses three different streaming technologies to display you
The jsmpeg live view will use more browser and client GPU resources. Using go2rtc is highly recommended and will provide a superior experience.
| Source | Frame Rate | Resolution | Audio | Requires go2rtc | Notes |
| ------ | ------------------------------------- | ---------- | ---------------------------- | --------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| jsmpeg | same as `detect -> fps`, capped at 10 | 720p | no | no | Resolution is configurable, but go2rtc is recommended if you want higher resolutions and better frame rates. jsmpeg is Frigate's default without go2rtc configured. |
| mse | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only. This is Frigate's default when go2rtc is configured. |
| webrtc | native | native | yes (depends on audio codec) | yes | Requires extra configuration, doesn't support h.265. Frigate attempts to use WebRTC when MSE fails or when using a camera's two-way talk feature. |
### Camera Settings Recommendations
If you are using go2rtc, you should adjust the following settings in your camera's firmware for the best experience with Live view:
- Video codec: **H.264** - provides the most compatible video codec with all Live view technologies and browsers. Avoid any kind of "smart codec" or "+" codec like _H.264+_ or _H.265+_. as these non-standard codecs remove keyframes (see below).
- Audio codec: **AAC** - provides the most compatible audio codec with all Live view technologies and browsers that support audio.
- I-frame interval (sometimes called the keyframe interval, the interframe space, or the GOP length): match your camera's frame rate, or choose "1x" (for interframe space on Reolink cameras). For example, if your stream outputs 20fps, your i-frame interval should be 20 (or 1x on Reolink). Values higher than the frame rate will cause the stream to take longer to begin playback. See [this page](https://gardinal.net/understanding-the-keyframe-interval/) for more on keyframes.
The default video and audio codec on your camera may not always be compatible with your browser, which is why setting them to H.264 and AAC is recommended. See the [go2rtc docs](https://github.com/AlexxIT/go2rtc?tab=readme-ov-file#codecs-madness) for codec support information.
| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
| ------ | ------- | ------------------------------------- | ---------- | ---------------------------- | --------------- | ------------------------------------------------------------------------------------ |
| jsmpeg | low | same as `detect -> fps`, capped at 10 | 720p | no | no | resolution is configurable, but go2rtc is recommended if you want higher resolutions |
| mse | low | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only |
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
### Audio Support
@@ -42,15 +32,6 @@ go2rtc:
- "ffmpeg:http_cam#audio=opus" # <- copy of the stream which transcodes audio to the missing codec (usually will be opus)
```
If your camera does not have audio and you are having problems with Live view, you should have go2rtc send video only:
```yaml
go2rtc:
streams:
no_audio_camera:
- ffmpeg:rtsp://192.168.1.5:554/live0#video=copy
```
### Setting Stream For Live UI
There may be some cameras that you would prefer to use the sub stream for live view, but the main stream for recording. This can be done via `live -> stream_name`.

View File

@@ -149,7 +149,7 @@ This detector also supports YOLOX. Frigate does not come with any YOLOX models p
#### YOLO-NAS
[YOLO-NAS](https://github.com/Deci-AI/super-gradients/blob/master/YOLONAS.md) models are supported, but not included by default. You can build and download a compatible model with pre-trained weights using [this notebook](https://github.com/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb).
[YOLO-NAS](https://github.com/Deci-AI/super-gradients/blob/master/YOLONAS.md) models are supported, but not included by default. You can build and download a compatible model with pre-trained weights using [this notebook](https://github.com/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb).
:::warning

View File

@@ -5,7 +5,7 @@ title: Available Objects
import labels from "../../../labelmap.txt";
Frigate includes the object labels listed below from the Google Coral test data.
Frigate includes the object models listed below from the Google Coral test data.
Please note:

View File

@@ -1,24 +0,0 @@
---
id: pwa
title: Installing Frigate App
---
Frigate supports being installed as a [Progressive Web App](https://web.dev/explore/progressive-web-apps) on Desktop, Android, and iOS.
This adds features including the ability to deep link directly into the app.
## Requirements
In order to install Frigate as a PWA, the following requirements must be met:
- Frigate must be accessed via a secure context (localhost, secure https, etc.)
- On Android, Firefox, Chrome, Edge, Opera, and Samsung Internet Browser all support installing PWAs.
- On iOS 16.4 and later, PWAs can be installed from the Share menu in Safari, Chrome, Edge, Firefox, and Orion.
## Installation
Installation varies slightly based on the device that is being used:
- Desktop: Use the install button typically found in right edge of the address bar
- Android: Use the `Install as App` button in the more options menu
- iOS: Use the `Add to Homescreen` button in the share menu

View File

@@ -320,9 +320,6 @@ review:
- car
- person
# Optional: required zones for an object to be marked as an alert (default: none)
# NOTE: when settings required zones globally, this zone must exist on all cameras
# or the config will be considered invalid. In that case the required_zones
# should be configured at the camera level.
required_zones:
- driveway
# Optional: detections configuration
@@ -332,20 +329,12 @@ review:
- car
- person
# Optional: required zones for an object to be marked as a detection (default: none)
# NOTE: when settings required zones globally, this zone must exist on all cameras
# or the config will be considered invalid. In that case the required_zones
# should be configured at the camera level.
required_zones:
- driveway
# Optional: Motion configuration
# NOTE: Can be overridden at the camera level
motion:
# Optional: enables detection for the camera (default: True)
# NOTE: Motion detection is required for object detection,
# setting this to False and leaving detect enabled
# will result in an error on startup.
enabled: False
# Optional: The threshold passed to cv2.threshold to determine if a pixel is different enough to be counted as motion. (default: shown below)
# Increasing this value will make motion detection less sensitive and decreasing it will make motion detection more sensitive.
# The value should be between 1 and 255.
@@ -480,12 +469,10 @@ snapshots:
# Uses https://github.com/AlexxIT/go2rtc (v1.9.2)
go2rtc:
# Optional: Live stream configuration for WebUI.
# NOTE: Can be overridden at the camera level
# Optional: jsmpeg stream configuration for WebUI
live:
# Optional: Set the name of the stream configured in go2rtc
# that should be used for live view in frigate WebUI. (default: name of camera)
# NOTE: In most cases this should be set at the camera level only.
# Optional: Set the name of the stream that should be used for live view
# in frigate WebUI. (default: name of camera)
stream_name: camera_name
# Optional: Set the height of the jsmpeg stream. (default: 720)
# This must be less than or equal to the height of the detect stream. Lower resolutions
@@ -626,8 +613,8 @@ cameras:
user: admin
# Optional: password for login.
password: admin
# Optional: Ignores time synchronization mismatches between the camera and the server during authentication.
# Using NTP on both ends is recommended and this should only be set to True in a "safe" environment due to the security risk it represents.
# Optional: Ignores time synchronization mismatches between the camera and the server during authentication.
# Using NTP on both ends is recommended and this should only be set to True in a "safe" environment due to the security risk it represents.
ignore_time_mismatch: False
# Optional: PTZ camera object autotracking. Keeps a moving object in
# the center of the frame by automatically moving the PTZ camera.
@@ -732,7 +719,7 @@ camera_groups:
- side_cam
- front_doorbell_cam
# Required: icon used for group
icon: LuCar
icon: car
# Required: index of this group
order: 0
```

View File

@@ -41,6 +41,8 @@ review:
By default all detections that do not qualify as an alert qualify as a detection. However, detections can further be filtered to only include certain labels or certain zones.
By default a review item will only be marked as an alert if a person or car is detected. This can be configured to include any object or audio label using the following config:
```yaml
# can be overridden at the camera level
review:

View File

@@ -13,19 +13,20 @@ Many users have reported various issues with Reolink cameras, so I do not recomm
Here are some of the camera's I recommend:
- <a href="https://amzn.to/4fwoNWA" target="_blank" rel="nofollow noopener sponsored">Loryta(Dahua) IPC-T549M-ALED-S3</a> (affiliate link)
- <a href="https://amzn.to/3YXpcMw" target="_blank" rel="nofollow noopener sponsored">Loryta(Dahua) IPC-T54IR-AS</a> (affiliate link)
- <a href="https://amzn.to/3AvBHoY" target="_blank" rel="nofollow noopener sponsored">Amcrest IP5M-T1179EW-AI-V3</a> (affiliate link)
- <a href="https://amzn.to/3uFLtxB" target="_blank" rel="nofollow noopener sponsored">Loryta(Dahua) T5442TM-AS-LED</a> (affiliate link)
- <a href="https://amzn.to/3isJ3gU" target="_blank" rel="nofollow noopener sponsored">Loryta(Dahua) IPC-T5442TM-AS</a> (affiliate link)
- <a href="https://amzn.to/2ZWNWIA" target="_blank" rel="nofollow noopener sponsored">Amcrest IP5M-T1179EW-28MM</a> (affiliate link)
I may earn a small commission for my endorsement, recommendation, testimonial, or link to any products or services from this website.
## Server
My current favorite is the Beelink EQ13 because of the efficient N100 CPU and dual NICs that allow you to setup a dedicated private network for your cameras where they can be blocked from accessing the internet. There are many used workstation options on eBay that work very well. Anything with an Intel CPU and capable of running Debian should work fine. As a bonus, you may want to look for devices with a M.2 or PCIe express slot that is compatible with the Google Coral. I may earn a small commission for my endorsement, recommendation, testimonial, or link to any products or services from this website.
My current favorite is the Beelink EQ12 because of the efficient N100 CPU and dual NICs that allow you to setup a dedicated private network for your cameras where they can be blocked from accessing the internet. There are many used workstation options on eBay that work very well. Anything with an Intel CPU and capable of running Debian should work fine. As a bonus, you may want to look for devices with a M.2 or PCIe express slot that is compatible with the Google Coral. I may earn a small commission for my endorsement, recommendation, testimonial, or link to any products or services from this website.
| Name | Coral Inference Speed | Coral Compatibility | Notes |
| ------------------------------------------------------------------------------------------------------------- | --------------------- | ------------------- | ----------------------------------------------------------------------------------------- |
| Beelink EQ13 (<a href="https://amzn.to/4iQaBKu" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) | 5-10ms | USB | Dual gigabit NICs for easy isolated camera network. Easily handles several 1080p cameras. |
| Name | Coral Inference Speed | Coral Compatibility | Notes |
| ------------------------------------------------------------------------------------------------------------- | --------------------- | ------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
| Beelink EQ12 (<a href="https://amzn.to/3OlTMJY" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) | 5-10ms | USB | Dual gigabit NICs for easy isolated camera network. Easily handles several 1080p cameras. |
| Intel NUC (<a href="https://amzn.to/3psFlHi" target="_blank" rel="nofollow noopener sponsored">Amazon</a>) | 5-10ms | USB | Overkill for most, but great performance. Can handle many cameras at 5fps depending on typical amounts of motion. Requires extra parts. |
## Detectors
@@ -68,7 +69,6 @@ Inference speeds vary greatly depending on the CPU, GPU, or VPU used, some known
| Intel i5 7500 | ~ 15 ms | Inference speeds on CPU were ~ 260 ms |
| Intel i5 1135G7 | 10 - 15 ms | |
| Intel i5 12600K | ~ 15 ms | Inference speeds on CPU were ~ 35 ms |
| Intel Arc A750 | ~ 4 ms | |
### TensorRT - Nvidia GPU

View File

@@ -13,15 +13,7 @@ Use of the bundled go2rtc is optional. You can still configure FFmpeg to connect
# Setup a go2rtc stream
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.9.4#module-streams), not just rtsp.
:::tip
For the best experience, you should set the stream name under `go2rtc` to match the name of your camera so that Frigate will automatically map it and be able to use better live view options for the camera.
See [the live view docs](../configuration/live.md#setting-stream-for-live-ui) for more information.
:::
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.9.4#module-streams), not just rtsp.
```yaml
go2rtc:
@@ -30,7 +22,7 @@ go2rtc:
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
```
After adding this to the config, restart Frigate and try to watch the live stream for a single camera by clicking on it from the dashboard. It should look much clearer and more fluent than the original jsmpeg stream.
The easiest live view to get working is MSE. After adding this to the config, restart Frigate and try to watch the live stream by selecting MSE in the dropdown after clicking on the camera.
### What if my video doesn't play?
@@ -54,7 +46,7 @@ After adding this to the config, restart Frigate and try to watch the live strea
streams:
back:
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
- "ffmpeg:back#video=h264#hardware"
- "ffmpeg:back#video=h264"
```
- Switch to FFmpeg if needed:
@@ -66,8 +58,9 @@ After adding this to the config, restart Frigate and try to watch the live strea
- ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
```
- If you can see the video but do not have audio, this is most likely because your camera's audio stream codec is not AAC.
- If possible, update your camera's audio settings to AAC in your camera's firmware.
- If you can see the video but do not have audio, this is most likely because your
camera's audio stream is not AAC.
- If possible, update your camera's audio settings to AAC.
- If your cameras do not support AAC audio, you will need to tell go2rtc to re-encode the audio to AAC on demand if you want audio. This will use additional CPU and add some latency. To add AAC audio on demand, you can update your go2rtc config as follows:
```yaml
go2rtc:
@@ -84,7 +77,7 @@ After adding this to the config, restart Frigate and try to watch the live strea
streams:
back:
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
- "ffmpeg:back#video=h264#audio=aac#hardware"
- "ffmpeg:back#video=h264#audio=aac"
```
When using the ffmpeg module, you would add AAC audio like this:
@@ -93,7 +86,7 @@ After adding this to the config, restart Frigate and try to watch the live strea
go2rtc:
streams:
back:
- "ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2#video=copy#audio=copy#audio=aac#hardware"
- "ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2#video=copy#audio=copy#audio=aac"
```
:::warning
@@ -109,4 +102,4 @@ section.
## Next steps
1. If the stream you added to go2rtc is also used by Frigate for the `record` or `detect` role, you can migrate your config to pull from the RTSP restream to reduce the number of connections to your camera as shown [here](/configuration/restream#reduce-connections-to-camera).
2. You may also prefer to [setup WebRTC](/configuration/live#webrtc-extra-configuration) for slightly lower latency than MSE. Note that WebRTC only supports h264 and specific audio formats and may require opening ports on your router.
1. You may also prefer to [setup WebRTC](/configuration/live#webrtc-extra-configuration) for slightly lower latency than MSE. Note that WebRTC only supports h264 and specific audio formats.

View File

@@ -294,21 +294,11 @@ cameras:
If you don't have separate streams for detect and record, you would just add the record role to the list on the first input.
:::note
If you only define one stream in your `inputs` and do not assign a `detect` role to it, Frigate will automatically assign it the `detect` role. Frigate will always decode a stream to support motion detection, Birdseye, the API image endpoints, and other features, even if you have disabled object detection with `enabled: False` in your config's `detect` section.
If you plan to use Frigate for recording only, it is still recommended to define a `detect` role for a low resolution stream to minimize resource usage from the required stream decoding.
:::
By default, Frigate will retain video of all events for 10 days. The full set of options for recording can be found [here](../configuration/reference.md).
### Step 7: Complete config
At this point you have a complete config with basic functionality.
- View [common configuration examples](../configuration/index.md#common-configuration-examples) for a list of common configuration examples.
- View [full config reference](../configuration/reference.md) for a complete list of configuration options.
At this point you have a complete config with basic functionality. You can see the [full config reference](../configuration/reference.md) for a complete list of configuration options.
### Follow up
@@ -319,3 +309,4 @@ Now that you have a working install, you can use the following documentation for
3. [Review](../configuration/review.md)
4. [Masks](../configuration/masks.md)
5. [Home Assistant Integration](../integrations/home-assistant.md) - Integrate with Home Assistant

View File

@@ -3,38 +3,25 @@ id: reverse_proxy
title: Setting up a reverse proxy
---
This guide outlines the basic configuration steps needed to set up a reverse proxy in front of your Frigate instance.
This guide outlines the basic configuration steps needed to expose your Frigate UI to the internet.
A common way of accomplishing this is to use a reverse proxy webserver between your router and your Frigate instance.
A reverse proxy accepts HTTP requests from the public internet and redirects them transparently to internal webserver(s) on your network.
A reverse proxy is typically needed if you want to set up Frigate on a custom URL, on a subdomain, or on a host serving multiple sites. It could also be used to set up your own authentication provider or for more advanced HTTP routing.
The suggested steps are:
Before setting up a reverse proxy, check if any of the built-in functionality in Frigate suits your needs:
|Topic|Docs|
|-|-|
|TLS|Please see the `tls` [configuration option](../configuration/tls.md)|
|Authentication|Please see the [authentication](../configuration/authentication.md) documentation|
|IPv6|[Enabling IPv6](../configuration/advanced.md#enabling-ipv6)
**Note about TLS**
When using a reverse proxy, the TLS session is usually terminated at the proxy, sending the internal request over plain HTTP. If this is the desired behavior, TLS must first be disabled in Frigate, or you will encounter an HTTP 400 error: "The plain HTTP request was sent to HTTPS port."
To disable TLS, set the following in your Frigate configuration:
```yml
tls:
enabled: false
```
- **Configure** a 'proxy' HTTP webserver (such as [Apache2](https://httpd.apache.org/docs/current/) or [NPM](https://github.com/NginxProxyManager/nginx-proxy-manager)) and only expose ports 80/443 from this webserver to the internet
- **Encrypt** content from the proxy webserver by installing SSL (such as with [Let's Encrypt](https://letsencrypt.org/)). Note that SSL is then not required on your Frigate webserver as the proxy encrypts all requests for you
- **Restrict** access to your Frigate instance at the proxy using, for example, password authentication
:::warning
A reverse proxy can be used to secure access to an internal web server, but the user will be entirely reliant on the steps they have taken. You must ensure you are following security best practices.
This page does not attempt to outline the specific steps needed to secure your internal website.
A reverse proxy can be used to secure access to an internal webserver but the user will be entirely reliant
on the steps they have taken. You must ensure you are following security best practices.
This page does not attempt to outline the specific steps needed to secure your internal website.
Please use your own knowledge to assess and vet the reverse proxy software before you install anything on your system.
:::
## Proxies
There are many solutions available to implement reverse proxies and the community is invited to help out documenting others through a contribution to this page.
* [Apache2](#apache2-reverse-proxy)
* [Nginx](#nginx-reverse-proxy)
* [Traefik](#traefik-reverse-proxy)
There are several technologies available to implement reverse proxies. This document currently suggests one, using Apache2,
and the community is invited to document others through a contribution to this page.
## Apache2 Reverse Proxy
@@ -154,26 +141,3 @@ The settings below enabled connection upgrade, sets up logging (optional) and pr
}
```
## Traefik Reverse Proxy
This example shows how to add a `label` to the Frigate Docker compose file, enabling Traefik to automatically discover your Frigate instance.
Before using the example below, you must first set up Traefik with the [Docker provider](https://doc.traefik.io/traefik/providers/docker/)
```yml
services:
frigate:
container_name: frigate
image: ghcr.io/blakeblackshear/frigate:stable
...
...
labels:
- "traefik.enable=true"
- "traefik.http.services.frigate.loadbalancer.server.port=8971"
- "traefik.http.routers.frigate.rule=Host(`traefik.example.com`)"
```
The above configuration will create a "service" in Traefik, automatically adding your container's IP on port 8971 as a backend.
It will also add a router, routing requests to "traefik.example.com" to your local container.
Note that with this approach, you don't need to expose any ports for the Frigate instance since all traffic will be routed over the internal Docker network.

View File

@@ -373,7 +373,7 @@ Metadata about previews for this time range.
Metadata about previews for this hour
### `GET /api/preview/<camera>/start/<start-timestamp>/end/<end-timestamp>/frames`
### `GET /api/preview/<camera>/start/<start-timestamp>/end/<end-timestamp>`
List of frames in the preview cache for the time range. Previews are only kept in the cache until they are combined into an mp4 at the end of the hour.
@@ -381,14 +381,6 @@ List of frames in the preview cache for the time range. Previews are only kept i
Specific preview frame from preview cache.
### `GET /review/<review_id>/preview`
Looping image made from preview video / frames during this review item.
| param | Type | Description |
| --------- | ---- | -------------------------------- |
| `format` | str | Format of preview [`gif`, `mp4`] |
### `GET /<camera>/start/<start-timestamp>/end/<end-timestamp>/preview`
Looping image made from preview video / frames during this time range.
@@ -411,37 +403,17 @@ HTTP Live Streaming Video on Demand URL for the specified event. Can be viewed i
HTTP Live Streaming Video on Demand URL for the camera with the specified time range. Can be viewed in an application like VLC.
### `GET /api/exports`
Fetch a list of all export recordings
Sample response:
```json
[
{
"camera": "doorbell",
"date": 12800057,
"id": "doorbell_pjis54",
"in_progress": false,
"name": "2024-10-04 fox visit",
"thumb_path": "/media/frigate/clips/export/doorbell_pjis54.webp",
"video_path": "/media/frigate/exports/doorbell_pjis54.mp4"
}
]
```
### `POST /api/export/<camera>/start/<start-timestamp>/end/<end-timestamp>`
Export recordings from `start-timestamp` to `end-timestamp` for `camera` as a single mp4 file. These recordings will be exported to the `/media/frigate/exports` folder.
It is also possible to export this recording as a time-lapse using the "playback" key in the json body, or specify a custom export filename, using the "name" key.
It is also possible to export this recording as a time-lapse.
**Optional Body:**
```json
{
"playback": "realtime", // playback factor: realtime or timelapse_25x
"name": "custom export name" // override the default export filename with a custom name
"playback": "realtime" // playback factor: realtime or timelapse_25x
}
```

View File

@@ -25,7 +25,7 @@ Available via HACS as a default repository. To install:
- Use [HACS](https://hacs.xyz/) to install the integration:
```
Home Assistant > HACS > Click in the Search bar and type "Frigate" > Frigate
Home Assistant > HACS > Integrations > "Explore & Add Integrations" > Frigate
```
- Restart Home Assistant.

View File

@@ -11,7 +11,7 @@ These are the MQTT messages generated by Frigate. The default topic_prefix is `f
Designed to be used as an availability topic with Home Assistant. Possible message are:
"online": published when Frigate is running (on startup)
"offline": published after Frigate has stopped
"offline": published right before Frigate stops
### `frigate/restart`

View File

@@ -19,17 +19,17 @@ Once logged in, you can generate an API key for Frigate in Settings.
### Set your API key
In Frigate, you can use an environment variable or a docker secret named `PLUS_API_KEY` to enable the Frigate+ page. Home Assistant Addon users can set it under Settings > Addons > Frigate NVR > Configuration > Options (be sure to toggle the "Show unused optional configuration options" switch).
In Frigate, you can use an environment variable or a docker secret named `PLUS_API_KEY` to enable the `SEND TO FRIGATE+` buttons on the events page. Home Assistant Addon users can set it under Settings > Addons > Frigate NVR > Configuration > Options (be sure to toggle the "Show unused optional configuration options" switch).
:::warning
You cannot use the `environment_vars` section of your Frigate configuration file to set this environment variable. It must be defined as an environment variable in the docker config or HA addon config.
You cannot use the `environment_vars` section of your configuration file to set this environment variable.
:::
## Submit examples
Once your API key is configured, you can submit examples directly from the Frigate+ page.
Once your API key is configured, you can submit examples directly from the events page in Frigate using the `SEND TO FRIGATE+` button.
:::note

View File

@@ -18,7 +18,3 @@ Please use your own knowledge to assess and vet them before you install anything
[Double Take](https://github.com/skrashevich/double-take) provides an unified UI and API for processing and training images for facial recognition.
It supports automatically setting the sub labels in Frigate for person objects that are detected and recognized.
This is a fork (with fixed errors and new features) of [original Double Take](https://github.com/jakowenko/double-take) project which, unfortunately, isn't being maintained by author.
## [Frigate telegram](https://github.com/OldTyT/frigate-telegram)
[Frigate telegram](https://github.com/OldTyT/frigate-telegram) makes it possible to send events from Frigate to Telegram. Events are sent as a message with a text description, video, and thumbnail.

View File

@@ -5,7 +5,7 @@ title: Requesting your first model
## Step 1: Upload and annotate your images
Before requesting your first model, you will need to upload and verify at least 10 images to Frigate+. The more images you upload, annotate, and verify the better your results will be. Most users start to see very good results once they have at least 100 verified images per camera. Keep in mind that varying conditions should be included. You will want images from cloudy days, sunny days, dawn, dusk, and night. Refer to the [integration docs](../integrations/plus.md#generate-an-api-key) for instructions on how to easily submit images to Frigate+ directly from Frigate.
Before requesting your first model, you will need to upload at least 10 images to Frigate+. But for the best results, you should provide at least 100 verified images per camera. Keep in mind that varying conditions should be included. You will want images from cloudy days, sunny days, dawn, dusk, and night. Refer to the [integration docs](../integrations/plus.md#generate-an-api-key) for instructions on how to easily submit images to Frigate+ directly from Frigate.
It is recommended to submit **both** true positives and false positives. This will help the model differentiate between what is and isn't correct. You should aim for a target of 80% true positive submissions and 20% false positives across all of your images. If you are experiencing false positives in a specific area, submitting true positives for any object type near that area in similar lighting conditions will help teach the model what that area looks like when no objects are present.
@@ -13,7 +13,7 @@ For more detailed recommendations, you can refer to the docs on [improving your
## Step 2: Submit a model request
Once you have an initial set of verified images, you can request a model on the Models page. For guidance on choosing a model type, refer to [this part of the documentation](./index.md#available-model-types). Each model request requires 1 of the 12 trainings that you receive with your annual subscription. This model will support all [label types available](./index.md#available-label-types) even if you do not submit any examples for those labels. Model creation can take up to 36 hours.
Once you have an initial set of verified images, you can request a model on the Models page. Each model request requires 1 of the 12 trainings that you receive with your annual subscription. This model will support all [label types available](./index.md#available-label-types) even if you do not submit any examples for those labels. Model creation can take up to 36 hours.
![Plus Models Page](/img/plus/plus-models.jpg)
## Step 3: Set your model id in the config

View File

@@ -3,7 +3,7 @@ id: improving_model
title: Improving your model
---
You may find that Frigate+ models result in more false positives initially, but by submitting true and false positives, the model will improve. With all the new images now being submitted by subscribers, future base models will improve as more and more examples are incorporated. Note that only images with at least one verified label will be used when training your model. Submitting an image from Frigate as a true or false positive will not verify the image. You still must verify the image in Frigate+ in order for it to be used in training.
You may find that Frigate+ models result in more false positives initially, but by submitting true and false positives, the model will improve. Because a limited number of users submitted images to Frigate+ prior to this launch, you may need to submit several hundred images per camera to see good results. With all the new images now being submitted, future base models will improve as more and more users (including you) submit examples to Frigate+. Note that only verified images will be used when training your model. Submitting an image from Frigate as a true or false positive will not verify the image. You still must verify the image in Frigate+ in order for it to be used in training.
- **Submit both true positives and false positives**. This will help the model differentiate between what is and isn't correct. You should aim for a target of 80% true positive submissions and 20% false positives across all of your images. If you are experiencing false positives in a specific area, submitting true positives for any object type near that area in similar lighting conditions will help teach the model what that area looks like when no objects are present.
- **Lower your thresholds a little in order to generate more false/true positives near the threshold value**. For example, if you have some false positives that are scoring at 68% and some true positives scoring at 72%, you can try lowering your threshold to 65% and submitting both true and false positives within that range. This will help the model learn and widen the gap between true and false positive scores.
@@ -13,7 +13,7 @@ You may find that Frigate+ models result in more false positives initially, but
For the best results, follow the following guidelines.
**Label every object in the image**: It is important that you label all objects in each image before verifying. If you don't label a car for example, the model will be taught that part of the image is _not_ a car and it will start to get confused. You can exclude labels that you don't want detected on any of your cameras.
**Label every object in the image**: It is important that you label all objects in each image before verifying. If you don't label a car for example, the model will be taught that part of the image is _not_ a car and it will start to get confused.
**Make tight bounding boxes**: Tighter bounding boxes improve the recognition and ensure that accurate bounding boxes are predicted at runtime.
@@ -21,7 +21,7 @@ For the best results, follow the following guidelines.
**Label objects hard to identify as difficult**: When objects are truly difficult to make out, such as a car barely visible through a bush, or a dog that is hard to distinguish from the background at night, flag it as 'difficult'. This is not used in the model training as of now, but will in the future.
**Delivery logos such as `amazon`, `ups`, and `fedex` should label the logo**: For a Fedex truck, label the truck as a `car` and make a different bounding box just for the Fedex logo. If there are multiple logos, label each of them.
**`amazon`, `ups`, and `fedex` should label the logo**: For a Fedex truck, label the truck as a `car` and make a different bounding box just for the Fedex logo. If there are multiple logos, label each of them.
![Fedex Logo](/img/plus/fedex-logo.jpg)
@@ -36,17 +36,18 @@ Misidentified objects should have a correct label added. For example, if a perso
## Shortcuts for a faster workflow
| Shortcut Key | Description |
| ----------------- | ----------------------------- |
| `?` | Show all keyboard shortcuts |
| `w` | Add box |
| `d` | Toggle difficult |
| `s` | Switch to the next label |
| `tab` | Select next largest box |
| `del` | Delete current box |
| `esc` | Deselect/Cancel |
| `← ↑ → ↓` | Move box |
| `Shift + ← ↑ → ↓` | Resize box |
| `scrollwheel` | Zoom in/out |
| `f` | Hide/show all but current box |
| `spacebar` | Verify and save |
|Shortcut Key|Description|
|-----|--------|
|`?`|Show all keyboard shortcuts|
|`w`|Add box|
|`d`|Toggle difficult|
|`s`|Switch to the next label|
|`tab`|Select next largest box|
|`del`|Delete current box|
|`esc`|Deselect/Cancel|
|`← ↑ → ↓`|Move box|
|`Shift + ← ↑ → ↓`|Resize box|
|`-`|Zoom out|
|`=`|Zoom in|
|`f`|Hide/show all but current box|
|`spacebar`|Verify and save|

View File

@@ -15,52 +15,25 @@ With a subscription, 12 model trainings per year are included. If you cancel you
Information on how to integrate Frigate+ with Frigate can be found in the [integration docs](../integrations/plus.md).
## Available model types
There are two model types offered in Frigate+, `mobiledet` and `yolonas`. Both of these models are object detection models and are trained to detect the same set of labels [listed below](#available-label-types).
Not all model types are supported by all detectors, so it's important to choose a model type to match your detector as shown in the table under [supported detector types](#supported-detector-types).
| Model Type | Description |
| ----------- | -------------------------------------------------------------------------------------------------------------------------------------------- |
| `mobiledet` | Based on the same architecture as the default model included with Frigate. Runs on Google Coral devices and CPUs. |
| `yolonas` | A newer architecture that offers slightly higher accuracy and improved detection of small objects. Runs on Intel, NVidia GPUs, and AMD GPUs. |
## Supported detector types
Currently, Frigate+ models support CPU (`cpu`), Google Coral (`edgetpu`), OpenVino (`openvino`), ONNX (`onnx`), and ROCm (`rocm`) detectors.
:::warning
Using Frigate+ models with `onnx` and `rocm` is only available with Frigate 0.15, which is still under development.
Frigate+ models are not supported for TensorRT or OpenVino yet.
:::
| Hardware | Recommended Detector Type | Recommended Model Type |
| ---------------------------------------------------------------------------------------------------------------------------- | ------------------------- | ---------------------- |
| [CPU](/configuration/object_detectors.md#cpu-detector-not-recommended) | `cpu` | `mobiledet` |
| [Coral (all form factors)](/configuration/object_detectors.md#edge-tpu-detector) | `edgetpu` | `mobiledet` |
| [Intel](/configuration/object_detectors.md#openvino-detector) | `openvino` | `yolonas` |
| [NVidia GPU](https://deploy-preview-13787--frigate-docs.netlify.app/configuration/object_detectors#onnx)\* | `onnx` | `yolonas` |
| [AMD ROCm GPU](https://deploy-preview-13787--frigate-docs.netlify.app/configuration/object_detectors#amdrocm-gpu-detector)\* | `rocm` | `yolonas` |
Currently, Frigate+ models only support CPU (`cpu`) and Coral (`edgetpu`) models. OpenVino is next in line to gain support.
_\* Requires Frigate 0.15_
The models are created using the same MobileDet architecture as the default model. Additional architectures will be added in future releases as needed.
## Available label types
Frigate+ models support a more relevant set of objects for security cameras. Currently, the following objects are supported:
- **People**: `person`, `face`
- **Vehicles**: `car`, `motorcycle`, `bicycle`, `boat`, `license_plate`
- **Delivery Logos**: `amazon`, `usps`, `ups`, `fedex`, `dhl`, `an_post`, `purolator`, `postnl`, `nzpost`, `postnord`, `gls`, `dpd`
- **Animals**: `dog`, `cat`, `deer`, `horse`, `bird`, `raccoon`, `fox`, `bear`, `cow`, `squirrel`, `goat`, `rabbit`
- **Other**: `package`, `waste_bin`, `bbq_grill`, `robot_lawnmower`, `umbrella`
Other object types available in the default Frigate model are not available. Additional object types will be added in future releases.
Frigate+ models support a more relevant set of objects for security cameras. Currently, only the following objects are supported: `person`, `face`, `car`, `license_plate`, `amazon`, `ups`, `fedex`, `package`, `dog`, `cat`, `deer`. Other object types available in the default Frigate model are not available. Additional object types will be added in future releases.
### Label attributes
Frigate has special handling for some labels when using Frigate+ models. `face`, `license_plate`, and delivery logos such as `amazon`, `ups`, and `fedex` are considered attribute labels which are not tracked like regular objects and do not generate events. In addition, the `threshold` filter will have no effect on these labels. You should adjust the `min_score` and other filter values as needed.
Frigate has special handling for some labels when using Frigate+ models. `face`, `license_plate`, `amazon`, `ups`, and `fedex` are considered attribute labels which are not tracked like regular objects and do not generate events. In addition, the `threshold` filter will have no effect on these labels. You should adjust the `min_score` and other filter values as needed.
In order to have Frigate start using these attribute labels, you will need to add them to the list of objects to track:
@@ -83,6 +56,6 @@ When using Frigate+ models, Frigate will choose the snapshot of a person object
![Face Attribute](/img/plus/attribute-example-face.jpg)
Delivery logos such as `amazon`, `ups`, and `fedex` labels are used to automatically assign a sub label to car objects.
`amazon`, `ups`, and `fedex` labels are used to automatically assign a sub label to car objects.
![Fedex Attribute](/img/plus/attribute-example-fedex.jpg)

View File

@@ -28,18 +28,6 @@ The USB coral has different IDs when it is uninitialized and initialized.
- When running Frigate in a VM, Proxmox lxc, etc. you must ensure both device IDs are mapped.
- When running HA OS you may need to run the Full Access version of the Frigate addon with the `Protected Mode` switch disabled so that the coral can be accessed.
### Synology 716+II running DSM 7.2.1-69057 Update 5
Some users have reported that this older device runs an older kernel causing issues with the coral not being detected. The following steps allowed it to be detected correctly:
1. Plug in the coral TPU in any of the USB ports on the NAS
2. Open the control panel - info screen. The coral TPU would be shown as a generic device.
3. Start the docker container with Coral TPU enabled in the config
4. The TPU would be detected but a few moments later it would disconnect.
5. While leaving the TPU device plugged in, restart the NAS using the reboot command in the UI. Do NOT unplug the NAS/power it off etc.
6. Open the control panel - info scree. The coral TPU will now be recognised as a USB Device - google inc
7. Start the frigate container. Everything should work now!
## USB Coral Detection Appears to be Stuck
The USB Coral can become stuck and need to be restarted, this can happen for a number of reasons depending on hardware and software setup. Some common reasons are:
@@ -49,21 +37,7 @@ The USB Coral can become stuck and need to be restarted, this can happen for a n
## PCIe Coral Not Detected
The most common reason for the PCIe Coral not being detected is that the driver has not been installed. This process varies based on what OS and kernel that is being run.
- In most cases [the Coral docs](https://coral.ai/docs/m2/get-started/#2-install-the-pcie-driver-and-edge-tpu-runtime) show how to install the driver for the PCIe based Coral.
- For Ubuntu 22.04+ https://github.com/jnicolson/gasket-builder can be used to build and install the latest version of the driver.
### Not detected on Raspberry Pi5
A kernel update to the RPi5 means an upate to config.txt is required, see [the raspberry pi forum for more info](https://forums.raspberrypi.com/viewtopic.php?t=363682&sid=cb59b026a412f0dc041595951273a9ca&start=25)
Specifically, add the following to config.txt
```
dtoverlay=pciex1-compat-pi5,no-mip
dtoverlay=pcie-32bit-dma-pi5
```
The most common reason for the PCIe coral not being detected is that the driver has not been installed. See [the coral docs](https://coral.ai/docs/m2/get-started/#2-install-the-pcie-driver-and-edge-tpu-runtime) for how to install the driver for the PCIe based coral.
## Only One PCIe Coral Is Detected With Coral Dual EdgeTPU

View File

@@ -52,7 +52,6 @@ module.exports = {
"configuration/authentication",
"configuration/hardware_acceleration",
"configuration/ffmpeg_presets",
"configuration/pwa",
"configuration/tls",
"configuration/advanced",
],

Binary file not shown.

Before

Width:  |  Height:  |  Size: 62 KiB

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 49 KiB

After

Width:  |  Height:  |  Size: 63 KiB

View File

@@ -11,18 +11,6 @@
"! pip install -q super_gradients==3.7.1"
]
},
{
"cell_type": "code",
"source": [
"! sed -i 's/sghub.deci.ai/sg-hub-nv.s3.amazonaws.com/' /usr/local/lib/python3.10/dist-packages/super_gradients/training/pretrained_models.py\n",
"! sed -i 's/sghub.deci.ai/sg-hub-nv.s3.amazonaws.com/' /usr/local/lib/python3.10/dist-packages/super_gradients/training/utils/checkpoint_utils.py"
],
"metadata": {
"id": "NiRCt917KKcL"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"execution_count": null,
@@ -84,4 +72,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
}
}

1
web/.node-version Normal file
View File

@@ -0,0 +1 @@
20

View File

@@ -10,6 +10,8 @@ import { Suspense, lazy } from "react";
import { Redirect } from "./components/navigation/Redirect";
import { cn } from "./lib/utils";
import { isPWA } from "./utils/isPWA";
import { ADMIN_USERS } from "@/types/user";
import useSWR from "swr";
const Live = lazy(() => import("@/pages/Live"));
const Events = lazy(() => import("@/pages/Events"));
@@ -22,6 +24,7 @@ const UIPlayground = lazy(() => import("@/pages/UIPlayground"));
const Logs = lazy(() => import("@/pages/Logs"));
function App() {
const { data: profile } = useSWR("profile");
return (
<Providers>
<BrowserRouter basename={window.baseUrl}>
@@ -47,9 +50,13 @@ function App() {
<Route path="/export" element={<Exports />} />
<Route path="/plus" element={<SubmitPlus />} />
<Route path="/system" element={<System />} />
<Route path="/settings" element={<Settings />} />
<Route path="/config" element={<ConfigEditor />} />
<Route path="/logs" element={<Logs />} />
{ADMIN_USERS.includes(profile?.username) && (
<>
<Route path="/settings" element={<Settings />} />
<Route path="/config" element={<ConfigEditor />} />
<Route path="/logs" element={<Logs />} />
</>
)}
<Route path="/playground" element={<UIPlayground />} />
<Route path="*" element={<Redirect to="/" />} />
</Routes>

View File

@@ -68,6 +68,8 @@ import {
import { TooltipPortal } from "@radix-ui/react-tooltip";
import { cn } from "@/lib/utils";
import { baseUrl } from "@/api/baseUrl";
import useSWR from "swr";
import { ADMIN_USERS } from "@/types/user";
type GeneralSettingsProps = {
className?: string;
@@ -80,6 +82,8 @@ export default function GeneralSettings({ className }: GeneralSettingsProps) {
const { send: sendRestart } = useRestart();
const { data: profile } = useSWR("profile");
useEffect(() => {
let countdownInterval: NodeJS.Timeout;
@@ -169,48 +173,58 @@ export default function GeneralSettings({ className }: GeneralSettingsProps) {
<span>System metrics</span>
</MenuItem>
</Link>
<Link to="/logs">
<MenuItem
className={
isDesktop
? "cursor-pointer"
: "flex w-full items-center p-2 text-sm"
}
>
<LuList className="mr-2 size-4" />
<span>System logs</span>
</MenuItem>
</Link>
{ADMIN_USERS.includes(profile?.username) && (
<Link to="/logs">
<MenuItem
className={
isDesktop
? "cursor-pointer"
: "flex w-full items-center p-2 text-sm"
}
>
<LuList className="mr-2 size-4" />
<span>System logs</span>
</MenuItem>
</Link>
)}
</DropdownMenuGroup>
<DropdownMenuLabel className={isDesktop ? "mt-3" : "mt-1"}>
Configuration
</DropdownMenuLabel>
<DropdownMenuSeparator />
{ADMIN_USERS.includes(profile?.username) && (
<>
<DropdownMenuLabel className={isDesktop ? "mt-3" : "mt-1"}>
Configuration
</DropdownMenuLabel>
<DropdownMenuSeparator />
</>
)}
<DropdownMenuGroup>
<Link to="/settings">
<MenuItem
className={
isDesktop
? "cursor-pointer"
: "flex w-full items-center p-2 text-sm"
}
>
<LuSettings className="mr-2 size-4" />
<span>Settings</span>
</MenuItem>
</Link>
<Link to="/config">
<MenuItem
className={
isDesktop
? "cursor-pointer"
: "flex w-full items-center p-2 text-sm"
}
>
<LuPenSquare className="mr-2 size-4" />
<span>Configuration editor</span>
</MenuItem>
</Link>
{ADMIN_USERS.includes(profile?.username) && (
<>
<Link to="/settings">
<MenuItem
className={
isDesktop
? "cursor-pointer"
: "flex w-full items-center p-2 text-sm"
}
>
<LuSettings className="mr-2 size-4" />
<span>Settings</span>
</MenuItem>
</Link>
<Link to="/config">
<MenuItem
className={
isDesktop
? "cursor-pointer"
: "flex w-full items-center p-2 text-sm"
}
>
<LuPenSquare className="mr-2 size-4" />
<span>Configuration editor</span>
</MenuItem>
</Link>
</>
)}
<DropdownMenuLabel className={isDesktop ? "mt-3" : "mt-1"}>
Appearance
</DropdownMenuLabel>
@@ -358,16 +372,24 @@ export default function GeneralSettings({ className }: GeneralSettingsProps) {
<span>GitHub</span>
</MenuItem>
</a>
<DropdownMenuSeparator className={isDesktop ? "mt-3" : "mt-1"} />
<MenuItem
className={
isDesktop ? "cursor-pointer" : "flex items-center p-2 text-sm"
}
onClick={() => setRestartDialogOpen(true)}
>
<LuRotateCw className="mr-2 size-4" />
<span>Restart Frigate</span>
</MenuItem>
{ADMIN_USERS.includes(profile?.username) && (
<>
<DropdownMenuSeparator
className={isDesktop ? "mt-3" : "mt-1"}
/>
<MenuItem
className={
isDesktop
? "cursor-pointer"
: "flex items-center p-2 text-sm"
}
onClick={() => setRestartDialogOpen(true)}
>
<LuRotateCw className="mr-2 size-4" />
<span>Restart Frigate</span>
</MenuItem>
</>
)}
</div>
</Content>
</Container>

View File

@@ -1,3 +1,5 @@
export type User = {
username: string;
};
export const ADMIN_USERS: string[] = ["admin", "cking91977", "akadmin"];