forked from Github/frigate
Compare commits
260 Commits
dependabot
...
v0.15.0-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
33825f6d96 | ||
|
|
eca504cb07 | ||
|
|
4c75440af4 | ||
|
|
94f7528885 | ||
|
|
4dadf6d353 | ||
|
|
2d27e72ed9 | ||
|
|
4ff0c8a8d1 | ||
|
|
f9fba94863 | ||
|
|
f9b246dbd0 | ||
|
|
8fefded8dc | ||
|
|
18824830fd | ||
|
|
fa81d87dc0 | ||
|
|
8bc145472a | ||
|
|
7afc1e9762 | ||
|
|
fc59c83e16 | ||
|
|
e4048be088 | ||
|
|
d715a8c290 | ||
|
|
ad308252a1 | ||
|
|
c7d9f83638 | ||
|
|
828fdbfd2d | ||
|
|
40c6fda19d | ||
|
|
b69816c2f9 | ||
|
|
46f5234bd9 | ||
|
|
81b8d7a66b | ||
|
|
b1285a16c1 | ||
|
|
90140e7710 | ||
|
|
8364e68667 | ||
|
|
4bb420d049 | ||
|
|
560dc68120 | ||
|
|
8fcb8e54f7 | ||
|
|
6c70e56059 | ||
|
|
b24d292ade | ||
|
|
2137de37b9 | ||
|
|
3c591ad8a9 | ||
|
|
b56f4c4558 | ||
|
|
5d8bcb42c6 | ||
|
|
b299652e86 | ||
|
|
8ac4b001a2 | ||
|
|
6294ce7807 | ||
|
|
8173cd7776 | ||
|
|
edaccd86d6 | ||
|
|
5f77408956 | ||
|
|
e836523bc3 | ||
|
|
9f866be110 | ||
|
|
f6879f40b0 | ||
|
|
06f47f262f | ||
|
|
eda52a3b82 | ||
|
|
3f1ab66899 | ||
|
|
b75efcbca2 | ||
|
|
25043278ab | ||
|
|
644069fb23 | ||
|
|
0eccb6a610 | ||
|
|
0abd514064 | ||
|
|
3879fde06d | ||
|
|
887433fc6a | ||
|
|
dd7a07bd0d | ||
|
|
0ee32cf110 | ||
|
|
72aa68cedc | ||
|
|
9adffa1ef5 | ||
|
|
4ca267ea17 | ||
|
|
833768172d | ||
|
|
1ec459ea3a | ||
|
|
66d0ad5803 | ||
|
|
92ac025e43 | ||
|
|
e8b2fde753 | ||
|
|
0fc7999780 | ||
|
|
3a403392e7 | ||
|
|
acccc6fd93 | ||
|
|
40bb4765d4 | ||
|
|
48c60621b6 | ||
|
|
1e1610671e | ||
|
|
de86c37687 | ||
|
|
6e332bbdf8 | ||
|
|
8a8a0c7dec | ||
|
|
d4b9b5a7dd | ||
|
|
6df541e1fd | ||
|
|
748087483c | ||
|
|
ae91fa6a39 | ||
|
|
2897afce41 | ||
|
|
ee8091ba91 | ||
|
|
30b5faebae | ||
|
|
8d753f821d | ||
|
|
54eb03d2a1 | ||
|
|
dd6276e706 | ||
|
|
f67ec241d4 | ||
|
|
8ade85edec | ||
|
|
a2ca18a714 | ||
|
|
6a83ff2511 | ||
|
|
bc3a06178b | ||
|
|
9fda259c0c | ||
|
|
d4925622f9 | ||
|
|
dbeaf43b8f | ||
|
|
a2f42d51fd | ||
|
|
0b71cfaf06 | ||
|
|
d558ac83b6 | ||
|
|
74efc94649 | ||
|
|
2541a345d0 | ||
|
|
23ce1e930d | ||
|
|
6ebad84160 | ||
|
|
24ac9f3e5a | ||
|
|
757150dec1 | ||
|
|
ddcec82b61 | ||
|
|
74047453ef | ||
|
|
dcaed0e90f | ||
|
|
cae304e07f | ||
|
|
47ad0ca993 | ||
|
|
9c751230a1 | ||
|
|
a468ed316d | ||
|
|
e725730982 | ||
|
|
21c12d118b | ||
|
|
b9e74ee9ab | ||
|
|
0f2cff5078 | ||
|
|
e5e196bd7f | ||
|
|
077402406b | ||
|
|
54900ae318 | ||
|
|
3c015bf822 | ||
|
|
a1efcfb2d0 | ||
|
|
75d531285a | ||
|
|
0aad7db2d2 | ||
|
|
20c3b890ae | ||
|
|
0126960d79 | ||
|
|
849d441c5c | ||
|
|
b5f5627ca6 | ||
|
|
5b0c1e5b9e | ||
|
|
3cff0df0ce | ||
|
|
15fa55c223 | ||
|
|
594ca3a04b | ||
|
|
fafe5623d1 | ||
|
|
141cf39368 | ||
|
|
1fa050fd7a | ||
|
|
f36e7430ae | ||
|
|
94fd75e014 | ||
|
|
95d6da3111 | ||
|
|
4dc4704bb4 | ||
|
|
18bf7f93fa | ||
|
|
c73f694c63 | ||
|
|
3688a3bc67 | ||
|
|
775a3a1c22 | ||
|
|
bbbb3b4a06 | ||
|
|
576191cd4e | ||
|
|
38d398c967 | ||
|
|
7da44115d3 | ||
|
|
b54032bdc7 | ||
|
|
cab497e81e | ||
|
|
50e9c67609 | ||
|
|
bd57ea0110 | ||
|
|
05fe7f8a48 | ||
|
|
c0bd3b362c | ||
|
|
6381028fd6 | ||
|
|
1f328be1bd | ||
|
|
ddfdb71783 | ||
|
|
da1478c0c1 | ||
|
|
40fe3b4358 | ||
|
|
20fd1db0f4 | ||
|
|
a65aaab849 | ||
|
|
a5595189ed | ||
|
|
4a1da3ebc5 | ||
|
|
35a4460334 | ||
|
|
a6ccb37683 | ||
|
|
68465aed49 | ||
|
|
fc3aac96f2 | ||
|
|
32c7669b28 | ||
|
|
fef30bc671 | ||
|
|
ae547d27e4 | ||
|
|
be3e1831d4 | ||
|
|
45aceea53b | ||
|
|
25819584bd | ||
|
|
4c24b70d47 | ||
|
|
4c12673fbb | ||
|
|
e935db5075 | ||
|
|
a7f1f8d327 | ||
|
|
8c540d7210 | ||
|
|
4c4b884f8e | ||
|
|
a3d3fe07ce | ||
|
|
1ae521f560 | ||
|
|
7854e1c2c1 | ||
|
|
a9ff795948 | ||
|
|
a8e2f97260 | ||
|
|
d17253b023 | ||
|
|
ecbf0410eb | ||
|
|
cffc431bf0 | ||
|
|
dc54981784 | ||
|
|
811da2e159 | ||
|
|
c4e2f3bc70 | ||
|
|
bd906a7915 | ||
|
|
3df33199bc | ||
|
|
7ad30f15d5 | ||
|
|
2f38d960d4 | ||
|
|
2fc58fea81 | ||
|
|
e7dfbf76bb | ||
|
|
94de29187a | ||
|
|
a82c1f303b | ||
|
|
55e1f865d8 | ||
|
|
3f996cd62c | ||
|
|
58a8028485 | ||
|
|
190ce5ee31 | ||
|
|
70aab068fd | ||
|
|
617d279419 | ||
|
|
4de088d725 | ||
|
|
f8fd746678 | ||
|
|
1529ee59fe | ||
|
|
19c253b429 | ||
|
|
13bb9dd715 | ||
|
|
9b4602acb3 | ||
|
|
e5448110fc | ||
|
|
4974defe6f | ||
|
|
65ceadda2b | ||
|
|
8b2adb55ed | ||
|
|
58ca44bd15 | ||
|
|
ef46451b80 | ||
|
|
758b0f9734 | ||
|
|
3650000b31 | ||
|
|
dbd042ca3e | ||
|
|
6b9082bdd9 | ||
|
|
f9baa3bf20 | ||
|
|
a75feb7f8f | ||
|
|
009900b29b | ||
|
|
dc04cf82d8 | ||
|
|
b2c23a367d | ||
|
|
338b59a32e | ||
|
|
07ffd76437 | ||
|
|
3eaf9f4011 | ||
|
|
9832831c5e | ||
|
|
d3259c4782 | ||
|
|
940c12d9d8 | ||
|
|
8f2cbe261b | ||
|
|
e86788034d | ||
|
|
4ecc0e15ce | ||
|
|
b01ce31903 | ||
|
|
87b69c373a | ||
|
|
07b3160dff | ||
|
|
096e2791f5 | ||
|
|
9d456ccfcf | ||
|
|
ad5c3741e9 | ||
|
|
fe188bd646 | ||
|
|
f47984818f | ||
|
|
7b274b6974 | ||
|
|
b1806b0a7c | ||
|
|
ff2e46650c | ||
|
|
69fe6cdc05 | ||
|
|
b7e0d14b83 | ||
|
|
7db6ed9ad5 | ||
|
|
da0f63f095 | ||
|
|
90221e8c94 | ||
|
|
37680c317c | ||
|
|
70ea6fc9a1 | ||
|
|
67e692a7f3 | ||
|
|
34382ac38e | ||
|
|
b94b08a33c | ||
|
|
540d66af57 | ||
|
|
a2deeb0d12 | ||
|
|
22fe261dd6 | ||
|
|
b44354ad29 | ||
|
|
3ffbdb35a2 | ||
|
|
0504e9ef79 | ||
|
|
b309287087 | ||
|
|
e891f2ad6d | ||
|
|
9b1fb33ac6 | ||
|
|
8a099b4ae5 | ||
|
|
2cdd483126 |
@@ -42,6 +42,7 @@ codeproject
|
||||
colormap
|
||||
colorspace
|
||||
comms
|
||||
coro
|
||||
ctypeslib
|
||||
CUDA
|
||||
Cuvid
|
||||
@@ -59,6 +60,7 @@ dsize
|
||||
dtype
|
||||
ECONNRESET
|
||||
edgetpu
|
||||
fastapi
|
||||
faststart
|
||||
fflags
|
||||
ffprobe
|
||||
@@ -212,6 +214,7 @@ rcond
|
||||
RDONLY
|
||||
rebranded
|
||||
referer
|
||||
reindex
|
||||
Reolink
|
||||
restream
|
||||
restreamed
|
||||
@@ -236,6 +239,7 @@ sleeptime
|
||||
SNDMORE
|
||||
socs
|
||||
sqliteq
|
||||
sqlitevecq
|
||||
ssdlite
|
||||
statm
|
||||
stimeout
|
||||
@@ -270,6 +274,7 @@ unraid
|
||||
unreviewed
|
||||
userdata
|
||||
usermod
|
||||
uvicorn
|
||||
vaapi
|
||||
vainfo
|
||||
variations
|
||||
|
||||
@@ -90,6 +90,9 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
@@ -102,7 +105,7 @@ body:
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no Coral)
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
11
.github/DISCUSSION_TEMPLATE/config-support.yml
vendored
11
.github/DISCUSSION_TEMPLATE/config-support.yml
vendored
@@ -76,6 +76,17 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
25
.github/DISCUSSION_TEMPLATE/detector-support.yml
vendored
25
.github/DISCUSSION_TEMPLATE/detector-support.yml
vendored
@@ -48,28 +48,6 @@ body:
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
@@ -78,6 +56,9 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
25
.github/DISCUSSION_TEMPLATE/general-support.yml
vendored
25
.github/DISCUSSION_TEMPLATE/general-support.yml
vendored
@@ -68,20 +68,6 @@ body:
|
||||
label: Frigate stats
|
||||
description: Output from frigate's /api/stats endpoint
|
||||
render: json
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
@@ -90,6 +76,17 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
@@ -24,12 +24,6 @@ body:
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: In which browser(s) are you experiencing the issue with?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
@@ -70,20 +64,6 @@ body:
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
@@ -92,6 +72,22 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: object-detector
|
||||
attributes:
|
||||
label: Object Detector
|
||||
options:
|
||||
- Coral
|
||||
- OpenVino
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
31
.github/pull_request_template.md
vendored
Normal file
31
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
## Proposed change
|
||||
<!--
|
||||
Describe what this pull request does and how it will benefit users of Frigate.
|
||||
Please describe in detail any considerations, breaking changes, etc. that are
|
||||
made in this pull request.
|
||||
-->
|
||||
|
||||
|
||||
## Type of change
|
||||
|
||||
- [ ] Dependency upgrade
|
||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature
|
||||
- [ ] Breaking change (fix/feature causing existing functionality to break)
|
||||
- [ ] Code quality improvements to existing code
|
||||
|
||||
## Additional information
|
||||
|
||||
- This PR fixes or closes issue: fixes #
|
||||
- This PR is related to issue:
|
||||
|
||||
## Checklist
|
||||
|
||||
<!--
|
||||
Put an `x` in the boxes that apply.
|
||||
-->
|
||||
|
||||
- [ ] The code change is tested and works locally.
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] The code has been formatted using Ruff (`ruff format frigate`)
|
||||
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
@@ -6,6 +6,8 @@ on:
|
||||
branches:
|
||||
- dev
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
|
||||
# only run the latest commit to avoid cache overwrites
|
||||
concurrency:
|
||||
@@ -155,6 +157,28 @@ jobs:
|
||||
tensorrt.tags=${{ steps.setup.outputs.image-name }}-tensorrt
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64,mode=max
|
||||
arm64_extra_builds:
|
||||
runs-on: ubuntu-latest
|
||||
name: ARM Extra Build
|
||||
needs:
|
||||
- arm64_build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push Rockchip build
|
||||
uses: docker/bake-action@v3
|
||||
with:
|
||||
push: true
|
||||
targets: rk
|
||||
files: docker/rockchip/rk.hcl
|
||||
set: |
|
||||
rk.tags=${{ steps.setup.outputs.image-name }}-rk
|
||||
*.cache-from=type=gha
|
||||
combined_extra_builds:
|
||||
runs-on: ubuntu-latest
|
||||
name: Combined Extra Builds
|
||||
@@ -205,7 +229,7 @@ jobs:
|
||||
with:
|
||||
string: ${{ github.repository }}
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
||||
5
.github/workflows/pull_request.yml
vendored
5
.github/workflows/pull_request.yml
vendored
@@ -1,6 +1,9 @@
|
||||
name: On pull request
|
||||
|
||||
on: pull_request
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.9
|
||||
|
||||
6
.github/workflows/release.yml
vendored
6
.github/workflows/release.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
with:
|
||||
string: ${{ github.repository }}
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -34,14 +34,14 @@ jobs:
|
||||
STABLE_TAG=${BASE}:stable
|
||||
PULL_TAG=${BASE}:${BUILD_TAG}
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG} docker://${VERSION_TAG}
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk; do
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk h8l rocm; do
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG}-${variant} docker://${VERSION_TAG}-${variant}
|
||||
done
|
||||
|
||||
# stable tag
|
||||
if [[ "${BUILD_TYPE}" == "stable" ]]; then
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG} docker://${STABLE_TAG}
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk; do
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk h8l rocm; do
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG}-${variant} docker://${STABLE_TAG}-${variant}
|
||||
done
|
||||
fi
|
||||
|
||||
@@ -4,6 +4,7 @@ from statistics import mean
|
||||
|
||||
import numpy as np
|
||||
|
||||
import frigate.util as util
|
||||
from frigate.config import DetectorTypeEnum
|
||||
from frigate.object_detection import (
|
||||
ObjectDetectProcess,
|
||||
@@ -90,7 +91,7 @@ edgetpu_process_2 = ObjectDetectProcess(
|
||||
)
|
||||
|
||||
for x in range(0, 10):
|
||||
camera_process = mp.Process(
|
||||
camera_process = util.Process(
|
||||
target=start, args=(x, 300, detection_queue, events[str(x)])
|
||||
)
|
||||
camera_process.daemon = True
|
||||
|
||||
@@ -50,7 +50,7 @@ RUN PYTHON_VERSION=$(python3 --version 2>&1 | awk '{print $2}' | cut -d. -f1,2)
|
||||
RUN . /etc/environment && \
|
||||
git clone https://github.com/hailo-ai/hailort.git /opt/hailort && \
|
||||
cd /opt/hailort && \
|
||||
git checkout v4.17.0 && \
|
||||
git checkout v4.18.0 && \
|
||||
cmake -H. -Bbuild -DCMAKE_BUILD_TYPE=Release -DHAILO_BUILD_PYBIND=1 -DPYBIND11_PYTHON_VERSION=${PYTHON_VERSION} && \
|
||||
cmake --build build --config release --target libhailort && \
|
||||
cmake --build build --config release --target _pyhailort && \
|
||||
@@ -91,7 +91,7 @@ RUN pip3 install -U /deps/hailo-wheels/*.whl
|
||||
RUN . /etc/environment && \
|
||||
mv /usr/local/lib/python${PYTHON_VERSION}/dist-packages/hailo_platform/pyhailort/libhailort.so /usr/lib/${CC} && \
|
||||
cd /usr/lib/${CC}/ && \
|
||||
ln -s libhailort.so libhailort.so.4.17.0
|
||||
ln -s libhailort.so libhailort.so.4.18.0
|
||||
|
||||
# Copy base files from the rootfs stage
|
||||
COPY --from=rootfs / /
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
# Update package list and install dependencies
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y build-essential cmake git wget linux-modules-extra-$(uname -r)
|
||||
sudo apt-get install -y build-essential cmake git wget
|
||||
|
||||
arch=$(uname -m)
|
||||
|
||||
@@ -13,7 +13,7 @@ else
|
||||
fi
|
||||
|
||||
# Clone the HailoRT driver repository
|
||||
git clone --depth 1 --branch v4.17.0 https://github.com/hailo-ai/hailort-drivers.git
|
||||
git clone --depth 1 --branch v4.18.0 https://github.com/hailo-ai/hailort-drivers.git
|
||||
|
||||
# Build and install the HailoRT driver
|
||||
cd hailort-drivers/linux/pcie
|
||||
@@ -23,13 +23,26 @@ sudo make install
|
||||
# Load the Hailo PCI driver
|
||||
sudo modprobe hailo_pci
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Unable to load hailo_pci module, common reasons for this are:"
|
||||
echo "- Key was rejected by service: Secure Boot is enabling disallowing install."
|
||||
echo "- Permissions are not setup correctly."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Download and install the firmware
|
||||
cd ../../
|
||||
./download_firmware.sh
|
||||
sudo mv hailo8_fw.4.17.0.bin /lib/firmware/hailo/hailo8_fw.bin
|
||||
|
||||
# verify the firmware folder is present
|
||||
if [ ! -d /lib/firmware/hailo ]; then
|
||||
sudo mkdir /lib/firmware/hailo
|
||||
fi
|
||||
sudo mv hailo8_fw.4.18.0.bin /lib/firmware/hailo/hailo8_fw.bin
|
||||
|
||||
# Install udev rules
|
||||
sudo cp ./linux/pcie/51-hailo-udev.rules /etc/udev/rules.d/
|
||||
sudo udevadm control --reload-rules && sudo udevadm trigger
|
||||
|
||||
echo "HailoRT driver installation complete."
|
||||
echo "reboot your system to load the firmware!"
|
||||
|
||||
@@ -30,6 +30,16 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
||||
--mount=type=cache,target=/root/.ccache \
|
||||
/deps/build_nginx.sh
|
||||
|
||||
FROM wget AS sqlite-vec
|
||||
ARG DEBIAN_FRONTEND
|
||||
|
||||
# Build sqlite_vec from source
|
||||
COPY docker/main/build_sqlite_vec.sh /deps/build_sqlite_vec.sh
|
||||
RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
||||
--mount=type=bind,source=docker/main/build_sqlite_vec.sh,target=/deps/build_sqlite_vec.sh \
|
||||
--mount=type=cache,target=/root/.ccache \
|
||||
/deps/build_sqlite_vec.sh
|
||||
|
||||
FROM scratch AS go2rtc
|
||||
ARG TARGETARCH
|
||||
WORKDIR /rootfs/usr/local/go2rtc/bin
|
||||
@@ -163,20 +173,18 @@ RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
||||
COPY docker/main/requirements.txt /requirements.txt
|
||||
RUN pip3 install -r /requirements.txt
|
||||
|
||||
# Build pysqlite3 from source to support ChromaDB
|
||||
# Build pysqlite3 from source
|
||||
COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh
|
||||
RUN /build_pysqlite3.sh
|
||||
|
||||
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
|
||||
RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt
|
||||
|
||||
COPY docker/main/requirements-wheels-post.txt /requirements-wheels-post.txt
|
||||
RUN pip3 wheel --no-deps --wheel-dir=/wheels-post -r /requirements-wheels-post.txt
|
||||
|
||||
|
||||
# Collect deps in a single layer
|
||||
FROM scratch AS deps-rootfs
|
||||
COPY --from=nginx /usr/local/nginx/ /usr/local/nginx/
|
||||
COPY --from=sqlite-vec /usr/local/lib/ /usr/local/lib/
|
||||
COPY --from=go2rtc /rootfs/ /
|
||||
COPY --from=libusb-build /usr/local/lib /usr/local/lib
|
||||
COPY --from=tempio /rootfs/ /
|
||||
@@ -197,12 +205,11 @@ ARG APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility"
|
||||
|
||||
# Turn off Chroma Telemetry: https://docs.trychroma.com/telemetry#opting-out
|
||||
ENV ANONYMIZED_TELEMETRY=False
|
||||
# Allow resetting the chroma database
|
||||
ENV ALLOW_RESET=True
|
||||
# Disable tokenizer parallelism warning
|
||||
# https://stackoverflow.com/questions/62691279/how-to-disable-tokenizers-parallelism-true-false-warning/72926996#72926996
|
||||
ENV TOKENIZERS_PARALLELISM=true
|
||||
# https://github.com/huggingface/transformers/issues/27214
|
||||
ENV TRANSFORMERS_NO_ADVISORY_WARNINGS=1
|
||||
|
||||
ENV PATH="/usr/local/go2rtc/bin:/usr/local/tempio/bin:/usr/local/nginx/sbin:${PATH}"
|
||||
ENV LIBAVFORMAT_VERSION_MAJOR=60
|
||||
@@ -215,14 +222,6 @@ RUN --mount=type=bind,from=wheels,source=/wheels,target=/deps/wheels \
|
||||
python3 -m pip install --upgrade pip && \
|
||||
pip3 install -U /deps/wheels/*.whl
|
||||
|
||||
# We have to uninstall this dependency specifically
|
||||
# as it will break onnxruntime-openvino
|
||||
RUN pip3 uninstall -y onnxruntime
|
||||
|
||||
RUN --mount=type=bind,from=wheels,source=/wheels-post,target=/deps/wheels \
|
||||
python3 -m pip install --upgrade pip && \
|
||||
pip3 install -U /deps/wheels/*.whl
|
||||
|
||||
COPY --from=deps-rootfs / /
|
||||
|
||||
RUN ldconfig
|
||||
@@ -239,7 +238,7 @@ ENV S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0
|
||||
ENTRYPOINT ["/init"]
|
||||
CMD []
|
||||
|
||||
HEALTHCHECK --start-period=120s --start-interval=5s --interval=15s --timeout=5s --retries=3 \
|
||||
HEALTHCHECK --start-period=300s --start-interval=5s --interval=15s --timeout=5s --retries=3 \
|
||||
CMD curl --fail --silent --show-error http://127.0.0.1:5000/api/version || exit 1
|
||||
|
||||
# Frigate deps with Node.js and NPM for devcontainer
|
||||
|
||||
31
docker/main/build_sqlite_vec.sh
Executable file
31
docker/main/build_sqlite_vec.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
SQLITE_VEC_VERSION="0.1.3"
|
||||
|
||||
cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list
|
||||
sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list
|
||||
apt-get update
|
||||
apt-get -yqq build-dep sqlite3 gettext git
|
||||
|
||||
mkdir /tmp/sqlite_vec
|
||||
# Grab the sqlite_vec source code.
|
||||
wget -nv https://github.com/asg017/sqlite-vec/archive/refs/tags/v${SQLITE_VEC_VERSION}.tar.gz
|
||||
tar -zxf v${SQLITE_VEC_VERSION}.tar.gz -C /tmp/sqlite_vec
|
||||
|
||||
cd /tmp/sqlite_vec/sqlite-vec-${SQLITE_VEC_VERSION}
|
||||
|
||||
mkdir -p vendor
|
||||
wget -O sqlite-amalgamation.zip https://www.sqlite.org/2024/sqlite-amalgamation-3450300.zip
|
||||
unzip sqlite-amalgamation.zip
|
||||
mv sqlite-amalgamation-3450300/* vendor/
|
||||
rmdir sqlite-amalgamation-3450300
|
||||
rm sqlite-amalgamation.zip
|
||||
|
||||
# build loadable module
|
||||
make loadable
|
||||
|
||||
# install it
|
||||
cp dist/vec0.* /usr/local/lib
|
||||
|
||||
@@ -8,11 +8,13 @@ apt-get -qq install --no-install-recommends -y \
|
||||
apt-transport-https \
|
||||
gnupg \
|
||||
wget \
|
||||
lbzip2 \
|
||||
procps vainfo \
|
||||
unzip locales tzdata libxml2 xz-utils \
|
||||
python3.9 \
|
||||
python3-pip \
|
||||
curl \
|
||||
lsof \
|
||||
jq \
|
||||
nethogs
|
||||
|
||||
@@ -44,7 +46,7 @@ if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linux64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/5.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/5.0/doc /usr/lib/ffmpeg/5.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2024-09-19-12-51/ffmpeg-n7.0.2-18-g3e6cec1286-linux64-gpl-7.0.tar.xz"
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2024-09-19-12-51/ffmpeg-n7.0.2-18-g3e6cec1286-linux64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/7.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/7.0/doc /usr/lib/ffmpeg/7.0/bin/ffplay
|
||||
fi
|
||||
@@ -56,7 +58,7 @@ if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linuxarm64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/5.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/5.0/doc /usr/lib/ffmpeg/5.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2024-09-19-12-51/ffmpeg-n7.0.2-18-g3e6cec1286-linuxarm64-gpl-7.0.tar.xz"
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2024-09-19-12-51/ffmpeg-n7.0.2-18-g3e6cec1286-linuxarm64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/7.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/7.0/doc /usr/lib/ffmpeg/7.0/bin/ffplay
|
||||
fi
|
||||
@@ -75,6 +77,9 @@ if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
i965-va-driver-shaders
|
||||
|
||||
# intel packages use zst compression so we need to update dpkg
|
||||
apt-get install -y dpkg
|
||||
|
||||
rm -f /etc/apt/sources.list.d/debian-bookworm.list
|
||||
|
||||
# use intel apt intel packages
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
# ONNX
|
||||
onnxruntime-openvino == 1.19.* ; platform_machine == 'x86_64'
|
||||
onnxruntime == 1.19.* ; platform_machine == 'aarch64'
|
||||
@@ -1,8 +1,12 @@
|
||||
click == 8.1.*
|
||||
Flask == 3.0.*
|
||||
Flask_Limiter == 3.8.*
|
||||
# FastAPI
|
||||
starlette-context == 0.3.6
|
||||
fastapi == 0.115.*
|
||||
uvicorn == 0.30.*
|
||||
slowapi == 0.1.*
|
||||
imutils == 0.5.*
|
||||
joserfc == 1.0.*
|
||||
pathvalidate == 3.2.*
|
||||
markupsafe == 2.1.*
|
||||
mypy == 1.6.1
|
||||
numpy == 1.26.*
|
||||
@@ -12,10 +16,10 @@ paho-mqtt == 2.1.*
|
||||
pandas == 2.2.*
|
||||
peewee == 3.17.*
|
||||
peewee_migrate == 1.13.*
|
||||
psutil == 5.9.*
|
||||
psutil == 6.1.*
|
||||
pydantic == 2.8.*
|
||||
git+https://github.com/fbcotter/py3nvml#egg=py3nvml
|
||||
pytz == 2024.1
|
||||
pytz == 2024.*
|
||||
pyzmq == 26.2.*
|
||||
ruamel.yaml == 0.18.*
|
||||
tzlocal == 5.2
|
||||
@@ -26,15 +30,16 @@ norfair == 2.2.*
|
||||
setproctitle == 1.3.*
|
||||
ws4py == 0.5.*
|
||||
unidecode == 1.3.*
|
||||
# OpenVino (ONNX installed in wheels-post)
|
||||
# OpenVino & ONNX
|
||||
openvino == 2024.3.*
|
||||
onnxruntime-openvino == 1.19.* ; platform_machine == 'x86_64'
|
||||
onnxruntime == 1.19.* ; platform_machine == 'aarch64'
|
||||
# Embeddings
|
||||
chromadb == 0.5.0
|
||||
onnx_clip == 4.0.*
|
||||
transformers == 4.45.*
|
||||
# Generative AI
|
||||
google-generativeai == 0.6.*
|
||||
ollama == 0.2.*
|
||||
openai == 1.30.*
|
||||
google-generativeai == 0.8.*
|
||||
ollama == 0.3.*
|
||||
openai == 1.51.*
|
||||
# push notifications
|
||||
py-vapid == 1.9.*
|
||||
pywebpush == 2.0.*
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
chroma
|
||||
@@ -1 +0,0 @@
|
||||
chroma-pipeline
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
exec logutil-service /dev/shm/logs/chroma
|
||||
@@ -1 +0,0 @@
|
||||
longrun
|
||||
@@ -1,28 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Take down the S6 supervision tree when the service exits
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
declare exit_code_container
|
||||
exit_code_container=$(cat /run/s6-linux-init-container-results/exitcode)
|
||||
readonly exit_code_container
|
||||
readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
readonly service="ChromaDB"
|
||||
|
||||
echo "[INFO] Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})"
|
||||
|
||||
if [[ "${exit_code_service}" -eq 256 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo $((128 + exit_code_signal)) >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo "${exit_code_service}" >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
fi
|
||||
|
||||
exec /run/s6/basedir/bin/halt
|
||||
@@ -1 +0,0 @@
|
||||
chroma-log
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Start the Frigate service
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
# Tell S6-Overlay not to restart this service
|
||||
s6-svc -O .
|
||||
|
||||
search_enabled=`python3 /usr/local/semantic_search/get_search_settings.py | jq -r .enabled`
|
||||
|
||||
# Replace the bash process with the Frigate process, redirecting stderr to stdout
|
||||
exec 2>&1
|
||||
|
||||
if [[ "$search_enabled" == 'true' ]]; then
|
||||
echo "[INFO] Starting ChromaDB..."
|
||||
exec /usr/local/chroma run --path /config/chroma --host 127.0.0.1
|
||||
else
|
||||
while true
|
||||
do
|
||||
sleep 9999
|
||||
continue
|
||||
done
|
||||
exit 0
|
||||
fi
|
||||
@@ -1 +0,0 @@
|
||||
120000
|
||||
@@ -1 +0,0 @@
|
||||
longrun
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx /dev/shm/logs/certsync /dev/shm/logs/chroma)
|
||||
dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx /dev/shm/logs/certsync)
|
||||
|
||||
mkdir -p "${dirs[@]}"
|
||||
chown nobody:nogroup "${dirs[@]}"
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-s
|
||||
__import__("pysqlite3")
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.modules["sqlite3"] = sys.modules.pop("pysqlite3")
|
||||
|
||||
from chromadb.cli.cli import app
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
|
||||
sys.exit(app())
|
||||
@@ -226,7 +226,7 @@ http {
|
||||
|
||||
location ~* /api/.*\.(jpg|jpeg|png|webp|gif)$ {
|
||||
include auth_request.conf;
|
||||
rewrite ^/api/(.*)$ $1 break;
|
||||
rewrite ^/api/(.*)$ /$1 break;
|
||||
proxy_pass http://frigate_api;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
"""Prints the semantic_search config as json to stdout."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
yaml = YAML()
|
||||
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
# Check if we can use .yaml instead of .yml
|
||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||
if os.path.isfile(config_file_yaml):
|
||||
config_file = config_file_yaml
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
raw_config = f.read()
|
||||
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config: dict[str, any] = yaml.load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
except FileNotFoundError:
|
||||
config: dict[str, any] = {}
|
||||
|
||||
search_config: dict[str, any] = config.get("semantic_search", {"enabled": False})
|
||||
|
||||
print(json.dumps(search_config))
|
||||
@@ -83,6 +83,7 @@ ARG AMDGPU
|
||||
|
||||
COPY --from=rocm /opt/rocm-$ROCM/bin/rocminfo /opt/rocm-$ROCM/bin/migraphx-driver /opt/rocm-$ROCM/bin/
|
||||
COPY --from=rocm /opt/rocm-$ROCM/share/miopen/db/*$AMDGPU* /opt/rocm-$ROCM/share/miopen/db/
|
||||
COPY --from=rocm /opt/rocm-$ROCM/share/miopen/db/*gfx908* /opt/rocm-$ROCM/share/miopen/db/
|
||||
COPY --from=rocm /opt/rocm-$ROCM/lib/rocblas/library/*$AMDGPU* /opt/rocm-$ROCM/lib/rocblas/library/
|
||||
COPY --from=rocm /opt/rocm-dist/ /
|
||||
COPY --from=debian-build /opt/rocm/lib/migraphx.cpython-39-x86_64-linux-gnu.so /opt/rocm-$ROCM/lib/
|
||||
|
||||
@@ -9,6 +9,6 @@ nvidia-cuda-runtime-cu11 == 11.8.*; platform_machine == 'x86_64'
|
||||
nvidia-cublas-cu11 == 11.11.3.6; platform_machine == 'x86_64'
|
||||
nvidia-cudnn-cu11 == 8.6.0.*; platform_machine == 'x86_64'
|
||||
nvidia-cufft-cu11==10.*; platform_machine == 'x86_64'
|
||||
onnx==1.14.0; platform_machine == 'x86_64'
|
||||
onnxruntime-gpu==1.17.*; platform_machine == 'x86_64'
|
||||
onnx==1.16.*; platform_machine == 'x86_64'
|
||||
onnxruntime-gpu==1.18.*; platform_machine == 'x86_64'
|
||||
protobuf==3.20.3; platform_machine == 'x86_64'
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# Website
|
||||
|
||||
This website is built using [Docusaurus 2](https://v2.docusaurus.io/), a modern static website generator.
|
||||
This website is built using [Docusaurus 3.5](https://docusaurus.io/docs), a modern static website generator.
|
||||
|
||||
For installation and contributing instructions, please follow the [Contributing Docs](https://docs.frigate.video/development/contributing).
|
||||
|
||||
# Development
|
||||
|
||||
1. Run `npm i` to install dependencies
|
||||
2. Run `npm run start` to start the website
|
||||
|
||||
@@ -183,7 +183,7 @@ To do this:
|
||||
3. Give `go2rtc` execute permission.
|
||||
4. Restart Frigate and the custom version will be used, you can verify by checking go2rtc logs.
|
||||
|
||||
## Validating your config.yaml file updates
|
||||
## Validating your config.yml file updates
|
||||
|
||||
When frigate starts up, it checks whether your config file is valid, and if it is not, the process exits. To minimize interruptions when updating your config, you have three options -- you can edit the config via the WebUI which has built in validation, use the config API, or you can validate on the command line using the frigate docker container.
|
||||
|
||||
@@ -211,5 +211,5 @@ docker run \
|
||||
--entrypoint python3 \
|
||||
ghcr.io/blakeblackshear/frigate:stable \
|
||||
-u -m frigate \
|
||||
--validate_config
|
||||
--validate-config
|
||||
```
|
||||
|
||||
@@ -26,7 +26,7 @@ In the event that you are locked out of your instance, you can tell Frigate to r
|
||||
|
||||
## Login failure rate limiting
|
||||
|
||||
In order to limit the risk of brute force attacks, rate limiting is available for login failures. This is implemented with Flask-Limiter, and the string notation for valid values is available in [the documentation](https://flask-limiter.readthedocs.io/en/stable/configuration.html#rate-limit-string-notation).
|
||||
In order to limit the risk of brute force attacks, rate limiting is available for login failures. This is implemented with SlowApi, and the string notation for valid values is available in [the documentation](https://limits.readthedocs.io/en/stable/quickstart.html#examples).
|
||||
|
||||
For example, `1/second;5/minute;20/hour` will rate limit the login endpoint when failures occur more than:
|
||||
|
||||
|
||||
@@ -9,6 +9,12 @@ This page makes use of presets of FFmpeg args. For more information on presets,
|
||||
|
||||
:::
|
||||
|
||||
:::note
|
||||
|
||||
Many cameras support encoding options which greatly affect the live view experience, see the [Live view](/configuration/live) page for more info.
|
||||
|
||||
:::
|
||||
|
||||
## MJPEG Cameras
|
||||
|
||||
Note that mjpeg cameras require encoding the video into h264 for recording, and restream roles. This will use significantly more CPU than if the cameras supported h264 feeds directly. It is recommended to use the restream role to create an h264 restream and then use that as the source for ffmpeg.
|
||||
|
||||
@@ -79,29 +79,41 @@ cameras:
|
||||
|
||||
If the ONVIF connection is successful, PTZ controls will be available in the camera's WebUI.
|
||||
|
||||
:::tip
|
||||
|
||||
If your ONVIF camera does not require authentication credentials, you may still need to specify an empty string for `user` and `password`, eg: `user: ""` and `password: ""`.
|
||||
|
||||
:::
|
||||
|
||||
An ONVIF-capable camera that supports relative movement within the field of view (FOV) can also be configured to automatically track moving objects and keep them in the center of the frame. For autotracking setup, see the [autotracking](autotracking.md) docs.
|
||||
|
||||
## ONVIF PTZ camera recommendations
|
||||
|
||||
This list of working and non-working PTZ cameras is based on user feedback.
|
||||
|
||||
| Brand or specific camera | PTZ Controls | Autotracking | Notes |
|
||||
| ------------------------ | :----------: | :----------: | ----------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Amcrest | ✅ | ✅ | ⛔️ Generally, Amcrest should work, but some older models (like the common IP2M-841) don't support autotracking |
|
||||
| Amcrest ASH21 | ❌ | ❌ | No ONVIF support |
|
||||
| Ctronics PTZ | ✅ | ❌ | |
|
||||
| Dahua | ✅ | ✅ | |
|
||||
| Foscam R5 | ✅ | ❌ | |
|
||||
| Hanwha XNP-6550RH | ✅ | ❌ | |
|
||||
| Hikvision | ✅ | ❌ | Incomplete ONVIF support (MoveStatus won't update even on latest firmware) - reported with HWP-N4215IH-DE and DS-2DE3304W-DE, but likely others |
|
||||
| Reolink 511WA | ✅ | ❌ | Zoom only |
|
||||
| Reolink E1 Pro | ✅ | ❌ | |
|
||||
| Reolink E1 Zoom | ✅ | ❌ | |
|
||||
| Reolink RLC-823A 16x | ✅ | ❌ | |
|
||||
| Sunba 405-D20X | ✅ | ❌ | |
|
||||
| Tapo | ✅ | ❌ | Many models supported, ONVIF Service Port: 2020 |
|
||||
| Uniview IPC672LR-AX4DUPK | ✅ | ❌ | Firmware says FOV relative movement is supported, but camera doesn't actually move when sending ONVIF commands |
|
||||
| Vikylin PTZ-2804X-I2 | ❌ | ❌ | Incomplete ONVIF support |
|
||||
| Brand or specific camera | PTZ Controls | Autotracking | Notes |
|
||||
| ---------------------------- | :----------: | :----------: | ----------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Amcrest | ✅ | ✅ | ⛔️ Generally, Amcrest should work, but some older models (like the common IP2M-841) don't support autotracking |
|
||||
| Amcrest ASH21 | ✅ | ❌ | ONVIF service port: 80 |
|
||||
| Amcrest IP4M-S2112EW-AI | ✅ | ❌ | FOV relative movement not supported. |
|
||||
| Amcrest IP5M-1190EW | ✅ | ❌ | ONVIF Port: 80. FOV relative movement not supported. |
|
||||
| Ctronics PTZ | ✅ | ❌ | |
|
||||
| Dahua | ✅ | ✅ | |
|
||||
| Dahua DH-SD2A500HB | ✅ | ❌ | |
|
||||
| Foscam R5 | ✅ | ❌ | |
|
||||
| Hanwha XNP-6550RH | ✅ | ❌ | |
|
||||
| Hikvision | ✅ | ❌ | Incomplete ONVIF support (MoveStatus won't update even on latest firmware) - reported with HWP-N4215IH-DE and DS-2DE3304W-DE, but likely others |
|
||||
| Hikvision DS-2DE3A404IWG-E/W | ✅ | ✅ | |
|
||||
| Reolink 511WA | ✅ | ❌ | Zoom only |
|
||||
| Reolink E1 Pro | ✅ | ❌ | |
|
||||
| Reolink E1 Zoom | ✅ | ❌ | |
|
||||
| Reolink RLC-823A 16x | ✅ | ❌ | |
|
||||
| Speco O8P32X | ✅ | ❌ | |
|
||||
| Sunba 405-D20X | ✅ | ❌ | |
|
||||
| Tapo | ✅ | ❌ | Many models supported, ONVIF Service Port: 2020 |
|
||||
| Uniview IPC672LR-AX4DUPK | ✅ | ❌ | Firmware says FOV relative movement is supported, but camera doesn't actually move when sending ONVIF commands |
|
||||
| Uniview IPC6612SR-X33-VG | ✅ | ✅ | Leave `calibrate_on_startup` as `False`. A user has reported that zooming with `absolute` is working. |
|
||||
| Vikylin PTZ-2804X-I2 | ❌ | ❌ | Incomplete ONVIF support |
|
||||
|
||||
## Setting up camera groups
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ id: genai
|
||||
title: Generative AI
|
||||
---
|
||||
|
||||
Generative AI can be used to automatically generate descriptions based on the thumbnails of your tracked objects. This helps with [Semantic Search](/configuration/semantic_search) in Frigate by providing detailed text descriptions as a basis of the search query.
|
||||
Generative AI can be used to automatically generate descriptive text based on the thumbnails of your tracked objects. This helps with [Semantic Search](/configuration/semantic_search) in Frigate to provide more context about your tracked objects.
|
||||
|
||||
Semantic Search must be enabled to use Generative AI. Descriptions are accessed via the _Explore_ view in the Frigate UI by clicking on a tracked object's thumbnail.
|
||||
|
||||
@@ -29,11 +29,21 @@ cameras:
|
||||
|
||||
## Ollama
|
||||
|
||||
[Ollama](https://ollama.com/) allows you to self-host large language models and keep everything running locally. It provides a nice API over [llama.cpp](https://github.com/ggerganov/llama.cpp). It is highly recommended to host this server on a machine with an Nvidia graphics card, or on a Apple silicon Mac for best performance. Most of the 7b parameter 4-bit vision models will fit inside 8GB of VRAM. There is also a [docker container](https://hub.docker.com/r/ollama/ollama) available.
|
||||
:::warning
|
||||
|
||||
Using Ollama on CPU is not recommended, high inference times make using generative AI impractical.
|
||||
|
||||
:::
|
||||
|
||||
[Ollama](https://ollama.com/) allows you to self-host large language models and keep everything running locally. It provides a nice API over [llama.cpp](https://github.com/ggerganov/llama.cpp). It is highly recommended to host this server on a machine with an Nvidia graphics card, or on a Apple silicon Mac for best performance.
|
||||
|
||||
Most of the 7b parameter 4-bit vision models will fit inside 8GB of VRAM. There is also a [docker container](https://hub.docker.com/r/ollama/ollama) available.
|
||||
|
||||
Parallel requests also come with some caveats. See the [Ollama documentation](https://github.com/ollama/ollama/blob/main/docs/faq.md#how-does-ollama-handle-concurrent-requests).
|
||||
|
||||
### Supported Models
|
||||
|
||||
You must use a vision capable model with Frigate. Current model variants can be found [in their model library](https://ollama.com/library). At the time of writing, this includes `llava`, `llava-llama3`, `llava-phi3`, and `moondream`.
|
||||
You must use a vision capable model with Frigate. Current model variants can be found [in their model library](https://ollama.com/library). At the time of writing, this includes `llava`, `llava-llama3`, `llava-phi3`, and `moondream`. Note that Frigate will not automatically download the model you specify in your config, you must download the model to your local instance of Ollama first i.e. by running `ollama pull llava:7b` on your Ollama server/Docker container. Note that the model specified in Frigate's config must match the downloaded model tag.
|
||||
|
||||
:::note
|
||||
|
||||
@@ -48,7 +58,7 @@ genai:
|
||||
enabled: True
|
||||
provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: llava
|
||||
model: llava:7b
|
||||
```
|
||||
|
||||
## Google Gemini
|
||||
@@ -100,12 +110,40 @@ genai:
|
||||
model: gpt-4o
|
||||
```
|
||||
|
||||
## Azure OpenAI
|
||||
|
||||
Microsoft offers several vision models through Azure OpenAI. A subscription is required.
|
||||
|
||||
### Supported Models
|
||||
|
||||
You must use a vision capable model with Frigate. Current model variants can be found [in their documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models). At the time of writing, this includes `gpt-4o` and `gpt-4-turbo`.
|
||||
|
||||
### Create Resource and Get API Key
|
||||
|
||||
To start using Azure OpenAI, you must first [create a resource](https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource). You'll need your API key and resource URL, which must include the `api-version` parameter (see the example below). The model field is not required in your configuration as the model is part of the deployment name you chose when deploying the resource.
|
||||
|
||||
### Configuration
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
enabled: True
|
||||
provider: azure_openai
|
||||
base_url: https://example-endpoint.openai.azure.com/openai/deployments/gpt-4o/chat/completions?api-version=2023-03-15-preview
|
||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||
```
|
||||
|
||||
## Usage and Best Practices
|
||||
|
||||
Frigate's thumbnail search excels at identifying specific details about tracked objects – for example, using an "image caption" approach to find a "person wearing a yellow vest," "a white dog running across the lawn," or "a red car on a residential street." To enhance this further, Frigate’s default prompts are designed to ask your AI provider about the intent behind the object's actions, rather than just describing its appearance.
|
||||
|
||||
While generating simple descriptions of detected objects is useful, understanding intent provides a deeper layer of insight. Instead of just recognizing "what" is in a scene, Frigate’s default prompts aim to infer "why" it might be there or "what" it could do next. Descriptions tell you what’s happening, but intent gives context. For instance, a person walking toward a door might seem like a visitor, but if they’re moving quickly after hours, you can infer a potential break-in attempt. Detecting a person loitering near a door at night can trigger an alert sooner than simply noting "a person standing by the door," helping you respond based on the situation’s context.
|
||||
|
||||
## Custom Prompts
|
||||
|
||||
Frigate sends multiple frames from the tracked object along with a prompt to your Generative AI provider asking it to generate a description. The default prompt is as follows:
|
||||
|
||||
```
|
||||
Describe the {label} in the sequence of images with as much detail as possible. Do not describe the background.
|
||||
Analyze the sequence of images containing the {label}. Focus on the likely intent or behavior of the {label} based on its actions and movement, rather than describing its appearance or the surroundings. Consider what the {label} is doing, why, and what it might do next.
|
||||
```
|
||||
|
||||
:::tip
|
||||
@@ -122,22 +160,30 @@ genai:
|
||||
provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: llava
|
||||
prompt: "Describe the {label} in these images from the {camera} security camera."
|
||||
prompt: "Analyze the {label} in these images from the {camera} security camera. Focus on the actions, behavior, and potential intent of the {label}, rather than just describing its appearance."
|
||||
object_prompts:
|
||||
person: "Describe the main person in these images (gender, age, clothing, activity, etc). Do not include where the activity is occurring (sidewalk, concrete, driveway, etc)."
|
||||
car: "Label the primary vehicle in these images with just the name of the company if it is a delivery vehicle, or the color make and model."
|
||||
person: "Examine the main person in these images. What are they doing and what might their actions suggest about their intent (e.g., approaching a door, leaving an area, standing still)? Do not describe the surroundings or static details."
|
||||
car: "Observe the primary vehicle in these images. Focus on its movement, direction, or purpose (e.g., parking, approaching, circling). If it's a delivery vehicle, mention the company."
|
||||
```
|
||||
|
||||
Prompts can also be overriden at the camera level to provide a more detailed prompt to the model about your specific camera, if you desire.
|
||||
Prompts can also be overriden at the camera level to provide a more detailed prompt to the model about your specific camera, if you desire. By default, descriptions will be generated for all tracked objects and all zones. But you can also optionally specify `objects` and `required_zones` to only generate descriptions for certain tracked objects or zones.
|
||||
|
||||
Optionally, you can generate the description using a snapshot (if enabled) by setting `use_snapshot` to `True`. By default, this is set to `False`, which sends the thumbnails collected over the object's lifetime to the model. Using a snapshot provides the AI with a higher-resolution image (typically downscaled by the AI itself), but the trade-off is that only a single image is used, which might limit the model's ability to determine object movement or direction.
|
||||
|
||||
```yaml
|
||||
cameras:
|
||||
front_door:
|
||||
genai:
|
||||
prompt: "Describe the {label} in these images from the {camera} security camera at the front door of a house, aimed outward toward the street."
|
||||
use_snapshot: True
|
||||
prompt: "Analyze the {label} in these images from the {camera} security camera at the front door. Focus on the actions and potential intent of the {label}."
|
||||
object_prompts:
|
||||
person: "Describe the main person in these images (gender, age, clothing, activity, etc). Do not include where the activity is occurring (sidewalk, concrete, driveway, etc). If delivering a package, include the company the package is from."
|
||||
cat: "Describe the cat in these images (color, size, tail). Indicate whether or not the cat is by the flower pots. If the cat is chasing a mouse, make up a name for the mouse."
|
||||
person: "Examine the person in these images. What are they doing, and how might their actions suggest their purpose (e.g., delivering something, approaching, leaving)? If they are carrying or interacting with a package, include details about its source or destination."
|
||||
cat: "Observe the cat in these images. Focus on its movement and intent (e.g., wandering, hunting, interacting with objects). If the cat is near the flower pots or engaging in any specific actions, mention it."
|
||||
objects:
|
||||
- person
|
||||
- cat
|
||||
required_zones:
|
||||
- steps
|
||||
```
|
||||
|
||||
### Experiment with prompts
|
||||
|
||||
@@ -65,6 +65,8 @@ Or map in all the `/dev/video*` devices.
|
||||
|
||||
## Intel-based CPUs
|
||||
|
||||
:::info
|
||||
|
||||
**Recommended hwaccel Preset**
|
||||
|
||||
| CPU Generation | Intel Driver | Recommended Preset | Notes |
|
||||
@@ -74,11 +76,13 @@ Or map in all the `/dev/video*` devices.
|
||||
| gen13+ | iHD / Xe | preset-intel-qsv-* | |
|
||||
| Intel Arc GPU | iHD / Xe | preset-intel-qsv-* | |
|
||||
|
||||
:::
|
||||
|
||||
:::note
|
||||
|
||||
The default driver is `iHD`. You may need to change the driver to `i965` by adding the following environment variable `LIBVA_DRIVER_NAME=i965` to your docker-compose file or [in the `frigate.yaml` for HA OS users](advanced.md#environment_vars).
|
||||
|
||||
See [The Intel Docs](https://www.intel.com/content/www/us/en/support/articles/000005505/processors.html to figure out what generation your CPU is.)
|
||||
See [The Intel Docs](https://www.intel.com/content/www/us/en/support/articles/000005505/processors.html) to figure out what generation your CPU is.
|
||||
|
||||
:::
|
||||
|
||||
@@ -379,7 +383,7 @@ Make sure to follow the [Rockchip specific installation instructions](/frigate/i
|
||||
|
||||
### Configuration
|
||||
|
||||
Add one of the following FFmpeg presets to your `config.yaml` to enable hardware video processing:
|
||||
Add one of the following FFmpeg presets to your `config.yml` to enable hardware video processing:
|
||||
|
||||
```yaml
|
||||
# if you try to decode a h264 encoded stream
|
||||
|
||||
@@ -11,11 +11,21 @@ Frigate intelligently uses three different streaming technologies to display you
|
||||
|
||||
The jsmpeg live view will use more browser and client GPU resources. Using go2rtc is highly recommended and will provide a superior experience.
|
||||
|
||||
| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
|
||||
| ------ | ------- | ------------------------------------- | ---------- | ---------------------------- | --------------- | ------------------------------------------------------------------------------------ |
|
||||
| jsmpeg | low | same as `detect -> fps`, capped at 10 | 720p | no | no | resolution is configurable, but go2rtc is recommended if you want higher resolutions |
|
||||
| mse | low | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only |
|
||||
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
|
||||
| Source | Frame Rate | Resolution | Audio | Requires go2rtc | Notes |
|
||||
| ------ | ------------------------------------- | ---------- | ---------------------------- | --------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| jsmpeg | same as `detect -> fps`, capped at 10 | 720p | no | no | Resolution is configurable, but go2rtc is recommended if you want higher resolutions and better frame rates. jsmpeg is Frigate's default without go2rtc configured. |
|
||||
| mse | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only. This is Frigate's default when go2rtc is configured. |
|
||||
| webrtc | native | native | yes (depends on audio codec) | yes | Requires extra configuration, doesn't support h.265. Frigate attempts to use WebRTC when MSE fails or when using a camera's two-way talk feature. |
|
||||
|
||||
### Camera Settings Recommendations
|
||||
|
||||
If you are using go2rtc, you should adjust the following settings in your camera's firmware for the best experience with Live view:
|
||||
|
||||
- Video codec: **H.264** - provides the most compatible video codec with all Live view technologies and browsers. Avoid any kind of "smart codec" or "+" codec like _H.264+_ or _H.265+_. as these non-standard codecs remove keyframes (see below).
|
||||
- Audio codec: **AAC** - provides the most compatible audio codec with all Live view technologies and browsers that support audio.
|
||||
- I-frame interval (sometimes called the keyframe interval, the interframe space, or the GOP length): match your camera's frame rate, or choose "1x" (for interframe space on Reolink cameras). For example, if your stream outputs 20fps, your i-frame interval should be 20 (or 1x on Reolink). Values higher than the frame rate will cause the stream to take longer to begin playback. See [this page](https://gardinal.net/understanding-the-keyframe-interval/) for more on keyframes.
|
||||
|
||||
The default video and audio codec on your camera may not always be compatible with your browser, which is why setting them to H.264 and AAC is recommended. See the [go2rtc docs](https://github.com/AlexxIT/go2rtc?tab=readme-ov-file#codecs-madness) for codec support information.
|
||||
|
||||
### Audio Support
|
||||
|
||||
@@ -32,6 +42,15 @@ go2rtc:
|
||||
- "ffmpeg:http_cam#audio=opus" # <- copy of the stream which transcodes audio to the missing codec (usually will be opus)
|
||||
```
|
||||
|
||||
If your camera does not have audio and you are having problems with Live view, you should have go2rtc send video only:
|
||||
|
||||
```yaml
|
||||
go2rtc:
|
||||
streams:
|
||||
no_audio_camera:
|
||||
- ffmpeg:rtsp://192.168.1.5:554/live0#video=copy
|
||||
```
|
||||
|
||||
### Setting Stream For Live UI
|
||||
|
||||
There may be some cameras that you would prefer to use the sub stream for live view, but the main stream for recording. This can be done via `live -> stream_name`.
|
||||
|
||||
@@ -92,10 +92,16 @@ motion:
|
||||
lightning_threshold: 0.8
|
||||
```
|
||||
|
||||
:::tip
|
||||
:::warning
|
||||
|
||||
Some cameras like doorbell cameras may have missed detections when someone walks directly in front of the camera and the lightning_threshold causes motion detection to be re-calibrated. In this case, it may be desirable to increase the `lightning_threshold` to ensure these objects are not missed.
|
||||
|
||||
:::
|
||||
|
||||
:::note
|
||||
|
||||
Lightning threshold does not stop motion based recordings from being saved.
|
||||
|
||||
:::
|
||||
|
||||
Large changes in motion like PTZ moves and camera switches between Color and IR mode should result in no motion detection. This is done via the `lightning_threshold` configuration. It is defined as the percentage of the image used to detect lightning or other substantial changes where motion detection needs to recalibrate. Increasing this value will make motion detection more likely to consider lightning or IR mode changes as valid motion. Decreasing this value will make motion detection more likely to ignore large amounts of motion such as a person approaching a doorbell camera.
|
||||
|
||||
@@ -5,6 +5,8 @@ title: Object Detectors
|
||||
|
||||
# Supported Hardware
|
||||
|
||||
:::info
|
||||
|
||||
Frigate supports multiple different detectors that work on different types of hardware:
|
||||
|
||||
**Most Hardware**
|
||||
@@ -26,37 +28,14 @@ Frigate supports multiple different detectors that work on different types of ha
|
||||
**Rockchip**
|
||||
- [RKNN](#rockchip-platform): RKNN models can run on Rockchip devices with included NPUs.
|
||||
|
||||
# Officially Supported Detectors
|
||||
|
||||
Frigate provides the following builtin detector types: `cpu`, `edgetpu`, `openvino`, `tensorrt`, `rknn`, and `hailo8l`. By default, Frigate will use a single CPU detector. Other detectors may require additional configuration as described below. When using multiple detectors they will run in dedicated processes, but pull from a common queue of detection requests from across all cameras.
|
||||
|
||||
## CPU Detector (not recommended)
|
||||
|
||||
The CPU detector type runs a TensorFlow Lite model utilizing the CPU without hardware acceleration. It is recommended to use a hardware accelerated detector type instead for better performance. To configure a CPU based detector, set the `"type"` attribute to `"cpu"`.
|
||||
|
||||
:::tip
|
||||
|
||||
If you do not have GPU or Edge TPU hardware, using the [OpenVINO Detector](#openvino-detector) is often more efficient than using the CPU detector.
|
||||
**For Testing**
|
||||
- [CPU Detector (not recommended for actual use](#cpu-detector-not-recommended): Use a CPU to run tflite model, this is not recommended and in most cases OpenVINO can be used in CPU mode with better results.
|
||||
|
||||
:::
|
||||
|
||||
The number of threads used by the interpreter can be specified using the `"num_threads"` attribute, and defaults to `3.`
|
||||
# Officially Supported Detectors
|
||||
|
||||
A TensorFlow Lite model is provided in the container at `/cpu_model.tflite` and is used by this detector type by default. To provide your own model, bind mount the file into the container and provide the path with `model.path`.
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
cpu1:
|
||||
type: cpu
|
||||
num_threads: 3
|
||||
model:
|
||||
path: "/custom_model.tflite"
|
||||
cpu2:
|
||||
type: cpu
|
||||
num_threads: 3
|
||||
```
|
||||
|
||||
When using CPU detectors, you can add one CPU detector per camera. Adding more detectors than the number of cameras should not improve performance.
|
||||
Frigate provides the following builtin detector types: `cpu`, `edgetpu`, `hailo8l`, `onnx`, `openvino`, `rknn`, `rocm`, and `tensorrt`. By default, Frigate will use a single CPU detector. Other detectors may require additional configuration as described below. When using multiple detectors they will run in dedicated processes, but pull from a common queue of detection requests from across all cameras.
|
||||
|
||||
## Edge TPU Detector
|
||||
|
||||
@@ -188,7 +167,7 @@ This detector also supports YOLOX. Frigate does not come with any YOLOX models p
|
||||
|
||||
#### YOLO-NAS
|
||||
|
||||
[YOLO-NAS](https://github.com/Deci-AI/super-gradients/blob/master/YOLONAS.md) models are supported, but not included by default. You can build and download a compatible model with pre-trained weights using [this notebook](https://github.com/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb) [](https://colab.research.google.com/github/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb).
|
||||
[YOLO-NAS](https://github.com/Deci-AI/super-gradients/blob/master/YOLONAS.md) models are supported, but not included by default. You can build and download a compatible model with pre-trained weights using [this notebook](https://github.com/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb) [](https://colab.research.google.com/github/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb).
|
||||
|
||||
:::warning
|
||||
|
||||
@@ -418,7 +397,7 @@ After placing the downloaded onnx model in your config folder, you can use the f
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
onnx:
|
||||
rocm:
|
||||
type: rocm
|
||||
|
||||
model:
|
||||
@@ -484,6 +463,34 @@ model:
|
||||
|
||||
Note that the labelmap uses a subset of the complete COCO label set that has only 80 objects.
|
||||
|
||||
## CPU Detector (not recommended)
|
||||
|
||||
The CPU detector type runs a TensorFlow Lite model utilizing the CPU without hardware acceleration. It is recommended to use a hardware accelerated detector type instead for better performance. To configure a CPU based detector, set the `"type"` attribute to `"cpu"`.
|
||||
|
||||
:::danger
|
||||
|
||||
The CPU detector is not recommended for general use. If you do not have GPU or Edge TPU hardware, using the [OpenVINO Detector](#openvino-detector) in CPU mode is often more efficient than using the CPU detector.
|
||||
|
||||
:::
|
||||
|
||||
The number of threads used by the interpreter can be specified using the `"num_threads"` attribute, and defaults to `3.`
|
||||
|
||||
A TensorFlow Lite model is provided in the container at `/cpu_model.tflite` and is used by this detector type by default. To provide your own model, bind mount the file into the container and provide the path with `model.path`.
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
cpu1:
|
||||
type: cpu
|
||||
num_threads: 3
|
||||
model:
|
||||
path: "/custom_model.tflite"
|
||||
cpu2:
|
||||
type: cpu
|
||||
num_threads: 3
|
||||
```
|
||||
|
||||
When using CPU detectors, you can add one CPU detector per camera. Adding more detectors than the number of cameras should not improve performance.
|
||||
|
||||
## Deepstack / CodeProject.AI Server Detector
|
||||
|
||||
The Deepstack / CodeProject.AI Server detector for Frigate allows you to integrate Deepstack and CodeProject.AI object detection capabilities into Frigate. CodeProject.AI and DeepStack are open-source AI platforms that can be run on various devices such as the Raspberry Pi, Nvidia Jetson, and other compatible hardware. It is important to note that the integration is performed over the network, so the inference times may not be as fast as native Frigate detectors, but it still provides an efficient and reliable solution for object detection and tracking.
|
||||
|
||||
24
docs/docs/configuration/pwa.md
Normal file
24
docs/docs/configuration/pwa.md
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
id: pwa
|
||||
title: Installing Frigate App
|
||||
---
|
||||
|
||||
Frigate supports being installed as a [Progressive Web App](https://web.dev/explore/progressive-web-apps) on Desktop, Android, and iOS.
|
||||
|
||||
This adds features including the ability to deep link directly into the app.
|
||||
|
||||
## Requirements
|
||||
|
||||
In order to install Frigate as a PWA, the following requirements must be met:
|
||||
|
||||
- Frigate must be accessed via a secure context (localhost, secure https, etc.)
|
||||
- On Android, Firefox, Chrome, Edge, Opera, and Samsung Internet Browser all support installing PWAs.
|
||||
- On iOS 16.4 and later, PWAs can be installed from the Share menu in Safari, Chrome, Edge, Firefox, and Orion.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation varies slightly based on the device that is being used:
|
||||
|
||||
- Desktop: Use the install button typically found in right edge of the address bar
|
||||
- Android: Use the `Install as App` button in the more options menu
|
||||
- iOS: Use the `Add to Homescreen` button in the share menu
|
||||
@@ -154,7 +154,7 @@ Footage can be exported from Frigate by right-clicking (desktop) or long pressin
|
||||
|
||||
### Time-lapse export
|
||||
|
||||
Time lapse exporting is available only via the [HTTP API](../integrations/api.md#post-apiexportcamerastartstart-timestampendend-timestamp).
|
||||
Time lapse exporting is available only via the [HTTP API](../integrations/api/export-recording-export-camera-name-start-start-time-end-end-time-post.api.mdx).
|
||||
|
||||
When exporting a time-lapse the default speed-up is 25x with 30 FPS. This means that every 25 seconds of (real-time) recording is condensed into 1 second of time-lapse video (always without audio) with a smoothness of 30 FPS.
|
||||
|
||||
|
||||
@@ -138,6 +138,16 @@ model:
|
||||
# Optional: Label name modifications. These are merged into the standard labelmap.
|
||||
labelmap:
|
||||
2: vehicle
|
||||
# Optional: Map of object labels to their attribute labels (default: depends on model)
|
||||
attributes_map:
|
||||
person:
|
||||
- amazon
|
||||
- face
|
||||
car:
|
||||
- amazon
|
||||
- fedex
|
||||
- license_plate
|
||||
- ups
|
||||
|
||||
# Optional: Audio Events Configuration
|
||||
# NOTE: Can be overridden at the camera level
|
||||
@@ -324,6 +334,9 @@ review:
|
||||
- car
|
||||
- person
|
||||
# Optional: required zones for an object to be marked as an alert (default: none)
|
||||
# NOTE: when settings required zones globally, this zone must exist on all cameras
|
||||
# or the config will be considered invalid. In that case the required_zones
|
||||
# should be configured at the camera level.
|
||||
required_zones:
|
||||
- driveway
|
||||
# Optional: detections configuration
|
||||
@@ -333,12 +346,20 @@ review:
|
||||
- car
|
||||
- person
|
||||
# Optional: required zones for an object to be marked as a detection (default: none)
|
||||
# NOTE: when settings required zones globally, this zone must exist on all cameras
|
||||
# or the config will be considered invalid. In that case the required_zones
|
||||
# should be configured at the camera level.
|
||||
required_zones:
|
||||
- driveway
|
||||
|
||||
# Optional: Motion configuration
|
||||
# NOTE: Can be overridden at the camera level
|
||||
motion:
|
||||
# Optional: enables detection for the camera (default: True)
|
||||
# NOTE: Motion detection is required for object detection,
|
||||
# setting this to False and leaving detect enabled
|
||||
# will result in an error on startup.
|
||||
enabled: False
|
||||
# Optional: The threshold passed to cv2.threshold to determine if a pixel is different enough to be counted as motion. (default: shown below)
|
||||
# Increasing this value will make motion detection less sensitive and decreasing it will make motion detection more sensitive.
|
||||
# The value should be between 1 and 255.
|
||||
@@ -497,6 +518,9 @@ semantic_search:
|
||||
enabled: False
|
||||
# Optional: Re-index embeddings database from historical tracked objects (default: shown below)
|
||||
reindex: False
|
||||
# Optional: Set the model size used for embeddings. (default: shown below)
|
||||
# NOTE: small model runs on CPU and large model runs on GPU
|
||||
model_size: "small"
|
||||
|
||||
# Optional: Configuration for AI generated tracked object descriptions
|
||||
# NOTE: Semantic Search must be enabled for this to do anything.
|
||||
@@ -716,6 +740,8 @@ cameras:
|
||||
genai:
|
||||
# Optional: Enable AI description generation (default: shown below)
|
||||
enabled: False
|
||||
# Optional: Use the object snapshot instead of thumbnails for description generation (default: shown below)
|
||||
use_snapshot: False
|
||||
# Optional: The default prompt for generating descriptions. Can use replacement
|
||||
# variables like "label", "sub_label", "camera" to make more dynamic. (default: shown below)
|
||||
prompt: "Describe the {label} in the sequence of images with as much detail as possible. Do not describe the background."
|
||||
@@ -723,6 +749,12 @@ cameras:
|
||||
# Format: {label}: {prompt}
|
||||
object_prompts:
|
||||
person: "My special person prompt."
|
||||
# Optional: objects to generate descriptions for (default: all objects that are tracked)
|
||||
objects:
|
||||
- person
|
||||
- cat
|
||||
# Optional: Restrict generation to objects that entered any of the listed zones (default: none, all zones qualify)
|
||||
required_zones: []
|
||||
|
||||
# Optional
|
||||
ui:
|
||||
@@ -786,7 +818,7 @@ camera_groups:
|
||||
- side_cam
|
||||
- front_doorbell_cam
|
||||
# Required: icon used for group
|
||||
icon: car
|
||||
icon: LuCar
|
||||
# Required: index of this group
|
||||
order: 0
|
||||
```
|
||||
|
||||
@@ -41,8 +41,6 @@ review:
|
||||
|
||||
By default all detections that do not qualify as an alert qualify as a detection. However, detections can further be filtered to only include certain labels or certain zones.
|
||||
|
||||
By default a review item will only be marked as an alert if a person or car is detected. This can be configured to include any object or audio label using the following config:
|
||||
|
||||
```yaml
|
||||
# can be overridden at the camera level
|
||||
review:
|
||||
|
||||
@@ -5,10 +5,18 @@ title: Using Semantic Search
|
||||
|
||||
Semantic Search in Frigate allows you to find tracked objects within your review items using either the image itself, a user-defined text description, or an automatically generated one. This feature works by creating _embeddings_ — numerical vector representations — for both the images and text descriptions of your tracked objects. By comparing these embeddings, Frigate assesses their similarities to deliver relevant search results.
|
||||
|
||||
Frigate has support for two models to create embeddings, both of which run locally: [OpenAI CLIP](https://openai.com/research/clip) and [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2). Embeddings are then saved to a local instance of [ChromaDB](https://trychroma.com).
|
||||
Frigate has support for [Jina AI's CLIP model](https://huggingface.co/jinaai/jina-clip-v1) to create embeddings, which runs locally. Embeddings are then saved to Frigate's database.
|
||||
|
||||
Semantic Search is accessed via the _Explore_ view in the Frigate UI.
|
||||
|
||||
## Minimum System Requirements
|
||||
|
||||
Semantic Search works by running a large AI model locally on your system. Small or underpowered systems like a Raspberry Pi will not run Semantic Search reliably or at all.
|
||||
|
||||
A minimum of 8GB of RAM is required to use Semantic Search. A GPU is not strictly required but will provide a significant performance increase over CPU-only systems.
|
||||
|
||||
For best performance, 16GB or more of RAM and a dedicated GPU are recommended.
|
||||
|
||||
## Configuration
|
||||
|
||||
Semantic search is disabled by default, and must be enabled in your config file before it can be used. Semantic Search is a global configuration setting.
|
||||
@@ -27,18 +35,34 @@ If you are enabling the Search feature for the first time, be advised that Friga
|
||||
|
||||
:::
|
||||
|
||||
### OpenAI CLIP
|
||||
### Jina AI CLIP
|
||||
|
||||
This model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in Chroma. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails.
|
||||
The vision model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in the database. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails.
|
||||
|
||||
### all-MiniLM-L6-v2
|
||||
The text model is used to embed tracked object descriptions and perform searches against them. Descriptions can be created, viewed, and modified on the Search page when clicking on the gray tracked object chip at the top left of each review item. See [the Generative AI docs](/configuration/genai.md) for more information on how to automatically generate tracked object descriptions.
|
||||
|
||||
This is a sentence embedding model that has been fine tuned on over 1 billion sentence pairs. This model is used to embed tracked object descriptions and perform searches against them. Descriptions can be created, viewed, and modified on the Search page when clicking on the gray tracked object chip at the top left of each review item. See [the Generative AI docs](/configuration/genai.md) for more information on how to automatically generate tracked object descriptions.
|
||||
Differently weighted CLIP models are available and can be selected by setting the `model_size` config option:
|
||||
|
||||
## Usage
|
||||
:::tip
|
||||
|
||||
The CLIP models are downloaded in ONNX format, which means they will be accelerated using GPU hardware when available. This depends on the Docker build that is used. See [the object detector docs](../configuration/object_detectors.md) for more information.
|
||||
|
||||
:::
|
||||
|
||||
```yaml
|
||||
semantic_search:
|
||||
enabled: True
|
||||
model_size: small
|
||||
```
|
||||
|
||||
- Configuring the `large` model employs the full Jina model and will automatically run on the GPU if applicable.
|
||||
- Configuring the `small` model employs a quantized version of the model that uses much less RAM and runs faster on CPU with a very negligible difference in embedding quality.
|
||||
|
||||
## Usage and Best Practices
|
||||
|
||||
1. Semantic search is used in conjunction with the other filters available on the Search page. Use a combination of traditional filtering and semantic search for the best results.
|
||||
2. The comparison between text and image embedding distances generally means that results matching `description` will appear first, even if a `thumbnail` embedding may be a better match. Play with the "Search Type" filter to help find what you are looking for.
|
||||
3. Make your search language and tone closely match your descriptions. If you are using thumbnail search, phrase your query as an image caption.
|
||||
4. Semantic search on thumbnails tends to return better results when matching large subjects that take up most of the frame. Small things like "cat" tend to not work well.
|
||||
5. Experiment! Find a tracked object you want to test and start typing keywords to see what works for you.
|
||||
2. Use the thumbnail search type when searching for particular objects in the scene. Use the description search type when attempting to discern the intent of your object.
|
||||
3. Because of how the AI models Frigate uses have been trained, the comparison between text and image embedding distances generally means that with multi-modal (`thumbnail` and `description`) searches, results matching `description` will appear first, even if a `thumbnail` embedding may be a better match. Play with the "Search Type" setting to help find what you are looking for. Note that if you are generating descriptions for specific objects or zones only, this may cause search results to prioritize the objects with descriptions even if the the ones without them are more relevant.
|
||||
4. Make your search language and tone closely match exactly what you're looking for. If you are using thumbnail search, **phrase your query as an image caption**. Searching for "red car" may not work as well as "red sedan driving down a residential street on a sunny day".
|
||||
5. Semantic search on thumbnails tends to return better results when matching large subjects that take up most of the frame. Small things like "cat" tend to not work well.
|
||||
6. Experiment! Find a tracked object you want to test and start typing keywords and phrases to see what works for you.
|
||||
|
||||
@@ -3,7 +3,7 @@ id: snapshots
|
||||
title: Snapshots
|
||||
---
|
||||
|
||||
Frigate can save a snapshot image to `/media/frigate/clips` for each object that is detected named as `<camera>-<id>.jpg`. They are also accessible [via the api](../integrations/api.md#get-apieventsidsnapshotjpg)
|
||||
Frigate can save a snapshot image to `/media/frigate/clips` for each object that is detected named as `<camera>-<id>.jpg`. They are also accessible [via the api](../integrations/api/event-snapshot-events-event-id-snapshot-jpg-get.api.mdx)
|
||||
|
||||
For users with Frigate+ enabled, snapshots are accessible in the UI in the Frigate+ pane to allow for quick submission to the Frigate+ service.
|
||||
|
||||
|
||||
@@ -193,7 +193,7 @@ npm run test
|
||||
#### 1. Installation
|
||||
|
||||
```console
|
||||
npm install
|
||||
cd docs && npm install
|
||||
```
|
||||
|
||||
#### 2. Local Development
|
||||
|
||||
@@ -69,6 +69,7 @@ Inference speeds vary greatly depending on the CPU, GPU, or VPU used, some known
|
||||
| Intel i5 7500 | ~ 15 ms | Inference speeds on CPU were ~ 260 ms |
|
||||
| Intel i5 1135G7 | 10 - 15 ms | |
|
||||
| Intel i5 12600K | ~ 15 ms | Inference speeds on CPU were ~ 35 ms |
|
||||
| Intel Arc A750 | ~ 4 ms | |
|
||||
|
||||
### TensorRT - Nvidia GPU
|
||||
|
||||
|
||||
@@ -250,10 +250,7 @@ The community supported docker image tags for the current stable version are:
|
||||
- `stable-tensorrt-jp5` - Frigate build optimized for nvidia Jetson devices running Jetpack 5
|
||||
- `stable-tensorrt-jp4` - Frigate build optimized for nvidia Jetson devices running Jetpack 4.6
|
||||
- `stable-rk` - Frigate build for SBCs with Rockchip SoC
|
||||
- `stable-rocm` - Frigate build for [AMD GPUs and iGPUs](../configuration/object_detectors.md#amdrocm-gpu-detector), all drivers
|
||||
- `stable-rocm-gfx900` - AMD gfx900 driver only
|
||||
- `stable-rocm-gfx1030` - AMD gfx1030 driver only
|
||||
- `stable-rocm-gfx1100` - AMD gfx1100 driver only
|
||||
- `stable-rocm` - Frigate build for [AMD GPUs](../configuration/object_detectors.md#amdrocm-gpu-detector)
|
||||
- `stable-h8l` - Frigate build for the Hailo-8L M.2 PICe Raspberry Pi 5 hat
|
||||
|
||||
## Home Assistant Addon
|
||||
|
||||
@@ -13,7 +13,7 @@ Use of the bundled go2rtc is optional. You can still configure FFmpeg to connect
|
||||
|
||||
# Setup a go2rtc stream
|
||||
|
||||
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.9.4#module-streams), not just rtsp.
|
||||
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. For the best experience, you should set the stream name under go2rtc to match the name of your camera so that Frigate will automatically map it and be able to use better live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.9.4#module-streams), not just rtsp.
|
||||
|
||||
```yaml
|
||||
go2rtc:
|
||||
@@ -22,7 +22,7 @@ go2rtc:
|
||||
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
```
|
||||
|
||||
The easiest live view to get working is MSE. After adding this to the config, restart Frigate and try to watch the live stream by selecting MSE in the dropdown after clicking on the camera.
|
||||
After adding this to the config, restart Frigate and try to watch the live stream for a single camera by clicking on it from the dashboard. It should look much clearer and more fluent than the original jsmpeg stream.
|
||||
|
||||
|
||||
### What if my video doesn't play?
|
||||
@@ -46,7 +46,7 @@ The easiest live view to get working is MSE. After adding this to the config, re
|
||||
streams:
|
||||
back:
|
||||
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
- "ffmpeg:back#video=h264"
|
||||
- "ffmpeg:back#video=h264#hardware"
|
||||
```
|
||||
|
||||
- Switch to FFmpeg if needed:
|
||||
@@ -58,9 +58,8 @@ The easiest live view to get working is MSE. After adding this to the config, re
|
||||
- ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
```
|
||||
|
||||
- If you can see the video but do not have audio, this is most likely because your
|
||||
camera's audio stream is not AAC.
|
||||
- If possible, update your camera's audio settings to AAC.
|
||||
- If you can see the video but do not have audio, this is most likely because your camera's audio stream codec is not AAC.
|
||||
- If possible, update your camera's audio settings to AAC in your camera's firmware.
|
||||
- If your cameras do not support AAC audio, you will need to tell go2rtc to re-encode the audio to AAC on demand if you want audio. This will use additional CPU and add some latency. To add AAC audio on demand, you can update your go2rtc config as follows:
|
||||
```yaml
|
||||
go2rtc:
|
||||
@@ -77,7 +76,7 @@ camera's audio stream is not AAC.
|
||||
streams:
|
||||
back:
|
||||
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
- "ffmpeg:back#video=h264#audio=aac"
|
||||
- "ffmpeg:back#video=h264#audio=aac#hardware"
|
||||
```
|
||||
|
||||
When using the ffmpeg module, you would add AAC audio like this:
|
||||
@@ -86,7 +85,7 @@ camera's audio stream is not AAC.
|
||||
go2rtc:
|
||||
streams:
|
||||
back:
|
||||
- "ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2#video=copy#audio=copy#audio=aac"
|
||||
- "ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2#video=copy#audio=copy#audio=aac#hardware"
|
||||
```
|
||||
|
||||
:::warning
|
||||
@@ -102,4 +101,4 @@ section.
|
||||
## Next steps
|
||||
|
||||
1. If the stream you added to go2rtc is also used by Frigate for the `record` or `detect` role, you can migrate your config to pull from the RTSP restream to reduce the number of connections to your camera as shown [here](/configuration/restream#reduce-connections-to-camera).
|
||||
1. You may also prefer to [setup WebRTC](/configuration/live#webrtc-extra-configuration) for slightly lower latency than MSE. Note that WebRTC only supports h264 and specific audio formats.
|
||||
2. You may also prefer to [setup WebRTC](/configuration/live#webrtc-extra-configuration) for slightly lower latency than MSE. Note that WebRTC only supports h264 and specific audio formats and may require opening ports on your router.
|
||||
|
||||
@@ -3,25 +3,38 @@ id: reverse_proxy
|
||||
title: Setting up a reverse proxy
|
||||
---
|
||||
|
||||
This guide outlines the basic configuration steps needed to expose your Frigate UI to the internet.
|
||||
A common way of accomplishing this is to use a reverse proxy webserver between your router and your Frigate instance.
|
||||
A reverse proxy accepts HTTP requests from the public internet and redirects them transparently to internal webserver(s) on your network.
|
||||
This guide outlines the basic configuration steps needed to set up a reverse proxy in front of your Frigate instance.
|
||||
|
||||
The suggested steps are:
|
||||
A reverse proxy is typically needed if you want to set up Frigate on a custom URL, on a subdomain, or on a host serving multiple sites. It could also be used to set up your own authentication provider or for more advanced HTTP routing.
|
||||
|
||||
- **Configure** a 'proxy' HTTP webserver (such as [Apache2](https://httpd.apache.org/docs/current/) or [NPM](https://github.com/NginxProxyManager/nginx-proxy-manager)) and only expose ports 80/443 from this webserver to the internet
|
||||
- **Encrypt** content from the proxy webserver by installing SSL (such as with [Let's Encrypt](https://letsencrypt.org/)). Note that SSL is then not required on your Frigate webserver as the proxy encrypts all requests for you
|
||||
- **Restrict** access to your Frigate instance at the proxy using, for example, password authentication
|
||||
Before setting up a reverse proxy, check if any of the built-in functionality in Frigate suits your needs:
|
||||
|Topic|Docs|
|
||||
|-|-|
|
||||
|TLS|Please see the `tls` [configuration option](../configuration/tls.md)|
|
||||
|Authentication|Please see the [authentication](../configuration/authentication.md) documentation|
|
||||
|IPv6|[Enabling IPv6](../configuration/advanced.md#enabling-ipv6)
|
||||
|
||||
**Note about TLS**
|
||||
When using a reverse proxy, the TLS session is usually terminated at the proxy, sending the internal request over plain HTTP. If this is the desired behavior, TLS must first be disabled in Frigate, or you will encounter an HTTP 400 error: "The plain HTTP request was sent to HTTPS port."
|
||||
To disable TLS, set the following in your Frigate configuration:
|
||||
```yml
|
||||
tls:
|
||||
enabled: false
|
||||
```
|
||||
|
||||
:::warning
|
||||
A reverse proxy can be used to secure access to an internal webserver but the user will be entirely reliant
|
||||
on the steps they have taken. You must ensure you are following security best practices.
|
||||
This page does not attempt to outline the specific steps needed to secure your internal website.
|
||||
A reverse proxy can be used to secure access to an internal web server, but the user will be entirely reliant on the steps they have taken. You must ensure you are following security best practices.
|
||||
This page does not attempt to outline the specific steps needed to secure your internal website.
|
||||
Please use your own knowledge to assess and vet the reverse proxy software before you install anything on your system.
|
||||
:::
|
||||
|
||||
There are several technologies available to implement reverse proxies. This document currently suggests one, using Apache2,
|
||||
and the community is invited to document others through a contribution to this page.
|
||||
## Proxies
|
||||
|
||||
There are many solutions available to implement reverse proxies and the community is invited to help out documenting others through a contribution to this page.
|
||||
|
||||
* [Apache2](#apache2-reverse-proxy)
|
||||
* [Nginx](#nginx-reverse-proxy)
|
||||
* [Traefik](#traefik-reverse-proxy)
|
||||
|
||||
## Apache2 Reverse Proxy
|
||||
|
||||
@@ -141,3 +154,26 @@ The settings below enabled connection upgrade, sets up logging (optional) and pr
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## Traefik Reverse Proxy
|
||||
|
||||
This example shows how to add a `label` to the Frigate Docker compose file, enabling Traefik to automatically discover your Frigate instance.
|
||||
Before using the example below, you must first set up Traefik with the [Docker provider](https://doc.traefik.io/traefik/providers/docker/)
|
||||
|
||||
```yml
|
||||
services:
|
||||
frigate:
|
||||
container_name: frigate
|
||||
image: ghcr.io/blakeblackshear/frigate:stable
|
||||
...
|
||||
...
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.frigate.loadbalancer.server.port=8971"
|
||||
- "traefik.http.routers.frigate.rule=Host(`traefik.example.com`)"
|
||||
```
|
||||
|
||||
The above configuration will create a "service" in Traefik, automatically adding your container's IP on port 8971 as a backend.
|
||||
It will also add a router, routing requests to "traefik.example.com" to your local container.
|
||||
|
||||
Note that with this approach, you don't need to expose any ports for the Frigate instance since all traffic will be routed over the internal Docker network.
|
||||
|
||||
@@ -1,534 +0,0 @@
|
||||
---
|
||||
id: api
|
||||
title: HTTP API
|
||||
---
|
||||
|
||||
A web server is available on port 5000 with the following endpoints.
|
||||
|
||||
## Management & Information
|
||||
|
||||
### `GET /api/config`
|
||||
|
||||
A json representation of your configuration
|
||||
|
||||
### `POST /api/restart`
|
||||
|
||||
Restarts Frigate process.
|
||||
|
||||
### `GET /api/stats`
|
||||
|
||||
Contains some granular debug info that can be used for sensors in Home Assistant.
|
||||
|
||||
Sample response:
|
||||
|
||||
```json
|
||||
{
|
||||
/* Per Camera Stats */
|
||||
"cameras": {
|
||||
"back": {
|
||||
/***************
|
||||
* Frames per second being consumed from your camera. If this is higher
|
||||
* than it is supposed to be, you should set -r FPS in your input_args.
|
||||
* camera_fps = process_fps + skipped_fps
|
||||
***************/
|
||||
"camera_fps": 5.0,
|
||||
/***************
|
||||
* Number of times detection is run per second. This can be higher than
|
||||
* your camera FPS because Frigate often looks at the same frame multiple times
|
||||
* or in multiple locations
|
||||
***************/
|
||||
"detection_fps": 1.5,
|
||||
/***************
|
||||
* PID for the ffmpeg process that consumes this camera
|
||||
***************/
|
||||
"capture_pid": 27,
|
||||
/***************
|
||||
* PID for the process that runs detection for this camera
|
||||
***************/
|
||||
"pid": 34,
|
||||
/***************
|
||||
* Frames per second being processed by Frigate.
|
||||
***************/
|
||||
"process_fps": 5.1,
|
||||
/***************
|
||||
* Frames per second skip for processing by Frigate.
|
||||
***************/
|
||||
"skipped_fps": 0.0
|
||||
}
|
||||
},
|
||||
/***************
|
||||
* Sum of detection_fps across all cameras and detectors.
|
||||
* This should be the sum of all detection_fps values from cameras.
|
||||
***************/
|
||||
"detection_fps": 5.0,
|
||||
/* Detectors Stats */
|
||||
"detectors": {
|
||||
"coral": {
|
||||
/***************
|
||||
* Timestamp when object detection started. If this value stays non-zero and constant
|
||||
* for a long time, that means the detection process is stuck.
|
||||
***************/
|
||||
"detection_start": 0.0,
|
||||
/***************
|
||||
* Time spent running object detection in milliseconds.
|
||||
***************/
|
||||
"inference_speed": 10.48,
|
||||
/***************
|
||||
* PID for the shared process that runs object detection on the Coral.
|
||||
***************/
|
||||
"pid": 25321
|
||||
}
|
||||
},
|
||||
"service": {
|
||||
/* Uptime in seconds */
|
||||
"uptime": 10,
|
||||
"version": "0.10.1-8883709",
|
||||
"latest_version": "0.10.1",
|
||||
/* Storage data in MB for important locations */
|
||||
"storage": {
|
||||
"/media/frigate/clips": {
|
||||
"total": 1000,
|
||||
"used": 700,
|
||||
"free": 300,
|
||||
"mnt_type": "ext4"
|
||||
},
|
||||
"/media/frigate/recordings": {
|
||||
"total": 1000,
|
||||
"used": 700,
|
||||
"free": 300,
|
||||
"mnt_type": "ext4"
|
||||
},
|
||||
"/tmp/cache": {
|
||||
"total": 256,
|
||||
"used": 100,
|
||||
"free": 156,
|
||||
"mnt_type": "tmpfs"
|
||||
},
|
||||
"/dev/shm": {
|
||||
"total": 256,
|
||||
"used": 100,
|
||||
"free": 156,
|
||||
"mnt_type": "tmpfs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"cpu_usages": {
|
||||
"pid": {
|
||||
"cmdline": "ffmpeg...",
|
||||
"cpu": "5.0",
|
||||
"cpu_average": "3.0",
|
||||
"mem": "0.5"
|
||||
}
|
||||
},
|
||||
"gpu_usages": {
|
||||
"gpu-type": {
|
||||
"gpu": "17%",
|
||||
"mem": "18%"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `GET /api/version`
|
||||
|
||||
Version info
|
||||
|
||||
### `GET /api/ffprobe`
|
||||
|
||||
Get ffprobe output for camera feed paths.
|
||||
|
||||
| param | Type | Description |
|
||||
| ------- | ------ | ---------------------------------- |
|
||||
| `paths` | string | `,` separated list of camera paths |
|
||||
|
||||
### `GET /api/<camera_name>/ptz/info`
|
||||
|
||||
Get PTZ info for the camera.
|
||||
|
||||
## Camera Media
|
||||
|
||||
### `GET /api/<camera_name>`
|
||||
|
||||
An mjpeg stream for debugging. Keep in mind the mjpeg endpoint is for debugging only and will put additional load on the system when in use.
|
||||
|
||||
Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ----------- | ---- | ------------------------------------------------------------------ |
|
||||
| `fps` | int | Frame rate |
|
||||
| `h` | int | Height in pixels |
|
||||
| `bbox` | int | Show bounding boxes for detected objects (0 or 1) |
|
||||
| `timestamp` | int | Print the timestamp in the upper left (0 or 1) |
|
||||
| `zones` | int | Draw the zones on the image (0 or 1) |
|
||||
| `mask` | int | Overlay the mask on the image (0 or 1) |
|
||||
| `motion` | int | Draw blue boxes for areas with detected motion (0 or 1) |
|
||||
| `regions` | int | Draw green boxes for areas where object detection was run (0 or 1) |
|
||||
|
||||
You can access a higher resolution mjpeg stream by appending `h=height-in-pixels` to the endpoint. For example `/api/back?h=1080`. You can also increase the FPS by appending `fps=frame-rate` to the URL such as `/api/back?fps=10` or both with `?fps=10&h=1000`.
|
||||
|
||||
### `GET /api/<camera_name>/latest.jpg[?h=300]`
|
||||
|
||||
The most recent frame that Frigate has finished processing. It is a full resolution image by default.
|
||||
|
||||
Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ----------- | ---- | ------------------------------------------------------------------ |
|
||||
| `h` | int | Height in pixels |
|
||||
| `bbox` | int | Show bounding boxes for detected objects (0 or 1) |
|
||||
| `timestamp` | int | Print the timestamp in the upper left (0 or 1) |
|
||||
| `zones` | int | Draw the zones on the image (0 or 1) |
|
||||
| `mask` | int | Overlay the mask on the image (0 or 1) |
|
||||
| `motion` | int | Draw blue boxes for areas with detected motion (0 or 1) |
|
||||
| `regions` | int | Draw green boxes for areas where object detection was run (0 or 1) |
|
||||
| `quality` | int | Jpeg encoding quality (0-100). Defaults to 70. |
|
||||
|
||||
Example parameters:
|
||||
|
||||
- `h=300`: resizes the image to 300 pixels tall
|
||||
|
||||
### `GET /api/<camera_name>/<label>/thumbnail.jpg`
|
||||
|
||||
Returns the thumbnail from the latest tracked object for the given camera and label combo. Using `any` as the label will return the latest thumbnail regardless of type.
|
||||
|
||||
### `GET /api/<camera_name>/<label>/clip.mp4`
|
||||
|
||||
Returns the clip from the latest tracked object for the given camera and label combo. Using `any` as the label will return the latest clip regardless of type.
|
||||
|
||||
### `GET /api/<camera_name>/<label>/snapshot.jpg`
|
||||
|
||||
Returns the snapshot image from the latest tracked object for the given camera and label combo. Using `any` as the label will return the latest thumbnail regardless of type.
|
||||
|
||||
### `GET /api/<camera_name>/grid.jpg`
|
||||
|
||||
Returns the latest camera image with the regions grid overlaid.
|
||||
|
||||
| param | Type | Description |
|
||||
| ------------ | ----- | ------------------------------------------------------------------------------------------ |
|
||||
| `color` | str | The color of the grid (red,green,blue,black,white). Defaults to "green". |
|
||||
| `font_scale` | float | Font scale. Can be used to increase font size on high resolution cameras. Defaults to 0.5. |
|
||||
|
||||
### `GET /clips/<camera>-<id>.jpg`
|
||||
|
||||
JPG snapshot for the given camera and event id.
|
||||
|
||||
## Events
|
||||
|
||||
### `GET /api/events`
|
||||
|
||||
Events from the database. Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| -------------------- | ----- | ----------------------------------------------------- |
|
||||
| `before` | int | Epoch time |
|
||||
| `after` | int | Epoch time |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
| `labels` | str | , separated list of labels |
|
||||
| `zones` | str | , separated list of zones |
|
||||
| `limit` | int | Limit the number of events returned |
|
||||
| `has_snapshot` | int | Filter to events that have snapshots (0 or 1) |
|
||||
| `has_clip` | int | Filter to events that have clips (0 or 1) |
|
||||
| `include_thumbnails` | int | Include thumbnails in the response (0 or 1) |
|
||||
| `in_progress` | int | Limit to events in progress (0 or 1) |
|
||||
| `time_range` | str | Time range in format after,before (00:00,24:00) |
|
||||
| `timezone` | str | Timezone to use for time range |
|
||||
| `min_score` | float | Minimum score of the event |
|
||||
| `max_score` | float | Maximum score of the event |
|
||||
| `is_submitted` | int | Filter events that are submitted to Frigate+ (0 or 1) |
|
||||
| `min_length` | float | Minimum length of the event |
|
||||
| `max_length` | float | Maximum length of the event |
|
||||
|
||||
### `GET /api/events/summary`
|
||||
|
||||
Returns summary data for events in the database. Used by the Home Assistant integration.
|
||||
|
||||
### `GET /api/events/<id>`
|
||||
|
||||
Returns data for a single event.
|
||||
|
||||
### `DELETE /api/events/<id>`
|
||||
|
||||
Permanently deletes the event along with any clips/snapshots.
|
||||
|
||||
### `POST /api/events/<id>/retain`
|
||||
|
||||
Sets retain to true for the event id.
|
||||
|
||||
### `POST /api/events/<id>/plus`
|
||||
|
||||
Submits the snapshot of the event to Frigate+ for labeling.
|
||||
|
||||
| param | Type | Description |
|
||||
| -------------------- | ---- | ---------------------------------- |
|
||||
| `include_annotation` | int | Submit annotation to Frigate+ too. |
|
||||
|
||||
### `PUT /api/events/<id>/false_positive`
|
||||
|
||||
Submits the snapshot of the event to Frigate+ for labeling and adds the detection as a false positive.
|
||||
|
||||
### `DELETE /api/events/<id>/retain`
|
||||
|
||||
Sets retain to false for the event id (event may be deleted quickly after removing).
|
||||
|
||||
### `POST /api/events/<id>/sub_label`
|
||||
|
||||
Set a sub label for an event. For example to update `person` -> `person's name` if they were recognized with facial recognition.
|
||||
Sub labels must be 100 characters or shorter.
|
||||
|
||||
```json
|
||||
{
|
||||
"subLabel": "some_string",
|
||||
"subLabelScore": 0.79
|
||||
}
|
||||
```
|
||||
|
||||
### `GET /api/events/<id>/thumbnail.jpg`
|
||||
|
||||
Returns a thumbnail for the event id optimized for notifications. Works while the event is in progress and after completion. Passing `?format=android` will convert the thumbnail to 2:1 aspect ratio.
|
||||
|
||||
### `GET /api/events/<id>/clip.mp4`
|
||||
|
||||
Returns the clip for the event id. Works after the event has ended.
|
||||
|
||||
### `GET /api/events/<id>/snapshot-clean.png`
|
||||
|
||||
Returns the clean snapshot image for the event id. Only works if `snapshots` and `clean_copy` are enabled in the config.
|
||||
|
||||
| param | Type | Description |
|
||||
| ---------- | ---- | ------------------ |
|
||||
| `download` | bool | Download the image |
|
||||
|
||||
### `GET /api/events/<id>/snapshot.jpg`
|
||||
|
||||
Returns the snapshot image for the event id. Works while the event is in progress and after completion.
|
||||
|
||||
Accepts the following query string parameters, but they are only applied when an event is in progress. After the event is completed, the saved snapshot is returned from disk without modification:
|
||||
|
||||
| param | Type | Description |
|
||||
| ----------- | ---- | ------------------------------------------------- |
|
||||
| `h` | int | Height in pixels |
|
||||
| `bbox` | int | Show bounding boxes for detected objects (0 or 1) |
|
||||
| `timestamp` | int | Print the timestamp in the upper left (0 or 1) |
|
||||
| `crop` | int | Crop the snapshot to the (0 or 1) |
|
||||
| `quality` | int | Jpeg encoding quality (0-100). Defaults to 70. |
|
||||
| `download` | bool | Download the image |
|
||||
|
||||
### `POST /api/events/<camera_name>/<label>/create`
|
||||
|
||||
Create a manual event with a given `label` (ex: doorbell press) to capture a specific event besides an object being detected.
|
||||
|
||||
:::warning
|
||||
|
||||
Recording retention config still applies to manual events, if frigate is configured with `mode: motion` then the manual event will only keep recording segments when motion occurred.
|
||||
|
||||
:::
|
||||
|
||||
**Optional Body:**
|
||||
|
||||
```json
|
||||
{
|
||||
"sub_label": "some_string", // add sub label to event
|
||||
"duration": 30, // predetermined length of event (default: 30 seconds) or can be to null for indeterminate length event
|
||||
"include_recording": true, // whether the event should save recordings along with the snapshot that is taken
|
||||
"draw": {
|
||||
// optional annotations that will be drawn on the snapshot
|
||||
"boxes": [
|
||||
{
|
||||
"box": [0.5, 0.5, 0.25, 0.25], // box consists of x, y, width, height which are on a scale between 0 - 1
|
||||
"color": [255, 0, 0], // color of the box, default is red
|
||||
"score": 100 // optional score associated with the box
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Success Response:**
|
||||
|
||||
```json
|
||||
{
|
||||
"event_id": "1682970645.13116-1ug7ns",
|
||||
"message": "Successfully created event.",
|
||||
"success": true
|
||||
}
|
||||
```
|
||||
|
||||
### `PUT /api/events/<event_id>/end`
|
||||
|
||||
End a specific manual event without a predetermined length.
|
||||
|
||||
### `GET /api/events/<id>/preview.gif`
|
||||
|
||||
Gif covering the first 20 seconds of a specific event.
|
||||
|
||||
## Previews
|
||||
|
||||
Previews are low res / fps videos that are quickly scrubbable and can be used for notifications or time-lapses.
|
||||
|
||||
### `GET /api/preview/<camera>/start/<start-timestamp>/end/<end-timestamp>`
|
||||
|
||||
Metadata about previews for this time range.
|
||||
|
||||
### `GET /api/preview/<year>-<month>/<day>/<hour>/<camera>/<timezone>`
|
||||
|
||||
Metadata about previews for this hour
|
||||
|
||||
### `GET /api/preview/<camera>/start/<start-timestamp>/end/<end-timestamp>`
|
||||
|
||||
List of frames in the preview cache for the time range. Previews are only kept in the cache until they are combined into an mp4 at the end of the hour.
|
||||
|
||||
### `GET /api/preview/<file_name>/thumbnail.jpg`
|
||||
|
||||
Specific preview frame from preview cache.
|
||||
|
||||
### `GET /<camera>/start/<start-timestamp>/end/<end-timestamp>/preview`
|
||||
|
||||
Looping image made from preview video / frames during this time range.
|
||||
|
||||
| param | Type | Description |
|
||||
| -------- | ---- | -------------------------------- |
|
||||
| `format` | str | Format of preview [`gif`, `mp4`] |
|
||||
|
||||
## Recordings
|
||||
|
||||
### `GET /vod/<year>-<month>/<day>/<hour>/<camera>/master.m3u8`
|
||||
|
||||
HTTP Live Streaming Video on Demand URL for the specified hour and camera. Can be viewed in an application like VLC.
|
||||
|
||||
### `GET /vod/event/<event-id>/index.m3u8`
|
||||
|
||||
HTTP Live Streaming Video on Demand URL for the specified event. Can be viewed in an application like VLC.
|
||||
|
||||
### `GET /vod/<camera>/start/<start-timestamp>/end/<end-timestamp>/index.m3u8`
|
||||
|
||||
HTTP Live Streaming Video on Demand URL for the camera with the specified time range. Can be viewed in an application like VLC.
|
||||
|
||||
### `POST /api/export/<camera>/start/<start-timestamp>/end/<end-timestamp>`
|
||||
|
||||
Export recordings from `start-timestamp` to `end-timestamp` for `camera` as a single mp4 file. These recordings will be exported to the `/media/frigate/exports` folder.
|
||||
|
||||
It is also possible to export this recording as a time-lapse.
|
||||
|
||||
**Optional Body:**
|
||||
|
||||
```json
|
||||
{
|
||||
"playback": "realtime" // playback factor: realtime or timelapse_25x
|
||||
}
|
||||
```
|
||||
|
||||
### `DELETE /api/export/<export_name>`
|
||||
|
||||
Delete an export from disk.
|
||||
|
||||
### `PATCH /api/export/<export_name_current>/<export_name_new>`
|
||||
|
||||
Renames an export.
|
||||
|
||||
### `GET /api/<camera_name>/recordings/summary`
|
||||
|
||||
Hourly summary of recordings data for a camera.
|
||||
|
||||
### `GET /api/<camera_name>/recordings`
|
||||
|
||||
Get recording segment details for the given timestamp range.
|
||||
|
||||
| param | Type | Description |
|
||||
| -------- | ---- | ------------------------------------- |
|
||||
| `after` | int | Unix timestamp for beginning of range |
|
||||
| `before` | int | Unix timestamp for end of range |
|
||||
|
||||
### `GET /api/<camera_name>/recordings/<frame_time>/snapshot.png`
|
||||
|
||||
Returns the snapshot image from the specific point in that cameras recordings.
|
||||
|
||||
## Reviews
|
||||
|
||||
### `GET /api/review`
|
||||
|
||||
Reviews from the database. Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ---------- | ---- | -------------------------------------------------------------- |
|
||||
| `before` | int | Epoch time |
|
||||
| `after` | int | Epoch time |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
| `labels` | str | , separated list of labels |
|
||||
| `zones` | str | , separated list of zones |
|
||||
| `reviewed` | int | Include items that have been reviewed (0 or 1) |
|
||||
| `limit` | int | Limit the number of events returned |
|
||||
| `severity` | str | Limit items to severity (alert, detection, significant_motion) |
|
||||
|
||||
### `GET /api/review/<id>`
|
||||
|
||||
Get review with `id` from the database.
|
||||
|
||||
### `GET /api/review/summary`
|
||||
|
||||
Summary of reviews for the last 30 days. Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ---------- | ---- | --------------------------- |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
| `labels` | str | , separated list of labels |
|
||||
| `timezone` | str | Timezone name |
|
||||
|
||||
### `POST /api/reviews/viewed`
|
||||
|
||||
Mark item(s) as reviewed.
|
||||
|
||||
**Required Body:**
|
||||
|
||||
```json
|
||||
{
|
||||
"ids": ["123", "456"] // , separated list of review IDs
|
||||
}
|
||||
```
|
||||
|
||||
### `DELETE /api/review/<id>/viewed`
|
||||
|
||||
Mark an item as not reviewed.
|
||||
|
||||
### `POST /api/reviews/delete`
|
||||
|
||||
Delete review items.
|
||||
|
||||
**Required Body:**
|
||||
|
||||
```json
|
||||
{
|
||||
"ids": ["123", "456"] // , separated list of review IDs
|
||||
}
|
||||
```
|
||||
|
||||
### `GET /review/activity/motion`
|
||||
|
||||
Get the motion activity for camera(s) during a specified time period.
|
||||
|
||||
| param | Type | Description |
|
||||
| --------- | ---- | --------------------------- |
|
||||
| `before` | int | Epoch time |
|
||||
| `after` | int | Epoch time |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
|
||||
### `GET /review/activity/audio`
|
||||
|
||||
Get the audio activity for camera(s) during a specified time period.
|
||||
|
||||
| param | Type | Description |
|
||||
| --------- | ---- | --------------------------- |
|
||||
| `before` | int | Epoch time |
|
||||
| `after` | int | Epoch time |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
|
||||
## Timeline
|
||||
|
||||
### `GET /api/timeline`
|
||||
|
||||
Timeline of key moments of an event(s) from the database. Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ----------- | ---- | ----------------------------------- |
|
||||
| `camera` | str | Name of camera |
|
||||
| `source_id` | str | ID of tracked object |
|
||||
| `limit` | int | Limit the number of events returned |
|
||||
|
||||
@@ -25,7 +25,7 @@ Available via HACS as a default repository. To install:
|
||||
- Use [HACS](https://hacs.xyz/) to install the integration:
|
||||
|
||||
```
|
||||
Home Assistant > HACS > Integrations > "Explore & Add Integrations" > Frigate
|
||||
Home Assistant > HACS > Click in the Search bar and type "Frigate" > Frigate
|
||||
```
|
||||
|
||||
- Restart Home Assistant.
|
||||
@@ -215,7 +215,7 @@ For advanced usecases, this behavior can be changed with the [RTSP URL
|
||||
template](#options) option. When set, this string will override the default stream
|
||||
address that is derived from the default behavior described above. This option supports
|
||||
[jinja2 templates](https://jinja.palletsprojects.com/) and has the `camera` dict
|
||||
variables from [Frigate API](api.md)
|
||||
variables from [Frigate API](../integrations/api)
|
||||
available for the template. Note that no Home Assistant state is available to the
|
||||
template, only the camera dict from Frigate.
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ These are the MQTT messages generated by Frigate. The default topic_prefix is `f
|
||||
|
||||
Designed to be used as an availability topic with Home Assistant. Possible message are:
|
||||
"online": published when Frigate is running (on startup)
|
||||
"offline": published right before Frigate stops
|
||||
"offline": published after Frigate has stopped
|
||||
|
||||
### `frigate/restart`
|
||||
|
||||
@@ -147,6 +147,10 @@ Message published for each changed review item. The first message is published w
|
||||
|
||||
Same data available at `/api/stats` published at a configurable interval.
|
||||
|
||||
### `frigate/camera_activity`
|
||||
|
||||
Returns data about each camera, its current features, and if it is detecting motion, objects, etc. Can be triggered by publising to `frigate/onConnect`
|
||||
|
||||
### `frigate/notifications/set`
|
||||
|
||||
Topic to turn notifications on and off. Expected values are `ON` and `OFF`.
|
||||
|
||||
@@ -23,7 +23,7 @@ In Frigate, you can use an environment variable or a docker secret named `PLUS_A
|
||||
|
||||
:::warning
|
||||
|
||||
You cannot use the `environment_vars` section of your configuration file to set this environment variable.
|
||||
You cannot use the `environment_vars` section of your Frigate configuration file to set this environment variable. It must be defined as an environment variable in the docker config or HA addon config.
|
||||
|
||||
:::
|
||||
|
||||
|
||||
@@ -18,3 +18,7 @@ Please use your own knowledge to assess and vet them before you install anything
|
||||
[Double Take](https://github.com/skrashevich/double-take) provides an unified UI and API for processing and training images for facial recognition.
|
||||
It supports automatically setting the sub labels in Frigate for person objects that are detected and recognized.
|
||||
This is a fork (with fixed errors and new features) of [original Double Take](https://github.com/jakowenko/double-take) project which, unfortunately, isn't being maintained by author.
|
||||
|
||||
## [Frigate telegram](https://github.com/OldTyT/frigate-telegram)
|
||||
|
||||
[Frigate telegram](https://github.com/OldTyT/frigate-telegram) makes it possible to send events from Frigate to Telegram. Events are sent as a message with a text description, video, and thumbnail.
|
||||
|
||||
@@ -28,6 +28,18 @@ The USB coral has different IDs when it is uninitialized and initialized.
|
||||
- When running Frigate in a VM, Proxmox lxc, etc. you must ensure both device IDs are mapped.
|
||||
- When running HA OS you may need to run the Full Access version of the Frigate addon with the `Protected Mode` switch disabled so that the coral can be accessed.
|
||||
|
||||
### Synology 716+II running DSM 7.2.1-69057 Update 5
|
||||
|
||||
Some users have reported that this older device runs an older kernel causing issues with the coral not being detected. The following steps allowed it to be detected correctly:
|
||||
|
||||
1. Plug in the coral TPU in any of the USB ports on the NAS
|
||||
2. Open the control panel - info screen. The coral TPU would be shown as a generic device.
|
||||
3. Start the docker container with Coral TPU enabled in the config
|
||||
4. The TPU would be detected but a few moments later it would disconnect.
|
||||
5. While leaving the TPU device plugged in, restart the NAS using the reboot command in the UI. Do NOT unplug the NAS/power it off etc.
|
||||
6. Open the control panel - info scree. The coral TPU will now be recognised as a USB Device - google inc
|
||||
7. Start the frigate container. Everything should work now!
|
||||
|
||||
## USB Coral Detection Appears to be Stuck
|
||||
|
||||
The USB Coral can become stuck and need to be restarted, this can happen for a number of reasons depending on hardware and software setup. Some common reasons are:
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
const path = require("path");
|
||||
|
||||
module.exports = {
|
||||
title: "Frigate",
|
||||
tagline: "NVR With Realtime Object Detection for IP Cameras",
|
||||
url: "https://docs.frigate.video",
|
||||
baseUrl: "/",
|
||||
onBrokenLinks: "throw",
|
||||
onBrokenMarkdownLinks: "warn",
|
||||
favicon: "img/favicon.ico",
|
||||
organizationName: "blakeblackshear",
|
||||
projectName: "frigate",
|
||||
themes: ["@docusaurus/theme-mermaid"],
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
},
|
||||
themeConfig: {
|
||||
algolia: {
|
||||
appId: "WIURGBNBPY",
|
||||
apiKey: "d02cc0a6a61178b25da550212925226b",
|
||||
indexName: "frigate",
|
||||
},
|
||||
docs: {
|
||||
sidebar: {
|
||||
hideable: true,
|
||||
},
|
||||
},
|
||||
prism: {
|
||||
additionalLanguages: ["bash", "json"],
|
||||
},
|
||||
navbar: {
|
||||
title: "Frigate",
|
||||
logo: {
|
||||
alt: "Frigate",
|
||||
src: "img/logo.svg",
|
||||
srcDark: "img/logo-dark.svg",
|
||||
},
|
||||
items: [
|
||||
{
|
||||
to: "/",
|
||||
activeBasePath: "docs",
|
||||
label: "Docs",
|
||||
position: "left",
|
||||
},
|
||||
{
|
||||
href: "https://frigate.video",
|
||||
label: "Website",
|
||||
position: "right",
|
||||
},
|
||||
{
|
||||
href: "http://demo.frigate.video",
|
||||
label: "Demo",
|
||||
position: "right",
|
||||
},
|
||||
{
|
||||
href: "https://github.com/blakeblackshear/frigate",
|
||||
label: "GitHub",
|
||||
position: "right",
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
style: "dark",
|
||||
links: [
|
||||
{
|
||||
title: "Community",
|
||||
items: [
|
||||
{
|
||||
label: "GitHub",
|
||||
href: "https://github.com/blakeblackshear/frigate",
|
||||
},
|
||||
{
|
||||
label: "Discussions",
|
||||
href: "https://github.com/blakeblackshear/frigate/discussions",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
copyright: `Copyright © ${new Date().getFullYear()} Blake Blackshear`,
|
||||
},
|
||||
},
|
||||
plugins: [path.resolve(__dirname, "plugins", "raw-loader")],
|
||||
presets: [
|
||||
[
|
||||
"@docusaurus/preset-classic",
|
||||
{
|
||||
docs: {
|
||||
routeBasePath: "/",
|
||||
sidebarPath: require.resolve("./sidebars.js"),
|
||||
// Please change this to your repo.
|
||||
editUrl:
|
||||
"https://github.com/blakeblackshear/frigate/edit/master/docs/",
|
||||
sidebarCollapsible: false,
|
||||
},
|
||||
|
||||
theme: {
|
||||
customCss: require.resolve("./src/css/custom.css"),
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
158
docs/docusaurus.config.ts
Normal file
158
docs/docusaurus.config.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import type * as Preset from '@docusaurus/preset-classic';
|
||||
import * as path from 'node:path';
|
||||
import type { Config, PluginConfig } from '@docusaurus/types';
|
||||
import type * as OpenApiPlugin from 'docusaurus-plugin-openapi-docs';
|
||||
|
||||
const config: Config = {
|
||||
title: 'Frigate',
|
||||
tagline: 'NVR With Realtime Object Detection for IP Cameras',
|
||||
url: 'https://docs.frigate.video',
|
||||
baseUrl: '/',
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
favicon: 'img/favicon.ico',
|
||||
organizationName: 'blakeblackshear',
|
||||
projectName: 'frigate',
|
||||
themes: ['@docusaurus/theme-mermaid', 'docusaurus-theme-openapi-docs'],
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
},
|
||||
themeConfig: {
|
||||
algolia: {
|
||||
appId: 'WIURGBNBPY',
|
||||
apiKey: 'd02cc0a6a61178b25da550212925226b',
|
||||
indexName: 'frigate',
|
||||
},
|
||||
docs: {
|
||||
sidebar: {
|
||||
hideable: true,
|
||||
},
|
||||
},
|
||||
prism: {
|
||||
additionalLanguages: ['bash', 'json'],
|
||||
},
|
||||
languageTabs: [
|
||||
{
|
||||
highlight: 'python',
|
||||
language: 'python',
|
||||
logoClass: 'python',
|
||||
},
|
||||
{
|
||||
highlight: 'javascript',
|
||||
language: 'nodejs',
|
||||
logoClass: 'nodejs',
|
||||
},
|
||||
{
|
||||
highlight: 'javascript',
|
||||
language: 'javascript',
|
||||
logoClass: 'javascript',
|
||||
},
|
||||
{
|
||||
highlight: 'bash',
|
||||
language: 'curl',
|
||||
logoClass: 'curl',
|
||||
},
|
||||
{
|
||||
highlight: "rust",
|
||||
language: "rust",
|
||||
logoClass: "rust",
|
||||
},
|
||||
],
|
||||
navbar: {
|
||||
title: 'Frigate',
|
||||
logo: {
|
||||
alt: 'Frigate',
|
||||
src: 'img/logo.svg',
|
||||
srcDark: 'img/logo-dark.svg',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
to: '/',
|
||||
activeBasePath: 'docs',
|
||||
label: 'Docs',
|
||||
position: 'left',
|
||||
},
|
||||
{
|
||||
href: 'https://frigate.video',
|
||||
label: 'Website',
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
href: 'http://demo.frigate.video',
|
||||
label: 'Demo',
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
href: 'https://github.com/blakeblackshear/frigate',
|
||||
label: 'GitHub',
|
||||
position: 'right',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
style: 'dark',
|
||||
links: [
|
||||
{
|
||||
title: 'Community',
|
||||
items: [
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/blakeblackshear/frigate',
|
||||
},
|
||||
{
|
||||
label: 'Discussions',
|
||||
href: 'https://github.com/blakeblackshear/frigate/discussions',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
copyright: `Copyright © ${new Date().getFullYear()} Blake Blackshear`,
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
path.resolve(__dirname, 'plugins', 'raw-loader'),
|
||||
[
|
||||
'docusaurus-plugin-openapi-docs',
|
||||
{
|
||||
id: 'openapi',
|
||||
docsPluginId: 'classic', // configured for preset-classic
|
||||
config: {
|
||||
frigateApi: {
|
||||
specPath: 'static/frigate-api.yaml',
|
||||
outputDir: 'docs/integrations/api',
|
||||
sidebarOptions: {
|
||||
groupPathsBy: 'tag',
|
||||
categoryLinkSource: 'tag',
|
||||
sidebarCollapsible: true,
|
||||
sidebarCollapsed: true,
|
||||
},
|
||||
showSchemas: true,
|
||||
} satisfies OpenApiPlugin.Options,
|
||||
},
|
||||
},
|
||||
]
|
||||
] as PluginConfig[],
|
||||
presets: [
|
||||
[
|
||||
'classic',
|
||||
{
|
||||
docs: {
|
||||
routeBasePath: '/',
|
||||
sidebarPath: './sidebars.ts',
|
||||
// Please change this to your repo.
|
||||
editUrl: 'https://github.com/blakeblackshear/frigate/edit/master/docs/',
|
||||
sidebarCollapsible: false,
|
||||
docItemComponent: '@theme/ApiItem', // Derived from docusaurus-theme-openapi
|
||||
},
|
||||
|
||||
theme: {
|
||||
customCss: './src/css/custom.css',
|
||||
},
|
||||
} satisfies Preset.Options,
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
export default async function createConfig() {
|
||||
return config;
|
||||
}
|
||||
4673
docs/package-lock.json
generated
4673
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,11 +4,14 @@
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"start": "docusaurus start --host 0.0.0.0",
|
||||
"build": "docusaurus build",
|
||||
"start": "npm run regen-docs && docusaurus start --host 0.0.0.0",
|
||||
"build": "npm run regen-docs && docusaurus build",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
"deploy": "docusaurus deploy",
|
||||
"clear": "docusaurus clear",
|
||||
"gen-api-docs": "docusaurus gen-api-docs all",
|
||||
"clear-api-docs": "docusaurus clean-api-docs all",
|
||||
"regen-docs": "npm run clear-api-docs && npm run gen-api-docs",
|
||||
"serve": "docusaurus serve --host 0.0.0.0",
|
||||
"write-translations": "docusaurus write-translations",
|
||||
"write-heading-ids": "docusaurus write-heading-ids"
|
||||
@@ -17,8 +20,11 @@
|
||||
"@docusaurus/core": "^3.5.2",
|
||||
"@docusaurus/preset-classic": "^3.5.2",
|
||||
"@docusaurus/theme-mermaid": "^3.5.2",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"@docusaurus/plugin-content-docs": "^3.5.2",
|
||||
"@mdx-js/react": "^3.0.1",
|
||||
"clsx": "^2.1.1",
|
||||
"docusaurus-plugin-openapi-docs": "^4.1.0",
|
||||
"docusaurus-theme-openapi-docs": "^4.1.0",
|
||||
"prism-react-renderer": "^2.4.0",
|
||||
"raw-loader": "^4.0.2",
|
||||
"react": "^18.3.1",
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
module.exports = {
|
||||
docs: {
|
||||
Frigate: [
|
||||
"frigate/index",
|
||||
"frigate/hardware",
|
||||
"frigate/installation",
|
||||
"frigate/camera_setup",
|
||||
"frigate/video_pipeline",
|
||||
"frigate/glossary",
|
||||
],
|
||||
Guides: [
|
||||
"guides/getting_started",
|
||||
"guides/configuring_go2rtc",
|
||||
"guides/ha_notifications",
|
||||
"guides/ha_network_storage",
|
||||
"guides/reverse_proxy",
|
||||
],
|
||||
Configuration: {
|
||||
"Configuration Files": [
|
||||
"configuration/index",
|
||||
"configuration/reference",
|
||||
{
|
||||
type: "link",
|
||||
label: "Go2RTC Configuration Reference",
|
||||
href: "https://github.com/AlexxIT/go2rtc/tree/v1.9.4#configuration",
|
||||
},
|
||||
],
|
||||
Detectors: [
|
||||
"configuration/object_detectors",
|
||||
"configuration/audio_detectors",
|
||||
],
|
||||
"Semantic Search": [
|
||||
"configuration/semantic_search",
|
||||
"configuration/genai",
|
||||
],
|
||||
Cameras: [
|
||||
"configuration/cameras",
|
||||
"configuration/review",
|
||||
"configuration/record",
|
||||
"configuration/snapshots",
|
||||
"configuration/motion_detection",
|
||||
"configuration/birdseye",
|
||||
"configuration/live",
|
||||
"configuration/restream",
|
||||
"configuration/autotracking",
|
||||
"configuration/camera_specific",
|
||||
],
|
||||
Objects: [
|
||||
"configuration/object_filters",
|
||||
"configuration/masks",
|
||||
"configuration/zones",
|
||||
"configuration/objects",
|
||||
"configuration/stationary_objects",
|
||||
],
|
||||
"Extra Configuration": [
|
||||
"configuration/authentication",
|
||||
"configuration/notifications",
|
||||
"configuration/hardware_acceleration",
|
||||
"configuration/ffmpeg_presets",
|
||||
"configuration/tls",
|
||||
"configuration/advanced",
|
||||
],
|
||||
},
|
||||
Integrations: [
|
||||
"integrations/plus",
|
||||
"integrations/home-assistant",
|
||||
"integrations/api",
|
||||
"integrations/mqtt",
|
||||
"integrations/third_party_extensions",
|
||||
],
|
||||
"Frigate+": [
|
||||
"plus/index",
|
||||
"plus/first_model",
|
||||
"plus/improving_model",
|
||||
"plus/faq",
|
||||
],
|
||||
Troubleshooting: [
|
||||
"troubleshooting/faqs",
|
||||
"troubleshooting/recordings",
|
||||
"troubleshooting/edgetpu",
|
||||
],
|
||||
Development: [
|
||||
"development/contributing",
|
||||
"development/contributing-boards",
|
||||
],
|
||||
},
|
||||
};
|
||||
105
docs/sidebars.ts
Normal file
105
docs/sidebars.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import type { SidebarsConfig, } from '@docusaurus/plugin-content-docs';
|
||||
import { PropSidebarItemLink } from '@docusaurus/plugin-content-docs';
|
||||
import frigateHttpApiSidebar from './docs/integrations/api/sidebar';
|
||||
|
||||
const sidebars: SidebarsConfig = {
|
||||
docs: {
|
||||
Frigate: [
|
||||
'frigate/index',
|
||||
'frigate/hardware',
|
||||
'frigate/installation',
|
||||
'frigate/camera_setup',
|
||||
'frigate/video_pipeline',
|
||||
'frigate/glossary',
|
||||
],
|
||||
Guides: [
|
||||
'guides/getting_started',
|
||||
'guides/configuring_go2rtc',
|
||||
'guides/ha_notifications',
|
||||
'guides/ha_network_storage',
|
||||
'guides/reverse_proxy',
|
||||
],
|
||||
Configuration: {
|
||||
'Configuration Files': [
|
||||
'configuration/index',
|
||||
'configuration/reference',
|
||||
{
|
||||
type: 'link',
|
||||
label: 'Go2RTC Configuration Reference',
|
||||
href: 'https://github.com/AlexxIT/go2rtc/tree/v1.9.4#configuration',
|
||||
} as PropSidebarItemLink,
|
||||
],
|
||||
Detectors: [
|
||||
'configuration/object_detectors',
|
||||
'configuration/audio_detectors',
|
||||
],
|
||||
'Semantic Search': [
|
||||
'configuration/semantic_search',
|
||||
'configuration/genai',
|
||||
],
|
||||
Cameras: [
|
||||
'configuration/cameras',
|
||||
'configuration/review',
|
||||
'configuration/record',
|
||||
'configuration/snapshots',
|
||||
'configuration/motion_detection',
|
||||
'configuration/birdseye',
|
||||
'configuration/live',
|
||||
'configuration/restream',
|
||||
'configuration/autotracking',
|
||||
'configuration/camera_specific',
|
||||
],
|
||||
Objects: [
|
||||
'configuration/object_filters',
|
||||
'configuration/masks',
|
||||
'configuration/zones',
|
||||
'configuration/objects',
|
||||
'configuration/stationary_objects',
|
||||
],
|
||||
'Extra Configuration': [
|
||||
'configuration/authentication',
|
||||
'configuration/notifications',
|
||||
'configuration/hardware_acceleration',
|
||||
'configuration/ffmpeg_presets',
|
||||
"configuration/pwa",
|
||||
'configuration/tls',
|
||||
'configuration/advanced',
|
||||
],
|
||||
},
|
||||
Integrations: [
|
||||
'integrations/plus',
|
||||
'integrations/home-assistant',
|
||||
// This is the HTTP API generated by OpenAPI
|
||||
{
|
||||
type: 'category',
|
||||
label: 'HTTP API',
|
||||
link: {
|
||||
type: 'generated-index',
|
||||
title: 'Frigate HTTP API',
|
||||
description: 'HTTP API',
|
||||
slug: '/integrations/api/frigate-http-api',
|
||||
},
|
||||
items: frigateHttpApiSidebar,
|
||||
},
|
||||
'integrations/mqtt',
|
||||
'integrations/third_party_extensions',
|
||||
],
|
||||
'Frigate+': [
|
||||
'plus/index',
|
||||
'plus/first_model',
|
||||
'plus/improving_model',
|
||||
'plus/faq',
|
||||
],
|
||||
Troubleshooting: [
|
||||
'troubleshooting/faqs',
|
||||
'troubleshooting/recordings',
|
||||
'troubleshooting/edgetpu',
|
||||
],
|
||||
Development: [
|
||||
'development/contributing',
|
||||
'development/contributing-boards',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export default sidebars;
|
||||
@@ -23,3 +23,214 @@
|
||||
margin: 0 calc(-1 * var(--ifm-pre-padding));
|
||||
padding: 0 var(--ifm-pre-padding);
|
||||
}
|
||||
|
||||
/**
|
||||
Custom CSS for OpenAPI Specification. Based of openapi https://github.com/PaloAltoNetworks/docusaurus-openapi-docs/tree/main/demo
|
||||
*/
|
||||
|
||||
/* Sidebar Method labels */
|
||||
.api-method > .menu__link,
|
||||
.schema > .menu__link {
|
||||
align-items: center;
|
||||
justify-content: start;
|
||||
}
|
||||
|
||||
.api-method > .menu__link::before,
|
||||
.schema > .menu__link::before {
|
||||
width: 55px;
|
||||
height: 20px;
|
||||
font-size: 12px;
|
||||
line-height: 20px;
|
||||
text-transform: uppercase;
|
||||
font-weight: 600;
|
||||
border-radius: 0.25rem;
|
||||
border: 1px solid;
|
||||
margin-right: var(--ifm-spacing-horizontal);
|
||||
text-align: center;
|
||||
flex-shrink: 0;
|
||||
border-color: transparent;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.get > .menu__link::before {
|
||||
content: "get";
|
||||
background-color: var(--ifm-color-primary);
|
||||
}
|
||||
|
||||
.post > .menu__link::before {
|
||||
content: "post";
|
||||
background-color: var(--ifm-color-success);
|
||||
}
|
||||
|
||||
.delete > .menu__link::before {
|
||||
content: "del";
|
||||
background-color: var(--openapi-code-red);
|
||||
}
|
||||
|
||||
.put > .menu__link::before {
|
||||
content: "put";
|
||||
background-color: var(--openapi-code-blue);
|
||||
}
|
||||
|
||||
.patch > .menu__link::before {
|
||||
content: "patch";
|
||||
background-color: var(--openapi-code-orange);
|
||||
}
|
||||
|
||||
.head > .menu__link::before {
|
||||
content: "head";
|
||||
background-color: var(--ifm-color-secondary-darkest);
|
||||
}
|
||||
|
||||
.event > .menu__link::before {
|
||||
content: "event";
|
||||
background-color: var(--ifm-color-secondary-darkest);
|
||||
}
|
||||
|
||||
.schema > .menu__link::before {
|
||||
content: "schema";
|
||||
background-color: var(--ifm-color-secondary-darkest);
|
||||
}
|
||||
|
||||
.menu__list-item--deprecated > .menu__link,
|
||||
.menu__list-item--deprecated > .menu__link:hover {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
/* Sidebar Method labels High Contrast */
|
||||
.api-method-contrast > .menu__link,
|
||||
.schema-contrast > .menu__link {
|
||||
align-items: center;
|
||||
justify-content: start;
|
||||
}
|
||||
|
||||
.api-method-contrast > .menu__link::before,
|
||||
.schema-contrast > .menu__link::before {
|
||||
width: 55px;
|
||||
height: 20px;
|
||||
font-size: 12px;
|
||||
line-height: 20px;
|
||||
text-transform: uppercase;
|
||||
font-weight: 600;
|
||||
border-radius: 0.25rem;
|
||||
border: 1px solid;
|
||||
border-inline-start-width: 5px;
|
||||
margin-right: var(--ifm-spacing-horizontal);
|
||||
text-align: center;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.get-contrast > .menu__link::before {
|
||||
content: "get";
|
||||
background-color: var(--ifm-color-info-contrast-background);
|
||||
color: var(--ifm-color-info-contrast-foreground);
|
||||
border-color: var(--ifm-color-info-dark);
|
||||
}
|
||||
|
||||
.post-contrast > .menu__link::before {
|
||||
content: "post";
|
||||
background-color: var(--ifm-color-success-contrast-background);
|
||||
color: var(--ifm-color-success-contrast-foreground);
|
||||
border-color: var(--ifm-color-success-dark);
|
||||
}
|
||||
|
||||
.delete-contrast > .menu__link::before {
|
||||
content: "del";
|
||||
background-color: var(--ifm-color-danger-contrast-background);
|
||||
color: var(--ifm-color-danger-contrast-foreground);
|
||||
border-color: var(--ifm-color-danger-dark);
|
||||
}
|
||||
|
||||
.put-contrast > .menu__link::before {
|
||||
content: "put";
|
||||
background-color: var(--ifm-color-warning-contrast-background);
|
||||
color: var(--ifm-color-warning-contrast-foreground);
|
||||
border-color: var(--ifm-color-warning-dark);
|
||||
}
|
||||
|
||||
.patch-contrast > .menu__link::before {
|
||||
content: "patch";
|
||||
background-color: var(--ifm-color-success-contrast-background);
|
||||
color: var(--ifm-color-success-contrast-foreground);
|
||||
border-color: var(--ifm-color-success-dark);
|
||||
}
|
||||
|
||||
.head-contrast > .menu__link::before {
|
||||
content: "head";
|
||||
background-color: var(--ifm-color-secondary-contrast-background);
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
border-color: var(--ifm-color-secondary-dark);
|
||||
}
|
||||
|
||||
.event-contrast > .menu__link::before {
|
||||
content: "event";
|
||||
background-color: var(--ifm-color-secondary-contrast-background);
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
border-color: var(--ifm-color-secondary-dark);
|
||||
}
|
||||
|
||||
.schema-contrast > .menu__link::before {
|
||||
content: "schema";
|
||||
background-color: var(--ifm-color-secondary-contrast-background);
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
border-color: var(--ifm-color-secondary-dark);
|
||||
}
|
||||
|
||||
/* Simple */
|
||||
.api-method-simple > .menu__link {
|
||||
align-items: center;
|
||||
justify-content: start;
|
||||
}
|
||||
.api-method-simple > .menu__link::before {
|
||||
width: 55px;
|
||||
height: 20px;
|
||||
font-size: 12px;
|
||||
line-height: 20px;
|
||||
text-transform: uppercase;
|
||||
font-weight: 600;
|
||||
border-radius: 0.25rem;
|
||||
align-content: start;
|
||||
margin-right: var(--ifm-spacing-horizontal);
|
||||
text-align: right;
|
||||
flex-shrink: 0;
|
||||
border-color: transparent;
|
||||
}
|
||||
|
||||
.get-simple > .menu__link::before {
|
||||
content: "get";
|
||||
color: var(--ifm-color-info);
|
||||
}
|
||||
|
||||
.post-simple > .menu__link::before {
|
||||
content: "post";
|
||||
color: var(--ifm-color-success);
|
||||
}
|
||||
|
||||
.delete-simple > .menu__link::before {
|
||||
content: "del";
|
||||
color: var(--ifm-color-danger);
|
||||
}
|
||||
|
||||
.put-simple > .menu__link::before {
|
||||
content: "put";
|
||||
color: var(--ifm-color-warning);
|
||||
}
|
||||
|
||||
.patch-simple > .menu__link::before {
|
||||
content: "patch";
|
||||
color: var(--ifm-color-warning);
|
||||
}
|
||||
|
||||
.head-simple > .menu__link::before {
|
||||
content: "head";
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
}
|
||||
|
||||
.event-simple > .menu__link::before {
|
||||
content: "event";
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
}
|
||||
|
||||
.schema-simple > .menu__link::before {
|
||||
content: "schema";
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
}
|
||||
|
||||
3450
docs/static/frigate-api.yaml
vendored
Normal file
3450
docs/static/frigate-api.yaml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
BIN
docs/static/img/plus/send-to-plus.jpg
vendored
BIN
docs/static/img/plus/send-to-plus.jpg
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 62 KiB |
BIN
docs/static/img/plus/submit-to-plus.jpg
vendored
BIN
docs/static/img/plus/submit-to-plus.jpg
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 63 KiB After Width: | Height: | Size: 49 KiB |
@@ -1,27 +1,63 @@
|
||||
import argparse
|
||||
import faulthandler
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from flask import cli
|
||||
from pydantic import ValidationError
|
||||
|
||||
from frigate.app import FrigateApp
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.log import setup_logging
|
||||
|
||||
|
||||
def main() -> None:
|
||||
faulthandler.enable()
|
||||
|
||||
# Clear all existing handlers.
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
handlers=[],
|
||||
force=True,
|
||||
)
|
||||
# Setup the logging thread
|
||||
setup_logging()
|
||||
|
||||
threading.current_thread().name = "frigate"
|
||||
cli.show_server_banner = lambda *x: None
|
||||
|
||||
# Make sure we exit cleanly on SIGTERM.
|
||||
signal.signal(signal.SIGTERM, lambda sig, frame: sys.exit())
|
||||
|
||||
# Parse the cli arguments.
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="Frigate",
|
||||
description="An NVR with realtime local object detection for IP cameras.",
|
||||
)
|
||||
parser.add_argument("--validate-config", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load the configuration.
|
||||
try:
|
||||
config = FrigateConfig.load(install=True)
|
||||
except ValidationError as e:
|
||||
print("*************************************************************")
|
||||
print("*************************************************************")
|
||||
print("*** Your config file is not valid! ***")
|
||||
print("*** Please check the docs at ***")
|
||||
print("*** https://docs.frigate.video/configuration/ ***")
|
||||
print("*************************************************************")
|
||||
print("*************************************************************")
|
||||
print("*** Config Validation Errors ***")
|
||||
print("*************************************************************")
|
||||
for error in e.errors():
|
||||
location = ".".join(str(item) for item in error["loc"])
|
||||
print(f"{location}: {error['msg']}")
|
||||
print("*************************************************************")
|
||||
print("*** End Config Validation Errors ***")
|
||||
print("*************************************************************")
|
||||
sys.exit(1)
|
||||
if args.validate_config:
|
||||
print("*************************************************************")
|
||||
print("*** Your config file is valid. ***")
|
||||
print("*************************************************************")
|
||||
sys.exit(0)
|
||||
|
||||
# Run the main application.
|
||||
FrigateApp().start()
|
||||
FrigateApp(config).start()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -7,172 +7,107 @@ import os
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from functools import reduce
|
||||
from typing import Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
import requests
|
||||
from flask import Blueprint, Flask, current_app, jsonify, make_response, request
|
||||
from fastapi import APIRouter, Body, Path, Request, Response
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.params import Depends
|
||||
from fastapi.responses import JSONResponse, PlainTextResponse
|
||||
from markupsafe import escape
|
||||
from peewee import operator
|
||||
from playhouse.sqliteq import SqliteQueueDatabase
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
|
||||
from frigate.api.auth import AuthBp, get_jwt_secret, limiter
|
||||
from frigate.api.event import EventBp
|
||||
from frigate.api.export import ExportBp
|
||||
from frigate.api.media import MediaBp
|
||||
from frigate.api.notification import NotificationBp
|
||||
from frigate.api.preview import PreviewBp
|
||||
from frigate.api.review import ReviewBp
|
||||
from frigate.api.defs.app_body import AppConfigSetBody
|
||||
from frigate.api.defs.app_query_parameters import AppTimelineHourlyQueryParameters
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import CONFIG_DIR
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.events.external import ExternalEventProcessor
|
||||
from frigate.models import Event, Timeline
|
||||
from frigate.plus import PlusApi
|
||||
from frigate.ptz.onvif import OnvifController
|
||||
from frigate.stats.emitter import StatsEmitter
|
||||
from frigate.storage import StorageMaintainer
|
||||
from frigate.util.builtin import (
|
||||
clean_camera_user_pass,
|
||||
get_tz_modifiers,
|
||||
update_yaml_from_url,
|
||||
)
|
||||
from frigate.util.services import ffprobe_stream, restart_frigate, vainfo_hwaccel
|
||||
from frigate.util.services import (
|
||||
ffprobe_stream,
|
||||
get_nvidia_driver_info,
|
||||
restart_frigate,
|
||||
vainfo_hwaccel,
|
||||
)
|
||||
from frigate.version import VERSION
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
bp = Blueprint("frigate", __name__)
|
||||
bp.register_blueprint(EventBp)
|
||||
bp.register_blueprint(ExportBp)
|
||||
bp.register_blueprint(MediaBp)
|
||||
bp.register_blueprint(PreviewBp)
|
||||
bp.register_blueprint(ReviewBp)
|
||||
bp.register_blueprint(AuthBp)
|
||||
bp.register_blueprint(NotificationBp)
|
||||
router = APIRouter(tags=[Tags.app])
|
||||
|
||||
|
||||
def create_app(
|
||||
frigate_config,
|
||||
database: SqliteQueueDatabase,
|
||||
embeddings: Optional[EmbeddingsContext],
|
||||
detected_frames_processor,
|
||||
storage_maintainer: StorageMaintainer,
|
||||
onvif: OnvifController,
|
||||
external_processor: ExternalEventProcessor,
|
||||
plus_api: PlusApi,
|
||||
stats_emitter: StatsEmitter,
|
||||
):
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.before_request
|
||||
def check_csrf():
|
||||
if request.method in ["GET", "HEAD", "OPTIONS", "TRACE"]:
|
||||
pass
|
||||
if "origin" in request.headers and "x-csrf-token" not in request.headers:
|
||||
return jsonify({"success": False, "message": "Missing CSRF header"}), 401
|
||||
|
||||
@app.before_request
|
||||
def _db_connect():
|
||||
if database.is_closed():
|
||||
database.connect()
|
||||
|
||||
@app.teardown_request
|
||||
def _db_close(exc):
|
||||
if not database.is_closed():
|
||||
database.close()
|
||||
|
||||
app.frigate_config = frigate_config
|
||||
app.embeddings = embeddings
|
||||
app.detected_frames_processor = detected_frames_processor
|
||||
app.storage_maintainer = storage_maintainer
|
||||
app.onvif = onvif
|
||||
app.external_processor = external_processor
|
||||
app.plus_api = plus_api
|
||||
app.camera_error_image = None
|
||||
app.stats_emitter = stats_emitter
|
||||
app.jwt_token = get_jwt_secret() if frigate_config.auth.enabled else None
|
||||
# update the request_address with the x-forwarded-for header from nginx
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1)
|
||||
# initialize the rate limiter for the login endpoint
|
||||
limiter.init_app(app)
|
||||
if frigate_config.auth.failed_login_rate_limit is None:
|
||||
limiter.enabled = False
|
||||
|
||||
app.register_blueprint(bp)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@bp.route("/")
|
||||
@router.get("/", response_class=PlainTextResponse)
|
||||
def is_healthy():
|
||||
return "Frigate is running. Alive and healthy!"
|
||||
|
||||
|
||||
@bp.route("/config/schema.json")
|
||||
def config_schema():
|
||||
return current_app.response_class(
|
||||
current_app.frigate_config.schema_json(), mimetype="application/json"
|
||||
@router.get("/config/schema.json")
|
||||
def config_schema(request: Request):
|
||||
return Response(
|
||||
content=request.app.frigate_config.schema_json(), media_type="application/json"
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/go2rtc/streams")
|
||||
@router.get("/go2rtc/streams")
|
||||
def go2rtc_streams():
|
||||
r = requests.get("http://127.0.0.1:1984/api/streams")
|
||||
if not r.ok:
|
||||
logger.error("Failed to fetch streams from go2rtc")
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Error fetching stream data"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Error fetching stream data"}),
|
||||
status_code=500,
|
||||
)
|
||||
stream_data = r.json()
|
||||
for data in stream_data.values():
|
||||
for producer in data.get("producers", []):
|
||||
producer["url"] = clean_camera_user_pass(producer.get("url", ""))
|
||||
return jsonify(stream_data)
|
||||
return JSONResponse(content=stream_data)
|
||||
|
||||
|
||||
@bp.route("/go2rtc/streams/<camera_name>")
|
||||
@router.get("/go2rtc/streams/{camera_name}")
|
||||
def go2rtc_camera_stream(camera_name: str):
|
||||
r = requests.get(
|
||||
f"http://127.0.0.1:1984/api/streams?src={camera_name}&video=all&audio=allµphone"
|
||||
)
|
||||
if not r.ok:
|
||||
logger.error("Failed to fetch streams from go2rtc")
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Error fetching stream data"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Error fetching stream data"}),
|
||||
status_code=500,
|
||||
)
|
||||
stream_data = r.json()
|
||||
for producer in stream_data.get("producers", []):
|
||||
producer["url"] = clean_camera_user_pass(producer.get("url", ""))
|
||||
return jsonify(stream_data)
|
||||
return JSONResponse(content=stream_data)
|
||||
|
||||
|
||||
@bp.route("/version")
|
||||
@router.get("/version", response_class=PlainTextResponse)
|
||||
def version():
|
||||
return VERSION
|
||||
|
||||
|
||||
@bp.route("/stats")
|
||||
def stats():
|
||||
return jsonify(current_app.stats_emitter.get_latest_stats())
|
||||
@router.get("/stats")
|
||||
def stats(request: Request):
|
||||
return JSONResponse(content=request.app.stats_emitter.get_latest_stats())
|
||||
|
||||
|
||||
@bp.route("/stats/history")
|
||||
def stats_history():
|
||||
keys = request.args.get("keys", default=None)
|
||||
|
||||
@router.get("/stats/history")
|
||||
def stats_history(request: Request, keys: str = None):
|
||||
if keys:
|
||||
keys = keys.split(",")
|
||||
|
||||
return jsonify(current_app.stats_emitter.get_stats_history(keys))
|
||||
return JSONResponse(content=request.app.stats_emitter.get_stats_history(keys))
|
||||
|
||||
|
||||
@bp.route("/config")
|
||||
def config():
|
||||
config_obj: FrigateConfig = current_app.frigate_config
|
||||
@router.get("/config")
|
||||
def config(request: Request):
|
||||
config_obj: FrigateConfig = request.app.frigate_config
|
||||
config: dict[str, dict[str, any]] = config_obj.model_dump(
|
||||
mode="json", warnings="none", exclude_none=True
|
||||
)
|
||||
@@ -183,7 +118,7 @@ def config():
|
||||
# remove the proxy secret
|
||||
config["proxy"].pop("auth_secret", None)
|
||||
|
||||
for camera_name, camera in current_app.frigate_config.cameras.items():
|
||||
for camera_name, camera in request.app.frigate_config.cameras.items():
|
||||
camera_dict = config["cameras"][camera_name]
|
||||
|
||||
# clean paths
|
||||
@@ -199,18 +134,18 @@ def config():
|
||||
for zone_name, zone in config_obj.cameras[camera_name].zones.items():
|
||||
camera_dict["zones"][zone_name]["color"] = zone.color
|
||||
|
||||
config["plus"] = {"enabled": current_app.plus_api.is_active()}
|
||||
config["plus"] = {"enabled": request.app.frigate_config.plus_api.is_active()}
|
||||
config["model"]["colormap"] = config_obj.model.colormap
|
||||
|
||||
for detector_config in config["detectors"].values():
|
||||
detector_config["model"]["labelmap"] = (
|
||||
current_app.frigate_config.model.merged_labelmap
|
||||
request.app.frigate_config.model.merged_labelmap
|
||||
)
|
||||
|
||||
return jsonify(config)
|
||||
return JSONResponse(content=config)
|
||||
|
||||
|
||||
@bp.route("/config/raw")
|
||||
@router.get("/config/raw")
|
||||
def config_raw():
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
@@ -221,43 +156,44 @@ def config_raw():
|
||||
config_file = config_file_yaml
|
||||
|
||||
if not os.path.isfile(config_file):
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Could not find file"}), 404
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Could not find file"}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
with open(config_file, "r") as f:
|
||||
raw_config = f.read()
|
||||
f.close()
|
||||
|
||||
return raw_config, 200
|
||||
return JSONResponse(
|
||||
content=raw_config, media_type="text/plain", status_code=200
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/config/save", methods=["POST"])
|
||||
def config_save():
|
||||
save_option = request.args.get("save_option")
|
||||
|
||||
new_config = request.get_data().decode()
|
||||
@router.post("/config/save")
|
||||
def config_save(save_option: str, body: Any = Body(media_type="text/plain")):
|
||||
new_config = body.decode()
|
||||
|
||||
if not new_config:
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": False, "message": "Config with body param is required"}
|
||||
),
|
||||
400,
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
# Validate the config schema
|
||||
try:
|
||||
FrigateConfig.parse_yaml(new_config)
|
||||
except Exception:
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": f"\nConfig Error:\n\n{escape(str(traceback.format_exc()))}",
|
||||
}
|
||||
),
|
||||
400,
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
# Save the config to file
|
||||
@@ -274,14 +210,14 @@ def config_save():
|
||||
f.write(new_config)
|
||||
f.close()
|
||||
except Exception:
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Could not write config file, be sure that Frigate has write permission on the config file.",
|
||||
}
|
||||
),
|
||||
400,
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
if save_option == "restart":
|
||||
@@ -289,34 +225,34 @@ def config_save():
|
||||
restart_frigate()
|
||||
except Exception as e:
|
||||
logging.error(f"Error restarting Frigate: {e}")
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Config successfully saved, unable to restart Frigate",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Config successfully saved, restarting (this can take up to one minute)...",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
else:
|
||||
return make_response(
|
||||
jsonify({"success": True, "message": "Config successfully saved."}),
|
||||
200,
|
||||
return JSONResponse(
|
||||
content=({"success": True, "message": "Config successfully saved."}),
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/config/set", methods=["PUT"])
|
||||
def config_set():
|
||||
@router.put("/config/set")
|
||||
def config_set(request: Request, body: AppConfigSetBody):
|
||||
config_file = os.environ.get("CONFIG_FILE", f"{CONFIG_DIR}/config.yml")
|
||||
|
||||
# Check if we can use .yaml instead of .yml
|
||||
@@ -330,80 +266,77 @@ def config_set():
|
||||
f.close()
|
||||
|
||||
try:
|
||||
update_yaml_from_url(config_file, request.url)
|
||||
update_yaml_from_url(config_file, str(request.url))
|
||||
with open(config_file, "r") as f:
|
||||
new_raw_config = f.read()
|
||||
f.close()
|
||||
# Validate the config schema
|
||||
try:
|
||||
config_obj = FrigateConfig.parse_yaml(new_raw_config)
|
||||
config = FrigateConfig.parse(new_raw_config)
|
||||
except Exception:
|
||||
with open(config_file, "w") as f:
|
||||
f.write(old_raw_config)
|
||||
f.close()
|
||||
logger.error(f"\nConfig Error:\n\n{str(traceback.format_exc())}")
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Error parsing config. Check logs for error message.",
|
||||
}
|
||||
),
|
||||
400,
|
||||
status_code=400,
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(f"Error updating config: {e}")
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Error updating config"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Error updating config"}),
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
json = request.get_json(silent=True) or {}
|
||||
|
||||
if json.get("requires_restart", 1) == 0:
|
||||
current_app.frigate_config = FrigateConfig.parse_object(
|
||||
config_obj, plus_api=current_app.plus_api
|
||||
)
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
if body.requires_restart == 0:
|
||||
request.app.frigate_config = config
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Config successfully updated, restart to apply",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/ffprobe", methods=["GET"])
|
||||
def ffprobe():
|
||||
path_param = request.args.get("paths", "")
|
||||
@router.get("/ffprobe")
|
||||
def ffprobe(request: Request, paths: str = ""):
|
||||
path_param = paths
|
||||
|
||||
if not path_param:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Path needs to be provided."}), 404
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Path needs to be provided."}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
if path_param.startswith("camera"):
|
||||
camera = path_param[7:]
|
||||
|
||||
if camera not in current_app.frigate_config.cameras.keys():
|
||||
return make_response(
|
||||
jsonify(
|
||||
if camera not in request.app.frigate_config.cameras.keys():
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": False, "message": f"{camera} is not a valid camera."}
|
||||
),
|
||||
404,
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
if not current_app.frigate_config.cameras[camera].enabled:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": f"{camera} is not enabled."}), 404
|
||||
if not request.app.frigate_config.cameras[camera].enabled:
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": f"{camera} is not enabled."}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
paths = map(
|
||||
lambda input: input.path,
|
||||
current_app.frigate_config.cameras[camera].ffmpeg.inputs,
|
||||
request.app.frigate_config.cameras[camera].ffmpeg.inputs,
|
||||
)
|
||||
elif "," in clean_camera_user_pass(path_param):
|
||||
paths = path_param.split(",")
|
||||
@@ -414,7 +347,7 @@ def ffprobe():
|
||||
output = []
|
||||
|
||||
for path in paths:
|
||||
ffprobe = ffprobe_stream(current_app.frigate_config.ffmpeg, path.strip())
|
||||
ffprobe = ffprobe_stream(request.app.frigate_config.ffmpeg, path.strip())
|
||||
output.append(
|
||||
{
|
||||
"return_code": ffprobe.returncode,
|
||||
@@ -431,14 +364,14 @@ def ffprobe():
|
||||
}
|
||||
)
|
||||
|
||||
return jsonify(output)
|
||||
return JSONResponse(content=output)
|
||||
|
||||
|
||||
@bp.route("/vainfo", methods=["GET"])
|
||||
@router.get("/vainfo")
|
||||
def vainfo():
|
||||
vainfo = vainfo_hwaccel()
|
||||
return jsonify(
|
||||
{
|
||||
return JSONResponse(
|
||||
content={
|
||||
"return_code": vainfo.returncode,
|
||||
"stderr": (
|
||||
vainfo.stderr.decode("unicode_escape").strip()
|
||||
@@ -454,41 +387,49 @@ def vainfo():
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/logs/<service>", methods=["GET"])
|
||||
def logs(service: str):
|
||||
@router.get("/nvinfo")
|
||||
def nvinfo():
|
||||
return JSONResponse(content=get_nvidia_driver_info())
|
||||
|
||||
|
||||
@router.get("/logs/{service}", tags=[Tags.logs])
|
||||
def logs(
|
||||
service: str = Path(enum=["frigate", "nginx", "go2rtc"]),
|
||||
download: Optional[str] = None,
|
||||
start: Optional[int] = 0,
|
||||
end: Optional[int] = None,
|
||||
):
|
||||
"""Get logs for the requested service (frigate/nginx/go2rtc)"""
|
||||
|
||||
def download_logs(service_location: str):
|
||||
try:
|
||||
file = open(service_location, "r")
|
||||
contents = file.read()
|
||||
file.close()
|
||||
return jsonify(contents)
|
||||
return JSONResponse(jsonable_encoder(contents))
|
||||
except FileNotFoundError as e:
|
||||
logger.error(e)
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Could not find log file"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Could not find log file"},
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
log_locations = {
|
||||
"frigate": "/dev/shm/logs/frigate/current",
|
||||
"go2rtc": "/dev/shm/logs/go2rtc/current",
|
||||
"nginx": "/dev/shm/logs/nginx/current",
|
||||
"chroma": "/dev/shm/logs/chroma/current",
|
||||
}
|
||||
service_location = log_locations.get(service)
|
||||
|
||||
if not service_location:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Not a valid service"}),
|
||||
404,
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Not a valid service"},
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
if request.args.get("download", type=bool, default=False):
|
||||
if download:
|
||||
return download_logs(service_location)
|
||||
|
||||
start = request.args.get("start", type=int, default=0)
|
||||
end = request.args.get("end", type=int)
|
||||
|
||||
try:
|
||||
file = open(service_location, "r")
|
||||
contents = file.read()
|
||||
@@ -529,49 +470,47 @@ def logs(service: str):
|
||||
|
||||
logLines.append(currentLine)
|
||||
|
||||
return make_response(
|
||||
jsonify({"totalLines": len(logLines), "lines": logLines[start:end]}),
|
||||
200,
|
||||
return JSONResponse(
|
||||
content={"totalLines": len(logLines), "lines": logLines[start:end]},
|
||||
status_code=200,
|
||||
)
|
||||
except FileNotFoundError as e:
|
||||
logger.error(e)
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Could not find log file"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Could not find log file"},
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/restart", methods=["POST"])
|
||||
@router.post("/restart")
|
||||
def restart():
|
||||
try:
|
||||
restart_frigate()
|
||||
except Exception as e:
|
||||
logging.error(f"Error restarting Frigate: {e}")
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Unable to restart Frigate.",
|
||||
}
|
||||
),
|
||||
500,
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Restarting (this can take up to one minute)...",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/labels")
|
||||
def get_labels():
|
||||
camera = request.args.get("camera", type=str, default="")
|
||||
|
||||
@router.get("/labels")
|
||||
def get_labels(camera: str = ""):
|
||||
try:
|
||||
if camera:
|
||||
events = Event.select(Event.label).where(Event.camera == camera).distinct()
|
||||
@@ -579,24 +518,23 @@ def get_labels():
|
||||
events = Event.select(Event.label).distinct()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Failed to get labels"}), 404
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Failed to get labels"}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
labels = sorted([e.label for e in events])
|
||||
return jsonify(labels)
|
||||
return JSONResponse(content=labels)
|
||||
|
||||
|
||||
@bp.route("/sub_labels")
|
||||
def get_sub_labels():
|
||||
split_joined = request.args.get("split_joined", type=int)
|
||||
|
||||
@router.get("/sub_labels")
|
||||
def get_sub_labels(split_joined: Optional[int] = None):
|
||||
try:
|
||||
events = Event.select(Event.sub_label).distinct()
|
||||
except Exception:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Failed to get sub_labels"}),
|
||||
404,
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Failed to get sub_labels"}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
sub_labels = [e.sub_label for e in events]
|
||||
@@ -617,15 +555,11 @@ def get_sub_labels():
|
||||
sub_labels.append(part.strip())
|
||||
|
||||
sub_labels.sort()
|
||||
return jsonify(sub_labels)
|
||||
return JSONResponse(content=sub_labels)
|
||||
|
||||
|
||||
@bp.route("/timeline")
|
||||
def timeline():
|
||||
camera = request.args.get("camera", "all")
|
||||
source_id = request.args.get("source_id", type=str)
|
||||
limit = request.args.get("limit", 100)
|
||||
|
||||
@router.get("/timeline")
|
||||
def timeline(camera: str = "all", limit: int = 100, source_id: Optional[str] = None):
|
||||
clauses = []
|
||||
|
||||
selected_columns = [
|
||||
@@ -654,18 +588,18 @@ def timeline():
|
||||
.dicts()
|
||||
)
|
||||
|
||||
return jsonify([t for t in timeline])
|
||||
return JSONResponse(content=[t for t in timeline])
|
||||
|
||||
|
||||
@bp.route("/timeline/hourly")
|
||||
def hourly_timeline():
|
||||
@router.get("/timeline/hourly")
|
||||
def hourly_timeline(params: AppTimelineHourlyQueryParameters = Depends()):
|
||||
"""Get hourly summary for timeline."""
|
||||
cameras = request.args.get("cameras", "all")
|
||||
labels = request.args.get("labels", "all")
|
||||
before = request.args.get("before", type=float)
|
||||
after = request.args.get("after", type=float)
|
||||
limit = request.args.get("limit", 200)
|
||||
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||
cameras = params.cameras
|
||||
labels = params.labels
|
||||
before = params.before
|
||||
after = params.after
|
||||
limit = params.limit
|
||||
tz_name = params.timezone
|
||||
|
||||
_, minute_modifier, _ = get_tz_modifiers(tz_name)
|
||||
minute_offset = int(minute_modifier.split(" ")[0])
|
||||
@@ -731,8 +665,8 @@ def hourly_timeline():
|
||||
else:
|
||||
hours[hour].insert(0, t)
|
||||
|
||||
return jsonify(
|
||||
{
|
||||
return JSONResponse(
|
||||
content={
|
||||
"start": start,
|
||||
"end": end,
|
||||
"count": count,
|
||||
|
||||
@@ -12,29 +12,49 @@ import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from flask import Blueprint, current_app, jsonify, make_response, redirect, request
|
||||
from flask_limiter import Limiter
|
||||
from fastapi import APIRouter, Request, Response
|
||||
from fastapi.responses import JSONResponse, RedirectResponse
|
||||
from joserfc import jwt
|
||||
from peewee import DoesNotExist
|
||||
from slowapi import Limiter
|
||||
|
||||
from frigate.api.defs.app_body import (
|
||||
AppPostLoginBody,
|
||||
AppPostUsersBody,
|
||||
AppPutPasswordBody,
|
||||
)
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.config import AuthConfig, ProxyConfig
|
||||
from frigate.const import CONFIG_DIR, JWT_SECRET_ENV_VAR, PASSWORD_HASH_ALGORITHM
|
||||
from frigate.models import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
AuthBp = Blueprint("auth", __name__)
|
||||
router = APIRouter(tags=[Tags.auth])
|
||||
|
||||
|
||||
def get_remote_addr():
|
||||
class RateLimiter:
|
||||
_limit = ""
|
||||
|
||||
def set_limit(self, limit: str):
|
||||
self._limit = limit
|
||||
|
||||
def get_limit(self) -> str:
|
||||
return self._limit
|
||||
|
||||
|
||||
rateLimiter = RateLimiter()
|
||||
|
||||
|
||||
def get_remote_addr(request: Request):
|
||||
route = list(reversed(request.headers.get("x-forwarded-for").split(",")))
|
||||
logger.debug(f"IP Route: {[r for r in route]}")
|
||||
trusted_proxies = []
|
||||
for proxy in current_app.frigate_config.auth.trusted_proxies:
|
||||
for proxy in request.app.frigate_config.auth.trusted_proxies:
|
||||
try:
|
||||
network = ipaddress.ip_network(proxy)
|
||||
except ValueError:
|
||||
logger.warn(f"Unable to parse trusted network: {proxy}")
|
||||
logger.warning(f"Unable to parse trusted network: {proxy}")
|
||||
trusted_proxies.append(network)
|
||||
|
||||
# return the first remote address that is not trusted
|
||||
@@ -68,16 +88,6 @@ def get_remote_addr():
|
||||
return request.remote_addr or "127.0.0.1"
|
||||
|
||||
|
||||
limiter = Limiter(
|
||||
get_remote_addr,
|
||||
storage_uri="memory://",
|
||||
)
|
||||
|
||||
|
||||
def get_rate_limit():
|
||||
return current_app.frigate_config.auth.failed_login_rate_limit
|
||||
|
||||
|
||||
def get_jwt_secret() -> str:
|
||||
jwt_secret = None
|
||||
# check env var
|
||||
@@ -112,7 +122,7 @@ def get_jwt_secret() -> str:
|
||||
with open(jwt_secret_file, "w") as f:
|
||||
f.write(str(jwt_secret))
|
||||
except Exception:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"Unable to write jwt token file to config directory. A new jwt token will be created at each startup."
|
||||
)
|
||||
else:
|
||||
@@ -121,18 +131,18 @@ def get_jwt_secret() -> str:
|
||||
try:
|
||||
jwt_secret = f.readline()
|
||||
except Exception:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"Unable to read jwt token from .jwt_secret file in config directory. A new jwt token will be created at each startup."
|
||||
)
|
||||
jwt_secret = secrets.token_hex(64)
|
||||
|
||||
if len(jwt_secret) < 64:
|
||||
logger.warn("JWT Secret is recommended to be 64 characters or more")
|
||||
logger.warning("JWT Secret is recommended to be 64 characters or more")
|
||||
|
||||
return jwt_secret
|
||||
|
||||
|
||||
def hash_password(password, salt=None, iterations=600000):
|
||||
def hash_password(password: str, salt=None, iterations=600000):
|
||||
if salt is None:
|
||||
salt = secrets.token_hex(16)
|
||||
assert salt and isinstance(salt, str) and "$" not in salt
|
||||
@@ -158,33 +168,36 @@ def create_encoded_jwt(user, expiration, secret):
|
||||
return jwt.encode({"alg": "HS256"}, {"sub": user, "exp": expiration}, secret)
|
||||
|
||||
|
||||
def set_jwt_cookie(response, cookie_name, encoded_jwt, expiration, secure):
|
||||
def set_jwt_cookie(response: Response, cookie_name, encoded_jwt, expiration, secure):
|
||||
# TODO: ideally this would set secure as well, but that requires TLS
|
||||
response.set_cookie(
|
||||
cookie_name, encoded_jwt, httponly=True, expires=expiration, secure=secure
|
||||
key=cookie_name,
|
||||
value=encoded_jwt,
|
||||
httponly=True,
|
||||
expires=expiration,
|
||||
secure=secure,
|
||||
)
|
||||
|
||||
|
||||
# Endpoint for use with nginx auth_request
|
||||
@AuthBp.route("/auth")
|
||||
def auth():
|
||||
auth_config: AuthConfig = current_app.frigate_config.auth
|
||||
proxy_config: ProxyConfig = current_app.frigate_config.proxy
|
||||
@router.get("/auth")
|
||||
def auth(request: Request):
|
||||
auth_config: AuthConfig = request.app.frigate_config.auth
|
||||
proxy_config: ProxyConfig = request.app.frigate_config.proxy
|
||||
|
||||
success_response = make_response({}, 202)
|
||||
success_response = Response("", status_code=202)
|
||||
|
||||
# dont require auth if the request is on the internal port
|
||||
# this header is set by Frigate's nginx proxy, so it cant be spoofed
|
||||
if request.headers.get("x-server-port", 0, type=int) == 5000:
|
||||
if int(request.headers.get("x-server-port", default=0)) == 5000:
|
||||
return success_response
|
||||
|
||||
fail_response = make_response({}, 401)
|
||||
fail_response = Response("", status_code=401)
|
||||
|
||||
# ensure the proxy secret matches if configured
|
||||
if (
|
||||
proxy_config.auth_secret is not None
|
||||
and request.headers.get("x-proxy-secret", "", type=str)
|
||||
!= proxy_config.auth_secret
|
||||
and request.headers.get("x-proxy-secret", "") != proxy_config.auth_secret
|
||||
):
|
||||
logger.debug("X-Proxy-Secret header does not match configured secret value")
|
||||
return fail_response
|
||||
@@ -196,7 +209,6 @@ def auth():
|
||||
if proxy_config.header_map.user is not None:
|
||||
upstream_user_header_value = request.headers.get(
|
||||
proxy_config.header_map.user,
|
||||
type=str,
|
||||
default="anonymous",
|
||||
)
|
||||
success_response.headers["remote-user"] = upstream_user_header_value
|
||||
@@ -207,10 +219,10 @@ def auth():
|
||||
# now apply authentication
|
||||
fail_response.headers["location"] = "/login"
|
||||
|
||||
JWT_COOKIE_NAME = current_app.frigate_config.auth.cookie_name
|
||||
JWT_COOKIE_SECURE = current_app.frigate_config.auth.cookie_secure
|
||||
JWT_REFRESH = current_app.frigate_config.auth.refresh_time
|
||||
JWT_SESSION_LENGTH = current_app.frigate_config.auth.session_length
|
||||
JWT_COOKIE_NAME = request.app.frigate_config.auth.cookie_name
|
||||
JWT_COOKIE_SECURE = request.app.frigate_config.auth.cookie_secure
|
||||
JWT_REFRESH = request.app.frigate_config.auth.refresh_time
|
||||
JWT_SESSION_LENGTH = request.app.frigate_config.auth.session_length
|
||||
|
||||
jwt_source = None
|
||||
encoded_token = None
|
||||
@@ -230,7 +242,7 @@ def auth():
|
||||
return fail_response
|
||||
|
||||
try:
|
||||
token = jwt.decode(encoded_token, current_app.jwt_token)
|
||||
token = jwt.decode(encoded_token, request.app.jwt_token)
|
||||
if "sub" not in token.claims:
|
||||
logger.debug("user not set in jwt token")
|
||||
return fail_response
|
||||
@@ -266,7 +278,7 @@ def auth():
|
||||
return fail_response
|
||||
new_expiration = current_time + JWT_SESSION_LENGTH
|
||||
new_encoded_jwt = create_encoded_jwt(
|
||||
user, new_expiration, current_app.jwt_token
|
||||
user, new_expiration, request.app.jwt_token
|
||||
)
|
||||
set_jwt_cookie(
|
||||
success_response,
|
||||
@@ -283,86 +295,85 @@ def auth():
|
||||
return fail_response
|
||||
|
||||
|
||||
@AuthBp.route("/profile")
|
||||
def profile():
|
||||
username = request.headers.get("remote-user", type=str)
|
||||
return jsonify({"username": username})
|
||||
@router.get("/profile")
|
||||
def profile(request: Request):
|
||||
username = request.headers.get("remote-user")
|
||||
return JSONResponse(content={"username": username})
|
||||
|
||||
|
||||
@AuthBp.route("/logout")
|
||||
def logout():
|
||||
auth_config: AuthConfig = current_app.frigate_config.auth
|
||||
response = make_response(redirect("/login", code=303))
|
||||
@router.get("/logout")
|
||||
def logout(request: Request):
|
||||
auth_config: AuthConfig = request.app.frigate_config.auth
|
||||
response = RedirectResponse("/login", status_code=303)
|
||||
response.delete_cookie(auth_config.cookie_name)
|
||||
return response
|
||||
|
||||
|
||||
@AuthBp.route("/login", methods=["POST"])
|
||||
@limiter.limit(get_rate_limit, deduct_when=lambda response: response.status_code == 400)
|
||||
def login():
|
||||
JWT_COOKIE_NAME = current_app.frigate_config.auth.cookie_name
|
||||
JWT_COOKIE_SECURE = current_app.frigate_config.auth.cookie_secure
|
||||
JWT_SESSION_LENGTH = current_app.frigate_config.auth.session_length
|
||||
content = request.get_json()
|
||||
user = content["user"]
|
||||
password = content["password"]
|
||||
limiter = Limiter(key_func=get_remote_addr)
|
||||
|
||||
|
||||
@router.post("/login")
|
||||
@limiter.limit(limit_value=rateLimiter.get_limit)
|
||||
def login(request: Request, body: AppPostLoginBody):
|
||||
JWT_COOKIE_NAME = request.app.frigate_config.auth.cookie_name
|
||||
JWT_COOKIE_SECURE = request.app.frigate_config.auth.cookie_secure
|
||||
JWT_SESSION_LENGTH = request.app.frigate_config.auth.session_length
|
||||
user = body.user
|
||||
password = body.password
|
||||
|
||||
try:
|
||||
db_user: User = User.get_by_id(user)
|
||||
except DoesNotExist:
|
||||
return make_response({"message": "Login failed"}, 400)
|
||||
return JSONResponse(content={"message": "Login failed"}, status_code=400)
|
||||
|
||||
password_hash = db_user.password_hash
|
||||
if verify_password(password, password_hash):
|
||||
expiration = int(time.time()) + JWT_SESSION_LENGTH
|
||||
encoded_jwt = create_encoded_jwt(user, expiration, current_app.jwt_token)
|
||||
response = make_response({}, 200)
|
||||
encoded_jwt = create_encoded_jwt(user, expiration, request.app.jwt_token)
|
||||
response = Response("", 200)
|
||||
set_jwt_cookie(
|
||||
response, JWT_COOKIE_NAME, encoded_jwt, expiration, JWT_COOKIE_SECURE
|
||||
)
|
||||
return response
|
||||
return make_response({"message": "Login failed"}, 400)
|
||||
return JSONResponse(content={"message": "Login failed"}, status_code=400)
|
||||
|
||||
|
||||
@AuthBp.route("/users")
|
||||
@router.get("/users")
|
||||
def get_users():
|
||||
exports = User.select(User.username).order_by(User.username).dicts().iterator()
|
||||
return jsonify([e for e in exports])
|
||||
return JSONResponse([e for e in exports])
|
||||
|
||||
|
||||
@AuthBp.route("/users", methods=["POST"])
|
||||
def create_user():
|
||||
HASH_ITERATIONS = current_app.frigate_config.auth.hash_iterations
|
||||
@router.post("/users")
|
||||
def create_user(request: Request, body: AppPostUsersBody):
|
||||
HASH_ITERATIONS = request.app.frigate_config.auth.hash_iterations
|
||||
|
||||
request_data = request.get_json()
|
||||
if not re.match("^[A-Za-z0-9._]+$", body.username):
|
||||
JSONResponse(content={"message": "Invalid username"}, status_code=400)
|
||||
|
||||
if not re.match("^[A-Za-z0-9._]+$", request_data.get("username", "")):
|
||||
make_response({"message": "Invalid username"}, 400)
|
||||
|
||||
password_hash = hash_password(request_data["password"], iterations=HASH_ITERATIONS)
|
||||
password_hash = hash_password(body.password, iterations=HASH_ITERATIONS)
|
||||
|
||||
User.insert(
|
||||
{
|
||||
User.username: request_data["username"],
|
||||
User.username: body.username,
|
||||
User.password_hash: password_hash,
|
||||
User.notification_tokens: [],
|
||||
}
|
||||
).execute()
|
||||
return jsonify({"username": request_data["username"]})
|
||||
return JSONResponse(content={"username": body.username})
|
||||
|
||||
|
||||
@AuthBp.route("/users/<username>", methods=["DELETE"])
|
||||
@router.delete("/users/{username}")
|
||||
def delete_user(username: str):
|
||||
User.delete_by_id(username)
|
||||
return jsonify({"success": True})
|
||||
return JSONResponse(content={"success": True})
|
||||
|
||||
|
||||
@AuthBp.route("/users/<username>/password", methods=["PUT"])
|
||||
def update_password(username: str):
|
||||
HASH_ITERATIONS = current_app.frigate_config.auth.hash_iterations
|
||||
@router.put("/users/{username}/password")
|
||||
def update_password(request: Request, username: str, body: AppPutPasswordBody):
|
||||
HASH_ITERATIONS = request.app.frigate_config.auth.hash_iterations
|
||||
|
||||
request_data = request.get_json()
|
||||
|
||||
password_hash = hash_password(request_data["password"], iterations=HASH_ITERATIONS)
|
||||
password_hash = hash_password(body.password, iterations=HASH_ITERATIONS)
|
||||
|
||||
User.set_by_id(
|
||||
username,
|
||||
@@ -370,4 +381,4 @@ def update_password(username: str):
|
||||
User.password_hash: password_hash,
|
||||
},
|
||||
)
|
||||
return jsonify({"success": True})
|
||||
return JSONResponse(content={"success": True})
|
||||
|
||||
19
frigate/api/defs/app_body.py
Normal file
19
frigate/api/defs/app_body.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AppConfigSetBody(BaseModel):
|
||||
requires_restart: int = 1
|
||||
|
||||
|
||||
class AppPutPasswordBody(BaseModel):
|
||||
password: str
|
||||
|
||||
|
||||
class AppPostUsersBody(BaseModel):
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class AppPostLoginBody(BaseModel):
|
||||
user: str
|
||||
password: str
|
||||
12
frigate/api/defs/app_query_parameters.py
Normal file
12
frigate/api/defs/app_query_parameters.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AppTimelineHourlyQueryParameters(BaseModel):
|
||||
cameras: Optional[str] = "all"
|
||||
labels: Optional[str] = "all"
|
||||
after: Optional[float] = None
|
||||
before: Optional[float] = None
|
||||
limit: Optional[int] = 200
|
||||
timezone: Optional[str] = "utc"
|
||||
31
frigate/api/defs/events_body.py
Normal file
31
frigate/api/defs/events_body.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class EventsSubLabelBody(BaseModel):
|
||||
subLabel: str = Field(title="Sub label", max_length=100)
|
||||
subLabelScore: Optional[float] = Field(
|
||||
title="Score for sub label", default=None, gt=0.0, le=1.0
|
||||
)
|
||||
|
||||
|
||||
class EventsDescriptionBody(BaseModel):
|
||||
description: Union[str, None] = Field(title="The description of the event")
|
||||
|
||||
|
||||
class EventsCreateBody(BaseModel):
|
||||
source_type: Optional[str] = "api"
|
||||
sub_label: Optional[str] = None
|
||||
score: Optional[int] = 0
|
||||
duration: Optional[int] = 30
|
||||
include_recording: Optional[bool] = True
|
||||
draw: Optional[dict] = {}
|
||||
|
||||
|
||||
class EventsEndBody(BaseModel):
|
||||
end_time: Optional[int] = None
|
||||
|
||||
|
||||
class SubmitPlusBody(BaseModel):
|
||||
include_annotation: int = Field(default=1)
|
||||
58
frigate/api/defs/events_query_parameters.py
Normal file
58
frigate/api/defs/events_query_parameters.py
Normal file
@@ -0,0 +1,58 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
DEFAULT_TIME_RANGE = "00:00,24:00"
|
||||
|
||||
|
||||
class EventsQueryParams(BaseModel):
|
||||
camera: Optional[str] = "all"
|
||||
cameras: Optional[str] = "all"
|
||||
label: Optional[str] = "all"
|
||||
labels: Optional[str] = "all"
|
||||
sub_label: Optional[str] = "all"
|
||||
sub_labels: Optional[str] = "all"
|
||||
zone: Optional[str] = "all"
|
||||
zones: Optional[str] = "all"
|
||||
limit: Optional[int] = 100
|
||||
after: Optional[float] = None
|
||||
before: Optional[float] = None
|
||||
time_range: Optional[str] = DEFAULT_TIME_RANGE
|
||||
has_clip: Optional[int] = None
|
||||
has_snapshot: Optional[int] = None
|
||||
in_progress: Optional[int] = None
|
||||
include_thumbnails: Optional[int] = 1
|
||||
favorites: Optional[int] = None
|
||||
min_score: Optional[float] = None
|
||||
max_score: Optional[float] = None
|
||||
is_submitted: Optional[int] = None
|
||||
min_length: Optional[float] = None
|
||||
max_length: Optional[float] = None
|
||||
sort: Optional[str] = None
|
||||
timezone: Optional[str] = "utc"
|
||||
|
||||
|
||||
class EventsSearchQueryParams(BaseModel):
|
||||
query: Optional[str] = None
|
||||
event_id: Optional[str] = None
|
||||
search_type: Optional[str] = "thumbnail"
|
||||
include_thumbnails: Optional[int] = 1
|
||||
limit: Optional[int] = 50
|
||||
cameras: Optional[str] = "all"
|
||||
labels: Optional[str] = "all"
|
||||
zones: Optional[str] = "all"
|
||||
after: Optional[float] = None
|
||||
before: Optional[float] = None
|
||||
time_range: Optional[str] = DEFAULT_TIME_RANGE
|
||||
has_clip: Optional[bool] = None
|
||||
has_snapshot: Optional[bool] = None
|
||||
timezone: Optional[str] = "utc"
|
||||
min_score: Optional[float] = None
|
||||
max_score: Optional[float] = None
|
||||
sort: Optional[str] = None
|
||||
|
||||
|
||||
class EventsSummaryQueryParams(BaseModel):
|
||||
timezone: Optional[str] = "utc"
|
||||
has_clip: Optional[int] = None
|
||||
has_snapshot: Optional[int] = None
|
||||
6
frigate/api/defs/generic_response.py
Normal file
6
frigate/api/defs/generic_response.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class GenericResponse(BaseModel):
|
||||
success: bool
|
||||
message: str
|
||||
42
frigate/api/defs/media_query_parameters.py
Normal file
42
frigate/api/defs/media_query_parameters.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Extension(str, Enum):
|
||||
webp = "webp"
|
||||
png = "png"
|
||||
jpg = "jpg"
|
||||
jpeg = "jpeg"
|
||||
|
||||
|
||||
class MediaLatestFrameQueryParams(BaseModel):
|
||||
bbox: Optional[int] = None
|
||||
timestamp: Optional[int] = None
|
||||
zones: Optional[int] = None
|
||||
mask: Optional[int] = None
|
||||
motion: Optional[int] = None
|
||||
regions: Optional[int] = None
|
||||
quality: Optional[int] = 70
|
||||
height: Optional[int] = None
|
||||
|
||||
|
||||
class MediaEventsSnapshotQueryParams(BaseModel):
|
||||
download: Optional[bool] = False
|
||||
timestamp: Optional[int] = None
|
||||
bbox: Optional[int] = None
|
||||
crop: Optional[int] = None
|
||||
height: Optional[int] = None
|
||||
quality: Optional[int] = 70
|
||||
|
||||
|
||||
class MediaMjpegFeedQueryParams(BaseModel):
|
||||
fps: int = 3
|
||||
height: int = 360
|
||||
bbox: Optional[int] = None
|
||||
timestamp: Optional[int] = None
|
||||
zones: Optional[int] = None
|
||||
mask: Optional[int] = None
|
||||
motion: Optional[int] = None
|
||||
regions: Optional[int] = None
|
||||
9
frigate/api/defs/regenerate_query_parameters.py
Normal file
9
frigate/api/defs/regenerate_query_parameters.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from frigate.events.types import RegenerateDescriptionEnum
|
||||
|
||||
|
||||
class RegenerateQueryParameters(BaseModel):
|
||||
source: Optional[RegenerateDescriptionEnum] = RegenerateDescriptionEnum.thumbnails
|
||||
6
frigate/api/defs/review_body.py
Normal file
6
frigate/api/defs/review_body.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from pydantic import BaseModel, conlist, constr
|
||||
|
||||
|
||||
class ReviewModifyMultipleBody(BaseModel):
|
||||
# List of string with at least one element and each element with at least one char
|
||||
ids: conlist(constr(min_length=1), min_length=1)
|
||||
31
frigate/api/defs/review_query_parameters.py
Normal file
31
frigate/api/defs/review_query_parameters.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from typing import Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic.json_schema import SkipJsonSchema
|
||||
|
||||
from frigate.review.maintainer import SeverityEnum
|
||||
|
||||
|
||||
class ReviewQueryParams(BaseModel):
|
||||
cameras: str = "all"
|
||||
labels: str = "all"
|
||||
zones: str = "all"
|
||||
reviewed: int = 0
|
||||
limit: Union[int, SkipJsonSchema[None]] = None
|
||||
severity: Union[SeverityEnum, SkipJsonSchema[None]] = None
|
||||
before: Union[float, SkipJsonSchema[None]] = None
|
||||
after: Union[float, SkipJsonSchema[None]] = None
|
||||
|
||||
|
||||
class ReviewSummaryQueryParams(BaseModel):
|
||||
cameras: str = "all"
|
||||
labels: str = "all"
|
||||
zones: str = "all"
|
||||
timezone: str = "utc"
|
||||
|
||||
|
||||
class ReviewActivityMotionQueryParams(BaseModel):
|
||||
cameras: str = "all"
|
||||
before: Union[float, SkipJsonSchema[None]] = None
|
||||
after: Union[float, SkipJsonSchema[None]] = None
|
||||
scale: int = 30
|
||||
43
frigate/api/defs/review_responses.py
Normal file
43
frigate/api/defs/review_responses.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict
|
||||
|
||||
from pydantic import BaseModel, Json
|
||||
|
||||
from frigate.review.maintainer import SeverityEnum
|
||||
|
||||
|
||||
class ReviewSegmentResponse(BaseModel):
|
||||
id: str
|
||||
camera: str
|
||||
start_time: datetime
|
||||
end_time: datetime
|
||||
has_been_reviewed: bool
|
||||
severity: SeverityEnum
|
||||
thumb_path: str
|
||||
data: Json
|
||||
|
||||
|
||||
class Last24HoursReview(BaseModel):
|
||||
reviewed_alert: int
|
||||
reviewed_detection: int
|
||||
total_alert: int
|
||||
total_detection: int
|
||||
|
||||
|
||||
class DayReview(BaseModel):
|
||||
day: datetime
|
||||
reviewed_alert: int
|
||||
reviewed_detection: int
|
||||
total_alert: int
|
||||
total_detection: int
|
||||
|
||||
|
||||
class ReviewSummaryResponse(BaseModel):
|
||||
last24Hours: Last24HoursReview
|
||||
root: Dict[str, DayReview]
|
||||
|
||||
|
||||
class ReviewActivityMotionResponse(BaseModel):
|
||||
start_time: int
|
||||
motion: float
|
||||
camera: str
|
||||
13
frigate/api/defs/tags.py
Normal file
13
frigate/api/defs/tags.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Tags(Enum):
|
||||
app = "App"
|
||||
preview = "Preview"
|
||||
logs = "Logs"
|
||||
media = "Media"
|
||||
notifications = "Notifications"
|
||||
review = "Review"
|
||||
export = "Export"
|
||||
events = "Events"
|
||||
auth = "Auth"
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,83 +1,111 @@
|
||||
"""Export apis."""
|
||||
|
||||
import logging
|
||||
import random
|
||||
import string
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import psutil
|
||||
from flask import (
|
||||
Blueprint,
|
||||
current_app,
|
||||
jsonify,
|
||||
make_response,
|
||||
request,
|
||||
)
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from peewee import DoesNotExist
|
||||
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.const import EXPORT_DIR
|
||||
from frigate.models import Export, Recordings
|
||||
from frigate.record.export import PlaybackFactorEnum, RecordingExporter
|
||||
from frigate.models import Export, Previews, Recordings
|
||||
from frigate.record.export import (
|
||||
PlaybackFactorEnum,
|
||||
PlaybackSourceEnum,
|
||||
RecordingExporter,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ExportBp = Blueprint("exports", __name__)
|
||||
router = APIRouter(tags=[Tags.export])
|
||||
|
||||
|
||||
@ExportBp.route("/exports")
|
||||
@router.get("/exports")
|
||||
def get_exports():
|
||||
exports = Export.select().order_by(Export.date.desc()).dicts().iterator()
|
||||
return jsonify([e for e in exports])
|
||||
return JSONResponse(content=[e for e in exports])
|
||||
|
||||
|
||||
@ExportBp.route(
|
||||
"/export/<camera_name>/start/<int:start_time>/end/<int:end_time>", methods=["POST"]
|
||||
)
|
||||
@ExportBp.route(
|
||||
"/export/<camera_name>/start/<float:start_time>/end/<float:end_time>",
|
||||
methods=["POST"],
|
||||
)
|
||||
def export_recording(camera_name: str, start_time, end_time):
|
||||
if not camera_name or not current_app.frigate_config.cameras.get(camera_name):
|
||||
return make_response(
|
||||
jsonify(
|
||||
@router.post("/export/{camera_name}/start/{start_time}/end/{end_time}")
|
||||
def export_recording(
|
||||
request: Request,
|
||||
camera_name: str,
|
||||
start_time: float,
|
||||
end_time: float,
|
||||
body: dict = None,
|
||||
):
|
||||
if not camera_name or not request.app.frigate_config.cameras.get(camera_name):
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": False, "message": f"{camera_name} is not a valid camera."}
|
||||
),
|
||||
404,
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||
json: dict[str, any] = body or {}
|
||||
playback_factor = json.get("playback", "realtime")
|
||||
playback_source = json.get("source", "recordings")
|
||||
friendly_name: Optional[str] = json.get("name")
|
||||
|
||||
if len(friendly_name or "") > 256:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "File name is too long."}),
|
||||
401,
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "File name is too long."}),
|
||||
status_code=401,
|
||||
)
|
||||
|
||||
existing_image = json.get("image_path")
|
||||
|
||||
recordings_count = (
|
||||
Recordings.select()
|
||||
.where(
|
||||
Recordings.start_time.between(start_time, end_time)
|
||||
| Recordings.end_time.between(start_time, end_time)
|
||||
| ((start_time > Recordings.start_time) & (end_time < Recordings.end_time))
|
||||
)
|
||||
.where(Recordings.camera == camera_name)
|
||||
.count()
|
||||
)
|
||||
|
||||
if recordings_count <= 0:
|
||||
return make_response(
|
||||
jsonify(
|
||||
{"success": False, "message": "No recordings found for time range"}
|
||||
),
|
||||
400,
|
||||
if playback_source == "recordings":
|
||||
recordings_count = (
|
||||
Recordings.select()
|
||||
.where(
|
||||
Recordings.start_time.between(start_time, end_time)
|
||||
| Recordings.end_time.between(start_time, end_time)
|
||||
| (
|
||||
(start_time > Recordings.start_time)
|
||||
& (end_time < Recordings.end_time)
|
||||
)
|
||||
)
|
||||
.where(Recordings.camera == camera_name)
|
||||
.count()
|
||||
)
|
||||
|
||||
if recordings_count <= 0:
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": False, "message": "No recordings found for time range"}
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
else:
|
||||
previews_count = (
|
||||
Previews.select()
|
||||
.where(
|
||||
Previews.start_time.between(start_time, end_time)
|
||||
| Previews.end_time.between(start_time, end_time)
|
||||
| ((start_time > Previews.start_time) & (end_time < Previews.end_time))
|
||||
)
|
||||
.where(Previews.camera == camera_name)
|
||||
.count()
|
||||
)
|
||||
|
||||
if previews_count <= 0:
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": False, "message": "No previews found for time range"}
|
||||
),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
export_id = f"{camera_name}_{''.join(random.choices(string.ascii_lowercase + string.digits, k=6))}"
|
||||
exporter = RecordingExporter(
|
||||
current_app.frigate_config,
|
||||
request.app.frigate_config,
|
||||
export_id,
|
||||
camera_name,
|
||||
friendly_name,
|
||||
existing_image,
|
||||
@@ -88,60 +116,66 @@ def export_recording(camera_name: str, start_time, end_time):
|
||||
if playback_factor in PlaybackFactorEnum.__members__.values()
|
||||
else PlaybackFactorEnum.realtime
|
||||
),
|
||||
(
|
||||
PlaybackSourceEnum[playback_source]
|
||||
if playback_source in PlaybackSourceEnum.__members__.values()
|
||||
else PlaybackSourceEnum.recordings
|
||||
),
|
||||
)
|
||||
exporter.start()
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Starting export of recording.",
|
||||
"export_id": export_id,
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@ExportBp.route("/export/<id>/<new_name>", methods=["PATCH"])
|
||||
def export_rename(id, new_name: str):
|
||||
@router.patch("/export/{event_id}/{new_name}")
|
||||
def export_rename(event_id: str, new_name: str):
|
||||
try:
|
||||
export: Export = Export.get(Export.id == id)
|
||||
export: Export = Export.get(Export.id == event_id)
|
||||
except DoesNotExist:
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Export not found.",
|
||||
}
|
||||
),
|
||||
404,
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
export.name = new_name
|
||||
export.save()
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Successfully renamed export.",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@ExportBp.route("/export/<id>", methods=["DELETE"])
|
||||
def export_delete(id: str):
|
||||
@router.delete("/export/{event_id}")
|
||||
def export_delete(event_id: str):
|
||||
try:
|
||||
export: Export = Export.get(Export.id == id)
|
||||
export: Export = Export.get(Export.id == event_id)
|
||||
except DoesNotExist:
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Export not found.",
|
||||
}
|
||||
),
|
||||
404,
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
files_in_use = []
|
||||
@@ -158,11 +192,11 @@ def export_delete(id: str):
|
||||
continue
|
||||
|
||||
if export.video_path.split("/")[-1] in files_in_use:
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": False, "message": "Can not delete in progress export."}
|
||||
),
|
||||
400,
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
Path(export.video_path).unlink(missing_ok=True)
|
||||
@@ -171,12 +205,12 @@ def export_delete(id: str):
|
||||
Path(export.thumb_path).unlink(missing_ok=True)
|
||||
|
||||
export.delete_instance()
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Successfully deleted export.",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
117
frigate/api/fastapi_app.py
Normal file
117
frigate/api/fastapi_app.py
Normal file
@@ -0,0 +1,117 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from playhouse.sqliteq import SqliteQueueDatabase
|
||||
from slowapi import _rate_limit_exceeded_handler
|
||||
from slowapi.errors import RateLimitExceeded
|
||||
from slowapi.middleware import SlowAPIMiddleware
|
||||
from starlette_context import middleware, plugins
|
||||
from starlette_context.plugins import Plugin
|
||||
|
||||
from frigate.api import app as main_app
|
||||
from frigate.api import auth, event, export, media, notification, preview, review
|
||||
from frigate.api.auth import get_jwt_secret, limiter
|
||||
from frigate.comms.event_metadata_updater import (
|
||||
EventMetadataPublisher,
|
||||
)
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.events.external import ExternalEventProcessor
|
||||
from frigate.ptz.onvif import OnvifController
|
||||
from frigate.stats.emitter import StatsEmitter
|
||||
from frigate.storage import StorageMaintainer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_csrf(request: Request):
|
||||
if request.method in ["GET", "HEAD", "OPTIONS", "TRACE"]:
|
||||
pass
|
||||
if "origin" in request.headers and "x-csrf-token" not in request.headers:
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Missing CSRF header"},
|
||||
status_code=401,
|
||||
)
|
||||
|
||||
|
||||
# Used to retrieve the remote-user header: https://starlette-context.readthedocs.io/en/latest/plugins.html#easy-mode
|
||||
class RemoteUserPlugin(Plugin):
|
||||
key = "Remote-User"
|
||||
|
||||
|
||||
def create_fastapi_app(
|
||||
frigate_config: FrigateConfig,
|
||||
database: SqliteQueueDatabase,
|
||||
embeddings: Optional[EmbeddingsContext],
|
||||
detected_frames_processor,
|
||||
storage_maintainer: StorageMaintainer,
|
||||
onvif: OnvifController,
|
||||
external_processor: ExternalEventProcessor,
|
||||
stats_emitter: StatsEmitter,
|
||||
event_metadata_updater: EventMetadataPublisher,
|
||||
):
|
||||
logger.info("Starting FastAPI app")
|
||||
app = FastAPI(
|
||||
debug=False,
|
||||
swagger_ui_parameters={"apisSorter": "alpha", "operationsSorter": "alpha"},
|
||||
)
|
||||
|
||||
# update the request_address with the x-forwarded-for header from nginx
|
||||
# https://starlette-context.readthedocs.io/en/latest/plugins.html#forwarded-for
|
||||
app.add_middleware(
|
||||
middleware.ContextMiddleware,
|
||||
plugins=(plugins.ForwardedForPlugin(),),
|
||||
)
|
||||
|
||||
# Middleware to connect to DB before and close connection after request
|
||||
# https://github.com/fastapi/full-stack-fastapi-template/issues/224#issuecomment-737423886
|
||||
# https://fastapi.tiangolo.com/tutorial/middleware/#before-and-after-the-response
|
||||
@app.middleware("http")
|
||||
async def frigate_middleware(request: Request, call_next):
|
||||
# Before request
|
||||
check_csrf(request)
|
||||
if database.is_closed():
|
||||
database.connect()
|
||||
|
||||
response = await call_next(request)
|
||||
|
||||
# After request https://stackoverflow.com/a/75487519
|
||||
if not database.is_closed():
|
||||
database.close()
|
||||
return response
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup():
|
||||
logger.info("FastAPI started")
|
||||
|
||||
# Rate limiter (used for login endpoint)
|
||||
auth.rateLimiter.set_limit(frigate_config.auth.failed_login_rate_limit or "")
|
||||
app.state.limiter = limiter
|
||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||
app.add_middleware(SlowAPIMiddleware)
|
||||
|
||||
# Routes
|
||||
# Order of include_router matters: https://fastapi.tiangolo.com/tutorial/path-params/#order-matters
|
||||
app.include_router(auth.router)
|
||||
app.include_router(review.router)
|
||||
app.include_router(main_app.router)
|
||||
app.include_router(preview.router)
|
||||
app.include_router(notification.router)
|
||||
app.include_router(export.router)
|
||||
app.include_router(event.router)
|
||||
app.include_router(media.router)
|
||||
# App Properties
|
||||
app.frigate_config = frigate_config
|
||||
app.embeddings = embeddings
|
||||
app.detected_frames_processor = detected_frames_processor
|
||||
app.storage_maintainer = storage_maintainer
|
||||
app.camera_error_image = None
|
||||
app.onvif = onvif
|
||||
app.stats_emitter = stats_emitter
|
||||
app.event_metadata_updater = event_metadata_updater
|
||||
app.external_processor = external_processor
|
||||
app.jwt_token = get_jwt_secret() if frigate_config.auth.enabled else None
|
||||
|
||||
return app
|
||||
1283
frigate/api/media.py
1283
frigate/api/media.py
File diff suppressed because it is too large
Load Diff
@@ -4,62 +4,62 @@ import logging
|
||||
import os
|
||||
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from flask import (
|
||||
Blueprint,
|
||||
current_app,
|
||||
jsonify,
|
||||
make_response,
|
||||
request,
|
||||
)
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from peewee import DoesNotExist
|
||||
from py_vapid import Vapid01, utils
|
||||
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.const import CONFIG_DIR
|
||||
from frigate.models import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
NotificationBp = Blueprint("notifications", __name__)
|
||||
router = APIRouter(tags=[Tags.notifications])
|
||||
|
||||
|
||||
@NotificationBp.route("/notifications/pubkey", methods=["GET"])
|
||||
def get_vapid_pub_key():
|
||||
if not current_app.frigate_config.notifications.enabled:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Notifications are not enabled."}),
|
||||
400,
|
||||
@router.get("/notifications/pubkey")
|
||||
def get_vapid_pub_key(request: Request):
|
||||
if not request.app.frigate_config.notifications.enabled:
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Notifications are not enabled."}),
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
key = Vapid01.from_file(os.path.join(CONFIG_DIR, "notifications.pem"))
|
||||
raw_pub = key.public_key.public_bytes(
|
||||
serialization.Encoding.X962, serialization.PublicFormat.UncompressedPoint
|
||||
)
|
||||
return jsonify(utils.b64urlencode(raw_pub)), 200
|
||||
return JSONResponse(content=utils.b64urlencode(raw_pub), status_code=200)
|
||||
|
||||
|
||||
@NotificationBp.route("/notifications/register", methods=["POST"])
|
||||
def register_notifications():
|
||||
if current_app.frigate_config.auth.enabled:
|
||||
username = request.headers.get("remote-user", type=str) or "admin"
|
||||
@router.post("/notifications/register")
|
||||
def register_notifications(request: Request, body: dict = None):
|
||||
if request.app.frigate_config.auth.enabled:
|
||||
# FIXME: For FastAPI the remote-user is not being populated
|
||||
username = request.headers.get("remote-user") or "admin"
|
||||
else:
|
||||
username = "admin"
|
||||
|
||||
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||
json: dict[str, any] = body or {}
|
||||
sub = json.get("sub")
|
||||
|
||||
if not sub:
|
||||
return jsonify(
|
||||
{"success": False, "message": "Subscription must be provided."}
|
||||
), 400
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Subscription must be provided."},
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
try:
|
||||
User.update(notification_tokens=User.notification_tokens.append(sub)).where(
|
||||
User.username == username
|
||||
).execute()
|
||||
return make_response(
|
||||
jsonify({"success": True, "message": "Successfully saved token."}), 200
|
||||
return JSONResponse(
|
||||
content=({"success": True, "message": "Successfully saved token."}),
|
||||
status_code=200,
|
||||
)
|
||||
except DoesNotExist:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Could not find user."}), 404
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Could not find user."}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
@@ -5,23 +5,21 @@ import os
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import pytz
|
||||
from flask import (
|
||||
Blueprint,
|
||||
jsonify,
|
||||
make_response,
|
||||
)
|
||||
from fastapi import APIRouter
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.const import CACHE_DIR, PREVIEW_FRAME_TYPE
|
||||
from frigate.models import Previews
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
PreviewBp = Blueprint("previews", __name__)
|
||||
|
||||
router = APIRouter(tags=[Tags.preview])
|
||||
|
||||
|
||||
@PreviewBp.route("/preview/<camera_name>/start/<int:start_ts>/end/<int:end_ts>")
|
||||
@PreviewBp.route("/preview/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
|
||||
def preview_ts(camera_name, start_ts, end_ts):
|
||||
@router.get("/preview/{camera_name}/start/{start_ts}/end/{end_ts}")
|
||||
def preview_ts(camera_name: str, start_ts: float, end_ts: float):
|
||||
"""Get all mp4 previews relevant for time period."""
|
||||
if camera_name != "all":
|
||||
camera_clause = Previews.camera == camera_name
|
||||
@@ -62,24 +60,20 @@ def preview_ts(camera_name, start_ts, end_ts):
|
||||
)
|
||||
|
||||
if not clips:
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"message": "No previews found.",
|
||||
}
|
||||
),
|
||||
404,
|
||||
return JSONResponse(
|
||||
content={
|
||||
"success": False,
|
||||
"message": "No previews found.",
|
||||
},
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
return make_response(jsonify(clips), 200)
|
||||
return JSONResponse(content=clips, status_code=200)
|
||||
|
||||
|
||||
@PreviewBp.route("/preview/<year_month>/<int:day>/<int:hour>/<camera_name>/<tz_name>")
|
||||
@PreviewBp.route(
|
||||
"/preview/<year_month>/<float:day>/<float:hour>/<camera_name>/<tz_name>"
|
||||
)
|
||||
def preview_hour(year_month, day, hour, camera_name, tz_name):
|
||||
@router.get("/preview/{year_month}/{day}/{hour}/{camera_name}/{tz_name}")
|
||||
def preview_hour(year_month: str, day: int, hour: int, camera_name: str, tz_name: str):
|
||||
"""Get all mp4 previews relevant for time period given the timezone"""
|
||||
parts = year_month.split("-")
|
||||
start_date = (
|
||||
datetime(int(parts[0]), int(parts[1]), int(day), int(hour), tzinfo=timezone.utc)
|
||||
@@ -92,11 +86,8 @@ def preview_hour(year_month, day, hour, camera_name, tz_name):
|
||||
return preview_ts(camera_name, start_ts, end_ts)
|
||||
|
||||
|
||||
@PreviewBp.route("/preview/<camera_name>/start/<int:start_ts>/end/<int:end_ts>/frames")
|
||||
@PreviewBp.route(
|
||||
"/preview/<camera_name>/start/<float:start_ts>/end/<float:end_ts>/frames"
|
||||
)
|
||||
def get_preview_frames_from_cache(camera_name: str, start_ts, end_ts):
|
||||
@router.get("/preview/{camera_name}/start/{start_ts}/end/{end_ts}/frames")
|
||||
def get_preview_frames_from_cache(camera_name: str, start_ts: float, end_ts: float):
|
||||
"""Get list of cached preview frames"""
|
||||
preview_dir = os.path.join(CACHE_DIR, "preview_frames")
|
||||
file_start = f"preview_{camera_name}"
|
||||
@@ -116,4 +107,7 @@ def get_preview_frames_from_cache(camera_name: str, start_ts, end_ts):
|
||||
|
||||
selected_previews.append(file)
|
||||
|
||||
return jsonify(selected_previews)
|
||||
return JSONResponse(
|
||||
content=selected_previews,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
@@ -1,35 +1,50 @@
|
||||
"""Review apis."""
|
||||
|
||||
import datetime
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from functools import reduce
|
||||
from pathlib import Path
|
||||
|
||||
import pandas as pd
|
||||
from flask import Blueprint, jsonify, make_response, request
|
||||
from fastapi import APIRouter
|
||||
from fastapi.params import Depends
|
||||
from fastapi.responses import JSONResponse
|
||||
from peewee import Case, DoesNotExist, fn, operator
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.api.defs.generic_response import GenericResponse
|
||||
from frigate.api.defs.review_body import ReviewModifyMultipleBody
|
||||
from frigate.api.defs.review_query_parameters import (
|
||||
ReviewActivityMotionQueryParams,
|
||||
ReviewQueryParams,
|
||||
ReviewSummaryQueryParams,
|
||||
)
|
||||
from frigate.api.defs.review_responses import (
|
||||
ReviewActivityMotionResponse,
|
||||
ReviewSegmentResponse,
|
||||
ReviewSummaryResponse,
|
||||
)
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.models import Recordings, ReviewSegment
|
||||
from frigate.util.builtin import get_tz_modifiers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ReviewBp = Blueprint("reviews", __name__)
|
||||
router = APIRouter(tags=[Tags.review])
|
||||
|
||||
|
||||
@ReviewBp.route("/review")
|
||||
def review():
|
||||
cameras = request.args.get("cameras", "all")
|
||||
labels = request.args.get("labels", "all")
|
||||
zones = request.args.get("zones", "all")
|
||||
reviewed = request.args.get("reviewed", type=int, default=0)
|
||||
limit = request.args.get("limit", type=int, default=None)
|
||||
severity = request.args.get("severity", None)
|
||||
|
||||
before = request.args.get("before", type=float, default=datetime.now().timestamp())
|
||||
after = request.args.get(
|
||||
"after", type=float, default=(datetime.now() - timedelta(hours=24)).timestamp()
|
||||
@router.get("/review", response_model=list[ReviewSegmentResponse])
|
||||
def review(params: ReviewQueryParams = Depends()):
|
||||
cameras = params.cameras
|
||||
labels = params.labels
|
||||
zones = params.zones
|
||||
reviewed = params.reviewed
|
||||
limit = params.limit
|
||||
severity = params.severity
|
||||
before = params.before or datetime.datetime.now().timestamp()
|
||||
after = (
|
||||
params.after
|
||||
or (datetime.datetime.now() - datetime.timedelta(hours=24)).timestamp()
|
||||
)
|
||||
|
||||
clauses = [
|
||||
@@ -91,39 +106,18 @@ def review():
|
||||
.iterator()
|
||||
)
|
||||
|
||||
return jsonify([r for r in review])
|
||||
return JSONResponse(content=[r for r in review])
|
||||
|
||||
|
||||
@ReviewBp.route("/review/event/<id>")
|
||||
def get_review_from_event(id: str):
|
||||
try:
|
||||
return model_to_dict(
|
||||
ReviewSegment.get(
|
||||
ReviewSegment.data["detections"].cast("text") % f'*"{id}"*'
|
||||
)
|
||||
)
|
||||
except DoesNotExist:
|
||||
return "Review item not found", 404
|
||||
@router.get("/review/summary", response_model=ReviewSummaryResponse)
|
||||
def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(params.timezone)
|
||||
day_ago = (datetime.datetime.now() - datetime.timedelta(hours=24)).timestamp()
|
||||
month_ago = (datetime.datetime.now() - datetime.timedelta(days=30)).timestamp()
|
||||
|
||||
|
||||
@ReviewBp.route("/review/<id>")
|
||||
def get_review(id: str):
|
||||
try:
|
||||
return model_to_dict(ReviewSegment.get(ReviewSegment.id == id))
|
||||
except DoesNotExist:
|
||||
return "Review item not found", 404
|
||||
|
||||
|
||||
@ReviewBp.route("/review/summary")
|
||||
def review_summary():
|
||||
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||
hour_modifier, minute_modifier, seconds_offset = get_tz_modifiers(tz_name)
|
||||
day_ago = (datetime.now() - timedelta(hours=24)).timestamp()
|
||||
month_ago = (datetime.now() - timedelta(days=30)).timestamp()
|
||||
|
||||
cameras = request.args.get("cameras", "all")
|
||||
labels = request.args.get("labels", "all")
|
||||
zones = request.args.get("zones", "all")
|
||||
cameras = params.cameras
|
||||
labels = params.labels
|
||||
zones = params.zones
|
||||
|
||||
clauses = [(ReviewSegment.start_time > day_ago)]
|
||||
|
||||
@@ -186,18 +180,6 @@ def review_summary():
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_detection"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "significant_motion"),
|
||||
ReviewSegment.has_been_reviewed,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_motion"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
@@ -222,18 +204,6 @@ def review_summary():
|
||||
0,
|
||||
)
|
||||
).alias("total_detection"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "significant_motion"),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_motion"),
|
||||
)
|
||||
.where(reduce(operator.and_, clauses))
|
||||
.dicts()
|
||||
@@ -295,18 +265,6 @@ def review_summary():
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_detection"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "significant_motion"),
|
||||
ReviewSegment.has_been_reviewed,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("reviewed_motion"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
@@ -331,18 +289,6 @@ def review_summary():
|
||||
0,
|
||||
)
|
||||
).alias("total_detection"),
|
||||
fn.SUM(
|
||||
Case(
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "significant_motion"),
|
||||
1,
|
||||
)
|
||||
],
|
||||
0,
|
||||
)
|
||||
).alias("total_motion"),
|
||||
)
|
||||
.where(reduce(operator.and_, clauses))
|
||||
.group_by(
|
||||
@@ -358,55 +304,24 @@ def review_summary():
|
||||
for e in last_month.dicts().iterator():
|
||||
data[e["day"]] = e
|
||||
|
||||
return jsonify(data)
|
||||
return JSONResponse(content=data)
|
||||
|
||||
|
||||
@ReviewBp.route("/reviews/viewed", methods=("POST",))
|
||||
def set_multiple_reviewed():
|
||||
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||
list_of_ids = json.get("ids", "")
|
||||
|
||||
if not list_of_ids or len(list_of_ids) == 0:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Not a valid list of ids"}), 404
|
||||
)
|
||||
|
||||
@router.post("/reviews/viewed", response_model=GenericResponse)
|
||||
def set_multiple_reviewed(body: ReviewModifyMultipleBody):
|
||||
ReviewSegment.update(has_been_reviewed=True).where(
|
||||
ReviewSegment.id << list_of_ids
|
||||
ReviewSegment.id << body.ids
|
||||
).execute()
|
||||
|
||||
return make_response(
|
||||
jsonify({"success": True, "message": "Reviewed multiple items"}), 200
|
||||
return JSONResponse(
|
||||
content=({"success": True, "message": "Reviewed multiple items"}),
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@ReviewBp.route("/review/<id>/viewed", methods=("DELETE",))
|
||||
def set_not_reviewed(id):
|
||||
try:
|
||||
review: ReviewSegment = ReviewSegment.get(ReviewSegment.id == id)
|
||||
except DoesNotExist:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Review " + id + " not found"}), 404
|
||||
)
|
||||
|
||||
review.has_been_reviewed = False
|
||||
review.save()
|
||||
|
||||
return make_response(
|
||||
jsonify({"success": True, "message": "Reviewed " + id + " not viewed"}), 200
|
||||
)
|
||||
|
||||
|
||||
@ReviewBp.route("/reviews/delete", methods=("POST",))
|
||||
def delete_reviews():
|
||||
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||
list_of_ids = json.get("ids", "")
|
||||
|
||||
if not list_of_ids or len(list_of_ids) == 0:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Not a valid list of ids"}), 404
|
||||
)
|
||||
|
||||
@router.post("/reviews/delete", response_model=GenericResponse)
|
||||
def delete_reviews(body: ReviewModifyMultipleBody):
|
||||
list_of_ids = body.ids
|
||||
reviews = (
|
||||
ReviewSegment.select(
|
||||
ReviewSegment.camera,
|
||||
@@ -446,18 +361,25 @@ def delete_reviews():
|
||||
Recordings.delete().where(Recordings.id << recording_ids).execute()
|
||||
ReviewSegment.delete().where(ReviewSegment.id << list_of_ids).execute()
|
||||
|
||||
return make_response(jsonify({"success": True, "message": "Delete reviews"}), 200)
|
||||
|
||||
|
||||
@ReviewBp.route("/review/activity/motion")
|
||||
def motion_activity():
|
||||
"""Get motion and audio activity."""
|
||||
cameras = request.args.get("cameras", "all")
|
||||
before = request.args.get("before", type=float, default=datetime.now().timestamp())
|
||||
after = request.args.get(
|
||||
"after", type=float, default=(datetime.now() - timedelta(hours=1)).timestamp()
|
||||
return JSONResponse(
|
||||
content=({"success": True, "message": "Delete reviews"}), status_code=200
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/review/activity/motion", response_model=list[ReviewActivityMotionResponse]
|
||||
)
|
||||
def motion_activity(params: ReviewActivityMotionQueryParams = Depends()):
|
||||
"""Get motion and audio activity."""
|
||||
cameras = params.cameras
|
||||
before = params.before or datetime.datetime.now().timestamp()
|
||||
after = (
|
||||
params.after
|
||||
or (datetime.datetime.now() - datetime.timedelta(hours=1)).timestamp()
|
||||
)
|
||||
# get scale in seconds
|
||||
scale = params.scale
|
||||
|
||||
clauses = [(Recordings.start_time > after) & (Recordings.end_time < before)]
|
||||
clauses.append((Recordings.motion > 0))
|
||||
|
||||
@@ -477,15 +399,12 @@ def motion_activity():
|
||||
.iterator()
|
||||
)
|
||||
|
||||
# get scale in seconds
|
||||
scale = request.args.get("scale", type=int, default=30)
|
||||
|
||||
# resample data using pandas to get activity on scaled basis
|
||||
df = pd.DataFrame(data, columns=["start_time", "motion", "camera"])
|
||||
|
||||
if df.empty:
|
||||
logger.warning("No motion data found for the requested time range")
|
||||
return jsonify([])
|
||||
return JSONResponse(content=[])
|
||||
|
||||
df = df.astype(dtype={"motion": "float32"})
|
||||
|
||||
@@ -520,68 +439,57 @@ def motion_activity():
|
||||
# change types for output
|
||||
df.index = df.index.astype(int) // (10**9)
|
||||
normalized = df.reset_index().to_dict("records")
|
||||
return jsonify(normalized)
|
||||
return JSONResponse(content=normalized)
|
||||
|
||||
|
||||
@ReviewBp.route("/review/activity/audio")
|
||||
def audio_activity():
|
||||
"""Get motion and audio activity."""
|
||||
cameras = request.args.get("cameras", "all")
|
||||
before = request.args.get("before", type=float, default=datetime.now().timestamp())
|
||||
after = request.args.get(
|
||||
"after", type=float, default=(datetime.now() - timedelta(hours=1)).timestamp()
|
||||
)
|
||||
|
||||
clauses = [(Recordings.start_time > after) & (Recordings.end_time < before)]
|
||||
|
||||
if cameras != "all":
|
||||
camera_list = cameras.split(",")
|
||||
clauses.append((Recordings.camera << camera_list))
|
||||
|
||||
all_recordings: list[Recordings] = (
|
||||
Recordings.select(
|
||||
Recordings.start_time,
|
||||
Recordings.duration,
|
||||
Recordings.objects,
|
||||
Recordings.dBFS,
|
||||
@router.get("/review/event/{event_id}", response_model=ReviewSegmentResponse)
|
||||
def get_review_from_event(event_id: str):
|
||||
try:
|
||||
return JSONResponse(
|
||||
model_to_dict(
|
||||
ReviewSegment.get(
|
||||
ReviewSegment.data["detections"].cast("text") % f'*"{event_id}"*'
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(reduce(operator.and_, clauses))
|
||||
.order_by(Recordings.start_time.asc())
|
||||
.iterator()
|
||||
)
|
||||
|
||||
# format is: { timestamp: segment_start_ts, motion: [0-100], audio: [0 - -100] }
|
||||
# periods where active objects / audio was detected will cause audio to be scaled down
|
||||
data: list[dict[str, float]] = []
|
||||
|
||||
for rec in all_recordings:
|
||||
data.append(
|
||||
{
|
||||
"start_time": rec.start_time,
|
||||
"audio": rec.dBFS if rec.objects == 0 else 0,
|
||||
}
|
||||
except DoesNotExist:
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Review item not found"},
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
# get scale in seconds
|
||||
scale = request.args.get("scale", type=int, default=30)
|
||||
|
||||
# resample data using pandas to get activity on scaled basis
|
||||
df = pd.DataFrame(data, columns=["start_time", "audio"])
|
||||
df = df.astype(dtype={"audio": "float16"})
|
||||
@router.get("/review/{review_id}", response_model=ReviewSegmentResponse)
|
||||
def get_review(review_id: str):
|
||||
try:
|
||||
return JSONResponse(
|
||||
content=model_to_dict(ReviewSegment.get(ReviewSegment.id == review_id))
|
||||
)
|
||||
except DoesNotExist:
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Review item not found"},
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
# set date as datetime index
|
||||
df["start_time"] = pd.to_datetime(df["start_time"], unit="s")
|
||||
df.set_index(["start_time"], inplace=True)
|
||||
|
||||
# normalize data
|
||||
df = df.resample(f"{scale}S").mean().fillna(0.0)
|
||||
df["audio"] = (
|
||||
(df["audio"] - df["audio"].max())
|
||||
/ (df["audio"].min() - df["audio"].max())
|
||||
* -100
|
||||
@router.delete("/review/{review_id}/viewed", response_model=GenericResponse)
|
||||
def set_not_reviewed(review_id: str):
|
||||
try:
|
||||
review: ReviewSegment = ReviewSegment.get(ReviewSegment.id == review_id)
|
||||
except DoesNotExist:
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": False, "message": "Review " + review_id + " not found"}
|
||||
),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
review.has_been_reviewed = False
|
||||
review.save()
|
||||
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": True, "message": "Set Review " + review_id + " as not viewed"}
|
||||
),
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
# change types for output
|
||||
df.index = df.index.astype(int) // (10**9)
|
||||
normalized = df.reset_index().to_dict("records")
|
||||
return jsonify(normalized)
|
||||
|
||||
377
frigate/app.py
377
frigate/app.py
@@ -1,49 +1,48 @@
|
||||
import argparse
|
||||
import datetime
|
||||
import logging
|
||||
import multiprocessing as mp
|
||||
import os
|
||||
import secrets
|
||||
import shutil
|
||||
import signal
|
||||
import sys
|
||||
import traceback
|
||||
from multiprocessing import Queue
|
||||
from multiprocessing.synchronize import Event as MpEvent
|
||||
from types import FrameType
|
||||
from typing import Optional
|
||||
|
||||
import psutil
|
||||
import uvicorn
|
||||
from peewee_migrate import Router
|
||||
from playhouse.sqlite_ext import SqliteExtDatabase
|
||||
from playhouse.sqliteq import SqliteQueueDatabase
|
||||
from pydantic import ValidationError
|
||||
|
||||
from frigate.api.app import create_app
|
||||
import frigate.util as util
|
||||
from frigate.api.auth import hash_password
|
||||
from frigate.api.fastapi_app import create_fastapi_app
|
||||
from frigate.camera import CameraMetrics, PTZMetrics
|
||||
from frigate.comms.config_updater import ConfigPublisher
|
||||
from frigate.comms.dispatcher import Communicator, Dispatcher
|
||||
from frigate.comms.event_metadata_updater import (
|
||||
EventMetadataPublisher,
|
||||
EventMetadataTypeEnum,
|
||||
)
|
||||
from frigate.comms.inter_process import InterProcessCommunicator
|
||||
from frigate.comms.mqtt import MqttClient
|
||||
from frigate.comms.webpush import WebPushClient
|
||||
from frigate.comms.ws import WebSocketClient
|
||||
from frigate.comms.zmq_proxy import ZmqProxy
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.config.config import FrigateConfig
|
||||
from frigate.const import (
|
||||
CACHE_DIR,
|
||||
CLIPS_DIR,
|
||||
CONFIG_DIR,
|
||||
DEFAULT_DB_PATH,
|
||||
EXPORT_DIR,
|
||||
MODEL_CACHE_DIR,
|
||||
RECORD_DIR,
|
||||
)
|
||||
from frigate.db.sqlitevecq import SqliteVecQueueDatabase
|
||||
from frigate.embeddings import EmbeddingsContext, manage_embeddings
|
||||
from frigate.events.audio import listen_to_audio
|
||||
from frigate.events.audio import AudioProcessor
|
||||
from frigate.events.cleanup import EventCleanup
|
||||
from frigate.events.external import ExternalEventProcessor
|
||||
from frigate.events.maintainer import EventProcessor
|
||||
from frigate.log import log_thread
|
||||
from frigate.models import (
|
||||
Event,
|
||||
Export,
|
||||
@@ -58,20 +57,18 @@ from frigate.models import (
|
||||
from frigate.object_detection import ObjectDetectProcess
|
||||
from frigate.object_processing import TrackedObjectProcessor
|
||||
from frigate.output.output import output_frames
|
||||
from frigate.plus import PlusApi
|
||||
from frigate.ptz.autotrack import PtzAutoTrackerThread
|
||||
from frigate.ptz.onvif import OnvifController
|
||||
from frigate.record.cleanup import RecordingCleanup
|
||||
from frigate.record.export import migrate_exports
|
||||
from frigate.record.record import manage_recordings
|
||||
from frigate.review.review import manage_review_segments
|
||||
from frigate.service_manager import ServiceManager
|
||||
from frigate.stats.emitter import StatsEmitter
|
||||
from frigate.stats.util import stats_init
|
||||
from frigate.storage import StorageMaintainer
|
||||
from frigate.timeline import TimelineProcessor
|
||||
from frigate.types import CameraMetricsTypes, PTZMetricsTypes
|
||||
from frigate.util.builtin import empty_and_close_queue, save_default_config
|
||||
from frigate.util.config import migrate_frigate_config
|
||||
from frigate.util.builtin import empty_and_close_queue
|
||||
from frigate.util.object import get_camera_regions_grid
|
||||
from frigate.version import VERSION
|
||||
from frigate.video import capture_camera, track_camera
|
||||
@@ -81,22 +78,19 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FrigateApp:
|
||||
def __init__(self) -> None:
|
||||
def __init__(self, config: FrigateConfig) -> None:
|
||||
self.stop_event: MpEvent = mp.Event()
|
||||
self.detection_queue: Queue = mp.Queue()
|
||||
self.detectors: dict[str, ObjectDetectProcess] = {}
|
||||
self.detection_out_events: dict[str, MpEvent] = {}
|
||||
self.detection_shms: list[mp.shared_memory.SharedMemory] = []
|
||||
self.log_queue: Queue = mp.Queue()
|
||||
self.plus_api = PlusApi()
|
||||
self.camera_metrics: dict[str, CameraMetricsTypes] = {}
|
||||
self.ptz_metrics: dict[str, PTZMetricsTypes] = {}
|
||||
self.camera_metrics: dict[str, CameraMetrics] = {}
|
||||
self.ptz_metrics: dict[str, PTZMetrics] = {}
|
||||
self.processes: dict[str, int] = {}
|
||||
self.embeddings: Optional[EmbeddingsContext] = None
|
||||
self.region_grids: dict[str, list[list[dict[str, int]]]] = {}
|
||||
|
||||
def set_environment_vars(self) -> None:
|
||||
for key, value in self.config.environment_vars.items():
|
||||
os.environ[key] = value
|
||||
self.config = config
|
||||
|
||||
def ensure_dirs(self) -> None:
|
||||
for d in [
|
||||
@@ -113,93 +107,15 @@ class FrigateApp:
|
||||
else:
|
||||
logger.debug(f"Skipping directory: {d}")
|
||||
|
||||
def init_config(self) -> None:
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
# Check if we can use .yaml instead of .yml
|
||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||
if os.path.isfile(config_file_yaml):
|
||||
config_file = config_file_yaml
|
||||
|
||||
if not os.path.isfile(config_file):
|
||||
print("No config file found, saving default config")
|
||||
config_file = config_file_yaml
|
||||
save_default_config(config_file)
|
||||
|
||||
# check if the config file needs to be migrated
|
||||
migrate_frigate_config(config_file)
|
||||
|
||||
self.config = FrigateConfig.parse_file(config_file, plus_api=self.plus_api)
|
||||
|
||||
def init_camera_metrics(self) -> None:
|
||||
# create camera_metrics
|
||||
for camera_name in self.config.cameras.keys():
|
||||
# create camera_metrics
|
||||
self.camera_metrics[camera_name] = {
|
||||
"camera_fps": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"skipped_fps": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"process_fps": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
"detection_fps": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"detection_frame": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"read_start": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"ffmpeg_pid": mp.Value("i", 0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"frame_queue": mp.Queue(maxsize=2),
|
||||
"capture_process": None,
|
||||
"process": None,
|
||||
"audio_rms": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
"audio_dBFS": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
}
|
||||
self.ptz_metrics[camera_name] = {
|
||||
"ptz_autotracker_enabled": mp.Value( # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"i",
|
||||
self.config.cameras[camera_name].onvif.autotracking.enabled,
|
||||
),
|
||||
"ptz_tracking_active": mp.Event(),
|
||||
"ptz_motor_stopped": mp.Event(),
|
||||
"ptz_reset": mp.Event(),
|
||||
"ptz_start_time": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"ptz_stop_time": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"ptz_frame_time": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"ptz_zoom_level": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"ptz_max_zoom": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
"ptz_min_zoom": mp.Value("d", 0.0), # type: ignore[typeddict-item]
|
||||
# issue https://github.com/python/typeshed/issues/8799
|
||||
# from mypy 0.981 onwards
|
||||
}
|
||||
self.ptz_metrics[camera_name]["ptz_motor_stopped"].set()
|
||||
|
||||
def set_log_levels(self) -> None:
|
||||
logging.getLogger().setLevel(self.config.logger.default.value.upper())
|
||||
for log, level in self.config.logger.logs.items():
|
||||
logging.getLogger(log).setLevel(level.value.upper())
|
||||
|
||||
if "werkzeug" not in self.config.logger.logs:
|
||||
logging.getLogger("werkzeug").setLevel("ERROR")
|
||||
|
||||
if "ws4py" not in self.config.logger.logs:
|
||||
logging.getLogger("ws4py").setLevel("ERROR")
|
||||
self.camera_metrics[camera_name] = CameraMetrics()
|
||||
self.ptz_metrics[camera_name] = PTZMetrics(
|
||||
autotracker_enabled=self.config.cameras[
|
||||
camera_name
|
||||
].onvif.autotracking.enabled
|
||||
)
|
||||
|
||||
def init_queues(self) -> None:
|
||||
# Queue for cameras to push tracked objects to
|
||||
@@ -232,13 +148,6 @@ class FrigateApp:
|
||||
except PermissionError:
|
||||
logger.error("Unable to write to /config to save DB state")
|
||||
|
||||
# Migrate DB location
|
||||
old_db_path = DEFAULT_DB_PATH
|
||||
if not os.path.isfile(self.config.database.path) and os.path.isfile(
|
||||
old_db_path
|
||||
):
|
||||
os.rename(old_db_path, self.config.database.path)
|
||||
|
||||
# Migrate DB schema
|
||||
migrate_db = SqliteExtDatabase(self.config.database.path)
|
||||
|
||||
@@ -287,7 +196,7 @@ class FrigateApp:
|
||||
self.processes["go2rtc"] = proc.info["pid"]
|
||||
|
||||
def init_recording_manager(self) -> None:
|
||||
recording_process = mp.Process(
|
||||
recording_process = util.Process(
|
||||
target=manage_recordings,
|
||||
name="recording_manager",
|
||||
args=(self.config,),
|
||||
@@ -299,7 +208,7 @@ class FrigateApp:
|
||||
logger.info(f"Recording process started: {recording_process.pid}")
|
||||
|
||||
def init_review_segment_manager(self) -> None:
|
||||
review_segment_process = mp.Process(
|
||||
review_segment_process = util.Process(
|
||||
target=manage_review_segments,
|
||||
name="review_segment_manager",
|
||||
args=(self.config,),
|
||||
@@ -312,12 +221,9 @@ class FrigateApp:
|
||||
|
||||
def init_embeddings_manager(self) -> None:
|
||||
if not self.config.semantic_search.enabled:
|
||||
self.embeddings = None
|
||||
return
|
||||
|
||||
# Create a client for other processes to use
|
||||
self.embeddings = EmbeddingsContext()
|
||||
embedding_process = mp.Process(
|
||||
embedding_process = util.Process(
|
||||
target=manage_embeddings,
|
||||
name="embeddings_manager",
|
||||
args=(self.config,),
|
||||
@@ -331,7 +237,7 @@ class FrigateApp:
|
||||
def bind_database(self) -> None:
|
||||
"""Bind db to the main process."""
|
||||
# NOTE: all db accessing processes need to be created before the db can be bound to the main process
|
||||
self.db = SqliteQueueDatabase(
|
||||
self.db = SqliteVecQueueDatabase(
|
||||
self.config.database.path,
|
||||
pragmas={
|
||||
"auto_vacuum": "FULL", # Does not defragment database
|
||||
@@ -341,6 +247,7 @@ class FrigateApp:
|
||||
timeout=max(
|
||||
60, 10 * len([c for c in self.config.cameras.values() if c.enabled])
|
||||
),
|
||||
load_vec_extension=self.config.semantic_search.enabled,
|
||||
)
|
||||
models = [
|
||||
Event,
|
||||
@@ -364,7 +271,12 @@ class FrigateApp:
|
||||
except PermissionError:
|
||||
logger.error("Unable to write to /config to save export state")
|
||||
|
||||
migrate_exports(self.config.ffmpeg, self.config.cameras.keys())
|
||||
migrate_exports(self.config.ffmpeg, list(self.config.cameras.keys()))
|
||||
|
||||
def init_embeddings_client(self) -> None:
|
||||
if self.config.semantic_search.enabled:
|
||||
# Create a client for other processes to use
|
||||
self.embeddings = EmbeddingsContext(self.db)
|
||||
|
||||
def init_external_event_processor(self) -> None:
|
||||
self.external_event_processor = ExternalEventProcessor(self.config)
|
||||
@@ -372,20 +284,10 @@ class FrigateApp:
|
||||
def init_inter_process_communicator(self) -> None:
|
||||
self.inter_process_communicator = InterProcessCommunicator()
|
||||
self.inter_config_updater = ConfigPublisher()
|
||||
self.inter_zmq_proxy = ZmqProxy()
|
||||
|
||||
def init_web_server(self) -> None:
|
||||
self.flask_app = create_app(
|
||||
self.config,
|
||||
self.db,
|
||||
self.embeddings,
|
||||
self.detected_frames_processor,
|
||||
self.storage_maintainer,
|
||||
self.onvif_controller,
|
||||
self.external_event_processor,
|
||||
self.plus_api,
|
||||
self.stats_emitter,
|
||||
self.event_metadata_updater = EventMetadataPublisher(
|
||||
EventMetadataTypeEnum.regenerate_description
|
||||
)
|
||||
self.inter_zmq_proxy = ZmqProxy()
|
||||
|
||||
def init_onvif(self) -> None:
|
||||
self.onvif_controller = OnvifController(self.config, self.ptz_metrics)
|
||||
@@ -418,7 +320,9 @@ class FrigateApp:
|
||||
largest_frame = max(
|
||||
[
|
||||
det.model.height * det.model.width * 3
|
||||
for (name, det) in self.config.detectors.items()
|
||||
if det.model is not None
|
||||
else 320
|
||||
for det in self.config.detectors.values()
|
||||
]
|
||||
)
|
||||
shm_in = mp.shared_memory.SharedMemory(
|
||||
@@ -468,7 +372,7 @@ class FrigateApp:
|
||||
self.detected_frames_processor.start()
|
||||
|
||||
def start_video_output_processor(self) -> None:
|
||||
output_processor = mp.Process(
|
||||
output_processor = util.Process(
|
||||
target=output_frames,
|
||||
name="output_processor",
|
||||
args=(self.config,),
|
||||
@@ -485,6 +389,7 @@ class FrigateApp:
|
||||
|
||||
# create or update region grids for each camera
|
||||
for camera in self.config.cameras.values():
|
||||
assert camera.name is not None
|
||||
self.region_grids[camera.name] = get_camera_regions_grid(
|
||||
camera.name,
|
||||
camera.detect,
|
||||
@@ -497,7 +402,7 @@ class FrigateApp:
|
||||
logger.info(f"Camera processor not started for disabled camera {name}")
|
||||
continue
|
||||
|
||||
camera_process = mp.Process(
|
||||
camera_process = util.Process(
|
||||
target=track_camera,
|
||||
name=f"camera_processor:{name}",
|
||||
args=(
|
||||
@@ -512,43 +417,40 @@ class FrigateApp:
|
||||
self.ptz_metrics[name],
|
||||
self.region_grids[name],
|
||||
),
|
||||
daemon=True,
|
||||
)
|
||||
camera_process.daemon = True
|
||||
self.camera_metrics[name]["process"] = camera_process
|
||||
self.camera_metrics[name].process = camera_process
|
||||
camera_process.start()
|
||||
logger.info(f"Camera processor started for {name}: {camera_process.pid}")
|
||||
|
||||
def start_camera_capture_processes(self) -> None:
|
||||
shm_frame_count = self.shm_frame_count()
|
||||
|
||||
for name, config in self.config.cameras.items():
|
||||
if not self.config.cameras[name].enabled:
|
||||
logger.info(f"Capture process not started for disabled camera {name}")
|
||||
continue
|
||||
|
||||
capture_process = mp.Process(
|
||||
capture_process = util.Process(
|
||||
target=capture_camera,
|
||||
name=f"camera_capture:{name}",
|
||||
args=(name, config, self.shm_frame_count, self.camera_metrics[name]),
|
||||
args=(name, config, shm_frame_count, self.camera_metrics[name]),
|
||||
)
|
||||
capture_process.daemon = True
|
||||
self.camera_metrics[name]["capture_process"] = capture_process
|
||||
self.camera_metrics[name].capture_process = capture_process
|
||||
capture_process.start()
|
||||
logger.info(f"Capture process started for {name}: {capture_process.pid}")
|
||||
|
||||
def start_audio_processors(self) -> None:
|
||||
self.audio_process = None
|
||||
if len([c for c in self.config.cameras.values() if c.audio.enabled]) > 0:
|
||||
self.audio_process = mp.Process(
|
||||
target=listen_to_audio,
|
||||
name="audio_capture",
|
||||
args=(
|
||||
self.config,
|
||||
self.camera_metrics,
|
||||
),
|
||||
)
|
||||
self.audio_process.daemon = True
|
||||
self.audio_process.start()
|
||||
self.processes["audio_detector"] = self.audio_process.pid or 0
|
||||
logger.info(f"Audio process started: {self.audio_process.pid}")
|
||||
def start_audio_processor(self) -> None:
|
||||
audio_cameras = [
|
||||
c
|
||||
for c in self.config.cameras.values()
|
||||
if c.enabled and c.audio.enabled_in_config
|
||||
]
|
||||
|
||||
if audio_cameras:
|
||||
proc = AudioProcessor(audio_cameras, self.camera_metrics).start(wait=True)
|
||||
self.processes["audio_detector"] = proc.pid or 0
|
||||
|
||||
def start_timeline_processor(self) -> None:
|
||||
self.timeline_processor = TimelineProcessor(
|
||||
@@ -565,7 +467,7 @@ class FrigateApp:
|
||||
self.event_processor.start()
|
||||
|
||||
def start_event_cleanup(self) -> None:
|
||||
self.event_cleanup = EventCleanup(self.config, self.stop_event)
|
||||
self.event_cleanup = EventCleanup(self.config, self.stop_event, self.db)
|
||||
self.event_cleanup.start()
|
||||
|
||||
def start_record_cleanup(self) -> None:
|
||||
@@ -590,7 +492,7 @@ class FrigateApp:
|
||||
self.frigate_watchdog = FrigateWatchdog(self.detectors, self.stop_event)
|
||||
self.frigate_watchdog.start()
|
||||
|
||||
def check_shm(self) -> None:
|
||||
def shm_frame_count(self) -> int:
|
||||
total_shm = round(shutil.disk_usage("/dev/shm").total / pow(2, 20), 1)
|
||||
|
||||
# required for log files + nginx cache
|
||||
@@ -600,27 +502,29 @@ class FrigateApp:
|
||||
min_req_shm += 8
|
||||
|
||||
available_shm = total_shm - min_req_shm
|
||||
cam_total_frame_size = 0
|
||||
cam_total_frame_size = 0.0
|
||||
|
||||
for camera in self.config.cameras.values():
|
||||
if camera.enabled:
|
||||
if camera.enabled and camera.detect.width and camera.detect.height:
|
||||
cam_total_frame_size += round(
|
||||
(camera.detect.width * camera.detect.height * 1.5 + 270480)
|
||||
/ 1048576,
|
||||
1,
|
||||
)
|
||||
|
||||
self.shm_frame_count = min(50, int(available_shm / (cam_total_frame_size)))
|
||||
shm_frame_count = min(50, int(available_shm / (cam_total_frame_size)))
|
||||
|
||||
logger.debug(
|
||||
f"Calculated total camera size {available_shm} / {cam_total_frame_size} :: {self.shm_frame_count} frames for each camera in SHM"
|
||||
f"Calculated total camera size {available_shm} / {cam_total_frame_size} :: {shm_frame_count} frames for each camera in SHM"
|
||||
)
|
||||
|
||||
if self.shm_frame_count < 10:
|
||||
if shm_frame_count < 10:
|
||||
logger.warning(
|
||||
f"The current SHM size of {total_shm}MB is too small, recommend increasing it to at least {round(min_req_shm + cam_total_frame_size)}MB."
|
||||
f"The current SHM size of {total_shm}MB is too small, recommend increasing it to at least {round(min_req_shm + cam_total_frame_size * 10)}MB."
|
||||
)
|
||||
|
||||
return shm_frame_count
|
||||
|
||||
def init_auth(self) -> None:
|
||||
if self.config.auth.enabled:
|
||||
if User.select().count() == 0:
|
||||
@@ -632,6 +536,7 @@ class FrigateApp:
|
||||
{
|
||||
User.username: "admin",
|
||||
User.password_hash: password_hash,
|
||||
User.notification_tokens: [],
|
||||
}
|
||||
).execute()
|
||||
|
||||
@@ -648,7 +553,11 @@ class FrigateApp:
|
||||
password_hash = hash_password(
|
||||
password, iterations=self.config.auth.hash_iterations
|
||||
)
|
||||
User.replace(username="admin", password_hash=password_hash).execute()
|
||||
User.replace(
|
||||
username="admin",
|
||||
password_hash=password_hash,
|
||||
notification_tokens=[],
|
||||
).execute()
|
||||
|
||||
logger.info("********************************************************")
|
||||
logger.info("********************************************************")
|
||||
@@ -657,97 +566,64 @@ class FrigateApp:
|
||||
logger.info("********************************************************")
|
||||
logger.info("********************************************************")
|
||||
|
||||
@log_thread()
|
||||
def start(self) -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="Frigate",
|
||||
description="An NVR with realtime local object detection for IP cameras.",
|
||||
)
|
||||
parser.add_argument("--validate-config", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
logger.info(f"Starting Frigate ({VERSION})")
|
||||
|
||||
try:
|
||||
self.ensure_dirs()
|
||||
try:
|
||||
self.init_config()
|
||||
except Exception as e:
|
||||
print("*************************************************************")
|
||||
print("*************************************************************")
|
||||
print("*** Your config file is not valid! ***")
|
||||
print("*** Please check the docs at ***")
|
||||
print("*** https://docs.frigate.video/configuration/index ***")
|
||||
print("*************************************************************")
|
||||
print("*************************************************************")
|
||||
print("*** Config Validation Errors ***")
|
||||
print("*************************************************************")
|
||||
if isinstance(e, ValidationError):
|
||||
for error in e.errors():
|
||||
location = ".".join(str(item) for item in error["loc"])
|
||||
print(f"{location}: {error['msg']}")
|
||||
else:
|
||||
print(e)
|
||||
print(traceback.format_exc())
|
||||
print("*************************************************************")
|
||||
print("*** End Config Validation Errors ***")
|
||||
print("*************************************************************")
|
||||
sys.exit(1)
|
||||
if args.validate_config:
|
||||
print("*************************************************************")
|
||||
print("*** Your config file is valid. ***")
|
||||
print("*************************************************************")
|
||||
sys.exit(0)
|
||||
self.set_environment_vars()
|
||||
self.set_log_levels()
|
||||
self.init_queues()
|
||||
self.init_database()
|
||||
self.init_onvif()
|
||||
self.init_recording_manager()
|
||||
self.init_review_segment_manager()
|
||||
self.init_embeddings_manager()
|
||||
self.init_go2rtc()
|
||||
self.bind_database()
|
||||
self.check_db_data_migrations()
|
||||
self.init_inter_process_communicator()
|
||||
self.init_dispatcher()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
sys.exit(1)
|
||||
# Ensure global state.
|
||||
self.ensure_dirs()
|
||||
|
||||
# Start frigate services.
|
||||
self.init_camera_metrics()
|
||||
self.init_queues()
|
||||
self.init_database()
|
||||
self.init_onvif()
|
||||
self.init_recording_manager()
|
||||
self.init_review_segment_manager()
|
||||
self.init_go2rtc()
|
||||
self.start_detectors()
|
||||
self.init_embeddings_manager()
|
||||
self.bind_database()
|
||||
self.check_db_data_migrations()
|
||||
self.init_inter_process_communicator()
|
||||
self.init_dispatcher()
|
||||
self.init_embeddings_client()
|
||||
self.start_video_output_processor()
|
||||
self.start_ptz_autotracker()
|
||||
self.init_historical_regions()
|
||||
self.start_detected_frames_processor()
|
||||
self.start_camera_processors()
|
||||
self.check_shm()
|
||||
self.start_camera_capture_processes()
|
||||
self.start_audio_processors()
|
||||
self.start_audio_processor()
|
||||
self.start_storage_maintainer()
|
||||
self.init_external_event_processor()
|
||||
self.start_stats_emitter()
|
||||
self.init_web_server()
|
||||
self.start_timeline_processor()
|
||||
self.start_event_processor()
|
||||
self.start_event_cleanup()
|
||||
self.start_record_cleanup()
|
||||
self.start_watchdog()
|
||||
|
||||
self.init_auth()
|
||||
|
||||
# Flask only listens for SIGINT, so we need to catch SIGTERM and send SIGINT
|
||||
def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None:
|
||||
os.kill(os.getpid(), signal.SIGINT)
|
||||
|
||||
signal.signal(signal.SIGTERM, receiveSignal)
|
||||
|
||||
try:
|
||||
self.flask_app.run(host="127.0.0.1", port=5001, debug=False, threaded=True)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
logger.info("Flask has exited...")
|
||||
|
||||
self.stop()
|
||||
uvicorn.run(
|
||||
create_fastapi_app(
|
||||
self.config,
|
||||
self.db,
|
||||
self.embeddings,
|
||||
self.detected_frames_processor,
|
||||
self.storage_maintainer,
|
||||
self.onvif_controller,
|
||||
self.external_event_processor,
|
||||
self.stats_emitter,
|
||||
self.event_metadata_updater,
|
||||
),
|
||||
host="127.0.0.1",
|
||||
port=5001,
|
||||
log_level="error",
|
||||
)
|
||||
finally:
|
||||
self.stop()
|
||||
|
||||
def stop(self) -> None:
|
||||
logger.info("Stopping...")
|
||||
@@ -762,29 +638,23 @@ class FrigateApp:
|
||||
ReviewSegment.end_time == None
|
||||
).execute()
|
||||
|
||||
# stop the audio process
|
||||
if self.audio_process is not None:
|
||||
self.audio_process.terminate()
|
||||
self.audio_process.join()
|
||||
|
||||
# ensure the capture processes are done
|
||||
for camera in self.camera_metrics.keys():
|
||||
capture_process = self.camera_metrics[camera]["capture_process"]
|
||||
for camera, metrics in self.camera_metrics.items():
|
||||
capture_process = metrics.capture_process
|
||||
if capture_process is not None:
|
||||
logger.info(f"Waiting for capture process for {camera} to stop")
|
||||
capture_process.terminate()
|
||||
capture_process.join()
|
||||
|
||||
# ensure the camera processors are done
|
||||
for camera in self.camera_metrics.keys():
|
||||
camera_process = self.camera_metrics[camera]["process"]
|
||||
for camera, metrics in self.camera_metrics.items():
|
||||
camera_process = metrics.process
|
||||
if camera_process is not None:
|
||||
logger.info(f"Waiting for process for {camera} to stop")
|
||||
camera_process.terminate()
|
||||
camera_process.join()
|
||||
logger.info(f"Closing frame queue for {camera}")
|
||||
frame_queue = self.camera_metrics[camera]["frame_queue"]
|
||||
empty_and_close_queue(frame_queue)
|
||||
empty_and_close_queue(metrics.frame_queue)
|
||||
|
||||
# ensure the detectors are done
|
||||
for detector in self.detectors.values():
|
||||
@@ -823,11 +693,12 @@ class FrigateApp:
|
||||
|
||||
# Save embeddings stats to disk
|
||||
if self.embeddings:
|
||||
self.embeddings.save_stats()
|
||||
self.embeddings.stop()
|
||||
|
||||
# Stop Communicators
|
||||
self.inter_process_communicator.stop()
|
||||
self.inter_config_updater.stop()
|
||||
self.event_metadata_updater.stop()
|
||||
self.inter_zmq_proxy.stop()
|
||||
|
||||
while len(self.detection_shms) > 0:
|
||||
@@ -835,4 +706,6 @@ class FrigateApp:
|
||||
shm.close()
|
||||
shm.unlink()
|
||||
|
||||
ServiceManager.current().shutdown(wait=True)
|
||||
|
||||
os._exit(os.EX_OK)
|
||||
|
||||
68
frigate/camera/__init__.py
Normal file
68
frigate/camera/__init__.py
Normal file
@@ -0,0 +1,68 @@
|
||||
import multiprocessing as mp
|
||||
from multiprocessing.sharedctypes import Synchronized
|
||||
from multiprocessing.synchronize import Event
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class CameraMetrics:
|
||||
camera_fps: Synchronized
|
||||
detection_fps: Synchronized
|
||||
detection_frame: Synchronized
|
||||
process_fps: Synchronized
|
||||
skipped_fps: Synchronized
|
||||
read_start: Synchronized
|
||||
audio_rms: Synchronized
|
||||
audio_dBFS: Synchronized
|
||||
|
||||
frame_queue: mp.Queue
|
||||
|
||||
process: Optional[mp.Process]
|
||||
capture_process: Optional[mp.Process]
|
||||
ffmpeg_pid: Synchronized
|
||||
|
||||
def __init__(self):
|
||||
self.camera_fps = mp.Value("d", 0)
|
||||
self.detection_fps = mp.Value("d", 0)
|
||||
self.detection_frame = mp.Value("d", 0)
|
||||
self.process_fps = mp.Value("d", 0)
|
||||
self.skipped_fps = mp.Value("d", 0)
|
||||
self.read_start = mp.Value("d", 0)
|
||||
self.audio_rms = mp.Value("d", 0)
|
||||
self.audio_dBFS = mp.Value("d", 0)
|
||||
|
||||
self.frame_queue = mp.Queue(maxsize=2)
|
||||
|
||||
self.process = None
|
||||
self.capture_process = None
|
||||
self.ffmpeg_pid = mp.Value("i", 0)
|
||||
|
||||
|
||||
class PTZMetrics:
|
||||
autotracker_enabled: Synchronized
|
||||
|
||||
start_time: Synchronized
|
||||
stop_time: Synchronized
|
||||
frame_time: Synchronized
|
||||
zoom_level: Synchronized
|
||||
max_zoom: Synchronized
|
||||
min_zoom: Synchronized
|
||||
|
||||
tracking_active: Event
|
||||
motor_stopped: Event
|
||||
reset: Event
|
||||
|
||||
def __init__(self, *, autotracker_enabled: bool):
|
||||
self.autotracker_enabled = mp.Value("i", autotracker_enabled)
|
||||
|
||||
self.start_time = mp.Value("d", 0)
|
||||
self.stop_time = mp.Value("d", 0)
|
||||
self.frame_time = mp.Value("d", 0)
|
||||
self.zoom_level = mp.Value("d", 0)
|
||||
self.max_zoom = mp.Value("d", 0)
|
||||
self.min_zoom = mp.Value("d", 0)
|
||||
|
||||
self.tracking_active = mp.Event()
|
||||
self.motor_stopped = mp.Event()
|
||||
self.reset = mp.Event()
|
||||
|
||||
self.motor_stopped.set()
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
from frigate.camera import PTZMetrics
|
||||
from frigate.comms.config_updater import ConfigPublisher
|
||||
from frigate.config import BirdseyeModeEnum, FrigateConfig
|
||||
from frigate.const import (
|
||||
@@ -14,12 +15,14 @@ from frigate.const import (
|
||||
INSERT_PREVIEW,
|
||||
REQUEST_REGION_GRID,
|
||||
UPDATE_CAMERA_ACTIVITY,
|
||||
UPDATE_EMBEDDINGS_REINDEX_PROGRESS,
|
||||
UPDATE_EVENT_DESCRIPTION,
|
||||
UPDATE_MODEL_STATE,
|
||||
UPSERT_REVIEW_SEGMENT,
|
||||
)
|
||||
from frigate.models import Event, Previews, Recordings, ReviewSegment
|
||||
from frigate.ptz.onvif import OnvifCommandEnum, OnvifController
|
||||
from frigate.types import PTZMetricsTypes
|
||||
from frigate.types import ModelStatusTypesEnum
|
||||
from frigate.util.object import get_camera_regions_grid
|
||||
from frigate.util.services import restart_frigate
|
||||
|
||||
@@ -53,7 +56,7 @@ class Dispatcher:
|
||||
config: FrigateConfig,
|
||||
config_updater: ConfigPublisher,
|
||||
onvif: OnvifController,
|
||||
ptz_metrics: dict[str, PTZMetricsTypes],
|
||||
ptz_metrics: dict[str, PTZMetrics],
|
||||
communicators: list[Communicator],
|
||||
) -> None:
|
||||
self.config = config
|
||||
@@ -61,6 +64,9 @@ class Dispatcher:
|
||||
self.onvif = onvif
|
||||
self.ptz_metrics = ptz_metrics
|
||||
self.comms = communicators
|
||||
self.camera_activity = {}
|
||||
self.model_state = {}
|
||||
self.embeddings_reindex = {}
|
||||
|
||||
self._camera_settings_handlers: dict[str, Callable] = {
|
||||
"audio": self._on_audio_command,
|
||||
@@ -82,36 +88,25 @@ class Dispatcher:
|
||||
for comm in self.comms:
|
||||
comm.subscribe(self._receive)
|
||||
|
||||
self.camera_activity = {}
|
||||
|
||||
def _receive(self, topic: str, payload: str) -> Optional[Any]:
|
||||
"""Handle receiving of payload from communicators."""
|
||||
if topic.endswith("set"):
|
||||
|
||||
def handle_camera_command(command_type, camera_name, command, payload):
|
||||
try:
|
||||
# example /cam_name/detect/set payload=ON|OFF
|
||||
if topic.count("/") == 2:
|
||||
camera_name = topic.split("/")[-3]
|
||||
command = topic.split("/")[-2]
|
||||
if command_type == "set":
|
||||
self._camera_settings_handlers[command](camera_name, payload)
|
||||
elif topic.count("/") == 1:
|
||||
command = topic.split("/")[-2]
|
||||
self._global_settings_handlers[command](payload)
|
||||
except IndexError:
|
||||
logger.error(f"Received invalid set command: {topic}")
|
||||
return
|
||||
elif topic.endswith("ptz"):
|
||||
try:
|
||||
# example /cam_name/ptz payload=MOVE_UP|MOVE_DOWN|STOP...
|
||||
camera_name = topic.split("/")[-2]
|
||||
self._on_ptz_command(camera_name, payload)
|
||||
except IndexError:
|
||||
logger.error(f"Received invalid ptz command: {topic}")
|
||||
return
|
||||
elif topic == "restart":
|
||||
elif command_type == "ptz":
|
||||
self._on_ptz_command(camera_name, payload)
|
||||
except KeyError:
|
||||
logger.error(f"Invalid command type or handler: {command_type}")
|
||||
|
||||
def handle_restart():
|
||||
restart_frigate()
|
||||
elif topic == INSERT_MANY_RECORDINGS:
|
||||
|
||||
def handle_insert_many_recordings():
|
||||
Recordings.insert_many(payload).execute()
|
||||
elif topic == REQUEST_REGION_GRID:
|
||||
|
||||
def handle_request_region_grid():
|
||||
camera = payload
|
||||
grid = get_camera_regions_grid(
|
||||
camera,
|
||||
@@ -119,28 +114,57 @@ class Dispatcher:
|
||||
max(self.config.model.width, self.config.model.height),
|
||||
)
|
||||
return grid
|
||||
elif topic == INSERT_PREVIEW:
|
||||
|
||||
def handle_insert_preview():
|
||||
Previews.insert(payload).execute()
|
||||
elif topic == UPSERT_REVIEW_SEGMENT:
|
||||
(
|
||||
ReviewSegment.insert(payload)
|
||||
.on_conflict(
|
||||
conflict_target=[ReviewSegment.id],
|
||||
update=payload,
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
elif topic == CLEAR_ONGOING_REVIEW_SEGMENTS:
|
||||
ReviewSegment.update(end_time=datetime.datetime.now().timestamp()).where(
|
||||
ReviewSegment.end_time == None
|
||||
|
||||
def handle_upsert_review_segment():
|
||||
ReviewSegment.insert(payload).on_conflict(
|
||||
conflict_target=[ReviewSegment.id],
|
||||
update=payload,
|
||||
).execute()
|
||||
elif topic == UPDATE_CAMERA_ACTIVITY:
|
||||
|
||||
def handle_clear_ongoing_review_segments():
|
||||
ReviewSegment.update(end_time=datetime.datetime.now().timestamp()).where(
|
||||
ReviewSegment.end_time.is_null(True)
|
||||
).execute()
|
||||
|
||||
def handle_update_camera_activity():
|
||||
self.camera_activity = payload
|
||||
elif topic == UPDATE_EVENT_DESCRIPTION:
|
||||
|
||||
def handle_update_event_description():
|
||||
event: Event = Event.get(Event.id == payload["id"])
|
||||
event.data["description"] = payload["description"]
|
||||
event.save()
|
||||
elif topic == "onConnect":
|
||||
self.publish(
|
||||
"event_update",
|
||||
json.dumps({"id": event.id, "description": event.data["description"]}),
|
||||
)
|
||||
|
||||
def handle_update_model_state():
|
||||
if payload:
|
||||
model = payload["model"]
|
||||
state = payload["state"]
|
||||
self.model_state[model] = ModelStatusTypesEnum[state]
|
||||
self.publish("model_state", json.dumps(self.model_state))
|
||||
|
||||
def handle_model_state():
|
||||
self.publish("model_state", json.dumps(self.model_state.copy()))
|
||||
|
||||
def handle_update_embeddings_reindex_progress():
|
||||
self.embeddings_reindex = payload
|
||||
self.publish(
|
||||
"embeddings_reindex_progress",
|
||||
json.dumps(payload),
|
||||
)
|
||||
|
||||
def handle_embeddings_reindex_progress():
|
||||
self.publish(
|
||||
"embeddings_reindex_progress",
|
||||
json.dumps(self.embeddings_reindex.copy()),
|
||||
)
|
||||
|
||||
def handle_on_connect():
|
||||
camera_status = self.camera_activity.copy()
|
||||
|
||||
for camera in camera_status.keys():
|
||||
@@ -155,6 +179,51 @@ class Dispatcher:
|
||||
}
|
||||
|
||||
self.publish("camera_activity", json.dumps(camera_status))
|
||||
self.publish("model_state", json.dumps(self.model_state.copy()))
|
||||
self.publish(
|
||||
"embeddings_reindex_progress",
|
||||
json.dumps(self.embeddings_reindex.copy()),
|
||||
)
|
||||
|
||||
# Dictionary mapping topic to handlers
|
||||
topic_handlers = {
|
||||
INSERT_MANY_RECORDINGS: handle_insert_many_recordings,
|
||||
REQUEST_REGION_GRID: handle_request_region_grid,
|
||||
INSERT_PREVIEW: handle_insert_preview,
|
||||
UPSERT_REVIEW_SEGMENT: handle_upsert_review_segment,
|
||||
CLEAR_ONGOING_REVIEW_SEGMENTS: handle_clear_ongoing_review_segments,
|
||||
UPDATE_CAMERA_ACTIVITY: handle_update_camera_activity,
|
||||
UPDATE_EVENT_DESCRIPTION: handle_update_event_description,
|
||||
UPDATE_MODEL_STATE: handle_update_model_state,
|
||||
UPDATE_EMBEDDINGS_REINDEX_PROGRESS: handle_update_embeddings_reindex_progress,
|
||||
"restart": handle_restart,
|
||||
"embeddingsReindexProgress": handle_embeddings_reindex_progress,
|
||||
"modelState": handle_model_state,
|
||||
"onConnect": handle_on_connect,
|
||||
}
|
||||
|
||||
if topic.endswith("set") or topic.endswith("ptz"):
|
||||
try:
|
||||
parts = topic.split("/")
|
||||
if len(parts) == 3 and topic.endswith("set"):
|
||||
# example /cam_name/detect/set payload=ON|OFF
|
||||
camera_name = parts[-3]
|
||||
command = parts[-2]
|
||||
handle_camera_command("set", camera_name, command, payload)
|
||||
elif len(parts) == 2 and topic.endswith("set"):
|
||||
command = parts[-2]
|
||||
self._global_settings_handlers[command](payload)
|
||||
elif len(parts) == 2 and topic.endswith("ptz"):
|
||||
# example /cam_name/ptz payload=MOVE_UP|MOVE_DOWN|STOP...
|
||||
camera_name = parts[-2]
|
||||
handle_camera_command("ptz", camera_name, "", payload)
|
||||
except IndexError:
|
||||
logger.error(
|
||||
f"Received invalid {topic.split('/')[-1]} command: {topic}"
|
||||
)
|
||||
return
|
||||
elif topic in topic_handlers:
|
||||
return topic_handlers[topic]()
|
||||
else:
|
||||
self.publish(topic, payload, retain=False)
|
||||
|
||||
@@ -247,16 +316,16 @@ class Dispatcher:
|
||||
"Autotracking must be enabled in the config to be turned on via MQTT."
|
||||
)
|
||||
return
|
||||
if not self.ptz_metrics[camera_name]["ptz_autotracker_enabled"].value:
|
||||
if not self.ptz_metrics[camera_name].autotracker_enabled.value:
|
||||
logger.info(f"Turning on ptz autotracker for {camera_name}")
|
||||
self.ptz_metrics[camera_name]["ptz_autotracker_enabled"].value = True
|
||||
self.ptz_metrics[camera_name]["ptz_start_time"].value = 0
|
||||
self.ptz_metrics[camera_name].autotracker_enabled.value = True
|
||||
self.ptz_metrics[camera_name].start_time.value = 0
|
||||
ptz_autotracker_settings.enabled = True
|
||||
elif payload == "OFF":
|
||||
if self.ptz_metrics[camera_name]["ptz_autotracker_enabled"].value:
|
||||
if self.ptz_metrics[camera_name].autotracker_enabled.value:
|
||||
logger.info(f"Turning off ptz autotracker for {camera_name}")
|
||||
self.ptz_metrics[camera_name]["ptz_autotracker_enabled"].value = False
|
||||
self.ptz_metrics[camera_name]["ptz_start_time"].value = 0
|
||||
self.ptz_metrics[camera_name].autotracker_enabled.value = False
|
||||
self.ptz_metrics[camera_name].start_time.value = 0
|
||||
ptz_autotracker_settings.enabled = False
|
||||
|
||||
self.publish(f"{camera_name}/ptz_autotracker/state", payload, retain=True)
|
||||
|
||||
65
frigate/comms/embeddings_updater.py
Normal file
65
frigate/comms/embeddings_updater.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Facilitates communication between processes."""
|
||||
|
||||
from enum import Enum
|
||||
from typing import Callable
|
||||
|
||||
import zmq
|
||||
|
||||
SOCKET_REP_REQ = "ipc:///tmp/cache/embeddings"
|
||||
|
||||
|
||||
class EmbeddingsRequestEnum(Enum):
|
||||
embed_description = "embed_description"
|
||||
embed_thumbnail = "embed_thumbnail"
|
||||
generate_search = "generate_search"
|
||||
|
||||
|
||||
class EmbeddingsResponder:
|
||||
def __init__(self) -> None:
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.REP)
|
||||
self.socket.bind(SOCKET_REP_REQ)
|
||||
|
||||
def check_for_request(self, process: Callable) -> None:
|
||||
while True: # load all messages that are queued
|
||||
has_message, _, _ = zmq.select([self.socket], [], [], 0.1)
|
||||
|
||||
if not has_message:
|
||||
break
|
||||
|
||||
try:
|
||||
(topic, value) = self.socket.recv_json(flags=zmq.NOBLOCK)
|
||||
|
||||
response = process(topic, value)
|
||||
|
||||
if response is not None:
|
||||
self.socket.send_json(response)
|
||||
else:
|
||||
self.socket.send_json([])
|
||||
except zmq.ZMQError:
|
||||
break
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
|
||||
|
||||
class EmbeddingsRequestor:
|
||||
"""Simplifies sending data to EmbeddingsResponder and getting a reply."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.REQ)
|
||||
self.socket.connect(SOCKET_REP_REQ)
|
||||
|
||||
def send_data(self, topic: str, data: any) -> str:
|
||||
"""Sends data and then waits for reply."""
|
||||
try:
|
||||
self.socket.send_json((topic, data))
|
||||
return self.socket.recv_json()
|
||||
except zmq.ZMQError:
|
||||
return ""
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
51
frigate/comms/event_metadata_updater.py
Normal file
51
frigate/comms/event_metadata_updater.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""Facilitates communication between processes."""
|
||||
|
||||
import logging
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from frigate.events.types import RegenerateDescriptionEnum
|
||||
|
||||
from .zmq_proxy import Publisher, Subscriber
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EventMetadataTypeEnum(str, Enum):
|
||||
all = ""
|
||||
regenerate_description = "regenerate_description"
|
||||
|
||||
|
||||
class EventMetadataPublisher(Publisher):
|
||||
"""Simplifies receiving event metadata."""
|
||||
|
||||
topic_base = "event_metadata/"
|
||||
|
||||
def __init__(self, topic: EventMetadataTypeEnum) -> None:
|
||||
topic = topic.value
|
||||
super().__init__(topic)
|
||||
|
||||
def publish(self, payload: tuple[str, RegenerateDescriptionEnum]) -> None:
|
||||
super().publish(payload)
|
||||
|
||||
|
||||
class EventMetadataSubscriber(Subscriber):
|
||||
"""Simplifies receiving event metadata."""
|
||||
|
||||
topic_base = "event_metadata/"
|
||||
|
||||
def __init__(self, topic: EventMetadataTypeEnum) -> None:
|
||||
topic = topic.value
|
||||
super().__init__(topic)
|
||||
|
||||
def check_for_update(
|
||||
self, timeout: float = 1
|
||||
) -> Optional[tuple[EventMetadataTypeEnum, str, RegenerateDescriptionEnum]]:
|
||||
return super().check_for_update(timeout)
|
||||
|
||||
def _return_object(self, topic: str, payload: any) -> any:
|
||||
if payload is None:
|
||||
return (None, None, None)
|
||||
topic = EventMetadataTypeEnum[topic[len(self.topic_base) :]]
|
||||
event_id, source = payload
|
||||
return (topic, event_id, RegenerateDescriptionEnum(source))
|
||||
@@ -65,8 +65,11 @@ class InterProcessRequestor:
|
||||
|
||||
def send_data(self, topic: str, data: any) -> any:
|
||||
"""Sends data and then waits for reply."""
|
||||
self.socket.send_json((topic, data))
|
||||
return self.socket.recv_json()
|
||||
try:
|
||||
self.socket.send_json((topic, data))
|
||||
return self.socket.recv_json()
|
||||
except zmq.ZMQError:
|
||||
return ""
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user