forked from Github/frigate
Compare commits
282 Commits
dependabot
...
v0.15.0-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
33825f6d96 | ||
|
|
eca504cb07 | ||
|
|
4c75440af4 | ||
|
|
94f7528885 | ||
|
|
4dadf6d353 | ||
|
|
2d27e72ed9 | ||
|
|
4ff0c8a8d1 | ||
|
|
f9fba94863 | ||
|
|
f9b246dbd0 | ||
|
|
8fefded8dc | ||
|
|
18824830fd | ||
|
|
fa81d87dc0 | ||
|
|
8bc145472a | ||
|
|
7afc1e9762 | ||
|
|
fc59c83e16 | ||
|
|
e4048be088 | ||
|
|
d715a8c290 | ||
|
|
ad308252a1 | ||
|
|
c7d9f83638 | ||
|
|
828fdbfd2d | ||
|
|
40c6fda19d | ||
|
|
b69816c2f9 | ||
|
|
46f5234bd9 | ||
|
|
81b8d7a66b | ||
|
|
b1285a16c1 | ||
|
|
90140e7710 | ||
|
|
8364e68667 | ||
|
|
4bb420d049 | ||
|
|
560dc68120 | ||
|
|
8fcb8e54f7 | ||
|
|
6c70e56059 | ||
|
|
b24d292ade | ||
|
|
2137de37b9 | ||
|
|
3c591ad8a9 | ||
|
|
b56f4c4558 | ||
|
|
5d8bcb42c6 | ||
|
|
b299652e86 | ||
|
|
8ac4b001a2 | ||
|
|
6294ce7807 | ||
|
|
8173cd7776 | ||
|
|
edaccd86d6 | ||
|
|
5f77408956 | ||
|
|
e836523bc3 | ||
|
|
9f866be110 | ||
|
|
f6879f40b0 | ||
|
|
06f47f262f | ||
|
|
eda52a3b82 | ||
|
|
3f1ab66899 | ||
|
|
b75efcbca2 | ||
|
|
25043278ab | ||
|
|
644069fb23 | ||
|
|
0eccb6a610 | ||
|
|
0abd514064 | ||
|
|
3879fde06d | ||
|
|
887433fc6a | ||
|
|
dd7a07bd0d | ||
|
|
0ee32cf110 | ||
|
|
72aa68cedc | ||
|
|
9adffa1ef5 | ||
|
|
4ca267ea17 | ||
|
|
833768172d | ||
|
|
1ec459ea3a | ||
|
|
66d0ad5803 | ||
|
|
92ac025e43 | ||
|
|
e8b2fde753 | ||
|
|
0fc7999780 | ||
|
|
3a403392e7 | ||
|
|
acccc6fd93 | ||
|
|
40bb4765d4 | ||
|
|
48c60621b6 | ||
|
|
1e1610671e | ||
|
|
de86c37687 | ||
|
|
6e332bbdf8 | ||
|
|
8a8a0c7dec | ||
|
|
d4b9b5a7dd | ||
|
|
6df541e1fd | ||
|
|
748087483c | ||
|
|
ae91fa6a39 | ||
|
|
2897afce41 | ||
|
|
ee8091ba91 | ||
|
|
30b5faebae | ||
|
|
8d753f821d | ||
|
|
54eb03d2a1 | ||
|
|
dd6276e706 | ||
|
|
f67ec241d4 | ||
|
|
8ade85edec | ||
|
|
a2ca18a714 | ||
|
|
6a83ff2511 | ||
|
|
bc3a06178b | ||
|
|
9fda259c0c | ||
|
|
d4925622f9 | ||
|
|
dbeaf43b8f | ||
|
|
a2f42d51fd | ||
|
|
0b71cfaf06 | ||
|
|
d558ac83b6 | ||
|
|
74efc94649 | ||
|
|
2541a345d0 | ||
|
|
23ce1e930d | ||
|
|
6ebad84160 | ||
|
|
24ac9f3e5a | ||
|
|
757150dec1 | ||
|
|
ddcec82b61 | ||
|
|
74047453ef | ||
|
|
dcaed0e90f | ||
|
|
cae304e07f | ||
|
|
47ad0ca993 | ||
|
|
9c751230a1 | ||
|
|
a468ed316d | ||
|
|
e725730982 | ||
|
|
21c12d118b | ||
|
|
b9e74ee9ab | ||
|
|
0f2cff5078 | ||
|
|
e5e196bd7f | ||
|
|
077402406b | ||
|
|
54900ae318 | ||
|
|
3c015bf822 | ||
|
|
a1efcfb2d0 | ||
|
|
75d531285a | ||
|
|
0aad7db2d2 | ||
|
|
20c3b890ae | ||
|
|
0126960d79 | ||
|
|
849d441c5c | ||
|
|
b5f5627ca6 | ||
|
|
5b0c1e5b9e | ||
|
|
3cff0df0ce | ||
|
|
15fa55c223 | ||
|
|
594ca3a04b | ||
|
|
fafe5623d1 | ||
|
|
141cf39368 | ||
|
|
1fa050fd7a | ||
|
|
f36e7430ae | ||
|
|
94fd75e014 | ||
|
|
95d6da3111 | ||
|
|
4dc4704bb4 | ||
|
|
18bf7f93fa | ||
|
|
c73f694c63 | ||
|
|
3688a3bc67 | ||
|
|
775a3a1c22 | ||
|
|
bbbb3b4a06 | ||
|
|
576191cd4e | ||
|
|
38d398c967 | ||
|
|
7da44115d3 | ||
|
|
b54032bdc7 | ||
|
|
cab497e81e | ||
|
|
50e9c67609 | ||
|
|
bd57ea0110 | ||
|
|
05fe7f8a48 | ||
|
|
c0bd3b362c | ||
|
|
6381028fd6 | ||
|
|
1f328be1bd | ||
|
|
ddfdb71783 | ||
|
|
da1478c0c1 | ||
|
|
40fe3b4358 | ||
|
|
20fd1db0f4 | ||
|
|
a65aaab849 | ||
|
|
a5595189ed | ||
|
|
4a1da3ebc5 | ||
|
|
35a4460334 | ||
|
|
a6ccb37683 | ||
|
|
68465aed49 | ||
|
|
fc3aac96f2 | ||
|
|
32c7669b28 | ||
|
|
fef30bc671 | ||
|
|
ae547d27e4 | ||
|
|
be3e1831d4 | ||
|
|
45aceea53b | ||
|
|
25819584bd | ||
|
|
4c24b70d47 | ||
|
|
4c12673fbb | ||
|
|
e935db5075 | ||
|
|
a7f1f8d327 | ||
|
|
8c540d7210 | ||
|
|
4c4b884f8e | ||
|
|
a3d3fe07ce | ||
|
|
1ae521f560 | ||
|
|
7854e1c2c1 | ||
|
|
a9ff795948 | ||
|
|
a8e2f97260 | ||
|
|
d17253b023 | ||
|
|
ecbf0410eb | ||
|
|
cffc431bf0 | ||
|
|
dc54981784 | ||
|
|
a7ed90f042 | ||
|
|
08941ab39a | ||
|
|
b81a8d26e4 | ||
|
|
af84af7b7a | ||
|
|
0f813962be | ||
|
|
fe57f7f489 | ||
|
|
12e2c04486 | ||
|
|
6bafb68d77 | ||
|
|
e8763b3697 | ||
|
|
6f2924006c | ||
|
|
062c305cd8 | ||
|
|
61a4a4bc2f | ||
|
|
176af55e8c | ||
|
|
1a51ce712c | ||
|
|
811da2e159 | ||
|
|
535bf6e4b9 | ||
|
|
515f06ba6c | ||
|
|
6c43e5dba9 | ||
|
|
d498fabe72 | ||
|
|
27e71eb142 | ||
|
|
7c63cb5bca | ||
|
|
ddf3a687a3 | ||
|
|
4515eb4637 | ||
|
|
c4e2f3bc70 | ||
|
|
bd906a7915 | ||
|
|
3df33199bc | ||
|
|
7ad30f15d5 | ||
|
|
2f38d960d4 | ||
|
|
2fc58fea81 | ||
|
|
e7dfbf76bb | ||
|
|
94de29187a | ||
|
|
a82c1f303b | ||
|
|
55e1f865d8 | ||
|
|
3f996cd62c | ||
|
|
58a8028485 | ||
|
|
190ce5ee31 | ||
|
|
70aab068fd | ||
|
|
617d279419 | ||
|
|
4de088d725 | ||
|
|
f8fd746678 | ||
|
|
1529ee59fe | ||
|
|
19c253b429 | ||
|
|
13bb9dd715 | ||
|
|
9b4602acb3 | ||
|
|
e5448110fc | ||
|
|
4974defe6f | ||
|
|
65ceadda2b | ||
|
|
8b2adb55ed | ||
|
|
58ca44bd15 | ||
|
|
ef46451b80 | ||
|
|
758b0f9734 | ||
|
|
3650000b31 | ||
|
|
dbd042ca3e | ||
|
|
6b9082bdd9 | ||
|
|
f9baa3bf20 | ||
|
|
a75feb7f8f | ||
|
|
009900b29b | ||
|
|
dc04cf82d8 | ||
|
|
b2c23a367d | ||
|
|
338b59a32e | ||
|
|
07ffd76437 | ||
|
|
3eaf9f4011 | ||
|
|
9832831c5e | ||
|
|
d3259c4782 | ||
|
|
940c12d9d8 | ||
|
|
8f2cbe261b | ||
|
|
e86788034d | ||
|
|
4ecc0e15ce | ||
|
|
b01ce31903 | ||
|
|
87b69c373a | ||
|
|
07b3160dff | ||
|
|
096e2791f5 | ||
|
|
9d456ccfcf | ||
|
|
ad5c3741e9 | ||
|
|
fe188bd646 | ||
|
|
f47984818f | ||
|
|
7b274b6974 | ||
|
|
b1806b0a7c | ||
|
|
ff2e46650c | ||
|
|
69fe6cdc05 | ||
|
|
b7e0d14b83 | ||
|
|
7db6ed9ad5 | ||
|
|
da0f63f095 | ||
|
|
90221e8c94 | ||
|
|
37680c317c | ||
|
|
70ea6fc9a1 | ||
|
|
67e692a7f3 | ||
|
|
34382ac38e | ||
|
|
b94b08a33c | ||
|
|
540d66af57 | ||
|
|
a2deeb0d12 | ||
|
|
22fe261dd6 | ||
|
|
b44354ad29 | ||
|
|
3ffbdb35a2 | ||
|
|
0504e9ef79 | ||
|
|
b309287087 | ||
|
|
e891f2ad6d | ||
|
|
9b1fb33ac6 | ||
|
|
8a099b4ae5 | ||
|
|
2cdd483126 |
@@ -42,6 +42,7 @@ codeproject
|
||||
colormap
|
||||
colorspace
|
||||
comms
|
||||
coro
|
||||
ctypeslib
|
||||
CUDA
|
||||
Cuvid
|
||||
@@ -59,6 +60,7 @@ dsize
|
||||
dtype
|
||||
ECONNRESET
|
||||
edgetpu
|
||||
fastapi
|
||||
faststart
|
||||
fflags
|
||||
ffprobe
|
||||
@@ -212,6 +214,7 @@ rcond
|
||||
RDONLY
|
||||
rebranded
|
||||
referer
|
||||
reindex
|
||||
Reolink
|
||||
restream
|
||||
restreamed
|
||||
@@ -236,6 +239,7 @@ sleeptime
|
||||
SNDMORE
|
||||
socs
|
||||
sqliteq
|
||||
sqlitevecq
|
||||
ssdlite
|
||||
statm
|
||||
stimeout
|
||||
@@ -270,6 +274,7 @@ unraid
|
||||
unreviewed
|
||||
userdata
|
||||
usermod
|
||||
uvicorn
|
||||
vaapi
|
||||
vainfo
|
||||
variations
|
||||
|
||||
@@ -52,7 +52,8 @@
|
||||
"csstools.postcss",
|
||||
"blanu.vscode-styled-jsx",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"charliermarsh.ruff"
|
||||
"charliermarsh.ruff",
|
||||
"eamodio.gitlens"
|
||||
],
|
||||
"settings": {
|
||||
"remote.autoForwardPorts": false,
|
||||
|
||||
@@ -90,6 +90,9 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
@@ -102,7 +105,7 @@ body:
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no Coral)
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
11
.github/DISCUSSION_TEMPLATE/config-support.yml
vendored
11
.github/DISCUSSION_TEMPLATE/config-support.yml
vendored
@@ -76,6 +76,17 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
25
.github/DISCUSSION_TEMPLATE/detector-support.yml
vendored
25
.github/DISCUSSION_TEMPLATE/detector-support.yml
vendored
@@ -48,28 +48,6 @@ body:
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
@@ -78,6 +56,9 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
25
.github/DISCUSSION_TEMPLATE/general-support.yml
vendored
25
.github/DISCUSSION_TEMPLATE/general-support.yml
vendored
@@ -68,20 +68,6 @@ body:
|
||||
label: Frigate stats
|
||||
description: Output from frigate's /api/stats endpoint
|
||||
render: json
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
@@ -90,6 +76,17 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
@@ -24,12 +24,6 @@ body:
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: In which browser(s) are you experiencing the issue with?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
@@ -70,20 +64,6 @@ body:
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
@@ -92,6 +72,22 @@ body:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: object-detector
|
||||
attributes:
|
||||
label: Object Detector
|
||||
options:
|
||||
- Coral
|
||||
- OpenVino
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
31
.github/pull_request_template.md
vendored
Normal file
31
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
## Proposed change
|
||||
<!--
|
||||
Describe what this pull request does and how it will benefit users of Frigate.
|
||||
Please describe in detail any considerations, breaking changes, etc. that are
|
||||
made in this pull request.
|
||||
-->
|
||||
|
||||
|
||||
## Type of change
|
||||
|
||||
- [ ] Dependency upgrade
|
||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature
|
||||
- [ ] Breaking change (fix/feature causing existing functionality to break)
|
||||
- [ ] Code quality improvements to existing code
|
||||
|
||||
## Additional information
|
||||
|
||||
- This PR fixes or closes issue: fixes #
|
||||
- This PR is related to issue:
|
||||
|
||||
## Checklist
|
||||
|
||||
<!--
|
||||
Put an `x` in the boxes that apply.
|
||||
-->
|
||||
|
||||
- [ ] The code change is tested and works locally.
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] The code has been formatted using Ruff (`ruff format frigate`)
|
||||
89
.github/workflows/ci.yml
vendored
89
.github/workflows/ci.yml
vendored
@@ -6,6 +6,8 @@ on:
|
||||
branches:
|
||||
- dev
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
|
||||
# only run the latest commit to avoid cache overwrites
|
||||
concurrency:
|
||||
@@ -155,6 +157,28 @@ jobs:
|
||||
tensorrt.tags=${{ steps.setup.outputs.image-name }}-tensorrt
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64,mode=max
|
||||
arm64_extra_builds:
|
||||
runs-on: ubuntu-latest
|
||||
name: ARM Extra Build
|
||||
needs:
|
||||
- arm64_build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push Rockchip build
|
||||
uses: docker/bake-action@v3
|
||||
with:
|
||||
push: true
|
||||
targets: rk
|
||||
files: docker/rockchip/rk.hcl
|
||||
set: |
|
||||
rk.tags=${{ steps.setup.outputs.image-name }}-rk
|
||||
*.cache-from=type=gha
|
||||
combined_extra_builds:
|
||||
runs-on: ubuntu-latest
|
||||
name: Combined Extra Builds
|
||||
@@ -179,57 +203,18 @@ jobs:
|
||||
h8l.tags=${{ steps.setup.outputs.image-name }}-h8l
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-h8l
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-h8l,mode=max
|
||||
#- name: AMD/ROCm general build
|
||||
# env:
|
||||
# AMDGPU: gfx
|
||||
# HSA_OVERRIDE: 0
|
||||
# uses: docker/bake-action@v3
|
||||
# with:
|
||||
# push: true
|
||||
# targets: rocm
|
||||
# files: docker/rocm/rocm.hcl
|
||||
# set: |
|
||||
# rocm.tags=${{ steps.setup.outputs.image-name }}-rocm
|
||||
# *.cache-from=type=gha
|
||||
#- name: AMD/ROCm gfx900
|
||||
# env:
|
||||
# AMDGPU: gfx900
|
||||
# HSA_OVERRIDE: 1
|
||||
# HSA_OVERRIDE_GFX_VERSION: 9.0.0
|
||||
# uses: docker/bake-action@v3
|
||||
# with:
|
||||
# push: true
|
||||
# targets: rocm
|
||||
# files: docker/rocm/rocm.hcl
|
||||
# set: |
|
||||
# rocm.tags=${{ steps.setup.outputs.image-name }}-rocm-gfx900
|
||||
# *.cache-from=type=gha
|
||||
#- name: AMD/ROCm gfx1030
|
||||
# env:
|
||||
# AMDGPU: gfx1030
|
||||
# HSA_OVERRIDE: 1
|
||||
# HSA_OVERRIDE_GFX_VERSION: 10.3.0
|
||||
# uses: docker/bake-action@v3
|
||||
# with:
|
||||
# push: true
|
||||
# targets: rocm
|
||||
# files: docker/rocm/rocm.hcl
|
||||
# set: |
|
||||
# rocm.tags=${{ steps.setup.outputs.image-name }}-rocm-gfx1030
|
||||
# *.cache-from=type=gha
|
||||
#- name: AMD/ROCm gfx1100
|
||||
# env:
|
||||
# AMDGPU: gfx1100
|
||||
# HSA_OVERRIDE: 1
|
||||
# HSA_OVERRIDE_GFX_VERSION: 11.0.0
|
||||
# uses: docker/bake-action@v3
|
||||
# with:
|
||||
# push: true
|
||||
# targets: rocm
|
||||
# files: docker/rocm/rocm.hcl
|
||||
# set: |
|
||||
# rocm.tags=${{ steps.setup.outputs.image-name }}-rocm-gfx1100
|
||||
# *.cache-from=type=gha
|
||||
- name: AMD/ROCm general build
|
||||
env:
|
||||
AMDGPU: gfx
|
||||
HSA_OVERRIDE: 0
|
||||
uses: docker/bake-action@v3
|
||||
with:
|
||||
push: true
|
||||
targets: rocm
|
||||
files: docker/rocm/rocm.hcl
|
||||
set: |
|
||||
rocm.tags=${{ steps.setup.outputs.image-name }}-rocm
|
||||
*.cache-from=type=gha
|
||||
# The majority of users running arm64 are rpi users, so the rpi
|
||||
# build should be the primary arm64 image
|
||||
assemble_default_build:
|
||||
@@ -244,7 +229,7 @@ jobs:
|
||||
with:
|
||||
string: ${{ github.repository }}
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
|
||||
5
.github/workflows/pull_request.yml
vendored
5
.github/workflows/pull_request.yml
vendored
@@ -1,6 +1,9 @@
|
||||
name: On pull request
|
||||
|
||||
on: pull_request
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.9
|
||||
|
||||
6
.github/workflows/release.yml
vendored
6
.github/workflows/release.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
with:
|
||||
string: ${{ github.repository }}
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
@@ -34,14 +34,14 @@ jobs:
|
||||
STABLE_TAG=${BASE}:stable
|
||||
PULL_TAG=${BASE}:${BUILD_TAG}
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG} docker://${VERSION_TAG}
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk; do
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk h8l rocm; do
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG}-${variant} docker://${VERSION_TAG}-${variant}
|
||||
done
|
||||
|
||||
# stable tag
|
||||
if [[ "${BUILD_TYPE}" == "stable" ]]; then
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG} docker://${STABLE_TAG}
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk; do
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk h8l rocm; do
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG}-${variant} docker://${STABLE_TAG}-${variant}
|
||||
done
|
||||
fi
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
.DS_Store
|
||||
*.pyc
|
||||
__pycache__
|
||||
.mypy_cache
|
||||
*.swp
|
||||
debug
|
||||
.vscode/*
|
||||
|
||||
@@ -4,6 +4,7 @@ from statistics import mean
|
||||
|
||||
import numpy as np
|
||||
|
||||
import frigate.util as util
|
||||
from frigate.config import DetectorTypeEnum
|
||||
from frigate.object_detection import (
|
||||
ObjectDetectProcess,
|
||||
@@ -90,7 +91,7 @@ edgetpu_process_2 = ObjectDetectProcess(
|
||||
)
|
||||
|
||||
for x in range(0, 10):
|
||||
camera_process = mp.Process(
|
||||
camera_process = util.Process(
|
||||
target=start, args=(x, 300, detection_queue, events[str(x)])
|
||||
)
|
||||
camera_process.daemon = True
|
||||
|
||||
@@ -50,7 +50,7 @@ RUN PYTHON_VERSION=$(python3 --version 2>&1 | awk '{print $2}' | cut -d. -f1,2)
|
||||
RUN . /etc/environment && \
|
||||
git clone https://github.com/hailo-ai/hailort.git /opt/hailort && \
|
||||
cd /opt/hailort && \
|
||||
git checkout v4.17.0 && \
|
||||
git checkout v4.18.0 && \
|
||||
cmake -H. -Bbuild -DCMAKE_BUILD_TYPE=Release -DHAILO_BUILD_PYBIND=1 -DPYBIND11_PYTHON_VERSION=${PYTHON_VERSION} && \
|
||||
cmake --build build --config release --target libhailort && \
|
||||
cmake --build build --config release --target _pyhailort && \
|
||||
@@ -91,7 +91,7 @@ RUN pip3 install -U /deps/hailo-wheels/*.whl
|
||||
RUN . /etc/environment && \
|
||||
mv /usr/local/lib/python${PYTHON_VERSION}/dist-packages/hailo_platform/pyhailort/libhailort.so /usr/lib/${CC} && \
|
||||
cd /usr/lib/${CC}/ && \
|
||||
ln -s libhailort.so libhailort.so.4.17.0
|
||||
ln -s libhailort.so libhailort.so.4.18.0
|
||||
|
||||
# Copy base files from the rootfs stage
|
||||
COPY --from=rootfs / /
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
# Update package list and install dependencies
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y build-essential cmake git wget linux-modules-extra-$(uname -r)
|
||||
sudo apt-get install -y build-essential cmake git wget
|
||||
|
||||
arch=$(uname -m)
|
||||
|
||||
@@ -13,7 +13,7 @@ else
|
||||
fi
|
||||
|
||||
# Clone the HailoRT driver repository
|
||||
git clone --depth 1 --branch v4.17.0 https://github.com/hailo-ai/hailort-drivers.git
|
||||
git clone --depth 1 --branch v4.18.0 https://github.com/hailo-ai/hailort-drivers.git
|
||||
|
||||
# Build and install the HailoRT driver
|
||||
cd hailort-drivers/linux/pcie
|
||||
@@ -23,13 +23,26 @@ sudo make install
|
||||
# Load the Hailo PCI driver
|
||||
sudo modprobe hailo_pci
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Unable to load hailo_pci module, common reasons for this are:"
|
||||
echo "- Key was rejected by service: Secure Boot is enabling disallowing install."
|
||||
echo "- Permissions are not setup correctly."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Download and install the firmware
|
||||
cd ../../
|
||||
./download_firmware.sh
|
||||
sudo mv hailo8_fw.4.17.0.bin /lib/firmware/hailo/hailo8_fw.bin
|
||||
|
||||
# verify the firmware folder is present
|
||||
if [ ! -d /lib/firmware/hailo ]; then
|
||||
sudo mkdir /lib/firmware/hailo
|
||||
fi
|
||||
sudo mv hailo8_fw.4.18.0.bin /lib/firmware/hailo/hailo8_fw.bin
|
||||
|
||||
# Install udev rules
|
||||
sudo cp ./linux/pcie/51-hailo-udev.rules /etc/udev/rules.d/
|
||||
sudo udevadm control --reload-rules && sudo udevadm trigger
|
||||
|
||||
echo "HailoRT driver installation complete."
|
||||
echo "reboot your system to load the firmware!"
|
||||
|
||||
@@ -30,6 +30,16 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
||||
--mount=type=cache,target=/root/.ccache \
|
||||
/deps/build_nginx.sh
|
||||
|
||||
FROM wget AS sqlite-vec
|
||||
ARG DEBIAN_FRONTEND
|
||||
|
||||
# Build sqlite_vec from source
|
||||
COPY docker/main/build_sqlite_vec.sh /deps/build_sqlite_vec.sh
|
||||
RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
||||
--mount=type=bind,source=docker/main/build_sqlite_vec.sh,target=/deps/build_sqlite_vec.sh \
|
||||
--mount=type=cache,target=/root/.ccache \
|
||||
/deps/build_sqlite_vec.sh
|
||||
|
||||
FROM scratch AS go2rtc
|
||||
ARG TARGETARCH
|
||||
WORKDIR /rootfs/usr/local/go2rtc/bin
|
||||
@@ -163,20 +173,18 @@ RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
||||
COPY docker/main/requirements.txt /requirements.txt
|
||||
RUN pip3 install -r /requirements.txt
|
||||
|
||||
# Build pysqlite3 from source to support ChromaDB
|
||||
# Build pysqlite3 from source
|
||||
COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh
|
||||
RUN /build_pysqlite3.sh
|
||||
|
||||
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
|
||||
RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt
|
||||
|
||||
COPY docker/main/requirements-wheels-post.txt /requirements-wheels-post.txt
|
||||
RUN pip3 wheel --no-deps --wheel-dir=/wheels-post -r /requirements-wheels-post.txt
|
||||
|
||||
|
||||
# Collect deps in a single layer
|
||||
FROM scratch AS deps-rootfs
|
||||
COPY --from=nginx /usr/local/nginx/ /usr/local/nginx/
|
||||
COPY --from=sqlite-vec /usr/local/lib/ /usr/local/lib/
|
||||
COPY --from=go2rtc /rootfs/ /
|
||||
COPY --from=libusb-build /usr/local/lib /usr/local/lib
|
||||
COPY --from=tempio /rootfs/ /
|
||||
@@ -197,12 +205,11 @@ ARG APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility"
|
||||
|
||||
# Turn off Chroma Telemetry: https://docs.trychroma.com/telemetry#opting-out
|
||||
ENV ANONYMIZED_TELEMETRY=False
|
||||
# Allow resetting the chroma database
|
||||
ENV ALLOW_RESET=True
|
||||
# Disable tokenizer parallelism warning
|
||||
# https://stackoverflow.com/questions/62691279/how-to-disable-tokenizers-parallelism-true-false-warning/72926996#72926996
|
||||
ENV TOKENIZERS_PARALLELISM=true
|
||||
# https://github.com/huggingface/transformers/issues/27214
|
||||
ENV TRANSFORMERS_NO_ADVISORY_WARNINGS=1
|
||||
|
||||
ENV PATH="/usr/local/go2rtc/bin:/usr/local/tempio/bin:/usr/local/nginx/sbin:${PATH}"
|
||||
ENV LIBAVFORMAT_VERSION_MAJOR=60
|
||||
@@ -215,14 +222,6 @@ RUN --mount=type=bind,from=wheels,source=/wheels,target=/deps/wheels \
|
||||
python3 -m pip install --upgrade pip && \
|
||||
pip3 install -U /deps/wheels/*.whl
|
||||
|
||||
# We have to uninstall this dependency specifically
|
||||
# as it will break onnxruntime-openvino
|
||||
RUN pip3 uninstall -y onnxruntime
|
||||
|
||||
RUN --mount=type=bind,from=wheels,source=/wheels-post,target=/deps/wheels \
|
||||
python3 -m pip install --upgrade pip && \
|
||||
pip3 install -U /deps/wheels/*.whl
|
||||
|
||||
COPY --from=deps-rootfs / /
|
||||
|
||||
RUN ldconfig
|
||||
@@ -239,7 +238,7 @@ ENV S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0
|
||||
ENTRYPOINT ["/init"]
|
||||
CMD []
|
||||
|
||||
HEALTHCHECK --start-period=120s --start-interval=5s --interval=15s --timeout=5s --retries=3 \
|
||||
HEALTHCHECK --start-period=300s --start-interval=5s --interval=15s --timeout=5s --retries=3 \
|
||||
CMD curl --fail --silent --show-error http://127.0.0.1:5000/api/version || exit 1
|
||||
|
||||
# Frigate deps with Node.js and NPM for devcontainer
|
||||
|
||||
31
docker/main/build_sqlite_vec.sh
Executable file
31
docker/main/build_sqlite_vec.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
SQLITE_VEC_VERSION="0.1.3"
|
||||
|
||||
cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list
|
||||
sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list
|
||||
apt-get update
|
||||
apt-get -yqq build-dep sqlite3 gettext git
|
||||
|
||||
mkdir /tmp/sqlite_vec
|
||||
# Grab the sqlite_vec source code.
|
||||
wget -nv https://github.com/asg017/sqlite-vec/archive/refs/tags/v${SQLITE_VEC_VERSION}.tar.gz
|
||||
tar -zxf v${SQLITE_VEC_VERSION}.tar.gz -C /tmp/sqlite_vec
|
||||
|
||||
cd /tmp/sqlite_vec/sqlite-vec-${SQLITE_VEC_VERSION}
|
||||
|
||||
mkdir -p vendor
|
||||
wget -O sqlite-amalgamation.zip https://www.sqlite.org/2024/sqlite-amalgamation-3450300.zip
|
||||
unzip sqlite-amalgamation.zip
|
||||
mv sqlite-amalgamation-3450300/* vendor/
|
||||
rmdir sqlite-amalgamation-3450300
|
||||
rm sqlite-amalgamation.zip
|
||||
|
||||
# build loadable module
|
||||
make loadable
|
||||
|
||||
# install it
|
||||
cp dist/vec0.* /usr/local/lib
|
||||
|
||||
@@ -8,11 +8,13 @@ apt-get -qq install --no-install-recommends -y \
|
||||
apt-transport-https \
|
||||
gnupg \
|
||||
wget \
|
||||
lbzip2 \
|
||||
procps vainfo \
|
||||
unzip locales tzdata libxml2 xz-utils \
|
||||
python3.9 \
|
||||
python3-pip \
|
||||
curl \
|
||||
lsof \
|
||||
jq \
|
||||
nethogs
|
||||
|
||||
@@ -40,35 +42,34 @@ apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
# btbn-ffmpeg -> amd64
|
||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
mkdir -p /usr/lib/ffmpeg/5.0
|
||||
mkdir -p /usr/lib/ffmpeg/6.0
|
||||
mkdir -p /usr/lib/ffmpeg/7.0
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linux64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/5.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/5.0/doc /usr/lib/ffmpeg/5.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2024-08-31-12-50/ffmpeg-n6.1.2-2-gb534cc666e-linux64-gpl-6.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/6.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/6.0/doc /usr/lib/ffmpeg/6.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2024-09-19-12-51/ffmpeg-n7.0.2-18-g3e6cec1286-linux64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/7.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/7.0/doc /usr/lib/ffmpeg/7.0/bin/ffplay
|
||||
fi
|
||||
|
||||
# ffmpeg -> arm64
|
||||
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||
mkdir -p /usr/lib/ffmpeg/5.0
|
||||
mkdir -p /usr/lib/ffmpeg/6.0
|
||||
mkdir -p /usr/lib/ffmpeg/7.0
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linuxarm64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/5.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/5.0/doc /usr/lib/ffmpeg/5.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2024-08-31-12-50/ffmpeg-n6.1.2-2-gb534cc666e-linuxarm64-gpl-6.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/6.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/6.0/doc /usr/lib/ffmpeg/6.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2024-09-19-12-51/ffmpeg-n7.0.2-18-g3e6cec1286-linuxarm64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/7.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/7.0/doc /usr/lib/ffmpeg/7.0/bin/ffplay
|
||||
fi
|
||||
|
||||
# arch specific packages
|
||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
# use debian bookworm for hwaccel packages
|
||||
# use debian bookworm for amd / intel-i965 driver packages
|
||||
echo 'deb https://deb.debian.org/debian bookworm main contrib non-free' >/etc/apt/sources.list.d/debian-bookworm.list
|
||||
apt-get -qq update
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
intel-opencl-icd intel-media-va-driver-non-free i965-va-driver \
|
||||
libmfx-gen1.2 libmfx1 onevpl-tools intel-gpu-tools \
|
||||
i965-va-driver intel-gpu-tools onevpl-tools \
|
||||
libva-drm2 \
|
||||
mesa-va-drivers radeontop
|
||||
|
||||
@@ -76,12 +77,26 @@ if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
i965-va-driver-shaders
|
||||
|
||||
# intel packages use zst compression so we need to update dpkg
|
||||
apt-get install -y dpkg
|
||||
|
||||
rm -f /etc/apt/sources.list.d/debian-bookworm.list
|
||||
|
||||
# use intel apt intel packages
|
||||
wget -qO - https://repositories.intel.com/gpu/intel-graphics.key | gpg --yes --dearmor --output /usr/share/keyrings/intel-graphics.gpg
|
||||
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu jammy client" | tee /etc/apt/sources.list.d/intel-gpu-jammy.list
|
||||
apt-get -qq update
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
intel-opencl-icd intel-level-zero-gpu intel-media-va-driver-non-free \
|
||||
libmfx1 libmfxgen1 libvpl2
|
||||
|
||||
rm -f /usr/share/keyrings/intel-graphics.gpg
|
||||
rm -f /etc/apt/sources.list.d/intel-gpu-jammy.list
|
||||
fi
|
||||
|
||||
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
libva-drm2 mesa-va-drivers
|
||||
libva-drm2 mesa-va-drivers radeontop
|
||||
fi
|
||||
|
||||
# install vulkan
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
# ONNX
|
||||
onnxruntime-openvino == 1.18.* ; platform_machine == 'x86_64'
|
||||
onnxruntime == 1.18.* ; platform_machine == 'aarch64'
|
||||
@@ -1,8 +1,12 @@
|
||||
click == 8.1.*
|
||||
Flask == 3.0.*
|
||||
Flask_Limiter == 3.8.*
|
||||
# FastAPI
|
||||
starlette-context == 0.3.6
|
||||
fastapi == 0.115.*
|
||||
uvicorn == 0.30.*
|
||||
slowapi == 0.1.*
|
||||
imutils == 0.5.*
|
||||
joserfc == 1.0.*
|
||||
pathvalidate == 3.2.*
|
||||
markupsafe == 2.1.*
|
||||
mypy == 1.6.1
|
||||
numpy == 1.26.*
|
||||
@@ -12,15 +16,13 @@ paho-mqtt == 2.1.*
|
||||
pandas == 2.2.*
|
||||
peewee == 3.17.*
|
||||
peewee_migrate == 1.13.*
|
||||
psutil == 5.9.*
|
||||
psutil == 6.1.*
|
||||
pydantic == 2.8.*
|
||||
git+https://github.com/fbcotter/py3nvml#egg=py3nvml
|
||||
PyYAML == 6.0.*
|
||||
pytz == 2024.1
|
||||
pytz == 2024.*
|
||||
pyzmq == 26.2.*
|
||||
ruamel.yaml == 0.18.*
|
||||
tzlocal == 5.2
|
||||
types-PyYAML == 6.0.*
|
||||
requests == 2.32.*
|
||||
types-requests == 2.32.*
|
||||
scipy == 1.13.*
|
||||
@@ -29,14 +31,15 @@ setproctitle == 1.3.*
|
||||
ws4py == 0.5.*
|
||||
unidecode == 1.3.*
|
||||
# OpenVino & ONNX
|
||||
openvino == 2024.1.*
|
||||
openvino == 2024.3.*
|
||||
onnxruntime-openvino == 1.19.* ; platform_machine == 'x86_64'
|
||||
onnxruntime == 1.19.* ; platform_machine == 'aarch64'
|
||||
# Embeddings
|
||||
chromadb == 0.5.0
|
||||
onnx_clip == 4.0.*
|
||||
transformers == 4.45.*
|
||||
# Generative AI
|
||||
google-generativeai == 0.6.*
|
||||
ollama == 0.2.*
|
||||
openai == 1.30.*
|
||||
google-generativeai == 0.8.*
|
||||
ollama == 0.3.*
|
||||
openai == 1.51.*
|
||||
# push notifications
|
||||
py-vapid == 1.9.*
|
||||
pywebpush == 2.0.*
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
chroma
|
||||
@@ -1 +0,0 @@
|
||||
chroma-pipeline
|
||||
@@ -1,4 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
exec logutil-service /dev/shm/logs/chroma
|
||||
@@ -1 +0,0 @@
|
||||
longrun
|
||||
@@ -1,28 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Take down the S6 supervision tree when the service exits
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
declare exit_code_container
|
||||
exit_code_container=$(cat /run/s6-linux-init-container-results/exitcode)
|
||||
readonly exit_code_container
|
||||
readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
readonly service="ChromaDB"
|
||||
|
||||
echo "[INFO] Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})"
|
||||
|
||||
if [[ "${exit_code_service}" -eq 256 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo $((128 + exit_code_signal)) >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo "${exit_code_service}" >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
fi
|
||||
|
||||
exec /run/s6/basedir/bin/halt
|
||||
@@ -1 +0,0 @@
|
||||
chroma-log
|
||||
@@ -1,27 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Start the Frigate service
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
# Tell S6-Overlay not to restart this service
|
||||
s6-svc -O .
|
||||
|
||||
search_enabled=`python3 /usr/local/semantic_search/get_search_settings.py | jq -r .enabled`
|
||||
|
||||
# Replace the bash process with the Frigate process, redirecting stderr to stdout
|
||||
exec 2>&1
|
||||
|
||||
if [[ "$search_enabled" == 'true' ]]; then
|
||||
echo "[INFO] Starting ChromaDB..."
|
||||
exec /usr/local/chroma run --path /config/chroma --host 127.0.0.1
|
||||
else
|
||||
while true
|
||||
do
|
||||
sleep 9999
|
||||
continue
|
||||
done
|
||||
exit 0
|
||||
fi
|
||||
@@ -1 +0,0 @@
|
||||
120000
|
||||
@@ -1 +0,0 @@
|
||||
longrun
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx /dev/shm/logs/certsync /dev/shm/logs/chroma)
|
||||
dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx /dev/shm/logs/certsync)
|
||||
|
||||
mkdir -p "${dirs[@]}"
|
||||
chown nobody:nogroup "${dirs[@]}"
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-s
|
||||
__import__("pysqlite3")
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.modules["sqlite3"] = sys.modules.pop("pysqlite3")
|
||||
|
||||
from chromadb.cli.cli import app
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
|
||||
sys.exit(app())
|
||||
@@ -6,16 +6,19 @@ import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
sys.path.insert(0, "/opt/frigate")
|
||||
from frigate.const import BIRDSEYE_PIPE # noqa: E402
|
||||
from frigate.ffmpeg_presets import ( # noqa: E402
|
||||
parse_preset_hardware_acceleration_encode,
|
||||
from frigate.const import (
|
||||
BIRDSEYE_PIPE,
|
||||
DEFAULT_FFMPEG_VERSION,
|
||||
INCLUDED_FFMPEG_VERSIONS,
|
||||
)
|
||||
from frigate.ffmpeg_presets import parse_preset_hardware_acceleration_encode
|
||||
|
||||
sys.path.remove("/opt/frigate")
|
||||
|
||||
yaml = YAML()
|
||||
|
||||
FRIGATE_ENV_VARS = {k: v for k, v in os.environ.items() if k.startswith("FRIGATE_")}
|
||||
# read docker secret files as env vars too
|
||||
@@ -38,7 +41,7 @@ try:
|
||||
raw_config = f.read()
|
||||
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config: dict[str, any] = yaml.safe_load(raw_config)
|
||||
config: dict[str, any] = yaml.load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
except FileNotFoundError:
|
||||
@@ -110,13 +113,11 @@ else:
|
||||
path = config.get("ffmpeg", {}).get("path", "default")
|
||||
if path == "default":
|
||||
if shutil.which("ffmpeg") is None:
|
||||
ffmpeg_path = "/usr/lib/ffmpeg/6.0/bin/ffmpeg"
|
||||
ffmpeg_path = f"/usr/lib/ffmpeg/{DEFAULT_FFMPEG_VERSION}/bin/ffmpeg"
|
||||
else:
|
||||
ffmpeg_path = "ffmpeg"
|
||||
elif path == "6.0":
|
||||
ffmpeg_path = "/usr/lib/ffmpeg/6.0/bin/ffmpeg"
|
||||
elif path == "5.0":
|
||||
ffmpeg_path = "/usr/lib/ffmpeg/5.0/bin/ffmpeg"
|
||||
elif path in INCLUDED_FFMPEG_VERSIONS:
|
||||
ffmpeg_path = f"/usr/lib/ffmpeg/{path}/bin/ffmpeg"
|
||||
else:
|
||||
ffmpeg_path = f"{path}/bin/ffmpeg"
|
||||
|
||||
|
||||
@@ -104,6 +104,8 @@ http {
|
||||
|
||||
add_header Cache-Control "no-store";
|
||||
expires off;
|
||||
|
||||
keepalive_disable safari;
|
||||
}
|
||||
|
||||
location /stream/ {
|
||||
@@ -224,7 +226,7 @@ http {
|
||||
|
||||
location ~* /api/.*\.(jpg|jpeg|png|webp|gif)$ {
|
||||
include auth_request.conf;
|
||||
rewrite ^/api/(.*)$ $1 break;
|
||||
rewrite ^/api/(.*)$ /$1 break;
|
||||
proxy_pass http://frigate_api;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
@@ -3,7 +3,9 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
import yaml
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
yaml = YAML()
|
||||
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
@@ -17,7 +19,7 @@ try:
|
||||
raw_config = f.read()
|
||||
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config: dict[str, any] = yaml.safe_load(raw_config)
|
||||
config: dict[str, any] = yaml.load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
except FileNotFoundError:
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
"""Prints the semantic_search config as json to stdout."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import yaml
|
||||
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
# Check if we can use .yaml instead of .yml
|
||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||
if os.path.isfile(config_file_yaml):
|
||||
config_file = config_file_yaml
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
raw_config = f.read()
|
||||
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config: dict[str, any] = yaml.safe_load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
except FileNotFoundError:
|
||||
config: dict[str, any] = {}
|
||||
|
||||
search_config: dict[str, any] = config.get("semantic_search", {"enabled": False})
|
||||
|
||||
print(json.dumps(search_config))
|
||||
@@ -24,3 +24,4 @@ RUN rm -rf /usr/lib/btbn-ffmpeg/bin/ffmpeg
|
||||
RUN rm -rf /usr/lib/btbn-ffmpeg/bin/ffprobe
|
||||
ADD --chmod=111 https://github.com/MarcA711/Rockchip-FFmpeg-Builds/releases/download/6.1-5/ffmpeg /usr/lib/ffmpeg/6.0/bin/
|
||||
ADD --chmod=111 https://github.com/MarcA711/Rockchip-FFmpeg-Builds/releases/download/6.1-5/ffprobe /usr/lib/ffmpeg/6.0/bin/
|
||||
ENV PATH="/usr/lib/ffmpeg/6.0/bin/:${PATH}"
|
||||
|
||||
@@ -23,11 +23,11 @@ COPY docker/rocm/rocm-pin-600 /etc/apt/preferences.d/
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get -y install --no-install-recommends migraphx
|
||||
RUN apt-get -y install --no-install-recommends migraphx hipfft roctracer
|
||||
RUN apt-get -y install --no-install-recommends migraphx-dev
|
||||
|
||||
RUN mkdir -p /opt/rocm-dist/opt/rocm-$ROCM/lib
|
||||
RUN cd /opt/rocm-$ROCM/lib && cp -dpr libMIOpen*.so* libamd*.so* libhip*.so* libhsa*.so* libmigraphx*.so* librocm*.so* librocblas*.so* /opt/rocm-dist/opt/rocm-$ROCM/lib/
|
||||
RUN cd /opt/rocm-$ROCM/lib && cp -dpr libMIOpen*.so* libamd*.so* libhip*.so* libhsa*.so* libmigraphx*.so* librocm*.so* librocblas*.so* libroctracer*.so* librocfft*.so* /opt/rocm-dist/opt/rocm-$ROCM/lib/
|
||||
RUN cd /opt/rocm-dist/opt/ && ln -s rocm-$ROCM rocm
|
||||
|
||||
RUN mkdir -p /opt/rocm-dist/etc/ld.so.conf.d/
|
||||
@@ -69,7 +69,11 @@ RUN apt-get -y install libnuma1
|
||||
|
||||
WORKDIR /opt/frigate/
|
||||
COPY --from=rootfs / /
|
||||
COPY docker/rocm/rootfs/ /
|
||||
|
||||
COPY docker/rocm/requirements-wheels-rocm.txt /requirements.txt
|
||||
RUN python3 -m pip install --upgrade pip \
|
||||
&& pip3 uninstall -y onnxruntime-openvino \
|
||||
&& pip3 install -r /requirements.txt
|
||||
|
||||
#######################################################################
|
||||
FROM scratch AS rocm-dist
|
||||
@@ -79,6 +83,7 @@ ARG AMDGPU
|
||||
|
||||
COPY --from=rocm /opt/rocm-$ROCM/bin/rocminfo /opt/rocm-$ROCM/bin/migraphx-driver /opt/rocm-$ROCM/bin/
|
||||
COPY --from=rocm /opt/rocm-$ROCM/share/miopen/db/*$AMDGPU* /opt/rocm-$ROCM/share/miopen/db/
|
||||
COPY --from=rocm /opt/rocm-$ROCM/share/miopen/db/*gfx908* /opt/rocm-$ROCM/share/miopen/db/
|
||||
COPY --from=rocm /opt/rocm-$ROCM/lib/rocblas/library/*$AMDGPU* /opt/rocm-$ROCM/lib/rocblas/library/
|
||||
COPY --from=rocm /opt/rocm-dist/ /
|
||||
COPY --from=debian-build /opt/rocm/lib/migraphx.cpython-39-x86_64-linux-gnu.so /opt/rocm-$ROCM/lib/
|
||||
@@ -101,6 +106,3 @@ ENV HSA_OVERRIDE_GFX_VERSION=$HSA_OVERRIDE_GFX_VERSION
|
||||
#######################################################################
|
||||
FROM rocm-prelim-hsa-override$HSA_OVERRIDE as rocm-deps
|
||||
|
||||
# Request yolov8 download at startup
|
||||
ENV DOWNLOAD_YOLOV8=1
|
||||
|
||||
|
||||
1
docker/rocm/requirements-wheels-rocm.txt
Normal file
1
docker/rocm/requirements-wheels-rocm.txt
Normal file
@@ -0,0 +1 @@
|
||||
onnxruntime-rocm @ https://github.com/NickM-27/frigate-onnxruntime-rocm/releases/download/v1.0.0/onnxruntime_rocm-1.17.3-cp39-cp39-linux_x86_64.whl
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Compile YoloV8 ONNX files into ROCm MIGraphX files
|
||||
|
||||
OVERRIDE=$(cd /opt/frigate && python3 -c 'import frigate.detectors.plugins.rocm as rocm; print(rocm.auto_override_gfx_version())')
|
||||
|
||||
if ! test -z "$OVERRIDE"; then
|
||||
echo "Using HSA_OVERRIDE_GFX_VERSION=${OVERRIDE}"
|
||||
export HSA_OVERRIDE_GFX_VERSION=$OVERRIDE
|
||||
fi
|
||||
|
||||
for onnx in /config/model_cache/yolov8/*.onnx
|
||||
do
|
||||
mxr="${onnx%.onnx}.mxr"
|
||||
if ! test -f $mxr; then
|
||||
echo "processing $onnx into $mxr"
|
||||
/opt/rocm/bin/migraphx-driver compile $onnx --optimize --gpu --enable-offload-copy --binary -o $mxr
|
||||
fi
|
||||
done
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
oneshot
|
||||
@@ -1 +0,0 @@
|
||||
/etc/s6-overlay/s6-rc.d/compile-rocm-models/run
|
||||
@@ -3,8 +3,6 @@
|
||||
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
ARG TRT_BASE=nvcr.io/nvidia/tensorrt:23.03-py3
|
||||
|
||||
# Make this a separate target so it can be built/cached optionally
|
||||
FROM wheels as trt-wheels
|
||||
ARG DEBIAN_FRONTEND
|
||||
@@ -15,7 +13,7 @@ COPY docker/tensorrt/requirements-amd64.txt /requirements-tensorrt.txt
|
||||
RUN mkdir -p /trt-wheels && pip3 wheel --wheel-dir=/trt-wheels -r /requirements-tensorrt.txt
|
||||
|
||||
# Build CuDNN
|
||||
FROM ${TRT_BASE} AS cudnn-deps
|
||||
FROM wget AS cudnn-deps
|
||||
|
||||
ARG COMPUTE_LEVEL
|
||||
|
||||
|
||||
@@ -9,6 +9,6 @@ nvidia-cuda-runtime-cu11 == 11.8.*; platform_machine == 'x86_64'
|
||||
nvidia-cublas-cu11 == 11.11.3.6; platform_machine == 'x86_64'
|
||||
nvidia-cudnn-cu11 == 8.6.0.*; platform_machine == 'x86_64'
|
||||
nvidia-cufft-cu11==10.*; platform_machine == 'x86_64'
|
||||
onnx==1.14.0; platform_machine == 'x86_64'
|
||||
onnxruntime-gpu==1.17.*; platform_machine == 'x86_64'
|
||||
onnx==1.16.*; platform_machine == 'x86_64'
|
||||
onnxruntime-gpu==1.18.*; platform_machine == 'x86_64'
|
||||
protobuf==3.20.3; platform_machine == 'x86_64'
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
# Website
|
||||
|
||||
This website is built using [Docusaurus 2](https://v2.docusaurus.io/), a modern static website generator.
|
||||
This website is built using [Docusaurus 3.5](https://docusaurus.io/docs), a modern static website generator.
|
||||
|
||||
For installation and contributing instructions, please follow the [Contributing Docs](https://docs.frigate.video/development/contributing).
|
||||
|
||||
# Development
|
||||
|
||||
1. Run `npm i` to install dependencies
|
||||
2. Run `npm run start` to start the website
|
||||
|
||||
@@ -183,7 +183,7 @@ To do this:
|
||||
3. Give `go2rtc` execute permission.
|
||||
4. Restart Frigate and the custom version will be used, you can verify by checking go2rtc logs.
|
||||
|
||||
## Validating your config.yaml file updates
|
||||
## Validating your config.yml file updates
|
||||
|
||||
When frigate starts up, it checks whether your config file is valid, and if it is not, the process exits. To minimize interruptions when updating your config, you have three options -- you can edit the config via the WebUI which has built in validation, use the config API, or you can validate on the command line using the frigate docker container.
|
||||
|
||||
@@ -211,5 +211,5 @@ docker run \
|
||||
--entrypoint python3 \
|
||||
ghcr.io/blakeblackshear/frigate:stable \
|
||||
-u -m frigate \
|
||||
--validate_config
|
||||
--validate-config
|
||||
```
|
||||
|
||||
@@ -26,7 +26,7 @@ In the event that you are locked out of your instance, you can tell Frigate to r
|
||||
|
||||
## Login failure rate limiting
|
||||
|
||||
In order to limit the risk of brute force attacks, rate limiting is available for login failures. This is implemented with Flask-Limiter, and the string notation for valid values is available in [the documentation](https://flask-limiter.readthedocs.io/en/stable/configuration.html#rate-limit-string-notation).
|
||||
In order to limit the risk of brute force attacks, rate limiting is available for login failures. This is implemented with SlowApi, and the string notation for valid values is available in [the documentation](https://limits.readthedocs.io/en/stable/quickstart.html#examples).
|
||||
|
||||
For example, `1/second;5/minute;20/hour` will rate limit the login endpoint when failures occur more than:
|
||||
|
||||
|
||||
@@ -9,6 +9,12 @@ This page makes use of presets of FFmpeg args. For more information on presets,
|
||||
|
||||
:::
|
||||
|
||||
:::note
|
||||
|
||||
Many cameras support encoding options which greatly affect the live view experience, see the [Live view](/configuration/live) page for more info.
|
||||
|
||||
:::
|
||||
|
||||
## MJPEG Cameras
|
||||
|
||||
Note that mjpeg cameras require encoding the video into h264 for recording, and restream roles. This will use significantly more CPU than if the cameras supported h264 feeds directly. It is recommended to use the restream role to create an h264 restream and then use that as the source for ffmpeg.
|
||||
|
||||
@@ -79,29 +79,41 @@ cameras:
|
||||
|
||||
If the ONVIF connection is successful, PTZ controls will be available in the camera's WebUI.
|
||||
|
||||
:::tip
|
||||
|
||||
If your ONVIF camera does not require authentication credentials, you may still need to specify an empty string for `user` and `password`, eg: `user: ""` and `password: ""`.
|
||||
|
||||
:::
|
||||
|
||||
An ONVIF-capable camera that supports relative movement within the field of view (FOV) can also be configured to automatically track moving objects and keep them in the center of the frame. For autotracking setup, see the [autotracking](autotracking.md) docs.
|
||||
|
||||
## ONVIF PTZ camera recommendations
|
||||
|
||||
This list of working and non-working PTZ cameras is based on user feedback.
|
||||
|
||||
| Brand or specific camera | PTZ Controls | Autotracking | Notes |
|
||||
| ------------------------ | :----------: | :----------: | ----------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Amcrest | ✅ | ✅ | ⛔️ Generally, Amcrest should work, but some older models (like the common IP2M-841) don't support autotracking |
|
||||
| Amcrest ASH21 | ❌ | ❌ | No ONVIF support |
|
||||
| Ctronics PTZ | ✅ | ❌ | |
|
||||
| Dahua | ✅ | ✅ | |
|
||||
| Foscam R5 | ✅ | ❌ | |
|
||||
| Hanwha XNP-6550RH | ✅ | ❌ | |
|
||||
| Hikvision | ✅ | ❌ | Incomplete ONVIF support (MoveStatus won't update even on latest firmware) - reported with HWP-N4215IH-DE and DS-2DE3304W-DE, but likely others |
|
||||
| Reolink 511WA | ✅ | ❌ | Zoom only |
|
||||
| Reolink E1 Pro | ✅ | ❌ | |
|
||||
| Reolink E1 Zoom | ✅ | ❌ | |
|
||||
| Reolink RLC-823A 16x | ✅ | ❌ | |
|
||||
| Sunba 405-D20X | ✅ | ❌ | |
|
||||
| Tapo | ✅ | ❌ | Many models supported, ONVIF Service Port: 2020 |
|
||||
| Uniview IPC672LR-AX4DUPK | ✅ | ❌ | Firmware says FOV relative movement is supported, but camera doesn't actually move when sending ONVIF commands |
|
||||
| Vikylin PTZ-2804X-I2 | ❌ | ❌ | Incomplete ONVIF support |
|
||||
| Brand or specific camera | PTZ Controls | Autotracking | Notes |
|
||||
| ---------------------------- | :----------: | :----------: | ----------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Amcrest | ✅ | ✅ | ⛔️ Generally, Amcrest should work, but some older models (like the common IP2M-841) don't support autotracking |
|
||||
| Amcrest ASH21 | ✅ | ❌ | ONVIF service port: 80 |
|
||||
| Amcrest IP4M-S2112EW-AI | ✅ | ❌ | FOV relative movement not supported. |
|
||||
| Amcrest IP5M-1190EW | ✅ | ❌ | ONVIF Port: 80. FOV relative movement not supported. |
|
||||
| Ctronics PTZ | ✅ | ❌ | |
|
||||
| Dahua | ✅ | ✅ | |
|
||||
| Dahua DH-SD2A500HB | ✅ | ❌ | |
|
||||
| Foscam R5 | ✅ | ❌ | |
|
||||
| Hanwha XNP-6550RH | ✅ | ❌ | |
|
||||
| Hikvision | ✅ | ❌ | Incomplete ONVIF support (MoveStatus won't update even on latest firmware) - reported with HWP-N4215IH-DE and DS-2DE3304W-DE, but likely others |
|
||||
| Hikvision DS-2DE3A404IWG-E/W | ✅ | ✅ | |
|
||||
| Reolink 511WA | ✅ | ❌ | Zoom only |
|
||||
| Reolink E1 Pro | ✅ | ❌ | |
|
||||
| Reolink E1 Zoom | ✅ | ❌ | |
|
||||
| Reolink RLC-823A 16x | ✅ | ❌ | |
|
||||
| Speco O8P32X | ✅ | ❌ | |
|
||||
| Sunba 405-D20X | ✅ | ❌ | |
|
||||
| Tapo | ✅ | ❌ | Many models supported, ONVIF Service Port: 2020 |
|
||||
| Uniview IPC672LR-AX4DUPK | ✅ | ❌ | Firmware says FOV relative movement is supported, but camera doesn't actually move when sending ONVIF commands |
|
||||
| Uniview IPC6612SR-X33-VG | ✅ | ✅ | Leave `calibrate_on_startup` as `False`. A user has reported that zooming with `absolute` is working. |
|
||||
| Vikylin PTZ-2804X-I2 | ❌ | ❌ | Incomplete ONVIF support |
|
||||
|
||||
## Setting up camera groups
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ id: genai
|
||||
title: Generative AI
|
||||
---
|
||||
|
||||
Generative AI can be used to automatically generate descriptions based on the thumbnails of your tracked objects. This helps with [Semantic Search](/configuration/semantic_search) in Frigate by providing detailed text descriptions as a basis of the search query.
|
||||
Generative AI can be used to automatically generate descriptive text based on the thumbnails of your tracked objects. This helps with [Semantic Search](/configuration/semantic_search) in Frigate to provide more context about your tracked objects.
|
||||
|
||||
Semantic Search must be enabled to use Generative AI. Descriptions are accessed via the _Explore_ view in the Frigate UI by clicking on a tracked object's thumbnail.
|
||||
|
||||
@@ -29,11 +29,21 @@ cameras:
|
||||
|
||||
## Ollama
|
||||
|
||||
[Ollama](https://ollama.com/) allows you to self-host large language models and keep everything running locally. It provides a nice API over [llama.cpp](https://github.com/ggerganov/llama.cpp). It is highly recommended to host this server on a machine with an Nvidia graphics card, or on a Apple silicon Mac for best performance. Most of the 7b parameter 4-bit vision models will fit inside 8GB of VRAM. There is also a [docker container](https://hub.docker.com/r/ollama/ollama) available.
|
||||
:::warning
|
||||
|
||||
Using Ollama on CPU is not recommended, high inference times make using generative AI impractical.
|
||||
|
||||
:::
|
||||
|
||||
[Ollama](https://ollama.com/) allows you to self-host large language models and keep everything running locally. It provides a nice API over [llama.cpp](https://github.com/ggerganov/llama.cpp). It is highly recommended to host this server on a machine with an Nvidia graphics card, or on a Apple silicon Mac for best performance.
|
||||
|
||||
Most of the 7b parameter 4-bit vision models will fit inside 8GB of VRAM. There is also a [docker container](https://hub.docker.com/r/ollama/ollama) available.
|
||||
|
||||
Parallel requests also come with some caveats. See the [Ollama documentation](https://github.com/ollama/ollama/blob/main/docs/faq.md#how-does-ollama-handle-concurrent-requests).
|
||||
|
||||
### Supported Models
|
||||
|
||||
You must use a vision capable model with Frigate. Current model variants can be found [in their model library](https://ollama.com/library). At the time of writing, this includes `llava`, `llava-llama3`, `llava-phi3`, and `moondream`.
|
||||
You must use a vision capable model with Frigate. Current model variants can be found [in their model library](https://ollama.com/library). At the time of writing, this includes `llava`, `llava-llama3`, `llava-phi3`, and `moondream`. Note that Frigate will not automatically download the model you specify in your config, you must download the model to your local instance of Ollama first i.e. by running `ollama pull llava:7b` on your Ollama server/Docker container. Note that the model specified in Frigate's config must match the downloaded model tag.
|
||||
|
||||
:::note
|
||||
|
||||
@@ -48,7 +58,7 @@ genai:
|
||||
enabled: True
|
||||
provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: llava
|
||||
model: llava:7b
|
||||
```
|
||||
|
||||
## Google Gemini
|
||||
@@ -100,12 +110,40 @@ genai:
|
||||
model: gpt-4o
|
||||
```
|
||||
|
||||
## Azure OpenAI
|
||||
|
||||
Microsoft offers several vision models through Azure OpenAI. A subscription is required.
|
||||
|
||||
### Supported Models
|
||||
|
||||
You must use a vision capable model with Frigate. Current model variants can be found [in their documentation](https://learn.microsoft.com/en-us/azure/ai-services/openai/concepts/models). At the time of writing, this includes `gpt-4o` and `gpt-4-turbo`.
|
||||
|
||||
### Create Resource and Get API Key
|
||||
|
||||
To start using Azure OpenAI, you must first [create a resource](https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource). You'll need your API key and resource URL, which must include the `api-version` parameter (see the example below). The model field is not required in your configuration as the model is part of the deployment name you chose when deploying the resource.
|
||||
|
||||
### Configuration
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
enabled: True
|
||||
provider: azure_openai
|
||||
base_url: https://example-endpoint.openai.azure.com/openai/deployments/gpt-4o/chat/completions?api-version=2023-03-15-preview
|
||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||
```
|
||||
|
||||
## Usage and Best Practices
|
||||
|
||||
Frigate's thumbnail search excels at identifying specific details about tracked objects – for example, using an "image caption" approach to find a "person wearing a yellow vest," "a white dog running across the lawn," or "a red car on a residential street." To enhance this further, Frigate’s default prompts are designed to ask your AI provider about the intent behind the object's actions, rather than just describing its appearance.
|
||||
|
||||
While generating simple descriptions of detected objects is useful, understanding intent provides a deeper layer of insight. Instead of just recognizing "what" is in a scene, Frigate’s default prompts aim to infer "why" it might be there or "what" it could do next. Descriptions tell you what’s happening, but intent gives context. For instance, a person walking toward a door might seem like a visitor, but if they’re moving quickly after hours, you can infer a potential break-in attempt. Detecting a person loitering near a door at night can trigger an alert sooner than simply noting "a person standing by the door," helping you respond based on the situation’s context.
|
||||
|
||||
## Custom Prompts
|
||||
|
||||
Frigate sends multiple frames from the tracked object along with a prompt to your Generative AI provider asking it to generate a description. The default prompt is as follows:
|
||||
|
||||
```
|
||||
Describe the {label} in the sequence of images with as much detail as possible. Do not describe the background.
|
||||
Analyze the sequence of images containing the {label}. Focus on the likely intent or behavior of the {label} based on its actions and movement, rather than describing its appearance or the surroundings. Consider what the {label} is doing, why, and what it might do next.
|
||||
```
|
||||
|
||||
:::tip
|
||||
@@ -122,22 +160,30 @@ genai:
|
||||
provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: llava
|
||||
prompt: "Describe the {label} in these images from the {camera} security camera."
|
||||
prompt: "Analyze the {label} in these images from the {camera} security camera. Focus on the actions, behavior, and potential intent of the {label}, rather than just describing its appearance."
|
||||
object_prompts:
|
||||
person: "Describe the main person in these images (gender, age, clothing, activity, etc). Do not include where the activity is occurring (sidewalk, concrete, driveway, etc)."
|
||||
car: "Label the primary vehicle in these images with just the name of the company if it is a delivery vehicle, or the color make and model."
|
||||
person: "Examine the main person in these images. What are they doing and what might their actions suggest about their intent (e.g., approaching a door, leaving an area, standing still)? Do not describe the surroundings or static details."
|
||||
car: "Observe the primary vehicle in these images. Focus on its movement, direction, or purpose (e.g., parking, approaching, circling). If it's a delivery vehicle, mention the company."
|
||||
```
|
||||
|
||||
Prompts can also be overriden at the camera level to provide a more detailed prompt to the model about your specific camera, if you desire.
|
||||
Prompts can also be overriden at the camera level to provide a more detailed prompt to the model about your specific camera, if you desire. By default, descriptions will be generated for all tracked objects and all zones. But you can also optionally specify `objects` and `required_zones` to only generate descriptions for certain tracked objects or zones.
|
||||
|
||||
Optionally, you can generate the description using a snapshot (if enabled) by setting `use_snapshot` to `True`. By default, this is set to `False`, which sends the thumbnails collected over the object's lifetime to the model. Using a snapshot provides the AI with a higher-resolution image (typically downscaled by the AI itself), but the trade-off is that only a single image is used, which might limit the model's ability to determine object movement or direction.
|
||||
|
||||
```yaml
|
||||
cameras:
|
||||
front_door:
|
||||
genai:
|
||||
prompt: "Describe the {label} in these images from the {camera} security camera at the front door of a house, aimed outward toward the street."
|
||||
use_snapshot: True
|
||||
prompt: "Analyze the {label} in these images from the {camera} security camera at the front door. Focus on the actions and potential intent of the {label}."
|
||||
object_prompts:
|
||||
person: "Describe the main person in these images (gender, age, clothing, activity, etc). Do not include where the activity is occurring (sidewalk, concrete, driveway, etc). If delivering a package, include the company the package is from."
|
||||
cat: "Describe the cat in these images (color, size, tail). Indicate whether or not the cat is by the flower pots. If the cat is chasing a mouse, make up a name for the mouse."
|
||||
person: "Examine the person in these images. What are they doing, and how might their actions suggest their purpose (e.g., delivering something, approaching, leaving)? If they are carrying or interacting with a package, include details about its source or destination."
|
||||
cat: "Observe the cat in these images. Focus on its movement and intent (e.g., wandering, hunting, interacting with objects). If the cat is near the flower pots or engaging in any specific actions, mention it."
|
||||
objects:
|
||||
- person
|
||||
- cat
|
||||
required_zones:
|
||||
- steps
|
||||
```
|
||||
|
||||
### Experiment with prompts
|
||||
|
||||
@@ -65,24 +65,37 @@ Or map in all the `/dev/video*` devices.
|
||||
|
||||
## Intel-based CPUs
|
||||
|
||||
:::info
|
||||
|
||||
**Recommended hwaccel Preset**
|
||||
|
||||
| CPU Generation | Intel Driver | Recommended Preset | Notes |
|
||||
| -------------- | ------------ | ------------------ | ----------------------------------- |
|
||||
| gen1 - gen7 | i965 | preset-vaapi | qsv is not supported |
|
||||
| gen8 - gen12 | iHD | preset-vaapi | preset-intel-qsv-* can also be used |
|
||||
| gen13+ | iHD / Xe | preset-intel-qsv-* | |
|
||||
| Intel Arc GPU | iHD / Xe | preset-intel-qsv-* | |
|
||||
|
||||
:::
|
||||
|
||||
:::note
|
||||
|
||||
The default driver is `iHD`. You may need to change the driver to `i965` by adding the following environment variable `LIBVA_DRIVER_NAME=i965` to your docker-compose file or [in the `frigate.yaml` for HA OS users](advanced.md#environment_vars).
|
||||
|
||||
See [The Intel Docs](https://www.intel.com/content/www/us/en/support/articles/000005505/processors.html) to figure out what generation your CPU is.
|
||||
|
||||
:::
|
||||
|
||||
### Via VAAPI
|
||||
|
||||
VAAPI supports automatic profile selection so it will work automatically with both H.264 and H.265 streams. VAAPI is recommended for all generations of Intel-based CPUs.
|
||||
VAAPI supports automatic profile selection so it will work automatically with both H.264 and H.265 streams.
|
||||
|
||||
```yaml
|
||||
ffmpeg:
|
||||
hwaccel_args: preset-vaapi
|
||||
```
|
||||
|
||||
:::note
|
||||
|
||||
With some of the processors, like the J4125, the default driver `iHD` doesn't seem to work correctly for hardware acceleration. You may need to change the driver to `i965` by adding the following environment variable `LIBVA_DRIVER_NAME=i965` to your docker-compose file or [in the `frigate.yaml` for HA OS users](advanced.md#environment_vars).
|
||||
|
||||
:::
|
||||
|
||||
### Via Quicksync (>=10th Generation only)
|
||||
|
||||
If VAAPI does not work for you, you can try QSV if your processor supports it. QSV must be set specifically based on the video encoding of the stream.
|
||||
### Via Quicksync
|
||||
|
||||
#### H.264 streams
|
||||
|
||||
@@ -370,7 +383,7 @@ Make sure to follow the [Rockchip specific installation instructions](/frigate/i
|
||||
|
||||
### Configuration
|
||||
|
||||
Add one of the following FFmpeg presets to your `config.yaml` to enable hardware video processing:
|
||||
Add one of the following FFmpeg presets to your `config.yml` to enable hardware video processing:
|
||||
|
||||
```yaml
|
||||
# if you try to decode a h264 encoded stream
|
||||
|
||||
@@ -11,11 +11,21 @@ Frigate intelligently uses three different streaming technologies to display you
|
||||
|
||||
The jsmpeg live view will use more browser and client GPU resources. Using go2rtc is highly recommended and will provide a superior experience.
|
||||
|
||||
| Source | Latency | Frame Rate | Resolution | Audio | Requires go2rtc | Other Limitations |
|
||||
| ------ | ------- | ------------------------------------- | ---------- | ---------------------------- | --------------- | ------------------------------------------------------------------------------------ |
|
||||
| jsmpeg | low | same as `detect -> fps`, capped at 10 | 720p | no | no | resolution is configurable, but go2rtc is recommended if you want higher resolutions |
|
||||
| mse | low | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only |
|
||||
| webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config, doesn't support h.265 |
|
||||
| Source | Frame Rate | Resolution | Audio | Requires go2rtc | Notes |
|
||||
| ------ | ------------------------------------- | ---------- | ---------------------------- | --------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| jsmpeg | same as `detect -> fps`, capped at 10 | 720p | no | no | Resolution is configurable, but go2rtc is recommended if you want higher resolutions and better frame rates. jsmpeg is Frigate's default without go2rtc configured. |
|
||||
| mse | native | native | yes (depends on audio codec) | yes | iPhone requires iOS 17.1+, Firefox is h.264 only. This is Frigate's default when go2rtc is configured. |
|
||||
| webrtc | native | native | yes (depends on audio codec) | yes | Requires extra configuration, doesn't support h.265. Frigate attempts to use WebRTC when MSE fails or when using a camera's two-way talk feature. |
|
||||
|
||||
### Camera Settings Recommendations
|
||||
|
||||
If you are using go2rtc, you should adjust the following settings in your camera's firmware for the best experience with Live view:
|
||||
|
||||
- Video codec: **H.264** - provides the most compatible video codec with all Live view technologies and browsers. Avoid any kind of "smart codec" or "+" codec like _H.264+_ or _H.265+_. as these non-standard codecs remove keyframes (see below).
|
||||
- Audio codec: **AAC** - provides the most compatible audio codec with all Live view technologies and browsers that support audio.
|
||||
- I-frame interval (sometimes called the keyframe interval, the interframe space, or the GOP length): match your camera's frame rate, or choose "1x" (for interframe space on Reolink cameras). For example, if your stream outputs 20fps, your i-frame interval should be 20 (or 1x on Reolink). Values higher than the frame rate will cause the stream to take longer to begin playback. See [this page](https://gardinal.net/understanding-the-keyframe-interval/) for more on keyframes.
|
||||
|
||||
The default video and audio codec on your camera may not always be compatible with your browser, which is why setting them to H.264 and AAC is recommended. See the [go2rtc docs](https://github.com/AlexxIT/go2rtc?tab=readme-ov-file#codecs-madness) for codec support information.
|
||||
|
||||
### Audio Support
|
||||
|
||||
@@ -32,6 +42,15 @@ go2rtc:
|
||||
- "ffmpeg:http_cam#audio=opus" # <- copy of the stream which transcodes audio to the missing codec (usually will be opus)
|
||||
```
|
||||
|
||||
If your camera does not have audio and you are having problems with Live view, you should have go2rtc send video only:
|
||||
|
||||
```yaml
|
||||
go2rtc:
|
||||
streams:
|
||||
no_audio_camera:
|
||||
- ffmpeg:rtsp://192.168.1.5:554/live0#video=copy
|
||||
```
|
||||
|
||||
### Setting Stream For Live UI
|
||||
|
||||
There may be some cameras that you would prefer to use the sub stream for live view, but the main stream for recording. This can be done via `live -> stream_name`.
|
||||
|
||||
@@ -92,10 +92,16 @@ motion:
|
||||
lightning_threshold: 0.8
|
||||
```
|
||||
|
||||
:::tip
|
||||
:::warning
|
||||
|
||||
Some cameras like doorbell cameras may have missed detections when someone walks directly in front of the camera and the lightning_threshold causes motion detection to be re-calibrated. In this case, it may be desirable to increase the `lightning_threshold` to ensure these objects are not missed.
|
||||
|
||||
:::
|
||||
|
||||
:::note
|
||||
|
||||
Lightning threshold does not stop motion based recordings from being saved.
|
||||
|
||||
:::
|
||||
|
||||
Large changes in motion like PTZ moves and camera switches between Color and IR mode should result in no motion detection. This is done via the `lightning_threshold` configuration. It is defined as the percentage of the image used to detect lightning or other substantial changes where motion detection needs to recalibrate. Increasing this value will make motion detection more likely to consider lightning or IR mode changes as valid motion. Decreasing this value will make motion detection more likely to ignore large amounts of motion such as a person approaching a doorbell camera.
|
||||
|
||||
@@ -5,10 +5,17 @@ title: Object Detectors
|
||||
|
||||
# Supported Hardware
|
||||
|
||||
:::info
|
||||
|
||||
Frigate supports multiple different detectors that work on different types of hardware:
|
||||
|
||||
**Most Hardware**
|
||||
- [Coral EdgeTPU](#edge-tpu-detector): The Google Coral EdgeTPU is available in USB and m.2 format allowing for a wide range of compatibility with devices.
|
||||
- [Hailo](#hailo-8l): The Hailo8 AI Acceleration module is available in m.2 format with a HAT for RPi devices, offering a wide range of compatibility with devices.
|
||||
|
||||
**AMD**
|
||||
- [ROCm](#amdrocm-gpu-detector): ROCm can run on AMD Discrete GPUs to provide efficient object detection.
|
||||
- [ONNX](#onnx): ROCm will automatically be detected and used as a detector in the `-rocm` Frigate image when a supported ONNX model is configured.
|
||||
|
||||
**Intel**
|
||||
- [OpenVino](#openvino-detector): OpenVino can run on Intel Arc GPUs, Intel integrated GPUs, and Intel CPUs to provide efficient object detection.
|
||||
@@ -16,42 +23,19 @@ Frigate supports multiple different detectors that work on different types of ha
|
||||
|
||||
**Nvidia**
|
||||
- [TensortRT](#nvidia-tensorrt-detector): TensorRT can run on Nvidia GPUs, using one of many default models.
|
||||
- [ONNX](#onnx): TensorRT will automatically be detected and used as a detector in the `-tensorrt` Frigate image when a supported ONNX is configured.
|
||||
- [ONNX](#onnx): TensorRT will automatically be detected and used as a detector in the `-tensorrt` Frigate image when a supported ONNX model is configured.
|
||||
|
||||
**Rockchip**
|
||||
- [RKNN](#rockchip-platform): RKNN models can run on Rockchip devices with included NPUs.
|
||||
|
||||
# Officially Supported Detectors
|
||||
|
||||
Frigate provides the following builtin detector types: `cpu`, `edgetpu`, `openvino`, `tensorrt`, `rknn`, and `hailo8l`. By default, Frigate will use a single CPU detector. Other detectors may require additional configuration as described below. When using multiple detectors they will run in dedicated processes, but pull from a common queue of detection requests from across all cameras.
|
||||
|
||||
## CPU Detector (not recommended)
|
||||
|
||||
The CPU detector type runs a TensorFlow Lite model utilizing the CPU without hardware acceleration. It is recommended to use a hardware accelerated detector type instead for better performance. To configure a CPU based detector, set the `"type"` attribute to `"cpu"`.
|
||||
|
||||
:::tip
|
||||
|
||||
If you do not have GPU or Edge TPU hardware, using the [OpenVINO Detector](#openvino-detector) is often more efficient than using the CPU detector.
|
||||
**For Testing**
|
||||
- [CPU Detector (not recommended for actual use](#cpu-detector-not-recommended): Use a CPU to run tflite model, this is not recommended and in most cases OpenVINO can be used in CPU mode with better results.
|
||||
|
||||
:::
|
||||
|
||||
The number of threads used by the interpreter can be specified using the `"num_threads"` attribute, and defaults to `3.`
|
||||
# Officially Supported Detectors
|
||||
|
||||
A TensorFlow Lite model is provided in the container at `/cpu_model.tflite` and is used by this detector type by default. To provide your own model, bind mount the file into the container and provide the path with `model.path`.
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
cpu1:
|
||||
type: cpu
|
||||
num_threads: 3
|
||||
model:
|
||||
path: "/custom_model.tflite"
|
||||
cpu2:
|
||||
type: cpu
|
||||
num_threads: 3
|
||||
```
|
||||
|
||||
When using CPU detectors, you can add one CPU detector per camera. Adding more detectors than the number of cameras should not improve performance.
|
||||
Frigate provides the following builtin detector types: `cpu`, `edgetpu`, `hailo8l`, `onnx`, `openvino`, `rknn`, `rocm`, and `tensorrt`. By default, Frigate will use a single CPU detector. Other detectors may require additional configuration as described below. When using multiple detectors they will run in dedicated processes, but pull from a common queue of detection requests from across all cameras.
|
||||
|
||||
## Edge TPU Detector
|
||||
|
||||
@@ -183,7 +167,7 @@ This detector also supports YOLOX. Frigate does not come with any YOLOX models p
|
||||
|
||||
#### YOLO-NAS
|
||||
|
||||
[YOLO-NAS](https://github.com/Deci-AI/super-gradients/blob/master/YOLONAS.md) models are supported, but not included by default. You can build and download a compatible model with pre-trained weights using [this notebook](https://github.com/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb) [](https://colab.research.google.com/github/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb).
|
||||
[YOLO-NAS](https://github.com/Deci-AI/super-gradients/blob/master/YOLONAS.md) models are supported, but not included by default. You can build and download a compatible model with pre-trained weights using [this notebook](https://github.com/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb) [](https://colab.research.google.com/github/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb).
|
||||
|
||||
:::warning
|
||||
|
||||
@@ -312,6 +296,121 @@ model:
|
||||
height: 320
|
||||
```
|
||||
|
||||
## AMD/ROCm GPU detector
|
||||
|
||||
### Setup
|
||||
|
||||
The `rocm` detector supports running YOLO-NAS models on AMD GPUs. Use a frigate docker image with `-rocm` suffix, for example `ghcr.io/blakeblackshear/frigate:stable-rocm`.
|
||||
|
||||
### Docker settings for GPU access
|
||||
|
||||
ROCm needs access to the `/dev/kfd` and `/dev/dri` devices. When docker or frigate is not run under root then also `video` (and possibly `render` and `ssl/_ssl`) groups should be added.
|
||||
|
||||
When running docker directly the following flags should be added for device access:
|
||||
|
||||
```bash
|
||||
$ docker run --device=/dev/kfd --device=/dev/dri \
|
||||
...
|
||||
```
|
||||
|
||||
When using docker compose:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
frigate:
|
||||
---
|
||||
devices:
|
||||
- /dev/dri
|
||||
- /dev/kfd
|
||||
```
|
||||
|
||||
For reference on recommended settings see [running ROCm/pytorch in Docker](https://rocm.docs.amd.com/projects/install-on-linux/en/develop/how-to/3rd-party/pytorch-install.html#using-docker-with-pytorch-pre-installed).
|
||||
|
||||
### Docker settings for overriding the GPU chipset
|
||||
|
||||
Your GPU might work just fine without any special configuration but in many cases they need manual settings. AMD/ROCm software stack comes with a limited set of GPU drivers and for newer or missing models you will have to override the chipset version to an older/generic version to get things working.
|
||||
|
||||
Also AMD/ROCm does not "officially" support integrated GPUs. It still does work with most of them just fine but requires special settings. One has to configure the `HSA_OVERRIDE_GFX_VERSION` environment variable. See the [ROCm bug report](https://github.com/ROCm/ROCm/issues/1743) for context and examples.
|
||||
|
||||
For the rocm frigate build there is some automatic detection:
|
||||
|
||||
- gfx90c -> 9.0.0
|
||||
- gfx1031 -> 10.3.0
|
||||
- gfx1103 -> 11.0.0
|
||||
|
||||
If you have something else you might need to override the `HSA_OVERRIDE_GFX_VERSION` at Docker launch. Suppose the version you want is `9.0.0`, then you should configure it from command line as:
|
||||
|
||||
```bash
|
||||
$ docker run -e HSA_OVERRIDE_GFX_VERSION=9.0.0 \
|
||||
...
|
||||
```
|
||||
|
||||
When using docker compose:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
frigate:
|
||||
...
|
||||
environment:
|
||||
HSA_OVERRIDE_GFX_VERSION: "9.0.0"
|
||||
```
|
||||
|
||||
Figuring out what version you need can be complicated as you can't tell the chipset name and driver from the AMD brand name.
|
||||
|
||||
- first make sure that rocm environment is running properly by running `/opt/rocm/bin/rocminfo` in the frigate container -- it should list both the CPU and the GPU with their properties
|
||||
- find the chipset version you have (gfxNNN) from the output of the `rocminfo` (see below)
|
||||
- use a search engine to query what `HSA_OVERRIDE_GFX_VERSION` you need for the given gfx name ("gfxNNN ROCm HSA_OVERRIDE_GFX_VERSION")
|
||||
- override the `HSA_OVERRIDE_GFX_VERSION` with relevant value
|
||||
- if things are not working check the frigate docker logs
|
||||
|
||||
#### Figuring out if AMD/ROCm is working and found your GPU
|
||||
|
||||
```bash
|
||||
$ docker exec -it frigate /opt/rocm/bin/rocminfo
|
||||
```
|
||||
|
||||
#### Figuring out your AMD GPU chipset version:
|
||||
|
||||
We unset the `HSA_OVERRIDE_GFX_VERSION` to prevent an existing override from messing up the result:
|
||||
|
||||
```bash
|
||||
$ docker exec -it frigate /bin/bash -c '(unset HSA_OVERRIDE_GFX_VERSION && /opt/rocm/bin/rocminfo |grep gfx)'
|
||||
```
|
||||
|
||||
### Supported Models
|
||||
|
||||
There is no default model provided, the following formats are supported:
|
||||
|
||||
#### YOLO-NAS
|
||||
|
||||
[YOLO-NAS](https://github.com/Deci-AI/super-gradients/blob/master/YOLONAS.md) models are supported, but not included by default. You can build and download a compatible model with pre-trained weights using [this notebook](https://github.com/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb) [](https://colab.research.google.com/github/blakeblackshear/frigate/blob/dev/notebooks/YOLO_NAS_Pretrained_Export.ipynb).
|
||||
|
||||
:::warning
|
||||
|
||||
The pre-trained YOLO-NAS weights from DeciAI are subject to their license and can't be used commercially. For more information, see: https://docs.deci.ai/super-gradients/latest/LICENSE.YOLONAS.html
|
||||
|
||||
:::
|
||||
|
||||
The input image size in this notebook is set to 320x320. This results in lower CPU usage and faster inference times without impacting performance in most cases due to the way Frigate crops video frames to areas of interest before running detection. The notebook and config can be updated to 640x640 if desired.
|
||||
|
||||
After placing the downloaded onnx model in your config folder, you can use the following configuration:
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
rocm:
|
||||
type: rocm
|
||||
|
||||
model:
|
||||
model_type: yolonas
|
||||
width: 320 # <--- should match whatever was set in notebook
|
||||
height: 320 # <--- should match whatever was set in notebook
|
||||
input_pixel_format: bgr
|
||||
path: /config/yolo_nas_s.onnx
|
||||
labelmap_path: /labelmap/coco-80.txt
|
||||
```
|
||||
|
||||
Note that the labelmap uses a subset of the complete COCO label set that has only 80 objects.
|
||||
|
||||
## ONNX
|
||||
|
||||
ONNX is an open format for building machine learning models, Frigate supports running ONNX models on CPU, OpenVINO, and TensorRT. On startup Frigate will automatically try to use a GPU if one is available.
|
||||
@@ -364,6 +463,34 @@ model:
|
||||
|
||||
Note that the labelmap uses a subset of the complete COCO label set that has only 80 objects.
|
||||
|
||||
## CPU Detector (not recommended)
|
||||
|
||||
The CPU detector type runs a TensorFlow Lite model utilizing the CPU without hardware acceleration. It is recommended to use a hardware accelerated detector type instead for better performance. To configure a CPU based detector, set the `"type"` attribute to `"cpu"`.
|
||||
|
||||
:::danger
|
||||
|
||||
The CPU detector is not recommended for general use. If you do not have GPU or Edge TPU hardware, using the [OpenVINO Detector](#openvino-detector) in CPU mode is often more efficient than using the CPU detector.
|
||||
|
||||
:::
|
||||
|
||||
The number of threads used by the interpreter can be specified using the `"num_threads"` attribute, and defaults to `3.`
|
||||
|
||||
A TensorFlow Lite model is provided in the container at `/cpu_model.tflite` and is used by this detector type by default. To provide your own model, bind mount the file into the container and provide the path with `model.path`.
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
cpu1:
|
||||
type: cpu
|
||||
num_threads: 3
|
||||
model:
|
||||
path: "/custom_model.tflite"
|
||||
cpu2:
|
||||
type: cpu
|
||||
num_threads: 3
|
||||
```
|
||||
|
||||
When using CPU detectors, you can add one CPU detector per camera. Adding more detectors than the number of cameras should not improve performance.
|
||||
|
||||
## Deepstack / CodeProject.AI Server Detector
|
||||
|
||||
The Deepstack / CodeProject.AI Server detector for Frigate allows you to integrate Deepstack and CodeProject.AI object detection capabilities into Frigate. CodeProject.AI and DeepStack are open-source AI platforms that can be run on various devices such as the Raspberry Pi, Nvidia Jetson, and other compatible hardware. It is important to note that the integration is performed over the network, so the inference times may not be as fast as native Frigate detectors, but it still provides an efficient and reliable solution for object detection and tracking.
|
||||
@@ -475,7 +602,9 @@ $ cat /sys/kernel/debug/rknpu/load
|
||||
|
||||
## Hailo-8l
|
||||
|
||||
This detector is available if you are using the Raspberry Pi 5 with Hailo-8L AI Kit. This has not been tested using the Hailo-8L with other hardware.
|
||||
This detector is available for use with Hailo-8 AI Acceleration Module.
|
||||
|
||||
See the [installation docs](../frigate/installation.md#hailo-8l) for information on configuring the hailo8.
|
||||
|
||||
### Configuration
|
||||
|
||||
|
||||
24
docs/docs/configuration/pwa.md
Normal file
24
docs/docs/configuration/pwa.md
Normal file
@@ -0,0 +1,24 @@
|
||||
---
|
||||
id: pwa
|
||||
title: Installing Frigate App
|
||||
---
|
||||
|
||||
Frigate supports being installed as a [Progressive Web App](https://web.dev/explore/progressive-web-apps) on Desktop, Android, and iOS.
|
||||
|
||||
This adds features including the ability to deep link directly into the app.
|
||||
|
||||
## Requirements
|
||||
|
||||
In order to install Frigate as a PWA, the following requirements must be met:
|
||||
|
||||
- Frigate must be accessed via a secure context (localhost, secure https, etc.)
|
||||
- On Android, Firefox, Chrome, Edge, Opera, and Samsung Internet Browser all support installing PWAs.
|
||||
- On iOS 16.4 and later, PWAs can be installed from the Share menu in Safari, Chrome, Edge, Firefox, and Orion.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation varies slightly based on the device that is being used:
|
||||
|
||||
- Desktop: Use the install button typically found in right edge of the address bar
|
||||
- Android: Use the `Install as App` button in the more options menu
|
||||
- iOS: Use the `Add to Homescreen` button in the share menu
|
||||
@@ -154,7 +154,7 @@ Footage can be exported from Frigate by right-clicking (desktop) or long pressin
|
||||
|
||||
### Time-lapse export
|
||||
|
||||
Time lapse exporting is available only via the [HTTP API](../integrations/api.md#post-apiexportcamerastartstart-timestampendend-timestamp).
|
||||
Time lapse exporting is available only via the [HTTP API](../integrations/api/export-recording-export-camera-name-start-start-time-end-end-time-post.api.mdx).
|
||||
|
||||
When exporting a time-lapse the default speed-up is 25x with 30 FPS. This means that every 25 seconds of (real-time) recording is condensed into 1 second of time-lapse video (always without audio) with a smoothness of 30 FPS.
|
||||
|
||||
|
||||
@@ -138,6 +138,16 @@ model:
|
||||
# Optional: Label name modifications. These are merged into the standard labelmap.
|
||||
labelmap:
|
||||
2: vehicle
|
||||
# Optional: Map of object labels to their attribute labels (default: depends on model)
|
||||
attributes_map:
|
||||
person:
|
||||
- amazon
|
||||
- face
|
||||
car:
|
||||
- amazon
|
||||
- fedex
|
||||
- license_plate
|
||||
- ups
|
||||
|
||||
# Optional: Audio Events Configuration
|
||||
# NOTE: Can be overridden at the camera level
|
||||
@@ -324,6 +334,9 @@ review:
|
||||
- car
|
||||
- person
|
||||
# Optional: required zones for an object to be marked as an alert (default: none)
|
||||
# NOTE: when settings required zones globally, this zone must exist on all cameras
|
||||
# or the config will be considered invalid. In that case the required_zones
|
||||
# should be configured at the camera level.
|
||||
required_zones:
|
||||
- driveway
|
||||
# Optional: detections configuration
|
||||
@@ -333,12 +346,20 @@ review:
|
||||
- car
|
||||
- person
|
||||
# Optional: required zones for an object to be marked as a detection (default: none)
|
||||
# NOTE: when settings required zones globally, this zone must exist on all cameras
|
||||
# or the config will be considered invalid. In that case the required_zones
|
||||
# should be configured at the camera level.
|
||||
required_zones:
|
||||
- driveway
|
||||
|
||||
# Optional: Motion configuration
|
||||
# NOTE: Can be overridden at the camera level
|
||||
motion:
|
||||
# Optional: enables detection for the camera (default: True)
|
||||
# NOTE: Motion detection is required for object detection,
|
||||
# setting this to False and leaving detect enabled
|
||||
# will result in an error on startup.
|
||||
enabled: False
|
||||
# Optional: The threshold passed to cv2.threshold to determine if a pixel is different enough to be counted as motion. (default: shown below)
|
||||
# Increasing this value will make motion detection less sensitive and decreasing it will make motion detection more sensitive.
|
||||
# The value should be between 1 and 255.
|
||||
@@ -497,6 +518,9 @@ semantic_search:
|
||||
enabled: False
|
||||
# Optional: Re-index embeddings database from historical tracked objects (default: shown below)
|
||||
reindex: False
|
||||
# Optional: Set the model size used for embeddings. (default: shown below)
|
||||
# NOTE: small model runs on CPU and large model runs on GPU
|
||||
model_size: "small"
|
||||
|
||||
# Optional: Configuration for AI generated tracked object descriptions
|
||||
# NOTE: Semantic Search must be enabled for this to do anything.
|
||||
@@ -716,6 +740,8 @@ cameras:
|
||||
genai:
|
||||
# Optional: Enable AI description generation (default: shown below)
|
||||
enabled: False
|
||||
# Optional: Use the object snapshot instead of thumbnails for description generation (default: shown below)
|
||||
use_snapshot: False
|
||||
# Optional: The default prompt for generating descriptions. Can use replacement
|
||||
# variables like "label", "sub_label", "camera" to make more dynamic. (default: shown below)
|
||||
prompt: "Describe the {label} in the sequence of images with as much detail as possible. Do not describe the background."
|
||||
@@ -723,6 +749,12 @@ cameras:
|
||||
# Format: {label}: {prompt}
|
||||
object_prompts:
|
||||
person: "My special person prompt."
|
||||
# Optional: objects to generate descriptions for (default: all objects that are tracked)
|
||||
objects:
|
||||
- person
|
||||
- cat
|
||||
# Optional: Restrict generation to objects that entered any of the listed zones (default: none, all zones qualify)
|
||||
required_zones: []
|
||||
|
||||
# Optional
|
||||
ui:
|
||||
@@ -786,7 +818,7 @@ camera_groups:
|
||||
- side_cam
|
||||
- front_doorbell_cam
|
||||
# Required: icon used for group
|
||||
icon: car
|
||||
icon: LuCar
|
||||
# Required: index of this group
|
||||
order: 0
|
||||
```
|
||||
|
||||
@@ -41,8 +41,6 @@ review:
|
||||
|
||||
By default all detections that do not qualify as an alert qualify as a detection. However, detections can further be filtered to only include certain labels or certain zones.
|
||||
|
||||
By default a review item will only be marked as an alert if a person or car is detected. This can be configured to include any object or audio label using the following config:
|
||||
|
||||
```yaml
|
||||
# can be overridden at the camera level
|
||||
review:
|
||||
|
||||
@@ -5,10 +5,18 @@ title: Using Semantic Search
|
||||
|
||||
Semantic Search in Frigate allows you to find tracked objects within your review items using either the image itself, a user-defined text description, or an automatically generated one. This feature works by creating _embeddings_ — numerical vector representations — for both the images and text descriptions of your tracked objects. By comparing these embeddings, Frigate assesses their similarities to deliver relevant search results.
|
||||
|
||||
Frigate has support for two models to create embeddings, both of which run locally: [OpenAI CLIP](https://openai.com/research/clip) and [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2). Embeddings are then saved to a local instance of [ChromaDB](https://trychroma.com).
|
||||
Frigate has support for [Jina AI's CLIP model](https://huggingface.co/jinaai/jina-clip-v1) to create embeddings, which runs locally. Embeddings are then saved to Frigate's database.
|
||||
|
||||
Semantic Search is accessed via the _Explore_ view in the Frigate UI.
|
||||
|
||||
## Minimum System Requirements
|
||||
|
||||
Semantic Search works by running a large AI model locally on your system. Small or underpowered systems like a Raspberry Pi will not run Semantic Search reliably or at all.
|
||||
|
||||
A minimum of 8GB of RAM is required to use Semantic Search. A GPU is not strictly required but will provide a significant performance increase over CPU-only systems.
|
||||
|
||||
For best performance, 16GB or more of RAM and a dedicated GPU are recommended.
|
||||
|
||||
## Configuration
|
||||
|
||||
Semantic search is disabled by default, and must be enabled in your config file before it can be used. Semantic Search is a global configuration setting.
|
||||
@@ -27,18 +35,34 @@ If you are enabling the Search feature for the first time, be advised that Friga
|
||||
|
||||
:::
|
||||
|
||||
### OpenAI CLIP
|
||||
### Jina AI CLIP
|
||||
|
||||
This model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in Chroma. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails.
|
||||
The vision model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on tracked objects to encode the thumbnail image and store it in the database. When searching for tracked objects via text in the search box, Frigate will perform a `text -> image` similarity search against this embedding. When clicking "Find Similar" in the tracked object detail pane, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails.
|
||||
|
||||
### all-MiniLM-L6-v2
|
||||
The text model is used to embed tracked object descriptions and perform searches against them. Descriptions can be created, viewed, and modified on the Search page when clicking on the gray tracked object chip at the top left of each review item. See [the Generative AI docs](/configuration/genai.md) for more information on how to automatically generate tracked object descriptions.
|
||||
|
||||
This is a sentence embedding model that has been fine tuned on over 1 billion sentence pairs. This model is used to embed tracked object descriptions and perform searches against them. Descriptions can be created, viewed, and modified on the Search page when clicking on the gray tracked object chip at the top left of each review item. See [the Generative AI docs](/configuration/genai.md) for more information on how to automatically generate tracked object descriptions.
|
||||
Differently weighted CLIP models are available and can be selected by setting the `model_size` config option:
|
||||
|
||||
## Usage
|
||||
:::tip
|
||||
|
||||
The CLIP models are downloaded in ONNX format, which means they will be accelerated using GPU hardware when available. This depends on the Docker build that is used. See [the object detector docs](../configuration/object_detectors.md) for more information.
|
||||
|
||||
:::
|
||||
|
||||
```yaml
|
||||
semantic_search:
|
||||
enabled: True
|
||||
model_size: small
|
||||
```
|
||||
|
||||
- Configuring the `large` model employs the full Jina model and will automatically run on the GPU if applicable.
|
||||
- Configuring the `small` model employs a quantized version of the model that uses much less RAM and runs faster on CPU with a very negligible difference in embedding quality.
|
||||
|
||||
## Usage and Best Practices
|
||||
|
||||
1. Semantic search is used in conjunction with the other filters available on the Search page. Use a combination of traditional filtering and semantic search for the best results.
|
||||
2. The comparison between text and image embedding distances generally means that results matching `description` will appear first, even if a `thumbnail` embedding may be a better match. Play with the "Search Type" filter to help find what you are looking for.
|
||||
3. Make your search language and tone closely match your descriptions. If you are using thumbnail search, phrase your query as an image caption.
|
||||
4. Semantic search on thumbnails tends to return better results when matching large subjects that take up most of the frame. Small things like "cat" tend to not work well.
|
||||
5. Experiment! Find a tracked object you want to test and start typing keywords to see what works for you.
|
||||
2. Use the thumbnail search type when searching for particular objects in the scene. Use the description search type when attempting to discern the intent of your object.
|
||||
3. Because of how the AI models Frigate uses have been trained, the comparison between text and image embedding distances generally means that with multi-modal (`thumbnail` and `description`) searches, results matching `description` will appear first, even if a `thumbnail` embedding may be a better match. Play with the "Search Type" setting to help find what you are looking for. Note that if you are generating descriptions for specific objects or zones only, this may cause search results to prioritize the objects with descriptions even if the the ones without them are more relevant.
|
||||
4. Make your search language and tone closely match exactly what you're looking for. If you are using thumbnail search, **phrase your query as an image caption**. Searching for "red car" may not work as well as "red sedan driving down a residential street on a sunny day".
|
||||
5. Semantic search on thumbnails tends to return better results when matching large subjects that take up most of the frame. Small things like "cat" tend to not work well.
|
||||
6. Experiment! Find a tracked object you want to test and start typing keywords and phrases to see what works for you.
|
||||
|
||||
@@ -3,7 +3,7 @@ id: snapshots
|
||||
title: Snapshots
|
||||
---
|
||||
|
||||
Frigate can save a snapshot image to `/media/frigate/clips` for each object that is detected named as `<camera>-<id>.jpg`. They are also accessible [via the api](../integrations/api.md#get-apieventsidsnapshotjpg)
|
||||
Frigate can save a snapshot image to `/media/frigate/clips` for each object that is detected named as `<camera>-<id>.jpg`. They are also accessible [via the api](../integrations/api/event-snapshot-events-event-id-snapshot-jpg-get.api.mdx)
|
||||
|
||||
For users with Frigate+ enabled, snapshots are accessible in the UI in the Frigate+ pane to allow for quick submission to the Frigate+ service.
|
||||
|
||||
|
||||
@@ -193,7 +193,7 @@ npm run test
|
||||
#### 1. Installation
|
||||
|
||||
```console
|
||||
npm install
|
||||
cd docs && npm install
|
||||
```
|
||||
|
||||
#### 2. Local Development
|
||||
|
||||
@@ -69,6 +69,7 @@ Inference speeds vary greatly depending on the CPU, GPU, or VPU used, some known
|
||||
| Intel i5 7500 | ~ 15 ms | Inference speeds on CPU were ~ 260 ms |
|
||||
| Intel i5 1135G7 | 10 - 15 ms | |
|
||||
| Intel i5 12600K | ~ 15 ms | Inference speeds on CPU were ~ 35 ms |
|
||||
| Intel Arc A750 | ~ 4 ms | |
|
||||
|
||||
### TensorRT - Nvidia GPU
|
||||
|
||||
@@ -87,6 +88,10 @@ Inference speeds will vary greatly depending on the GPU and the model used.
|
||||
| Quadro P400 2GB | 20 - 25 ms |
|
||||
| Quadro P2000 | ~ 12 ms |
|
||||
|
||||
#### AMD GPUs
|
||||
|
||||
With the [rocm](../configuration/object_detectors.md#amdrocm-gpu-detector) detector Frigate can take advantage of many AMD GPUs.
|
||||
|
||||
### Community Supported:
|
||||
|
||||
#### Nvidia Jetson
|
||||
|
||||
@@ -112,8 +112,8 @@ For other installations, follow these steps for installation:
|
||||
|
||||
1. Install the driver from the [Hailo GitHub repository](https://github.com/hailo-ai/hailort-drivers). A convenient script for Linux is available to clone the repository, build the driver, and install it.
|
||||
2. Copy or download [this script](https://github.com/blakeblackshear/frigate/blob/41c9b13d2fffce508b32dfc971fa529b49295fbd/docker/hailo8l/user_installation.sh).
|
||||
3. Ensure it has execution permissions with `sudo chmod +x install_hailo8l_driver.sh`
|
||||
4. Run the script with `./install_hailo8l_driver.sh`
|
||||
3. Ensure it has execution permissions with `sudo chmod +x user_installation.sh`
|
||||
4. Run the script with `./user_installation.sh`
|
||||
|
||||
#### Setup
|
||||
|
||||
@@ -250,10 +250,7 @@ The community supported docker image tags for the current stable version are:
|
||||
- `stable-tensorrt-jp5` - Frigate build optimized for nvidia Jetson devices running Jetpack 5
|
||||
- `stable-tensorrt-jp4` - Frigate build optimized for nvidia Jetson devices running Jetpack 4.6
|
||||
- `stable-rk` - Frigate build for SBCs with Rockchip SoC
|
||||
- `stable-rocm` - Frigate build for [AMD GPUs and iGPUs](../configuration/object_detectors.md#amdrocm-gpu-detector), all drivers
|
||||
- `stable-rocm-gfx900` - AMD gfx900 driver only
|
||||
- `stable-rocm-gfx1030` - AMD gfx1030 driver only
|
||||
- `stable-rocm-gfx1100` - AMD gfx1100 driver only
|
||||
- `stable-rocm` - Frigate build for [AMD GPUs](../configuration/object_detectors.md#amdrocm-gpu-detector)
|
||||
- `stable-h8l` - Frigate build for the Hailo-8L M.2 PICe Raspberry Pi 5 hat
|
||||
|
||||
## Home Assistant Addon
|
||||
|
||||
@@ -13,7 +13,7 @@ Use of the bundled go2rtc is optional. You can still configure FFmpeg to connect
|
||||
|
||||
# Setup a go2rtc stream
|
||||
|
||||
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. If you set the stream name under go2rtc to match the name of your camera, it will automatically be mapped and you will get additional live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.9.4#module-streams), not just rtsp.
|
||||
First, you will want to configure go2rtc to connect to your camera stream by adding the stream you want to use for live view in your Frigate config file. For the best experience, you should set the stream name under go2rtc to match the name of your camera so that Frigate will automatically map it and be able to use better live view options for the camera. Avoid changing any other parts of your config at this step. Note that go2rtc supports [many different stream types](https://github.com/AlexxIT/go2rtc/tree/v1.9.4#module-streams), not just rtsp.
|
||||
|
||||
```yaml
|
||||
go2rtc:
|
||||
@@ -22,7 +22,7 @@ go2rtc:
|
||||
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
```
|
||||
|
||||
The easiest live view to get working is MSE. After adding this to the config, restart Frigate and try to watch the live stream by selecting MSE in the dropdown after clicking on the camera.
|
||||
After adding this to the config, restart Frigate and try to watch the live stream for a single camera by clicking on it from the dashboard. It should look much clearer and more fluent than the original jsmpeg stream.
|
||||
|
||||
|
||||
### What if my video doesn't play?
|
||||
@@ -46,7 +46,7 @@ The easiest live view to get working is MSE. After adding this to the config, re
|
||||
streams:
|
||||
back:
|
||||
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
- "ffmpeg:back#video=h264"
|
||||
- "ffmpeg:back#video=h264#hardware"
|
||||
```
|
||||
|
||||
- Switch to FFmpeg if needed:
|
||||
@@ -58,9 +58,8 @@ The easiest live view to get working is MSE. After adding this to the config, re
|
||||
- ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
```
|
||||
|
||||
- If you can see the video but do not have audio, this is most likely because your
|
||||
camera's audio stream is not AAC.
|
||||
- If possible, update your camera's audio settings to AAC.
|
||||
- If you can see the video but do not have audio, this is most likely because your camera's audio stream codec is not AAC.
|
||||
- If possible, update your camera's audio settings to AAC in your camera's firmware.
|
||||
- If your cameras do not support AAC audio, you will need to tell go2rtc to re-encode the audio to AAC on demand if you want audio. This will use additional CPU and add some latency. To add AAC audio on demand, you can update your go2rtc config as follows:
|
||||
```yaml
|
||||
go2rtc:
|
||||
@@ -77,7 +76,7 @@ camera's audio stream is not AAC.
|
||||
streams:
|
||||
back:
|
||||
- rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2
|
||||
- "ffmpeg:back#video=h264#audio=aac"
|
||||
- "ffmpeg:back#video=h264#audio=aac#hardware"
|
||||
```
|
||||
|
||||
When using the ffmpeg module, you would add AAC audio like this:
|
||||
@@ -86,7 +85,7 @@ camera's audio stream is not AAC.
|
||||
go2rtc:
|
||||
streams:
|
||||
back:
|
||||
- "ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2#video=copy#audio=copy#audio=aac"
|
||||
- "ffmpeg:rtsp://user:password@10.0.10.10:554/cam/realmonitor?channel=1&subtype=2#video=copy#audio=copy#audio=aac#hardware"
|
||||
```
|
||||
|
||||
:::warning
|
||||
@@ -102,4 +101,4 @@ section.
|
||||
## Next steps
|
||||
|
||||
1. If the stream you added to go2rtc is also used by Frigate for the `record` or `detect` role, you can migrate your config to pull from the RTSP restream to reduce the number of connections to your camera as shown [here](/configuration/restream#reduce-connections-to-camera).
|
||||
1. You may also prefer to [setup WebRTC](/configuration/live#webrtc-extra-configuration) for slightly lower latency than MSE. Note that WebRTC only supports h264 and specific audio formats.
|
||||
2. You may also prefer to [setup WebRTC](/configuration/live#webrtc-extra-configuration) for slightly lower latency than MSE. Note that WebRTC only supports h264 and specific audio formats and may require opening ports on your router.
|
||||
|
||||
@@ -3,25 +3,38 @@ id: reverse_proxy
|
||||
title: Setting up a reverse proxy
|
||||
---
|
||||
|
||||
This guide outlines the basic configuration steps needed to expose your Frigate UI to the internet.
|
||||
A common way of accomplishing this is to use a reverse proxy webserver between your router and your Frigate instance.
|
||||
A reverse proxy accepts HTTP requests from the public internet and redirects them transparently to internal webserver(s) on your network.
|
||||
This guide outlines the basic configuration steps needed to set up a reverse proxy in front of your Frigate instance.
|
||||
|
||||
The suggested steps are:
|
||||
A reverse proxy is typically needed if you want to set up Frigate on a custom URL, on a subdomain, or on a host serving multiple sites. It could also be used to set up your own authentication provider or for more advanced HTTP routing.
|
||||
|
||||
- **Configure** a 'proxy' HTTP webserver (such as [Apache2](https://httpd.apache.org/docs/current/) or [NPM](https://github.com/NginxProxyManager/nginx-proxy-manager)) and only expose ports 80/443 from this webserver to the internet
|
||||
- **Encrypt** content from the proxy webserver by installing SSL (such as with [Let's Encrypt](https://letsencrypt.org/)). Note that SSL is then not required on your Frigate webserver as the proxy encrypts all requests for you
|
||||
- **Restrict** access to your Frigate instance at the proxy using, for example, password authentication
|
||||
Before setting up a reverse proxy, check if any of the built-in functionality in Frigate suits your needs:
|
||||
|Topic|Docs|
|
||||
|-|-|
|
||||
|TLS|Please see the `tls` [configuration option](../configuration/tls.md)|
|
||||
|Authentication|Please see the [authentication](../configuration/authentication.md) documentation|
|
||||
|IPv6|[Enabling IPv6](../configuration/advanced.md#enabling-ipv6)
|
||||
|
||||
**Note about TLS**
|
||||
When using a reverse proxy, the TLS session is usually terminated at the proxy, sending the internal request over plain HTTP. If this is the desired behavior, TLS must first be disabled in Frigate, or you will encounter an HTTP 400 error: "The plain HTTP request was sent to HTTPS port."
|
||||
To disable TLS, set the following in your Frigate configuration:
|
||||
```yml
|
||||
tls:
|
||||
enabled: false
|
||||
```
|
||||
|
||||
:::warning
|
||||
A reverse proxy can be used to secure access to an internal webserver but the user will be entirely reliant
|
||||
on the steps they have taken. You must ensure you are following security best practices.
|
||||
This page does not attempt to outline the specific steps needed to secure your internal website.
|
||||
A reverse proxy can be used to secure access to an internal web server, but the user will be entirely reliant on the steps they have taken. You must ensure you are following security best practices.
|
||||
This page does not attempt to outline the specific steps needed to secure your internal website.
|
||||
Please use your own knowledge to assess and vet the reverse proxy software before you install anything on your system.
|
||||
:::
|
||||
|
||||
There are several technologies available to implement reverse proxies. This document currently suggests one, using Apache2,
|
||||
and the community is invited to document others through a contribution to this page.
|
||||
## Proxies
|
||||
|
||||
There are many solutions available to implement reverse proxies and the community is invited to help out documenting others through a contribution to this page.
|
||||
|
||||
* [Apache2](#apache2-reverse-proxy)
|
||||
* [Nginx](#nginx-reverse-proxy)
|
||||
* [Traefik](#traefik-reverse-proxy)
|
||||
|
||||
## Apache2 Reverse Proxy
|
||||
|
||||
@@ -141,3 +154,26 @@ The settings below enabled connection upgrade, sets up logging (optional) and pr
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## Traefik Reverse Proxy
|
||||
|
||||
This example shows how to add a `label` to the Frigate Docker compose file, enabling Traefik to automatically discover your Frigate instance.
|
||||
Before using the example below, you must first set up Traefik with the [Docker provider](https://doc.traefik.io/traefik/providers/docker/)
|
||||
|
||||
```yml
|
||||
services:
|
||||
frigate:
|
||||
container_name: frigate
|
||||
image: ghcr.io/blakeblackshear/frigate:stable
|
||||
...
|
||||
...
|
||||
labels:
|
||||
- "traefik.enable=true"
|
||||
- "traefik.http.services.frigate.loadbalancer.server.port=8971"
|
||||
- "traefik.http.routers.frigate.rule=Host(`traefik.example.com`)"
|
||||
```
|
||||
|
||||
The above configuration will create a "service" in Traefik, automatically adding your container's IP on port 8971 as a backend.
|
||||
It will also add a router, routing requests to "traefik.example.com" to your local container.
|
||||
|
||||
Note that with this approach, you don't need to expose any ports for the Frigate instance since all traffic will be routed over the internal Docker network.
|
||||
|
||||
@@ -1,534 +0,0 @@
|
||||
---
|
||||
id: api
|
||||
title: HTTP API
|
||||
---
|
||||
|
||||
A web server is available on port 5000 with the following endpoints.
|
||||
|
||||
## Management & Information
|
||||
|
||||
### `GET /api/config`
|
||||
|
||||
A json representation of your configuration
|
||||
|
||||
### `POST /api/restart`
|
||||
|
||||
Restarts Frigate process.
|
||||
|
||||
### `GET /api/stats`
|
||||
|
||||
Contains some granular debug info that can be used for sensors in Home Assistant.
|
||||
|
||||
Sample response:
|
||||
|
||||
```json
|
||||
{
|
||||
/* Per Camera Stats */
|
||||
"cameras": {
|
||||
"back": {
|
||||
/***************
|
||||
* Frames per second being consumed from your camera. If this is higher
|
||||
* than it is supposed to be, you should set -r FPS in your input_args.
|
||||
* camera_fps = process_fps + skipped_fps
|
||||
***************/
|
||||
"camera_fps": 5.0,
|
||||
/***************
|
||||
* Number of times detection is run per second. This can be higher than
|
||||
* your camera FPS because Frigate often looks at the same frame multiple times
|
||||
* or in multiple locations
|
||||
***************/
|
||||
"detection_fps": 1.5,
|
||||
/***************
|
||||
* PID for the ffmpeg process that consumes this camera
|
||||
***************/
|
||||
"capture_pid": 27,
|
||||
/***************
|
||||
* PID for the process that runs detection for this camera
|
||||
***************/
|
||||
"pid": 34,
|
||||
/***************
|
||||
* Frames per second being processed by Frigate.
|
||||
***************/
|
||||
"process_fps": 5.1,
|
||||
/***************
|
||||
* Frames per second skip for processing by Frigate.
|
||||
***************/
|
||||
"skipped_fps": 0.0
|
||||
}
|
||||
},
|
||||
/***************
|
||||
* Sum of detection_fps across all cameras and detectors.
|
||||
* This should be the sum of all detection_fps values from cameras.
|
||||
***************/
|
||||
"detection_fps": 5.0,
|
||||
/* Detectors Stats */
|
||||
"detectors": {
|
||||
"coral": {
|
||||
/***************
|
||||
* Timestamp when object detection started. If this value stays non-zero and constant
|
||||
* for a long time, that means the detection process is stuck.
|
||||
***************/
|
||||
"detection_start": 0.0,
|
||||
/***************
|
||||
* Time spent running object detection in milliseconds.
|
||||
***************/
|
||||
"inference_speed": 10.48,
|
||||
/***************
|
||||
* PID for the shared process that runs object detection on the Coral.
|
||||
***************/
|
||||
"pid": 25321
|
||||
}
|
||||
},
|
||||
"service": {
|
||||
/* Uptime in seconds */
|
||||
"uptime": 10,
|
||||
"version": "0.10.1-8883709",
|
||||
"latest_version": "0.10.1",
|
||||
/* Storage data in MB for important locations */
|
||||
"storage": {
|
||||
"/media/frigate/clips": {
|
||||
"total": 1000,
|
||||
"used": 700,
|
||||
"free": 300,
|
||||
"mnt_type": "ext4"
|
||||
},
|
||||
"/media/frigate/recordings": {
|
||||
"total": 1000,
|
||||
"used": 700,
|
||||
"free": 300,
|
||||
"mnt_type": "ext4"
|
||||
},
|
||||
"/tmp/cache": {
|
||||
"total": 256,
|
||||
"used": 100,
|
||||
"free": 156,
|
||||
"mnt_type": "tmpfs"
|
||||
},
|
||||
"/dev/shm": {
|
||||
"total": 256,
|
||||
"used": 100,
|
||||
"free": 156,
|
||||
"mnt_type": "tmpfs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"cpu_usages": {
|
||||
"pid": {
|
||||
"cmdline": "ffmpeg...",
|
||||
"cpu": "5.0",
|
||||
"cpu_average": "3.0",
|
||||
"mem": "0.5"
|
||||
}
|
||||
},
|
||||
"gpu_usages": {
|
||||
"gpu-type": {
|
||||
"gpu": "17%",
|
||||
"mem": "18%"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `GET /api/version`
|
||||
|
||||
Version info
|
||||
|
||||
### `GET /api/ffprobe`
|
||||
|
||||
Get ffprobe output for camera feed paths.
|
||||
|
||||
| param | Type | Description |
|
||||
| ------- | ------ | ---------------------------------- |
|
||||
| `paths` | string | `,` separated list of camera paths |
|
||||
|
||||
### `GET /api/<camera_name>/ptz/info`
|
||||
|
||||
Get PTZ info for the camera.
|
||||
|
||||
## Camera Media
|
||||
|
||||
### `GET /api/<camera_name>`
|
||||
|
||||
An mjpeg stream for debugging. Keep in mind the mjpeg endpoint is for debugging only and will put additional load on the system when in use.
|
||||
|
||||
Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ----------- | ---- | ------------------------------------------------------------------ |
|
||||
| `fps` | int | Frame rate |
|
||||
| `h` | int | Height in pixels |
|
||||
| `bbox` | int | Show bounding boxes for detected objects (0 or 1) |
|
||||
| `timestamp` | int | Print the timestamp in the upper left (0 or 1) |
|
||||
| `zones` | int | Draw the zones on the image (0 or 1) |
|
||||
| `mask` | int | Overlay the mask on the image (0 or 1) |
|
||||
| `motion` | int | Draw blue boxes for areas with detected motion (0 or 1) |
|
||||
| `regions` | int | Draw green boxes for areas where object detection was run (0 or 1) |
|
||||
|
||||
You can access a higher resolution mjpeg stream by appending `h=height-in-pixels` to the endpoint. For example `/api/back?h=1080`. You can also increase the FPS by appending `fps=frame-rate` to the URL such as `/api/back?fps=10` or both with `?fps=10&h=1000`.
|
||||
|
||||
### `GET /api/<camera_name>/latest.jpg[?h=300]`
|
||||
|
||||
The most recent frame that Frigate has finished processing. It is a full resolution image by default.
|
||||
|
||||
Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ----------- | ---- | ------------------------------------------------------------------ |
|
||||
| `h` | int | Height in pixels |
|
||||
| `bbox` | int | Show bounding boxes for detected objects (0 or 1) |
|
||||
| `timestamp` | int | Print the timestamp in the upper left (0 or 1) |
|
||||
| `zones` | int | Draw the zones on the image (0 or 1) |
|
||||
| `mask` | int | Overlay the mask on the image (0 or 1) |
|
||||
| `motion` | int | Draw blue boxes for areas with detected motion (0 or 1) |
|
||||
| `regions` | int | Draw green boxes for areas where object detection was run (0 or 1) |
|
||||
| `quality` | int | Jpeg encoding quality (0-100). Defaults to 70. |
|
||||
|
||||
Example parameters:
|
||||
|
||||
- `h=300`: resizes the image to 300 pixels tall
|
||||
|
||||
### `GET /api/<camera_name>/<label>/thumbnail.jpg`
|
||||
|
||||
Returns the thumbnail from the latest tracked object for the given camera and label combo. Using `any` as the label will return the latest thumbnail regardless of type.
|
||||
|
||||
### `GET /api/<camera_name>/<label>/clip.mp4`
|
||||
|
||||
Returns the clip from the latest tracked object for the given camera and label combo. Using `any` as the label will return the latest clip regardless of type.
|
||||
|
||||
### `GET /api/<camera_name>/<label>/snapshot.jpg`
|
||||
|
||||
Returns the snapshot image from the latest tracked object for the given camera and label combo. Using `any` as the label will return the latest thumbnail regardless of type.
|
||||
|
||||
### `GET /api/<camera_name>/grid.jpg`
|
||||
|
||||
Returns the latest camera image with the regions grid overlaid.
|
||||
|
||||
| param | Type | Description |
|
||||
| ------------ | ----- | ------------------------------------------------------------------------------------------ |
|
||||
| `color` | str | The color of the grid (red,green,blue,black,white). Defaults to "green". |
|
||||
| `font_scale` | float | Font scale. Can be used to increase font size on high resolution cameras. Defaults to 0.5. |
|
||||
|
||||
### `GET /clips/<camera>-<id>.jpg`
|
||||
|
||||
JPG snapshot for the given camera and event id.
|
||||
|
||||
## Events
|
||||
|
||||
### `GET /api/events`
|
||||
|
||||
Events from the database. Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| -------------------- | ----- | ----------------------------------------------------- |
|
||||
| `before` | int | Epoch time |
|
||||
| `after` | int | Epoch time |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
| `labels` | str | , separated list of labels |
|
||||
| `zones` | str | , separated list of zones |
|
||||
| `limit` | int | Limit the number of events returned |
|
||||
| `has_snapshot` | int | Filter to events that have snapshots (0 or 1) |
|
||||
| `has_clip` | int | Filter to events that have clips (0 or 1) |
|
||||
| `include_thumbnails` | int | Include thumbnails in the response (0 or 1) |
|
||||
| `in_progress` | int | Limit to events in progress (0 or 1) |
|
||||
| `time_range` | str | Time range in format after,before (00:00,24:00) |
|
||||
| `timezone` | str | Timezone to use for time range |
|
||||
| `min_score` | float | Minimum score of the event |
|
||||
| `max_score` | float | Maximum score of the event |
|
||||
| `is_submitted` | int | Filter events that are submitted to Frigate+ (0 or 1) |
|
||||
| `min_length` | float | Minimum length of the event |
|
||||
| `max_length` | float | Maximum length of the event |
|
||||
|
||||
### `GET /api/events/summary`
|
||||
|
||||
Returns summary data for events in the database. Used by the Home Assistant integration.
|
||||
|
||||
### `GET /api/events/<id>`
|
||||
|
||||
Returns data for a single event.
|
||||
|
||||
### `DELETE /api/events/<id>`
|
||||
|
||||
Permanently deletes the event along with any clips/snapshots.
|
||||
|
||||
### `POST /api/events/<id>/retain`
|
||||
|
||||
Sets retain to true for the event id.
|
||||
|
||||
### `POST /api/events/<id>/plus`
|
||||
|
||||
Submits the snapshot of the event to Frigate+ for labeling.
|
||||
|
||||
| param | Type | Description |
|
||||
| -------------------- | ---- | ---------------------------------- |
|
||||
| `include_annotation` | int | Submit annotation to Frigate+ too. |
|
||||
|
||||
### `PUT /api/events/<id>/false_positive`
|
||||
|
||||
Submits the snapshot of the event to Frigate+ for labeling and adds the detection as a false positive.
|
||||
|
||||
### `DELETE /api/events/<id>/retain`
|
||||
|
||||
Sets retain to false for the event id (event may be deleted quickly after removing).
|
||||
|
||||
### `POST /api/events/<id>/sub_label`
|
||||
|
||||
Set a sub label for an event. For example to update `person` -> `person's name` if they were recognized with facial recognition.
|
||||
Sub labels must be 100 characters or shorter.
|
||||
|
||||
```json
|
||||
{
|
||||
"subLabel": "some_string",
|
||||
"subLabelScore": 0.79
|
||||
}
|
||||
```
|
||||
|
||||
### `GET /api/events/<id>/thumbnail.jpg`
|
||||
|
||||
Returns a thumbnail for the event id optimized for notifications. Works while the event is in progress and after completion. Passing `?format=android` will convert the thumbnail to 2:1 aspect ratio.
|
||||
|
||||
### `GET /api/events/<id>/clip.mp4`
|
||||
|
||||
Returns the clip for the event id. Works after the event has ended.
|
||||
|
||||
### `GET /api/events/<id>/snapshot-clean.png`
|
||||
|
||||
Returns the clean snapshot image for the event id. Only works if `snapshots` and `clean_copy` are enabled in the config.
|
||||
|
||||
| param | Type | Description |
|
||||
| ---------- | ---- | ------------------ |
|
||||
| `download` | bool | Download the image |
|
||||
|
||||
### `GET /api/events/<id>/snapshot.jpg`
|
||||
|
||||
Returns the snapshot image for the event id. Works while the event is in progress and after completion.
|
||||
|
||||
Accepts the following query string parameters, but they are only applied when an event is in progress. After the event is completed, the saved snapshot is returned from disk without modification:
|
||||
|
||||
| param | Type | Description |
|
||||
| ----------- | ---- | ------------------------------------------------- |
|
||||
| `h` | int | Height in pixels |
|
||||
| `bbox` | int | Show bounding boxes for detected objects (0 or 1) |
|
||||
| `timestamp` | int | Print the timestamp in the upper left (0 or 1) |
|
||||
| `crop` | int | Crop the snapshot to the (0 or 1) |
|
||||
| `quality` | int | Jpeg encoding quality (0-100). Defaults to 70. |
|
||||
| `download` | bool | Download the image |
|
||||
|
||||
### `POST /api/events/<camera_name>/<label>/create`
|
||||
|
||||
Create a manual event with a given `label` (ex: doorbell press) to capture a specific event besides an object being detected.
|
||||
|
||||
:::warning
|
||||
|
||||
Recording retention config still applies to manual events, if frigate is configured with `mode: motion` then the manual event will only keep recording segments when motion occurred.
|
||||
|
||||
:::
|
||||
|
||||
**Optional Body:**
|
||||
|
||||
```json
|
||||
{
|
||||
"sub_label": "some_string", // add sub label to event
|
||||
"duration": 30, // predetermined length of event (default: 30 seconds) or can be to null for indeterminate length event
|
||||
"include_recording": true, // whether the event should save recordings along with the snapshot that is taken
|
||||
"draw": {
|
||||
// optional annotations that will be drawn on the snapshot
|
||||
"boxes": [
|
||||
{
|
||||
"box": [0.5, 0.5, 0.25, 0.25], // box consists of x, y, width, height which are on a scale between 0 - 1
|
||||
"color": [255, 0, 0], // color of the box, default is red
|
||||
"score": 100 // optional score associated with the box
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Success Response:**
|
||||
|
||||
```json
|
||||
{
|
||||
"event_id": "1682970645.13116-1ug7ns",
|
||||
"message": "Successfully created event.",
|
||||
"success": true
|
||||
}
|
||||
```
|
||||
|
||||
### `PUT /api/events/<event_id>/end`
|
||||
|
||||
End a specific manual event without a predetermined length.
|
||||
|
||||
### `GET /api/events/<id>/preview.gif`
|
||||
|
||||
Gif covering the first 20 seconds of a specific event.
|
||||
|
||||
## Previews
|
||||
|
||||
Previews are low res / fps videos that are quickly scrubbable and can be used for notifications or time-lapses.
|
||||
|
||||
### `GET /api/preview/<camera>/start/<start-timestamp>/end/<end-timestamp>`
|
||||
|
||||
Metadata about previews for this time range.
|
||||
|
||||
### `GET /api/preview/<year>-<month>/<day>/<hour>/<camera>/<timezone>`
|
||||
|
||||
Metadata about previews for this hour
|
||||
|
||||
### `GET /api/preview/<camera>/start/<start-timestamp>/end/<end-timestamp>`
|
||||
|
||||
List of frames in the preview cache for the time range. Previews are only kept in the cache until they are combined into an mp4 at the end of the hour.
|
||||
|
||||
### `GET /api/preview/<file_name>/thumbnail.jpg`
|
||||
|
||||
Specific preview frame from preview cache.
|
||||
|
||||
### `GET /<camera>/start/<start-timestamp>/end/<end-timestamp>/preview`
|
||||
|
||||
Looping image made from preview video / frames during this time range.
|
||||
|
||||
| param | Type | Description |
|
||||
| -------- | ---- | -------------------------------- |
|
||||
| `format` | str | Format of preview [`gif`, `mp4`] |
|
||||
|
||||
## Recordings
|
||||
|
||||
### `GET /vod/<year>-<month>/<day>/<hour>/<camera>/master.m3u8`
|
||||
|
||||
HTTP Live Streaming Video on Demand URL for the specified hour and camera. Can be viewed in an application like VLC.
|
||||
|
||||
### `GET /vod/event/<event-id>/index.m3u8`
|
||||
|
||||
HTTP Live Streaming Video on Demand URL for the specified event. Can be viewed in an application like VLC.
|
||||
|
||||
### `GET /vod/<camera>/start/<start-timestamp>/end/<end-timestamp>/index.m3u8`
|
||||
|
||||
HTTP Live Streaming Video on Demand URL for the camera with the specified time range. Can be viewed in an application like VLC.
|
||||
|
||||
### `POST /api/export/<camera>/start/<start-timestamp>/end/<end-timestamp>`
|
||||
|
||||
Export recordings from `start-timestamp` to `end-timestamp` for `camera` as a single mp4 file. These recordings will be exported to the `/media/frigate/exports` folder.
|
||||
|
||||
It is also possible to export this recording as a time-lapse.
|
||||
|
||||
**Optional Body:**
|
||||
|
||||
```json
|
||||
{
|
||||
"playback": "realtime" // playback factor: realtime or timelapse_25x
|
||||
}
|
||||
```
|
||||
|
||||
### `DELETE /api/export/<export_name>`
|
||||
|
||||
Delete an export from disk.
|
||||
|
||||
### `PATCH /api/export/<export_name_current>/<export_name_new>`
|
||||
|
||||
Renames an export.
|
||||
|
||||
### `GET /api/<camera_name>/recordings/summary`
|
||||
|
||||
Hourly summary of recordings data for a camera.
|
||||
|
||||
### `GET /api/<camera_name>/recordings`
|
||||
|
||||
Get recording segment details for the given timestamp range.
|
||||
|
||||
| param | Type | Description |
|
||||
| -------- | ---- | ------------------------------------- |
|
||||
| `after` | int | Unix timestamp for beginning of range |
|
||||
| `before` | int | Unix timestamp for end of range |
|
||||
|
||||
### `GET /api/<camera_name>/recordings/<frame_time>/snapshot.png`
|
||||
|
||||
Returns the snapshot image from the specific point in that cameras recordings.
|
||||
|
||||
## Reviews
|
||||
|
||||
### `GET /api/review`
|
||||
|
||||
Reviews from the database. Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ---------- | ---- | -------------------------------------------------------------- |
|
||||
| `before` | int | Epoch time |
|
||||
| `after` | int | Epoch time |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
| `labels` | str | , separated list of labels |
|
||||
| `zones` | str | , separated list of zones |
|
||||
| `reviewed` | int | Include items that have been reviewed (0 or 1) |
|
||||
| `limit` | int | Limit the number of events returned |
|
||||
| `severity` | str | Limit items to severity (alert, detection, significant_motion) |
|
||||
|
||||
### `GET /api/review/<id>`
|
||||
|
||||
Get review with `id` from the database.
|
||||
|
||||
### `GET /api/review/summary`
|
||||
|
||||
Summary of reviews for the last 30 days. Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ---------- | ---- | --------------------------- |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
| `labels` | str | , separated list of labels |
|
||||
| `timezone` | str | Timezone name |
|
||||
|
||||
### `POST /api/reviews/viewed`
|
||||
|
||||
Mark item(s) as reviewed.
|
||||
|
||||
**Required Body:**
|
||||
|
||||
```json
|
||||
{
|
||||
"ids": ["123", "456"] // , separated list of review IDs
|
||||
}
|
||||
```
|
||||
|
||||
### `DELETE /api/review/<id>/viewed`
|
||||
|
||||
Mark an item as not reviewed.
|
||||
|
||||
### `POST /api/reviews/delete`
|
||||
|
||||
Delete review items.
|
||||
|
||||
**Required Body:**
|
||||
|
||||
```json
|
||||
{
|
||||
"ids": ["123", "456"] // , separated list of review IDs
|
||||
}
|
||||
```
|
||||
|
||||
### `GET /review/activity/motion`
|
||||
|
||||
Get the motion activity for camera(s) during a specified time period.
|
||||
|
||||
| param | Type | Description |
|
||||
| --------- | ---- | --------------------------- |
|
||||
| `before` | int | Epoch time |
|
||||
| `after` | int | Epoch time |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
|
||||
### `GET /review/activity/audio`
|
||||
|
||||
Get the audio activity for camera(s) during a specified time period.
|
||||
|
||||
| param | Type | Description |
|
||||
| --------- | ---- | --------------------------- |
|
||||
| `before` | int | Epoch time |
|
||||
| `after` | int | Epoch time |
|
||||
| `cameras` | str | , separated list of cameras |
|
||||
|
||||
## Timeline
|
||||
|
||||
### `GET /api/timeline`
|
||||
|
||||
Timeline of key moments of an event(s) from the database. Accepts the following query string parameters:
|
||||
|
||||
| param | Type | Description |
|
||||
| ----------- | ---- | ----------------------------------- |
|
||||
| `camera` | str | Name of camera |
|
||||
| `source_id` | str | ID of tracked object |
|
||||
| `limit` | int | Limit the number of events returned |
|
||||
|
||||
@@ -25,7 +25,7 @@ Available via HACS as a default repository. To install:
|
||||
- Use [HACS](https://hacs.xyz/) to install the integration:
|
||||
|
||||
```
|
||||
Home Assistant > HACS > Integrations > "Explore & Add Integrations" > Frigate
|
||||
Home Assistant > HACS > Click in the Search bar and type "Frigate" > Frigate
|
||||
```
|
||||
|
||||
- Restart Home Assistant.
|
||||
@@ -215,7 +215,7 @@ For advanced usecases, this behavior can be changed with the [RTSP URL
|
||||
template](#options) option. When set, this string will override the default stream
|
||||
address that is derived from the default behavior described above. This option supports
|
||||
[jinja2 templates](https://jinja.palletsprojects.com/) and has the `camera` dict
|
||||
variables from [Frigate API](api.md)
|
||||
variables from [Frigate API](../integrations/api)
|
||||
available for the template. Note that no Home Assistant state is available to the
|
||||
template, only the camera dict from Frigate.
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ These are the MQTT messages generated by Frigate. The default topic_prefix is `f
|
||||
|
||||
Designed to be used as an availability topic with Home Assistant. Possible message are:
|
||||
"online": published when Frigate is running (on startup)
|
||||
"offline": published right before Frigate stops
|
||||
"offline": published after Frigate has stopped
|
||||
|
||||
### `frigate/restart`
|
||||
|
||||
@@ -147,6 +147,10 @@ Message published for each changed review item. The first message is published w
|
||||
|
||||
Same data available at `/api/stats` published at a configurable interval.
|
||||
|
||||
### `frigate/camera_activity`
|
||||
|
||||
Returns data about each camera, its current features, and if it is detecting motion, objects, etc. Can be triggered by publising to `frigate/onConnect`
|
||||
|
||||
### `frigate/notifications/set`
|
||||
|
||||
Topic to turn notifications on and off. Expected values are `ON` and `OFF`.
|
||||
|
||||
@@ -23,7 +23,7 @@ In Frigate, you can use an environment variable or a docker secret named `PLUS_A
|
||||
|
||||
:::warning
|
||||
|
||||
You cannot use the `environment_vars` section of your configuration file to set this environment variable.
|
||||
You cannot use the `environment_vars` section of your Frigate configuration file to set this environment variable. It must be defined as an environment variable in the docker config or HA addon config.
|
||||
|
||||
:::
|
||||
|
||||
|
||||
@@ -18,3 +18,7 @@ Please use your own knowledge to assess and vet them before you install anything
|
||||
[Double Take](https://github.com/skrashevich/double-take) provides an unified UI and API for processing and training images for facial recognition.
|
||||
It supports automatically setting the sub labels in Frigate for person objects that are detected and recognized.
|
||||
This is a fork (with fixed errors and new features) of [original Double Take](https://github.com/jakowenko/double-take) project which, unfortunately, isn't being maintained by author.
|
||||
|
||||
## [Frigate telegram](https://github.com/OldTyT/frigate-telegram)
|
||||
|
||||
[Frigate telegram](https://github.com/OldTyT/frigate-telegram) makes it possible to send events from Frigate to Telegram. Events are sent as a message with a text description, video, and thumbnail.
|
||||
|
||||
@@ -28,6 +28,18 @@ The USB coral has different IDs when it is uninitialized and initialized.
|
||||
- When running Frigate in a VM, Proxmox lxc, etc. you must ensure both device IDs are mapped.
|
||||
- When running HA OS you may need to run the Full Access version of the Frigate addon with the `Protected Mode` switch disabled so that the coral can be accessed.
|
||||
|
||||
### Synology 716+II running DSM 7.2.1-69057 Update 5
|
||||
|
||||
Some users have reported that this older device runs an older kernel causing issues with the coral not being detected. The following steps allowed it to be detected correctly:
|
||||
|
||||
1. Plug in the coral TPU in any of the USB ports on the NAS
|
||||
2. Open the control panel - info screen. The coral TPU would be shown as a generic device.
|
||||
3. Start the docker container with Coral TPU enabled in the config
|
||||
4. The TPU would be detected but a few moments later it would disconnect.
|
||||
5. While leaving the TPU device plugged in, restart the NAS using the reboot command in the UI. Do NOT unplug the NAS/power it off etc.
|
||||
6. Open the control panel - info scree. The coral TPU will now be recognised as a USB Device - google inc
|
||||
7. Start the frigate container. Everything should work now!
|
||||
|
||||
## USB Coral Detection Appears to be Stuck
|
||||
|
||||
The USB Coral can become stuck and need to be restarted, this can happen for a number of reasons depending on hardware and software setup. Some common reasons are:
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
const path = require("path");
|
||||
|
||||
module.exports = {
|
||||
title: "Frigate",
|
||||
tagline: "NVR With Realtime Object Detection for IP Cameras",
|
||||
url: "https://docs.frigate.video",
|
||||
baseUrl: "/",
|
||||
onBrokenLinks: "throw",
|
||||
onBrokenMarkdownLinks: "warn",
|
||||
favicon: "img/favicon.ico",
|
||||
organizationName: "blakeblackshear",
|
||||
projectName: "frigate",
|
||||
themes: ["@docusaurus/theme-mermaid"],
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
},
|
||||
themeConfig: {
|
||||
algolia: {
|
||||
appId: "WIURGBNBPY",
|
||||
apiKey: "d02cc0a6a61178b25da550212925226b",
|
||||
indexName: "frigate",
|
||||
},
|
||||
docs: {
|
||||
sidebar: {
|
||||
hideable: true,
|
||||
},
|
||||
},
|
||||
prism: {
|
||||
additionalLanguages: ["bash", "json"],
|
||||
},
|
||||
navbar: {
|
||||
title: "Frigate",
|
||||
logo: {
|
||||
alt: "Frigate",
|
||||
src: "img/logo.svg",
|
||||
srcDark: "img/logo-dark.svg",
|
||||
},
|
||||
items: [
|
||||
{
|
||||
to: "/",
|
||||
activeBasePath: "docs",
|
||||
label: "Docs",
|
||||
position: "left",
|
||||
},
|
||||
{
|
||||
href: "https://frigate.video",
|
||||
label: "Website",
|
||||
position: "right",
|
||||
},
|
||||
{
|
||||
href: "http://demo.frigate.video",
|
||||
label: "Demo",
|
||||
position: "right",
|
||||
},
|
||||
{
|
||||
href: "https://github.com/blakeblackshear/frigate",
|
||||
label: "GitHub",
|
||||
position: "right",
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
style: "dark",
|
||||
links: [
|
||||
{
|
||||
title: "Community",
|
||||
items: [
|
||||
{
|
||||
label: "GitHub",
|
||||
href: "https://github.com/blakeblackshear/frigate",
|
||||
},
|
||||
{
|
||||
label: "Discussions",
|
||||
href: "https://github.com/blakeblackshear/frigate/discussions",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
copyright: `Copyright © ${new Date().getFullYear()} Blake Blackshear`,
|
||||
},
|
||||
},
|
||||
plugins: [path.resolve(__dirname, "plugins", "raw-loader")],
|
||||
presets: [
|
||||
[
|
||||
"@docusaurus/preset-classic",
|
||||
{
|
||||
docs: {
|
||||
routeBasePath: "/",
|
||||
sidebarPath: require.resolve("./sidebars.js"),
|
||||
// Please change this to your repo.
|
||||
editUrl:
|
||||
"https://github.com/blakeblackshear/frigate/edit/master/docs/",
|
||||
sidebarCollapsible: false,
|
||||
},
|
||||
|
||||
theme: {
|
||||
customCss: require.resolve("./src/css/custom.css"),
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
158
docs/docusaurus.config.ts
Normal file
158
docs/docusaurus.config.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import type * as Preset from '@docusaurus/preset-classic';
|
||||
import * as path from 'node:path';
|
||||
import type { Config, PluginConfig } from '@docusaurus/types';
|
||||
import type * as OpenApiPlugin from 'docusaurus-plugin-openapi-docs';
|
||||
|
||||
const config: Config = {
|
||||
title: 'Frigate',
|
||||
tagline: 'NVR With Realtime Object Detection for IP Cameras',
|
||||
url: 'https://docs.frigate.video',
|
||||
baseUrl: '/',
|
||||
onBrokenLinks: 'throw',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
favicon: 'img/favicon.ico',
|
||||
organizationName: 'blakeblackshear',
|
||||
projectName: 'frigate',
|
||||
themes: ['@docusaurus/theme-mermaid', 'docusaurus-theme-openapi-docs'],
|
||||
markdown: {
|
||||
mermaid: true,
|
||||
},
|
||||
themeConfig: {
|
||||
algolia: {
|
||||
appId: 'WIURGBNBPY',
|
||||
apiKey: 'd02cc0a6a61178b25da550212925226b',
|
||||
indexName: 'frigate',
|
||||
},
|
||||
docs: {
|
||||
sidebar: {
|
||||
hideable: true,
|
||||
},
|
||||
},
|
||||
prism: {
|
||||
additionalLanguages: ['bash', 'json'],
|
||||
},
|
||||
languageTabs: [
|
||||
{
|
||||
highlight: 'python',
|
||||
language: 'python',
|
||||
logoClass: 'python',
|
||||
},
|
||||
{
|
||||
highlight: 'javascript',
|
||||
language: 'nodejs',
|
||||
logoClass: 'nodejs',
|
||||
},
|
||||
{
|
||||
highlight: 'javascript',
|
||||
language: 'javascript',
|
||||
logoClass: 'javascript',
|
||||
},
|
||||
{
|
||||
highlight: 'bash',
|
||||
language: 'curl',
|
||||
logoClass: 'curl',
|
||||
},
|
||||
{
|
||||
highlight: "rust",
|
||||
language: "rust",
|
||||
logoClass: "rust",
|
||||
},
|
||||
],
|
||||
navbar: {
|
||||
title: 'Frigate',
|
||||
logo: {
|
||||
alt: 'Frigate',
|
||||
src: 'img/logo.svg',
|
||||
srcDark: 'img/logo-dark.svg',
|
||||
},
|
||||
items: [
|
||||
{
|
||||
to: '/',
|
||||
activeBasePath: 'docs',
|
||||
label: 'Docs',
|
||||
position: 'left',
|
||||
},
|
||||
{
|
||||
href: 'https://frigate.video',
|
||||
label: 'Website',
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
href: 'http://demo.frigate.video',
|
||||
label: 'Demo',
|
||||
position: 'right',
|
||||
},
|
||||
{
|
||||
href: 'https://github.com/blakeblackshear/frigate',
|
||||
label: 'GitHub',
|
||||
position: 'right',
|
||||
},
|
||||
],
|
||||
},
|
||||
footer: {
|
||||
style: 'dark',
|
||||
links: [
|
||||
{
|
||||
title: 'Community',
|
||||
items: [
|
||||
{
|
||||
label: 'GitHub',
|
||||
href: 'https://github.com/blakeblackshear/frigate',
|
||||
},
|
||||
{
|
||||
label: 'Discussions',
|
||||
href: 'https://github.com/blakeblackshear/frigate/discussions',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
copyright: `Copyright © ${new Date().getFullYear()} Blake Blackshear`,
|
||||
},
|
||||
},
|
||||
plugins: [
|
||||
path.resolve(__dirname, 'plugins', 'raw-loader'),
|
||||
[
|
||||
'docusaurus-plugin-openapi-docs',
|
||||
{
|
||||
id: 'openapi',
|
||||
docsPluginId: 'classic', // configured for preset-classic
|
||||
config: {
|
||||
frigateApi: {
|
||||
specPath: 'static/frigate-api.yaml',
|
||||
outputDir: 'docs/integrations/api',
|
||||
sidebarOptions: {
|
||||
groupPathsBy: 'tag',
|
||||
categoryLinkSource: 'tag',
|
||||
sidebarCollapsible: true,
|
||||
sidebarCollapsed: true,
|
||||
},
|
||||
showSchemas: true,
|
||||
} satisfies OpenApiPlugin.Options,
|
||||
},
|
||||
},
|
||||
]
|
||||
] as PluginConfig[],
|
||||
presets: [
|
||||
[
|
||||
'classic',
|
||||
{
|
||||
docs: {
|
||||
routeBasePath: '/',
|
||||
sidebarPath: './sidebars.ts',
|
||||
// Please change this to your repo.
|
||||
editUrl: 'https://github.com/blakeblackshear/frigate/edit/master/docs/',
|
||||
sidebarCollapsible: false,
|
||||
docItemComponent: '@theme/ApiItem', // Derived from docusaurus-theme-openapi
|
||||
},
|
||||
|
||||
theme: {
|
||||
customCss: './src/css/custom.css',
|
||||
},
|
||||
} satisfies Preset.Options,
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
export default async function createConfig() {
|
||||
return config;
|
||||
}
|
||||
4673
docs/package-lock.json
generated
4673
docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,11 +4,14 @@
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"docusaurus": "docusaurus",
|
||||
"start": "docusaurus start --host 0.0.0.0",
|
||||
"build": "docusaurus build",
|
||||
"start": "npm run regen-docs && docusaurus start --host 0.0.0.0",
|
||||
"build": "npm run regen-docs && docusaurus build",
|
||||
"swizzle": "docusaurus swizzle",
|
||||
"deploy": "docusaurus deploy",
|
||||
"clear": "docusaurus clear",
|
||||
"gen-api-docs": "docusaurus gen-api-docs all",
|
||||
"clear-api-docs": "docusaurus clean-api-docs all",
|
||||
"regen-docs": "npm run clear-api-docs && npm run gen-api-docs",
|
||||
"serve": "docusaurus serve --host 0.0.0.0",
|
||||
"write-translations": "docusaurus write-translations",
|
||||
"write-heading-ids": "docusaurus write-heading-ids"
|
||||
@@ -17,8 +20,11 @@
|
||||
"@docusaurus/core": "^3.5.2",
|
||||
"@docusaurus/preset-classic": "^3.5.2",
|
||||
"@docusaurus/theme-mermaid": "^3.5.2",
|
||||
"@mdx-js/react": "^3.0.0",
|
||||
"clsx": "^2.0.0",
|
||||
"@docusaurus/plugin-content-docs": "^3.5.2",
|
||||
"@mdx-js/react": "^3.0.1",
|
||||
"clsx": "^2.1.1",
|
||||
"docusaurus-plugin-openapi-docs": "^4.1.0",
|
||||
"docusaurus-theme-openapi-docs": "^4.1.0",
|
||||
"prism-react-renderer": "^2.4.0",
|
||||
"raw-loader": "^4.0.2",
|
||||
"react": "^18.3.1",
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
module.exports = {
|
||||
docs: {
|
||||
Frigate: [
|
||||
"frigate/index",
|
||||
"frigate/hardware",
|
||||
"frigate/installation",
|
||||
"frigate/camera_setup",
|
||||
"frigate/video_pipeline",
|
||||
"frigate/glossary",
|
||||
],
|
||||
Guides: [
|
||||
"guides/getting_started",
|
||||
"guides/configuring_go2rtc",
|
||||
"guides/ha_notifications",
|
||||
"guides/ha_network_storage",
|
||||
"guides/reverse_proxy",
|
||||
],
|
||||
Configuration: {
|
||||
"Configuration Files": [
|
||||
"configuration/index",
|
||||
"configuration/reference",
|
||||
{
|
||||
type: "link",
|
||||
label: "Go2RTC Configuration Reference",
|
||||
href: "https://github.com/AlexxIT/go2rtc/tree/v1.9.4#configuration",
|
||||
},
|
||||
],
|
||||
Detectors: [
|
||||
"configuration/object_detectors",
|
||||
"configuration/audio_detectors",
|
||||
],
|
||||
"Semantic Search": [
|
||||
"configuration/semantic_search",
|
||||
"configuration/genai",
|
||||
],
|
||||
Cameras: [
|
||||
"configuration/cameras",
|
||||
"configuration/review",
|
||||
"configuration/record",
|
||||
"configuration/snapshots",
|
||||
"configuration/motion_detection",
|
||||
"configuration/birdseye",
|
||||
"configuration/live",
|
||||
"configuration/restream",
|
||||
"configuration/autotracking",
|
||||
"configuration/camera_specific",
|
||||
],
|
||||
Objects: [
|
||||
"configuration/object_filters",
|
||||
"configuration/masks",
|
||||
"configuration/zones",
|
||||
"configuration/objects",
|
||||
"configuration/stationary_objects",
|
||||
],
|
||||
"Extra Configuration": [
|
||||
"configuration/authentication",
|
||||
"configuration/notifications",
|
||||
"configuration/hardware_acceleration",
|
||||
"configuration/ffmpeg_presets",
|
||||
"configuration/tls",
|
||||
"configuration/advanced",
|
||||
],
|
||||
},
|
||||
Integrations: [
|
||||
"integrations/plus",
|
||||
"integrations/home-assistant",
|
||||
"integrations/api",
|
||||
"integrations/mqtt",
|
||||
"integrations/third_party_extensions",
|
||||
],
|
||||
"Frigate+": [
|
||||
"plus/index",
|
||||
"plus/first_model",
|
||||
"plus/improving_model",
|
||||
"plus/faq",
|
||||
],
|
||||
Troubleshooting: [
|
||||
"troubleshooting/faqs",
|
||||
"troubleshooting/recordings",
|
||||
"troubleshooting/edgetpu",
|
||||
],
|
||||
Development: [
|
||||
"development/contributing",
|
||||
"development/contributing-boards",
|
||||
],
|
||||
},
|
||||
};
|
||||
105
docs/sidebars.ts
Normal file
105
docs/sidebars.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import type { SidebarsConfig, } from '@docusaurus/plugin-content-docs';
|
||||
import { PropSidebarItemLink } from '@docusaurus/plugin-content-docs';
|
||||
import frigateHttpApiSidebar from './docs/integrations/api/sidebar';
|
||||
|
||||
const sidebars: SidebarsConfig = {
|
||||
docs: {
|
||||
Frigate: [
|
||||
'frigate/index',
|
||||
'frigate/hardware',
|
||||
'frigate/installation',
|
||||
'frigate/camera_setup',
|
||||
'frigate/video_pipeline',
|
||||
'frigate/glossary',
|
||||
],
|
||||
Guides: [
|
||||
'guides/getting_started',
|
||||
'guides/configuring_go2rtc',
|
||||
'guides/ha_notifications',
|
||||
'guides/ha_network_storage',
|
||||
'guides/reverse_proxy',
|
||||
],
|
||||
Configuration: {
|
||||
'Configuration Files': [
|
||||
'configuration/index',
|
||||
'configuration/reference',
|
||||
{
|
||||
type: 'link',
|
||||
label: 'Go2RTC Configuration Reference',
|
||||
href: 'https://github.com/AlexxIT/go2rtc/tree/v1.9.4#configuration',
|
||||
} as PropSidebarItemLink,
|
||||
],
|
||||
Detectors: [
|
||||
'configuration/object_detectors',
|
||||
'configuration/audio_detectors',
|
||||
],
|
||||
'Semantic Search': [
|
||||
'configuration/semantic_search',
|
||||
'configuration/genai',
|
||||
],
|
||||
Cameras: [
|
||||
'configuration/cameras',
|
||||
'configuration/review',
|
||||
'configuration/record',
|
||||
'configuration/snapshots',
|
||||
'configuration/motion_detection',
|
||||
'configuration/birdseye',
|
||||
'configuration/live',
|
||||
'configuration/restream',
|
||||
'configuration/autotracking',
|
||||
'configuration/camera_specific',
|
||||
],
|
||||
Objects: [
|
||||
'configuration/object_filters',
|
||||
'configuration/masks',
|
||||
'configuration/zones',
|
||||
'configuration/objects',
|
||||
'configuration/stationary_objects',
|
||||
],
|
||||
'Extra Configuration': [
|
||||
'configuration/authentication',
|
||||
'configuration/notifications',
|
||||
'configuration/hardware_acceleration',
|
||||
'configuration/ffmpeg_presets',
|
||||
"configuration/pwa",
|
||||
'configuration/tls',
|
||||
'configuration/advanced',
|
||||
],
|
||||
},
|
||||
Integrations: [
|
||||
'integrations/plus',
|
||||
'integrations/home-assistant',
|
||||
// This is the HTTP API generated by OpenAPI
|
||||
{
|
||||
type: 'category',
|
||||
label: 'HTTP API',
|
||||
link: {
|
||||
type: 'generated-index',
|
||||
title: 'Frigate HTTP API',
|
||||
description: 'HTTP API',
|
||||
slug: '/integrations/api/frigate-http-api',
|
||||
},
|
||||
items: frigateHttpApiSidebar,
|
||||
},
|
||||
'integrations/mqtt',
|
||||
'integrations/third_party_extensions',
|
||||
],
|
||||
'Frigate+': [
|
||||
'plus/index',
|
||||
'plus/first_model',
|
||||
'plus/improving_model',
|
||||
'plus/faq',
|
||||
],
|
||||
Troubleshooting: [
|
||||
'troubleshooting/faqs',
|
||||
'troubleshooting/recordings',
|
||||
'troubleshooting/edgetpu',
|
||||
],
|
||||
Development: [
|
||||
'development/contributing',
|
||||
'development/contributing-boards',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
export default sidebars;
|
||||
@@ -23,3 +23,214 @@
|
||||
margin: 0 calc(-1 * var(--ifm-pre-padding));
|
||||
padding: 0 var(--ifm-pre-padding);
|
||||
}
|
||||
|
||||
/**
|
||||
Custom CSS for OpenAPI Specification. Based of openapi https://github.com/PaloAltoNetworks/docusaurus-openapi-docs/tree/main/demo
|
||||
*/
|
||||
|
||||
/* Sidebar Method labels */
|
||||
.api-method > .menu__link,
|
||||
.schema > .menu__link {
|
||||
align-items: center;
|
||||
justify-content: start;
|
||||
}
|
||||
|
||||
.api-method > .menu__link::before,
|
||||
.schema > .menu__link::before {
|
||||
width: 55px;
|
||||
height: 20px;
|
||||
font-size: 12px;
|
||||
line-height: 20px;
|
||||
text-transform: uppercase;
|
||||
font-weight: 600;
|
||||
border-radius: 0.25rem;
|
||||
border: 1px solid;
|
||||
margin-right: var(--ifm-spacing-horizontal);
|
||||
text-align: center;
|
||||
flex-shrink: 0;
|
||||
border-color: transparent;
|
||||
color: white;
|
||||
}
|
||||
|
||||
.get > .menu__link::before {
|
||||
content: "get";
|
||||
background-color: var(--ifm-color-primary);
|
||||
}
|
||||
|
||||
.post > .menu__link::before {
|
||||
content: "post";
|
||||
background-color: var(--ifm-color-success);
|
||||
}
|
||||
|
||||
.delete > .menu__link::before {
|
||||
content: "del";
|
||||
background-color: var(--openapi-code-red);
|
||||
}
|
||||
|
||||
.put > .menu__link::before {
|
||||
content: "put";
|
||||
background-color: var(--openapi-code-blue);
|
||||
}
|
||||
|
||||
.patch > .menu__link::before {
|
||||
content: "patch";
|
||||
background-color: var(--openapi-code-orange);
|
||||
}
|
||||
|
||||
.head > .menu__link::before {
|
||||
content: "head";
|
||||
background-color: var(--ifm-color-secondary-darkest);
|
||||
}
|
||||
|
||||
.event > .menu__link::before {
|
||||
content: "event";
|
||||
background-color: var(--ifm-color-secondary-darkest);
|
||||
}
|
||||
|
||||
.schema > .menu__link::before {
|
||||
content: "schema";
|
||||
background-color: var(--ifm-color-secondary-darkest);
|
||||
}
|
||||
|
||||
.menu__list-item--deprecated > .menu__link,
|
||||
.menu__list-item--deprecated > .menu__link:hover {
|
||||
text-decoration: line-through;
|
||||
}
|
||||
/* Sidebar Method labels High Contrast */
|
||||
.api-method-contrast > .menu__link,
|
||||
.schema-contrast > .menu__link {
|
||||
align-items: center;
|
||||
justify-content: start;
|
||||
}
|
||||
|
||||
.api-method-contrast > .menu__link::before,
|
||||
.schema-contrast > .menu__link::before {
|
||||
width: 55px;
|
||||
height: 20px;
|
||||
font-size: 12px;
|
||||
line-height: 20px;
|
||||
text-transform: uppercase;
|
||||
font-weight: 600;
|
||||
border-radius: 0.25rem;
|
||||
border: 1px solid;
|
||||
border-inline-start-width: 5px;
|
||||
margin-right: var(--ifm-spacing-horizontal);
|
||||
text-align: center;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.get-contrast > .menu__link::before {
|
||||
content: "get";
|
||||
background-color: var(--ifm-color-info-contrast-background);
|
||||
color: var(--ifm-color-info-contrast-foreground);
|
||||
border-color: var(--ifm-color-info-dark);
|
||||
}
|
||||
|
||||
.post-contrast > .menu__link::before {
|
||||
content: "post";
|
||||
background-color: var(--ifm-color-success-contrast-background);
|
||||
color: var(--ifm-color-success-contrast-foreground);
|
||||
border-color: var(--ifm-color-success-dark);
|
||||
}
|
||||
|
||||
.delete-contrast > .menu__link::before {
|
||||
content: "del";
|
||||
background-color: var(--ifm-color-danger-contrast-background);
|
||||
color: var(--ifm-color-danger-contrast-foreground);
|
||||
border-color: var(--ifm-color-danger-dark);
|
||||
}
|
||||
|
||||
.put-contrast > .menu__link::before {
|
||||
content: "put";
|
||||
background-color: var(--ifm-color-warning-contrast-background);
|
||||
color: var(--ifm-color-warning-contrast-foreground);
|
||||
border-color: var(--ifm-color-warning-dark);
|
||||
}
|
||||
|
||||
.patch-contrast > .menu__link::before {
|
||||
content: "patch";
|
||||
background-color: var(--ifm-color-success-contrast-background);
|
||||
color: var(--ifm-color-success-contrast-foreground);
|
||||
border-color: var(--ifm-color-success-dark);
|
||||
}
|
||||
|
||||
.head-contrast > .menu__link::before {
|
||||
content: "head";
|
||||
background-color: var(--ifm-color-secondary-contrast-background);
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
border-color: var(--ifm-color-secondary-dark);
|
||||
}
|
||||
|
||||
.event-contrast > .menu__link::before {
|
||||
content: "event";
|
||||
background-color: var(--ifm-color-secondary-contrast-background);
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
border-color: var(--ifm-color-secondary-dark);
|
||||
}
|
||||
|
||||
.schema-contrast > .menu__link::before {
|
||||
content: "schema";
|
||||
background-color: var(--ifm-color-secondary-contrast-background);
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
border-color: var(--ifm-color-secondary-dark);
|
||||
}
|
||||
|
||||
/* Simple */
|
||||
.api-method-simple > .menu__link {
|
||||
align-items: center;
|
||||
justify-content: start;
|
||||
}
|
||||
.api-method-simple > .menu__link::before {
|
||||
width: 55px;
|
||||
height: 20px;
|
||||
font-size: 12px;
|
||||
line-height: 20px;
|
||||
text-transform: uppercase;
|
||||
font-weight: 600;
|
||||
border-radius: 0.25rem;
|
||||
align-content: start;
|
||||
margin-right: var(--ifm-spacing-horizontal);
|
||||
text-align: right;
|
||||
flex-shrink: 0;
|
||||
border-color: transparent;
|
||||
}
|
||||
|
||||
.get-simple > .menu__link::before {
|
||||
content: "get";
|
||||
color: var(--ifm-color-info);
|
||||
}
|
||||
|
||||
.post-simple > .menu__link::before {
|
||||
content: "post";
|
||||
color: var(--ifm-color-success);
|
||||
}
|
||||
|
||||
.delete-simple > .menu__link::before {
|
||||
content: "del";
|
||||
color: var(--ifm-color-danger);
|
||||
}
|
||||
|
||||
.put-simple > .menu__link::before {
|
||||
content: "put";
|
||||
color: var(--ifm-color-warning);
|
||||
}
|
||||
|
||||
.patch-simple > .menu__link::before {
|
||||
content: "patch";
|
||||
color: var(--ifm-color-warning);
|
||||
}
|
||||
|
||||
.head-simple > .menu__link::before {
|
||||
content: "head";
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
}
|
||||
|
||||
.event-simple > .menu__link::before {
|
||||
content: "event";
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
}
|
||||
|
||||
.schema-simple > .menu__link::before {
|
||||
content: "schema";
|
||||
color: var(--ifm-color-secondary-contrast-foreground);
|
||||
}
|
||||
|
||||
3450
docs/static/frigate-api.yaml
vendored
Normal file
3450
docs/static/frigate-api.yaml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
BIN
docs/static/img/plus/send-to-plus.jpg
vendored
BIN
docs/static/img/plus/send-to-plus.jpg
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 57 KiB After Width: | Height: | Size: 62 KiB |
BIN
docs/static/img/plus/submit-to-plus.jpg
vendored
BIN
docs/static/img/plus/submit-to-plus.jpg
vendored
Binary file not shown.
|
Before Width: | Height: | Size: 63 KiB After Width: | Height: | Size: 49 KiB |
@@ -1,27 +1,63 @@
|
||||
import argparse
|
||||
import faulthandler
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from flask import cli
|
||||
from pydantic import ValidationError
|
||||
|
||||
from frigate.app import FrigateApp
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.log import setup_logging
|
||||
|
||||
|
||||
def main() -> None:
|
||||
faulthandler.enable()
|
||||
|
||||
# Clear all existing handlers.
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
handlers=[],
|
||||
force=True,
|
||||
)
|
||||
# Setup the logging thread
|
||||
setup_logging()
|
||||
|
||||
threading.current_thread().name = "frigate"
|
||||
cli.show_server_banner = lambda *x: None
|
||||
|
||||
# Make sure we exit cleanly on SIGTERM.
|
||||
signal.signal(signal.SIGTERM, lambda sig, frame: sys.exit())
|
||||
|
||||
# Parse the cli arguments.
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="Frigate",
|
||||
description="An NVR with realtime local object detection for IP cameras.",
|
||||
)
|
||||
parser.add_argument("--validate-config", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Load the configuration.
|
||||
try:
|
||||
config = FrigateConfig.load(install=True)
|
||||
except ValidationError as e:
|
||||
print("*************************************************************")
|
||||
print("*************************************************************")
|
||||
print("*** Your config file is not valid! ***")
|
||||
print("*** Please check the docs at ***")
|
||||
print("*** https://docs.frigate.video/configuration/ ***")
|
||||
print("*************************************************************")
|
||||
print("*************************************************************")
|
||||
print("*** Config Validation Errors ***")
|
||||
print("*************************************************************")
|
||||
for error in e.errors():
|
||||
location = ".".join(str(item) for item in error["loc"])
|
||||
print(f"{location}: {error['msg']}")
|
||||
print("*************************************************************")
|
||||
print("*** End Config Validation Errors ***")
|
||||
print("*************************************************************")
|
||||
sys.exit(1)
|
||||
if args.validate_config:
|
||||
print("*************************************************************")
|
||||
print("*** Your config file is valid. ***")
|
||||
print("*************************************************************")
|
||||
sys.exit(0)
|
||||
|
||||
# Run the main application.
|
||||
FrigateApp().start()
|
||||
FrigateApp(config).start()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -7,172 +7,107 @@ import os
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from functools import reduce
|
||||
from typing import Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
import requests
|
||||
from flask import Blueprint, Flask, current_app, jsonify, make_response, request
|
||||
from fastapi import APIRouter, Body, Path, Request, Response
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
from fastapi.params import Depends
|
||||
from fastapi.responses import JSONResponse, PlainTextResponse
|
||||
from markupsafe import escape
|
||||
from peewee import operator
|
||||
from playhouse.sqliteq import SqliteQueueDatabase
|
||||
from werkzeug.middleware.proxy_fix import ProxyFix
|
||||
|
||||
from frigate.api.auth import AuthBp, get_jwt_secret, limiter
|
||||
from frigate.api.event import EventBp
|
||||
from frigate.api.export import ExportBp
|
||||
from frigate.api.media import MediaBp
|
||||
from frigate.api.notification import NotificationBp
|
||||
from frigate.api.preview import PreviewBp
|
||||
from frigate.api.review import ReviewBp
|
||||
from frigate.api.defs.app_body import AppConfigSetBody
|
||||
from frigate.api.defs.app_query_parameters import AppTimelineHourlyQueryParameters
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import CONFIG_DIR
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.events.external import ExternalEventProcessor
|
||||
from frigate.models import Event, Timeline
|
||||
from frigate.plus import PlusApi
|
||||
from frigate.ptz.onvif import OnvifController
|
||||
from frigate.stats.emitter import StatsEmitter
|
||||
from frigate.storage import StorageMaintainer
|
||||
from frigate.util.builtin import (
|
||||
clean_camera_user_pass,
|
||||
get_tz_modifiers,
|
||||
update_yaml_from_url,
|
||||
)
|
||||
from frigate.util.services import ffprobe_stream, restart_frigate, vainfo_hwaccel
|
||||
from frigate.util.services import (
|
||||
ffprobe_stream,
|
||||
get_nvidia_driver_info,
|
||||
restart_frigate,
|
||||
vainfo_hwaccel,
|
||||
)
|
||||
from frigate.version import VERSION
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
bp = Blueprint("frigate", __name__)
|
||||
bp.register_blueprint(EventBp)
|
||||
bp.register_blueprint(ExportBp)
|
||||
bp.register_blueprint(MediaBp)
|
||||
bp.register_blueprint(PreviewBp)
|
||||
bp.register_blueprint(ReviewBp)
|
||||
bp.register_blueprint(AuthBp)
|
||||
bp.register_blueprint(NotificationBp)
|
||||
router = APIRouter(tags=[Tags.app])
|
||||
|
||||
|
||||
def create_app(
|
||||
frigate_config,
|
||||
database: SqliteQueueDatabase,
|
||||
embeddings: Optional[EmbeddingsContext],
|
||||
detected_frames_processor,
|
||||
storage_maintainer: StorageMaintainer,
|
||||
onvif: OnvifController,
|
||||
external_processor: ExternalEventProcessor,
|
||||
plus_api: PlusApi,
|
||||
stats_emitter: StatsEmitter,
|
||||
):
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.before_request
|
||||
def check_csrf():
|
||||
if request.method in ["GET", "HEAD", "OPTIONS", "TRACE"]:
|
||||
pass
|
||||
if "origin" in request.headers and "x-csrf-token" not in request.headers:
|
||||
return jsonify({"success": False, "message": "Missing CSRF header"}), 401
|
||||
|
||||
@app.before_request
|
||||
def _db_connect():
|
||||
if database.is_closed():
|
||||
database.connect()
|
||||
|
||||
@app.teardown_request
|
||||
def _db_close(exc):
|
||||
if not database.is_closed():
|
||||
database.close()
|
||||
|
||||
app.frigate_config = frigate_config
|
||||
app.embeddings = embeddings
|
||||
app.detected_frames_processor = detected_frames_processor
|
||||
app.storage_maintainer = storage_maintainer
|
||||
app.onvif = onvif
|
||||
app.external_processor = external_processor
|
||||
app.plus_api = plus_api
|
||||
app.camera_error_image = None
|
||||
app.stats_emitter = stats_emitter
|
||||
app.jwt_token = get_jwt_secret() if frigate_config.auth.enabled else None
|
||||
# update the request_address with the x-forwarded-for header from nginx
|
||||
app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1)
|
||||
# initialize the rate limiter for the login endpoint
|
||||
limiter.init_app(app)
|
||||
if frigate_config.auth.failed_login_rate_limit is None:
|
||||
limiter.enabled = False
|
||||
|
||||
app.register_blueprint(bp)
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@bp.route("/")
|
||||
@router.get("/", response_class=PlainTextResponse)
|
||||
def is_healthy():
|
||||
return "Frigate is running. Alive and healthy!"
|
||||
|
||||
|
||||
@bp.route("/config/schema.json")
|
||||
def config_schema():
|
||||
return current_app.response_class(
|
||||
current_app.frigate_config.schema_json(), mimetype="application/json"
|
||||
@router.get("/config/schema.json")
|
||||
def config_schema(request: Request):
|
||||
return Response(
|
||||
content=request.app.frigate_config.schema_json(), media_type="application/json"
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/go2rtc/streams")
|
||||
@router.get("/go2rtc/streams")
|
||||
def go2rtc_streams():
|
||||
r = requests.get("http://127.0.0.1:1984/api/streams")
|
||||
if not r.ok:
|
||||
logger.error("Failed to fetch streams from go2rtc")
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Error fetching stream data"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Error fetching stream data"}),
|
||||
status_code=500,
|
||||
)
|
||||
stream_data = r.json()
|
||||
for data in stream_data.values():
|
||||
for producer in data.get("producers", []):
|
||||
producer["url"] = clean_camera_user_pass(producer.get("url", ""))
|
||||
return jsonify(stream_data)
|
||||
return JSONResponse(content=stream_data)
|
||||
|
||||
|
||||
@bp.route("/go2rtc/streams/<camera_name>")
|
||||
@router.get("/go2rtc/streams/{camera_name}")
|
||||
def go2rtc_camera_stream(camera_name: str):
|
||||
r = requests.get(
|
||||
f"http://127.0.0.1:1984/api/streams?src={camera_name}&video=all&audio=allµphone"
|
||||
)
|
||||
if not r.ok:
|
||||
logger.error("Failed to fetch streams from go2rtc")
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Error fetching stream data"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Error fetching stream data"}),
|
||||
status_code=500,
|
||||
)
|
||||
stream_data = r.json()
|
||||
for producer in stream_data.get("producers", []):
|
||||
producer["url"] = clean_camera_user_pass(producer.get("url", ""))
|
||||
return jsonify(stream_data)
|
||||
return JSONResponse(content=stream_data)
|
||||
|
||||
|
||||
@bp.route("/version")
|
||||
@router.get("/version", response_class=PlainTextResponse)
|
||||
def version():
|
||||
return VERSION
|
||||
|
||||
|
||||
@bp.route("/stats")
|
||||
def stats():
|
||||
return jsonify(current_app.stats_emitter.get_latest_stats())
|
||||
@router.get("/stats")
|
||||
def stats(request: Request):
|
||||
return JSONResponse(content=request.app.stats_emitter.get_latest_stats())
|
||||
|
||||
|
||||
@bp.route("/stats/history")
|
||||
def stats_history():
|
||||
keys = request.args.get("keys", default=None)
|
||||
|
||||
@router.get("/stats/history")
|
||||
def stats_history(request: Request, keys: str = None):
|
||||
if keys:
|
||||
keys = keys.split(",")
|
||||
|
||||
return jsonify(current_app.stats_emitter.get_stats_history(keys))
|
||||
return JSONResponse(content=request.app.stats_emitter.get_stats_history(keys))
|
||||
|
||||
|
||||
@bp.route("/config")
|
||||
def config():
|
||||
config_obj: FrigateConfig = current_app.frigate_config
|
||||
@router.get("/config")
|
||||
def config(request: Request):
|
||||
config_obj: FrigateConfig = request.app.frigate_config
|
||||
config: dict[str, dict[str, any]] = config_obj.model_dump(
|
||||
mode="json", warnings="none", exclude_none=True
|
||||
)
|
||||
@@ -183,7 +118,7 @@ def config():
|
||||
# remove the proxy secret
|
||||
config["proxy"].pop("auth_secret", None)
|
||||
|
||||
for camera_name, camera in current_app.frigate_config.cameras.items():
|
||||
for camera_name, camera in request.app.frigate_config.cameras.items():
|
||||
camera_dict = config["cameras"][camera_name]
|
||||
|
||||
# clean paths
|
||||
@@ -199,18 +134,18 @@ def config():
|
||||
for zone_name, zone in config_obj.cameras[camera_name].zones.items():
|
||||
camera_dict["zones"][zone_name]["color"] = zone.color
|
||||
|
||||
config["plus"] = {"enabled": current_app.plus_api.is_active()}
|
||||
config["plus"] = {"enabled": request.app.frigate_config.plus_api.is_active()}
|
||||
config["model"]["colormap"] = config_obj.model.colormap
|
||||
|
||||
for detector_config in config["detectors"].values():
|
||||
detector_config["model"]["labelmap"] = (
|
||||
current_app.frigate_config.model.merged_labelmap
|
||||
request.app.frigate_config.model.merged_labelmap
|
||||
)
|
||||
|
||||
return jsonify(config)
|
||||
return JSONResponse(content=config)
|
||||
|
||||
|
||||
@bp.route("/config/raw")
|
||||
@router.get("/config/raw")
|
||||
def config_raw():
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
@@ -221,43 +156,44 @@ def config_raw():
|
||||
config_file = config_file_yaml
|
||||
|
||||
if not os.path.isfile(config_file):
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Could not find file"}), 404
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Could not find file"}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
with open(config_file, "r") as f:
|
||||
raw_config = f.read()
|
||||
f.close()
|
||||
|
||||
return raw_config, 200
|
||||
return JSONResponse(
|
||||
content=raw_config, media_type="text/plain", status_code=200
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/config/save", methods=["POST"])
|
||||
def config_save():
|
||||
save_option = request.args.get("save_option")
|
||||
|
||||
new_config = request.get_data().decode()
|
||||
@router.post("/config/save")
|
||||
def config_save(save_option: str, body: Any = Body(media_type="text/plain")):
|
||||
new_config = body.decode()
|
||||
|
||||
if not new_config:
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": False, "message": "Config with body param is required"}
|
||||
),
|
||||
400,
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
# Validate the config schema
|
||||
try:
|
||||
FrigateConfig.parse_raw(new_config)
|
||||
FrigateConfig.parse_yaml(new_config)
|
||||
except Exception:
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": f"\nConfig Error:\n\n{escape(str(traceback.format_exc()))}",
|
||||
}
|
||||
),
|
||||
400,
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
# Save the config to file
|
||||
@@ -274,14 +210,14 @@ def config_save():
|
||||
f.write(new_config)
|
||||
f.close()
|
||||
except Exception:
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Could not write config file, be sure that Frigate has write permission on the config file.",
|
||||
}
|
||||
),
|
||||
400,
|
||||
status_code=400,
|
||||
)
|
||||
|
||||
if save_option == "restart":
|
||||
@@ -289,34 +225,34 @@ def config_save():
|
||||
restart_frigate()
|
||||
except Exception as e:
|
||||
logging.error(f"Error restarting Frigate: {e}")
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Config successfully saved, unable to restart Frigate",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Config successfully saved, restarting (this can take up to one minute)...",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
else:
|
||||
return make_response(
|
||||
jsonify({"success": True, "message": "Config successfully saved."}),
|
||||
200,
|
||||
return JSONResponse(
|
||||
content=({"success": True, "message": "Config successfully saved."}),
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/config/set", methods=["PUT"])
|
||||
def config_set():
|
||||
@router.put("/config/set")
|
||||
def config_set(request: Request, body: AppConfigSetBody):
|
||||
config_file = os.environ.get("CONFIG_FILE", f"{CONFIG_DIR}/config.yml")
|
||||
|
||||
# Check if we can use .yaml instead of .yml
|
||||
@@ -330,80 +266,77 @@ def config_set():
|
||||
f.close()
|
||||
|
||||
try:
|
||||
update_yaml_from_url(config_file, request.url)
|
||||
update_yaml_from_url(config_file, str(request.url))
|
||||
with open(config_file, "r") as f:
|
||||
new_raw_config = f.read()
|
||||
f.close()
|
||||
# Validate the config schema
|
||||
try:
|
||||
config_obj = FrigateConfig.parse_raw(new_raw_config)
|
||||
config = FrigateConfig.parse(new_raw_config)
|
||||
except Exception:
|
||||
with open(config_file, "w") as f:
|
||||
f.write(old_raw_config)
|
||||
f.close()
|
||||
logger.error(f"\nConfig Error:\n\n{str(traceback.format_exc())}")
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Error parsing config. Check logs for error message.",
|
||||
}
|
||||
),
|
||||
400,
|
||||
status_code=400,
|
||||
)
|
||||
except Exception as e:
|
||||
logging.error(f"Error updating config: {e}")
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Error updating config"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Error updating config"}),
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
json = request.get_json(silent=True) or {}
|
||||
|
||||
if json.get("requires_restart", 1) == 0:
|
||||
current_app.frigate_config = FrigateConfig.runtime_config(
|
||||
config_obj, current_app.plus_api
|
||||
)
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
if body.requires_restart == 0:
|
||||
request.app.frigate_config = config
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Config successfully updated, restart to apply",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/ffprobe", methods=["GET"])
|
||||
def ffprobe():
|
||||
path_param = request.args.get("paths", "")
|
||||
@router.get("/ffprobe")
|
||||
def ffprobe(request: Request, paths: str = ""):
|
||||
path_param = paths
|
||||
|
||||
if not path_param:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Path needs to be provided."}), 404
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Path needs to be provided."}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
if path_param.startswith("camera"):
|
||||
camera = path_param[7:]
|
||||
|
||||
if camera not in current_app.frigate_config.cameras.keys():
|
||||
return make_response(
|
||||
jsonify(
|
||||
if camera not in request.app.frigate_config.cameras.keys():
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{"success": False, "message": f"{camera} is not a valid camera."}
|
||||
),
|
||||
404,
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
if not current_app.frigate_config.cameras[camera].enabled:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": f"{camera} is not enabled."}), 404
|
||||
if not request.app.frigate_config.cameras[camera].enabled:
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": f"{camera} is not enabled."}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
paths = map(
|
||||
lambda input: input.path,
|
||||
current_app.frigate_config.cameras[camera].ffmpeg.inputs,
|
||||
request.app.frigate_config.cameras[camera].ffmpeg.inputs,
|
||||
)
|
||||
elif "," in clean_camera_user_pass(path_param):
|
||||
paths = path_param.split(",")
|
||||
@@ -414,7 +347,7 @@ def ffprobe():
|
||||
output = []
|
||||
|
||||
for path in paths:
|
||||
ffprobe = ffprobe_stream(current_app.frigate_config.ffmpeg, path.strip())
|
||||
ffprobe = ffprobe_stream(request.app.frigate_config.ffmpeg, path.strip())
|
||||
output.append(
|
||||
{
|
||||
"return_code": ffprobe.returncode,
|
||||
@@ -431,14 +364,14 @@ def ffprobe():
|
||||
}
|
||||
)
|
||||
|
||||
return jsonify(output)
|
||||
return JSONResponse(content=output)
|
||||
|
||||
|
||||
@bp.route("/vainfo", methods=["GET"])
|
||||
@router.get("/vainfo")
|
||||
def vainfo():
|
||||
vainfo = vainfo_hwaccel()
|
||||
return jsonify(
|
||||
{
|
||||
return JSONResponse(
|
||||
content={
|
||||
"return_code": vainfo.returncode,
|
||||
"stderr": (
|
||||
vainfo.stderr.decode("unicode_escape").strip()
|
||||
@@ -454,41 +387,49 @@ def vainfo():
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/logs/<service>", methods=["GET"])
|
||||
def logs(service: str):
|
||||
@router.get("/nvinfo")
|
||||
def nvinfo():
|
||||
return JSONResponse(content=get_nvidia_driver_info())
|
||||
|
||||
|
||||
@router.get("/logs/{service}", tags=[Tags.logs])
|
||||
def logs(
|
||||
service: str = Path(enum=["frigate", "nginx", "go2rtc"]),
|
||||
download: Optional[str] = None,
|
||||
start: Optional[int] = 0,
|
||||
end: Optional[int] = None,
|
||||
):
|
||||
"""Get logs for the requested service (frigate/nginx/go2rtc)"""
|
||||
|
||||
def download_logs(service_location: str):
|
||||
try:
|
||||
file = open(service_location, "r")
|
||||
contents = file.read()
|
||||
file.close()
|
||||
return jsonify(contents)
|
||||
return JSONResponse(jsonable_encoder(contents))
|
||||
except FileNotFoundError as e:
|
||||
logger.error(e)
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Could not find log file"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Could not find log file"},
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
log_locations = {
|
||||
"frigate": "/dev/shm/logs/frigate/current",
|
||||
"go2rtc": "/dev/shm/logs/go2rtc/current",
|
||||
"nginx": "/dev/shm/logs/nginx/current",
|
||||
"chroma": "/dev/shm/logs/chroma/current",
|
||||
}
|
||||
service_location = log_locations.get(service)
|
||||
|
||||
if not service_location:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Not a valid service"}),
|
||||
404,
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Not a valid service"},
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
if request.args.get("download", type=bool, default=False):
|
||||
if download:
|
||||
return download_logs(service_location)
|
||||
|
||||
start = request.args.get("start", type=int, default=0)
|
||||
end = request.args.get("end", type=int)
|
||||
|
||||
try:
|
||||
file = open(service_location, "r")
|
||||
contents = file.read()
|
||||
@@ -529,49 +470,47 @@ def logs(service: str):
|
||||
|
||||
logLines.append(currentLine)
|
||||
|
||||
return make_response(
|
||||
jsonify({"totalLines": len(logLines), "lines": logLines[start:end]}),
|
||||
200,
|
||||
return JSONResponse(
|
||||
content={"totalLines": len(logLines), "lines": logLines[start:end]},
|
||||
status_code=200,
|
||||
)
|
||||
except FileNotFoundError as e:
|
||||
logger.error(e)
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Could not find log file"}),
|
||||
500,
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Could not find log file"},
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/restart", methods=["POST"])
|
||||
@router.post("/restart")
|
||||
def restart():
|
||||
try:
|
||||
restart_frigate()
|
||||
except Exception as e:
|
||||
logging.error(f"Error restarting Frigate: {e}")
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Unable to restart Frigate.",
|
||||
}
|
||||
),
|
||||
500,
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
return JSONResponse(
|
||||
content=(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Restarting (this can take up to one minute)...",
|
||||
}
|
||||
),
|
||||
200,
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@bp.route("/labels")
|
||||
def get_labels():
|
||||
camera = request.args.get("camera", type=str, default="")
|
||||
|
||||
@router.get("/labels")
|
||||
def get_labels(camera: str = ""):
|
||||
try:
|
||||
if camera:
|
||||
events = Event.select(Event.label).where(Event.camera == camera).distinct()
|
||||
@@ -579,24 +518,23 @@ def get_labels():
|
||||
events = Event.select(Event.label).distinct()
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Failed to get labels"}), 404
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Failed to get labels"}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
labels = sorted([e.label for e in events])
|
||||
return jsonify(labels)
|
||||
return JSONResponse(content=labels)
|
||||
|
||||
|
||||
@bp.route("/sub_labels")
|
||||
def get_sub_labels():
|
||||
split_joined = request.args.get("split_joined", type=int)
|
||||
|
||||
@router.get("/sub_labels")
|
||||
def get_sub_labels(split_joined: Optional[int] = None):
|
||||
try:
|
||||
events = Event.select(Event.sub_label).distinct()
|
||||
except Exception:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Failed to get sub_labels"}),
|
||||
404,
|
||||
return JSONResponse(
|
||||
content=({"success": False, "message": "Failed to get sub_labels"}),
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
sub_labels = [e.sub_label for e in events]
|
||||
@@ -617,15 +555,11 @@ def get_sub_labels():
|
||||
sub_labels.append(part.strip())
|
||||
|
||||
sub_labels.sort()
|
||||
return jsonify(sub_labels)
|
||||
return JSONResponse(content=sub_labels)
|
||||
|
||||
|
||||
@bp.route("/timeline")
|
||||
def timeline():
|
||||
camera = request.args.get("camera", "all")
|
||||
source_id = request.args.get("source_id", type=str)
|
||||
limit = request.args.get("limit", 100)
|
||||
|
||||
@router.get("/timeline")
|
||||
def timeline(camera: str = "all", limit: int = 100, source_id: Optional[str] = None):
|
||||
clauses = []
|
||||
|
||||
selected_columns = [
|
||||
@@ -654,18 +588,18 @@ def timeline():
|
||||
.dicts()
|
||||
)
|
||||
|
||||
return jsonify([t for t in timeline])
|
||||
return JSONResponse(content=[t for t in timeline])
|
||||
|
||||
|
||||
@bp.route("/timeline/hourly")
|
||||
def hourly_timeline():
|
||||
@router.get("/timeline/hourly")
|
||||
def hourly_timeline(params: AppTimelineHourlyQueryParameters = Depends()):
|
||||
"""Get hourly summary for timeline."""
|
||||
cameras = request.args.get("cameras", "all")
|
||||
labels = request.args.get("labels", "all")
|
||||
before = request.args.get("before", type=float)
|
||||
after = request.args.get("after", type=float)
|
||||
limit = request.args.get("limit", 200)
|
||||
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||
cameras = params.cameras
|
||||
labels = params.labels
|
||||
before = params.before
|
||||
after = params.after
|
||||
limit = params.limit
|
||||
tz_name = params.timezone
|
||||
|
||||
_, minute_modifier, _ = get_tz_modifiers(tz_name)
|
||||
minute_offset = int(minute_modifier.split(" ")[0])
|
||||
@@ -731,8 +665,8 @@ def hourly_timeline():
|
||||
else:
|
||||
hours[hour].insert(0, t)
|
||||
|
||||
return jsonify(
|
||||
{
|
||||
return JSONResponse(
|
||||
content={
|
||||
"start": start,
|
||||
"end": end,
|
||||
"count": count,
|
||||
|
||||
@@ -12,29 +12,49 @@ import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from flask import Blueprint, current_app, jsonify, make_response, redirect, request
|
||||
from flask_limiter import Limiter
|
||||
from fastapi import APIRouter, Request, Response
|
||||
from fastapi.responses import JSONResponse, RedirectResponse
|
||||
from joserfc import jwt
|
||||
from peewee import DoesNotExist
|
||||
from slowapi import Limiter
|
||||
|
||||
from frigate.api.defs.app_body import (
|
||||
AppPostLoginBody,
|
||||
AppPostUsersBody,
|
||||
AppPutPasswordBody,
|
||||
)
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.config import AuthConfig, ProxyConfig
|
||||
from frigate.const import CONFIG_DIR, JWT_SECRET_ENV_VAR, PASSWORD_HASH_ALGORITHM
|
||||
from frigate.models import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
AuthBp = Blueprint("auth", __name__)
|
||||
router = APIRouter(tags=[Tags.auth])
|
||||
|
||||
|
||||
def get_remote_addr():
|
||||
class RateLimiter:
|
||||
_limit = ""
|
||||
|
||||
def set_limit(self, limit: str):
|
||||
self._limit = limit
|
||||
|
||||
def get_limit(self) -> str:
|
||||
return self._limit
|
||||
|
||||
|
||||
rateLimiter = RateLimiter()
|
||||
|
||||
|
||||
def get_remote_addr(request: Request):
|
||||
route = list(reversed(request.headers.get("x-forwarded-for").split(",")))
|
||||
logger.debug(f"IP Route: {[r for r in route]}")
|
||||
trusted_proxies = []
|
||||
for proxy in current_app.frigate_config.auth.trusted_proxies:
|
||||
for proxy in request.app.frigate_config.auth.trusted_proxies:
|
||||
try:
|
||||
network = ipaddress.ip_network(proxy)
|
||||
except ValueError:
|
||||
logger.warn(f"Unable to parse trusted network: {proxy}")
|
||||
logger.warning(f"Unable to parse trusted network: {proxy}")
|
||||
trusted_proxies.append(network)
|
||||
|
||||
# return the first remote address that is not trusted
|
||||
@@ -68,16 +88,6 @@ def get_remote_addr():
|
||||
return request.remote_addr or "127.0.0.1"
|
||||
|
||||
|
||||
limiter = Limiter(
|
||||
get_remote_addr,
|
||||
storage_uri="memory://",
|
||||
)
|
||||
|
||||
|
||||
def get_rate_limit():
|
||||
return current_app.frigate_config.auth.failed_login_rate_limit
|
||||
|
||||
|
||||
def get_jwt_secret() -> str:
|
||||
jwt_secret = None
|
||||
# check env var
|
||||
@@ -112,7 +122,7 @@ def get_jwt_secret() -> str:
|
||||
with open(jwt_secret_file, "w") as f:
|
||||
f.write(str(jwt_secret))
|
||||
except Exception:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"Unable to write jwt token file to config directory. A new jwt token will be created at each startup."
|
||||
)
|
||||
else:
|
||||
@@ -121,18 +131,18 @@ def get_jwt_secret() -> str:
|
||||
try:
|
||||
jwt_secret = f.readline()
|
||||
except Exception:
|
||||
logger.warn(
|
||||
logger.warning(
|
||||
"Unable to read jwt token from .jwt_secret file in config directory. A new jwt token will be created at each startup."
|
||||
)
|
||||
jwt_secret = secrets.token_hex(64)
|
||||
|
||||
if len(jwt_secret) < 64:
|
||||
logger.warn("JWT Secret is recommended to be 64 characters or more")
|
||||
logger.warning("JWT Secret is recommended to be 64 characters or more")
|
||||
|
||||
return jwt_secret
|
||||
|
||||
|
||||
def hash_password(password, salt=None, iterations=600000):
|
||||
def hash_password(password: str, salt=None, iterations=600000):
|
||||
if salt is None:
|
||||
salt = secrets.token_hex(16)
|
||||
assert salt and isinstance(salt, str) and "$" not in salt
|
||||
@@ -158,33 +168,36 @@ def create_encoded_jwt(user, expiration, secret):
|
||||
return jwt.encode({"alg": "HS256"}, {"sub": user, "exp": expiration}, secret)
|
||||
|
||||
|
||||
def set_jwt_cookie(response, cookie_name, encoded_jwt, expiration, secure):
|
||||
def set_jwt_cookie(response: Response, cookie_name, encoded_jwt, expiration, secure):
|
||||
# TODO: ideally this would set secure as well, but that requires TLS
|
||||
response.set_cookie(
|
||||
cookie_name, encoded_jwt, httponly=True, expires=expiration, secure=secure
|
||||
key=cookie_name,
|
||||
value=encoded_jwt,
|
||||
httponly=True,
|
||||
expires=expiration,
|
||||
secure=secure,
|
||||
)
|
||||
|
||||
|
||||
# Endpoint for use with nginx auth_request
|
||||
@AuthBp.route("/auth")
|
||||
def auth():
|
||||
auth_config: AuthConfig = current_app.frigate_config.auth
|
||||
proxy_config: ProxyConfig = current_app.frigate_config.proxy
|
||||
@router.get("/auth")
|
||||
def auth(request: Request):
|
||||
auth_config: AuthConfig = request.app.frigate_config.auth
|
||||
proxy_config: ProxyConfig = request.app.frigate_config.proxy
|
||||
|
||||
success_response = make_response({}, 202)
|
||||
success_response = Response("", status_code=202)
|
||||
|
||||
# dont require auth if the request is on the internal port
|
||||
# this header is set by Frigate's nginx proxy, so it cant be spoofed
|
||||
if request.headers.get("x-server-port", 0, type=int) == 5000:
|
||||
if int(request.headers.get("x-server-port", default=0)) == 5000:
|
||||
return success_response
|
||||
|
||||
fail_response = make_response({}, 401)
|
||||
fail_response = Response("", status_code=401)
|
||||
|
||||
# ensure the proxy secret matches if configured
|
||||
if (
|
||||
proxy_config.auth_secret is not None
|
||||
and request.headers.get("x-proxy-secret", "", type=str)
|
||||
!= proxy_config.auth_secret
|
||||
and request.headers.get("x-proxy-secret", "") != proxy_config.auth_secret
|
||||
):
|
||||
logger.debug("X-Proxy-Secret header does not match configured secret value")
|
||||
return fail_response
|
||||
@@ -196,7 +209,6 @@ def auth():
|
||||
if proxy_config.header_map.user is not None:
|
||||
upstream_user_header_value = request.headers.get(
|
||||
proxy_config.header_map.user,
|
||||
type=str,
|
||||
default="anonymous",
|
||||
)
|
||||
success_response.headers["remote-user"] = upstream_user_header_value
|
||||
@@ -207,10 +219,10 @@ def auth():
|
||||
# now apply authentication
|
||||
fail_response.headers["location"] = "/login"
|
||||
|
||||
JWT_COOKIE_NAME = current_app.frigate_config.auth.cookie_name
|
||||
JWT_COOKIE_SECURE = current_app.frigate_config.auth.cookie_secure
|
||||
JWT_REFRESH = current_app.frigate_config.auth.refresh_time
|
||||
JWT_SESSION_LENGTH = current_app.frigate_config.auth.session_length
|
||||
JWT_COOKIE_NAME = request.app.frigate_config.auth.cookie_name
|
||||
JWT_COOKIE_SECURE = request.app.frigate_config.auth.cookie_secure
|
||||
JWT_REFRESH = request.app.frigate_config.auth.refresh_time
|
||||
JWT_SESSION_LENGTH = request.app.frigate_config.auth.session_length
|
||||
|
||||
jwt_source = None
|
||||
encoded_token = None
|
||||
@@ -230,7 +242,7 @@ def auth():
|
||||
return fail_response
|
||||
|
||||
try:
|
||||
token = jwt.decode(encoded_token, current_app.jwt_token)
|
||||
token = jwt.decode(encoded_token, request.app.jwt_token)
|
||||
if "sub" not in token.claims:
|
||||
logger.debug("user not set in jwt token")
|
||||
return fail_response
|
||||
@@ -266,7 +278,7 @@ def auth():
|
||||
return fail_response
|
||||
new_expiration = current_time + JWT_SESSION_LENGTH
|
||||
new_encoded_jwt = create_encoded_jwt(
|
||||
user, new_expiration, current_app.jwt_token
|
||||
user, new_expiration, request.app.jwt_token
|
||||
)
|
||||
set_jwt_cookie(
|
||||
success_response,
|
||||
@@ -283,86 +295,85 @@ def auth():
|
||||
return fail_response
|
||||
|
||||
|
||||
@AuthBp.route("/profile")
|
||||
def profile():
|
||||
username = request.headers.get("remote-user", type=str)
|
||||
return jsonify({"username": username})
|
||||
@router.get("/profile")
|
||||
def profile(request: Request):
|
||||
username = request.headers.get("remote-user")
|
||||
return JSONResponse(content={"username": username})
|
||||
|
||||
|
||||
@AuthBp.route("/logout")
|
||||
def logout():
|
||||
auth_config: AuthConfig = current_app.frigate_config.auth
|
||||
response = make_response(redirect("/login", code=303))
|
||||
@router.get("/logout")
|
||||
def logout(request: Request):
|
||||
auth_config: AuthConfig = request.app.frigate_config.auth
|
||||
response = RedirectResponse("/login", status_code=303)
|
||||
response.delete_cookie(auth_config.cookie_name)
|
||||
return response
|
||||
|
||||
|
||||
@AuthBp.route("/login", methods=["POST"])
|
||||
@limiter.limit(get_rate_limit, deduct_when=lambda response: response.status_code == 400)
|
||||
def login():
|
||||
JWT_COOKIE_NAME = current_app.frigate_config.auth.cookie_name
|
||||
JWT_COOKIE_SECURE = current_app.frigate_config.auth.cookie_secure
|
||||
JWT_SESSION_LENGTH = current_app.frigate_config.auth.session_length
|
||||
content = request.get_json()
|
||||
user = content["user"]
|
||||
password = content["password"]
|
||||
limiter = Limiter(key_func=get_remote_addr)
|
||||
|
||||
|
||||
@router.post("/login")
|
||||
@limiter.limit(limit_value=rateLimiter.get_limit)
|
||||
def login(request: Request, body: AppPostLoginBody):
|
||||
JWT_COOKIE_NAME = request.app.frigate_config.auth.cookie_name
|
||||
JWT_COOKIE_SECURE = request.app.frigate_config.auth.cookie_secure
|
||||
JWT_SESSION_LENGTH = request.app.frigate_config.auth.session_length
|
||||
user = body.user
|
||||
password = body.password
|
||||
|
||||
try:
|
||||
db_user: User = User.get_by_id(user)
|
||||
except DoesNotExist:
|
||||
return make_response({"message": "Login failed"}, 400)
|
||||
return JSONResponse(content={"message": "Login failed"}, status_code=400)
|
||||
|
||||
password_hash = db_user.password_hash
|
||||
if verify_password(password, password_hash):
|
||||
expiration = int(time.time()) + JWT_SESSION_LENGTH
|
||||
encoded_jwt = create_encoded_jwt(user, expiration, current_app.jwt_token)
|
||||
response = make_response({}, 200)
|
||||
encoded_jwt = create_encoded_jwt(user, expiration, request.app.jwt_token)
|
||||
response = Response("", 200)
|
||||
set_jwt_cookie(
|
||||
response, JWT_COOKIE_NAME, encoded_jwt, expiration, JWT_COOKIE_SECURE
|
||||
)
|
||||
return response
|
||||
return make_response({"message": "Login failed"}, 400)
|
||||
return JSONResponse(content={"message": "Login failed"}, status_code=400)
|
||||
|
||||
|
||||
@AuthBp.route("/users")
|
||||
@router.get("/users")
|
||||
def get_users():
|
||||
exports = User.select(User.username).order_by(User.username).dicts().iterator()
|
||||
return jsonify([e for e in exports])
|
||||
return JSONResponse([e for e in exports])
|
||||
|
||||
|
||||
@AuthBp.route("/users", methods=["POST"])
|
||||
def create_user():
|
||||
HASH_ITERATIONS = current_app.frigate_config.auth.hash_iterations
|
||||
@router.post("/users")
|
||||
def create_user(request: Request, body: AppPostUsersBody):
|
||||
HASH_ITERATIONS = request.app.frigate_config.auth.hash_iterations
|
||||
|
||||
request_data = request.get_json()
|
||||
if not re.match("^[A-Za-z0-9._]+$", body.username):
|
||||
JSONResponse(content={"message": "Invalid username"}, status_code=400)
|
||||
|
||||
if not re.match("^[A-Za-z0-9._]+$", request_data.get("username", "")):
|
||||
make_response({"message": "Invalid username"}, 400)
|
||||
|
||||
password_hash = hash_password(request_data["password"], iterations=HASH_ITERATIONS)
|
||||
password_hash = hash_password(body.password, iterations=HASH_ITERATIONS)
|
||||
|
||||
User.insert(
|
||||
{
|
||||
User.username: request_data["username"],
|
||||
User.username: body.username,
|
||||
User.password_hash: password_hash,
|
||||
User.notification_tokens: [],
|
||||
}
|
||||
).execute()
|
||||
return jsonify({"username": request_data["username"]})
|
||||
return JSONResponse(content={"username": body.username})
|
||||
|
||||
|
||||
@AuthBp.route("/users/<username>", methods=["DELETE"])
|
||||
@router.delete("/users/{username}")
|
||||
def delete_user(username: str):
|
||||
User.delete_by_id(username)
|
||||
return jsonify({"success": True})
|
||||
return JSONResponse(content={"success": True})
|
||||
|
||||
|
||||
@AuthBp.route("/users/<username>/password", methods=["PUT"])
|
||||
def update_password(username: str):
|
||||
HASH_ITERATIONS = current_app.frigate_config.auth.hash_iterations
|
||||
@router.put("/users/{username}/password")
|
||||
def update_password(request: Request, username: str, body: AppPutPasswordBody):
|
||||
HASH_ITERATIONS = request.app.frigate_config.auth.hash_iterations
|
||||
|
||||
request_data = request.get_json()
|
||||
|
||||
password_hash = hash_password(request_data["password"], iterations=HASH_ITERATIONS)
|
||||
password_hash = hash_password(body.password, iterations=HASH_ITERATIONS)
|
||||
|
||||
User.set_by_id(
|
||||
username,
|
||||
@@ -370,4 +381,4 @@ def update_password(username: str):
|
||||
User.password_hash: password_hash,
|
||||
},
|
||||
)
|
||||
return jsonify({"success": True})
|
||||
return JSONResponse(content={"success": True})
|
||||
|
||||
19
frigate/api/defs/app_body.py
Normal file
19
frigate/api/defs/app_body.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AppConfigSetBody(BaseModel):
|
||||
requires_restart: int = 1
|
||||
|
||||
|
||||
class AppPutPasswordBody(BaseModel):
|
||||
password: str
|
||||
|
||||
|
||||
class AppPostUsersBody(BaseModel):
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class AppPostLoginBody(BaseModel):
|
||||
user: str
|
||||
password: str
|
||||
12
frigate/api/defs/app_query_parameters.py
Normal file
12
frigate/api/defs/app_query_parameters.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class AppTimelineHourlyQueryParameters(BaseModel):
|
||||
cameras: Optional[str] = "all"
|
||||
labels: Optional[str] = "all"
|
||||
after: Optional[float] = None
|
||||
before: Optional[float] = None
|
||||
limit: Optional[int] = 200
|
||||
timezone: Optional[str] = "utc"
|
||||
31
frigate/api/defs/events_body.py
Normal file
31
frigate/api/defs/events_body.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from typing import Optional, Union
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class EventsSubLabelBody(BaseModel):
|
||||
subLabel: str = Field(title="Sub label", max_length=100)
|
||||
subLabelScore: Optional[float] = Field(
|
||||
title="Score for sub label", default=None, gt=0.0, le=1.0
|
||||
)
|
||||
|
||||
|
||||
class EventsDescriptionBody(BaseModel):
|
||||
description: Union[str, None] = Field(title="The description of the event")
|
||||
|
||||
|
||||
class EventsCreateBody(BaseModel):
|
||||
source_type: Optional[str] = "api"
|
||||
sub_label: Optional[str] = None
|
||||
score: Optional[int] = 0
|
||||
duration: Optional[int] = 30
|
||||
include_recording: Optional[bool] = True
|
||||
draw: Optional[dict] = {}
|
||||
|
||||
|
||||
class EventsEndBody(BaseModel):
|
||||
end_time: Optional[int] = None
|
||||
|
||||
|
||||
class SubmitPlusBody(BaseModel):
|
||||
include_annotation: int = Field(default=1)
|
||||
58
frigate/api/defs/events_query_parameters.py
Normal file
58
frigate/api/defs/events_query_parameters.py
Normal file
@@ -0,0 +1,58 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
DEFAULT_TIME_RANGE = "00:00,24:00"
|
||||
|
||||
|
||||
class EventsQueryParams(BaseModel):
|
||||
camera: Optional[str] = "all"
|
||||
cameras: Optional[str] = "all"
|
||||
label: Optional[str] = "all"
|
||||
labels: Optional[str] = "all"
|
||||
sub_label: Optional[str] = "all"
|
||||
sub_labels: Optional[str] = "all"
|
||||
zone: Optional[str] = "all"
|
||||
zones: Optional[str] = "all"
|
||||
limit: Optional[int] = 100
|
||||
after: Optional[float] = None
|
||||
before: Optional[float] = None
|
||||
time_range: Optional[str] = DEFAULT_TIME_RANGE
|
||||
has_clip: Optional[int] = None
|
||||
has_snapshot: Optional[int] = None
|
||||
in_progress: Optional[int] = None
|
||||
include_thumbnails: Optional[int] = 1
|
||||
favorites: Optional[int] = None
|
||||
min_score: Optional[float] = None
|
||||
max_score: Optional[float] = None
|
||||
is_submitted: Optional[int] = None
|
||||
min_length: Optional[float] = None
|
||||
max_length: Optional[float] = None
|
||||
sort: Optional[str] = None
|
||||
timezone: Optional[str] = "utc"
|
||||
|
||||
|
||||
class EventsSearchQueryParams(BaseModel):
|
||||
query: Optional[str] = None
|
||||
event_id: Optional[str] = None
|
||||
search_type: Optional[str] = "thumbnail"
|
||||
include_thumbnails: Optional[int] = 1
|
||||
limit: Optional[int] = 50
|
||||
cameras: Optional[str] = "all"
|
||||
labels: Optional[str] = "all"
|
||||
zones: Optional[str] = "all"
|
||||
after: Optional[float] = None
|
||||
before: Optional[float] = None
|
||||
time_range: Optional[str] = DEFAULT_TIME_RANGE
|
||||
has_clip: Optional[bool] = None
|
||||
has_snapshot: Optional[bool] = None
|
||||
timezone: Optional[str] = "utc"
|
||||
min_score: Optional[float] = None
|
||||
max_score: Optional[float] = None
|
||||
sort: Optional[str] = None
|
||||
|
||||
|
||||
class EventsSummaryQueryParams(BaseModel):
|
||||
timezone: Optional[str] = "utc"
|
||||
has_clip: Optional[int] = None
|
||||
has_snapshot: Optional[int] = None
|
||||
6
frigate/api/defs/generic_response.py
Normal file
6
frigate/api/defs/generic_response.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class GenericResponse(BaseModel):
|
||||
success: bool
|
||||
message: str
|
||||
42
frigate/api/defs/media_query_parameters.py
Normal file
42
frigate/api/defs/media_query_parameters.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Extension(str, Enum):
|
||||
webp = "webp"
|
||||
png = "png"
|
||||
jpg = "jpg"
|
||||
jpeg = "jpeg"
|
||||
|
||||
|
||||
class MediaLatestFrameQueryParams(BaseModel):
|
||||
bbox: Optional[int] = None
|
||||
timestamp: Optional[int] = None
|
||||
zones: Optional[int] = None
|
||||
mask: Optional[int] = None
|
||||
motion: Optional[int] = None
|
||||
regions: Optional[int] = None
|
||||
quality: Optional[int] = 70
|
||||
height: Optional[int] = None
|
||||
|
||||
|
||||
class MediaEventsSnapshotQueryParams(BaseModel):
|
||||
download: Optional[bool] = False
|
||||
timestamp: Optional[int] = None
|
||||
bbox: Optional[int] = None
|
||||
crop: Optional[int] = None
|
||||
height: Optional[int] = None
|
||||
quality: Optional[int] = 70
|
||||
|
||||
|
||||
class MediaMjpegFeedQueryParams(BaseModel):
|
||||
fps: int = 3
|
||||
height: int = 360
|
||||
bbox: Optional[int] = None
|
||||
timestamp: Optional[int] = None
|
||||
zones: Optional[int] = None
|
||||
mask: Optional[int] = None
|
||||
motion: Optional[int] = None
|
||||
regions: Optional[int] = None
|
||||
9
frigate/api/defs/regenerate_query_parameters.py
Normal file
9
frigate/api/defs/regenerate_query_parameters.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from frigate.events.types import RegenerateDescriptionEnum
|
||||
|
||||
|
||||
class RegenerateQueryParameters(BaseModel):
|
||||
source: Optional[RegenerateDescriptionEnum] = RegenerateDescriptionEnum.thumbnails
|
||||
6
frigate/api/defs/review_body.py
Normal file
6
frigate/api/defs/review_body.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from pydantic import BaseModel, conlist, constr
|
||||
|
||||
|
||||
class ReviewModifyMultipleBody(BaseModel):
|
||||
# List of string with at least one element and each element with at least one char
|
||||
ids: conlist(constr(min_length=1), min_length=1)
|
||||
31
frigate/api/defs/review_query_parameters.py
Normal file
31
frigate/api/defs/review_query_parameters.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from typing import Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic.json_schema import SkipJsonSchema
|
||||
|
||||
from frigate.review.maintainer import SeverityEnum
|
||||
|
||||
|
||||
class ReviewQueryParams(BaseModel):
|
||||
cameras: str = "all"
|
||||
labels: str = "all"
|
||||
zones: str = "all"
|
||||
reviewed: int = 0
|
||||
limit: Union[int, SkipJsonSchema[None]] = None
|
||||
severity: Union[SeverityEnum, SkipJsonSchema[None]] = None
|
||||
before: Union[float, SkipJsonSchema[None]] = None
|
||||
after: Union[float, SkipJsonSchema[None]] = None
|
||||
|
||||
|
||||
class ReviewSummaryQueryParams(BaseModel):
|
||||
cameras: str = "all"
|
||||
labels: str = "all"
|
||||
zones: str = "all"
|
||||
timezone: str = "utc"
|
||||
|
||||
|
||||
class ReviewActivityMotionQueryParams(BaseModel):
|
||||
cameras: str = "all"
|
||||
before: Union[float, SkipJsonSchema[None]] = None
|
||||
after: Union[float, SkipJsonSchema[None]] = None
|
||||
scale: int = 30
|
||||
43
frigate/api/defs/review_responses.py
Normal file
43
frigate/api/defs/review_responses.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict
|
||||
|
||||
from pydantic import BaseModel, Json
|
||||
|
||||
from frigate.review.maintainer import SeverityEnum
|
||||
|
||||
|
||||
class ReviewSegmentResponse(BaseModel):
|
||||
id: str
|
||||
camera: str
|
||||
start_time: datetime
|
||||
end_time: datetime
|
||||
has_been_reviewed: bool
|
||||
severity: SeverityEnum
|
||||
thumb_path: str
|
||||
data: Json
|
||||
|
||||
|
||||
class Last24HoursReview(BaseModel):
|
||||
reviewed_alert: int
|
||||
reviewed_detection: int
|
||||
total_alert: int
|
||||
total_detection: int
|
||||
|
||||
|
||||
class DayReview(BaseModel):
|
||||
day: datetime
|
||||
reviewed_alert: int
|
||||
reviewed_detection: int
|
||||
total_alert: int
|
||||
total_detection: int
|
||||
|
||||
|
||||
class ReviewSummaryResponse(BaseModel):
|
||||
last24Hours: Last24HoursReview
|
||||
root: Dict[str, DayReview]
|
||||
|
||||
|
||||
class ReviewActivityMotionResponse(BaseModel):
|
||||
start_time: int
|
||||
motion: float
|
||||
camera: str
|
||||
13
frigate/api/defs/tags.py
Normal file
13
frigate/api/defs/tags.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Tags(Enum):
|
||||
app = "App"
|
||||
preview = "Preview"
|
||||
logs = "Logs"
|
||||
media = "Media"
|
||||
notifications = "Notifications"
|
||||
review = "Review"
|
||||
export = "Export"
|
||||
events = "Events"
|
||||
auth = "Auth"
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user