forked from Github/frigate
Compare commits
125 Commits
v0.14.0-be
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6a77055930 | ||
|
|
cf7718132a | ||
|
|
939a055d46 | ||
|
|
01fa1777ac | ||
|
|
a77436eec3 | ||
|
|
c268a126dc | ||
|
|
29e86d4eeb | ||
|
|
9d18061d0f | ||
|
|
943114c052 | ||
|
|
2cb81ef116 | ||
|
|
c16450adc8 | ||
|
|
347d54f388 | ||
|
|
3428baa3fa | ||
|
|
4f8066a35a | ||
|
|
04fd05bc7d | ||
|
|
3abf89596a | ||
|
|
690ee3dc15 | ||
|
|
331c882af2 | ||
|
|
b4eb83d892 | ||
|
|
4a35573210 | ||
|
|
e7fabce4e0 | ||
|
|
feb2c9fc62 | ||
|
|
dd7fd16b69 | ||
|
|
d93d6262ce | ||
|
|
9d7e499adb | ||
|
|
0d7a148897 | ||
|
|
9e825811f2 | ||
|
|
36cbffcc5e | ||
|
|
f4f3cfa911 | ||
|
|
ca0f6e4c0a | ||
|
|
a7ccabd8f1 | ||
|
|
453a8d794e | ||
|
|
ce79898cae | ||
|
|
bf90daae2b | ||
|
|
fdb5d53960 | ||
|
|
2dc5a7f767 | ||
|
|
65ca3c8fa3 | ||
|
|
ff34af2c1f | ||
|
|
e01b6ee76b | ||
|
|
1c7ee5f4e4 | ||
|
|
d96f76c27f | ||
|
|
1da934e63c | ||
|
|
38a8d34ba5 | ||
|
|
8e31244fb3 | ||
|
|
3a124dbb84 | ||
|
|
8c23ede683 | ||
|
|
4133e454c4 | ||
|
|
4dce8ff60a | ||
|
|
2e724291db | ||
|
|
f6b61c26ae | ||
|
|
1b876bf8d3 | ||
|
|
b0d42ea116 | ||
|
|
05bc3839cc | ||
|
|
281482927a | ||
|
|
132a712341 | ||
|
|
13d121f443 | ||
|
|
67ba3dbd8b | ||
|
|
4afa7bf4e1 | ||
|
|
77bf710299 | ||
|
|
9b96211faf | ||
|
|
99e03576bf | ||
|
|
78d67484e1 | ||
|
|
e9e86cc5af | ||
|
|
70618e93b7 | ||
|
|
c84511de16 | ||
|
|
6d9590b4ec | ||
|
|
33e04fe61f | ||
|
|
9f43d10ba7 | ||
|
|
57503cc318 | ||
|
|
e563692fa2 | ||
|
|
9c2974438d | ||
|
|
54e1bd9eeb | ||
|
|
8212b66ee0 | ||
|
|
43d2986208 | ||
|
|
f8f7b74792 | ||
|
|
5069072a84 | ||
|
|
93b81756c6 | ||
|
|
4a867ddd56 | ||
|
|
a347cb5a42 | ||
|
|
80e8930e73 | ||
|
|
2637541c6c | ||
|
|
da913d8d31 | ||
|
|
88d4b694f8 | ||
|
|
b28cc45510 | ||
|
|
c0b23ca938 | ||
|
|
8e7b83d2f1 | ||
|
|
599dd7eecb | ||
|
|
84348350fe | ||
|
|
7d03d99852 | ||
|
|
7c39b176ac | ||
|
|
4c2e6f75a2 | ||
|
|
81139e8f47 | ||
|
|
cea0596cf5 | ||
|
|
51a1526146 | ||
|
|
b4db07d7a5 | ||
|
|
5c15659a34 | ||
|
|
1bd3285679 | ||
|
|
6de426c697 | ||
|
|
d28ad0f0c8 | ||
|
|
47aecff567 | ||
|
|
524f03a650 | ||
|
|
68e6ffdfef | ||
|
|
29345c429a | ||
|
|
f2c46408c4 | ||
|
|
e5dc476c1e | ||
|
|
eb2363b93d | ||
|
|
7bfebd5b61 | ||
|
|
6addf4d88b | ||
|
|
c56e7e7c6c | ||
|
|
78c15f3020 | ||
|
|
30f0f73a4e | ||
|
|
e9da453190 | ||
|
|
91f62cf8ce | ||
|
|
58dbbd5d29 | ||
|
|
5c90f7dce7 | ||
|
|
b7cf5f4105 | ||
|
|
c850604931 | ||
|
|
82d2910039 | ||
|
|
5066fa369d | ||
|
|
3afd77cbe0 | ||
|
|
093201a1cc | ||
|
|
6102e9e5ea | ||
|
|
91215a1406 | ||
|
|
94b1350c9d | ||
|
|
1129a2aba4 |
83
.github/DISCUSSION_TEMPLATE/bug-report.yml
vendored
83
.github/DISCUSSION_TEMPLATE/bug-report.yml
vendored
@@ -1,83 +0,0 @@
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "triage"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
||||
43
.github/DISCUSSION_TEMPLATE/camera-support.yml
vendored
43
.github/DISCUSSION_TEMPLATE/camera-support.yml
vendored
@@ -1,6 +1,16 @@
|
||||
title: "[Camera Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for support or questions for an issue with your cameras.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
@@ -11,9 +21,15 @@ body:
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: What browser(s) are you using?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
@@ -23,10 +39,18 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
@@ -34,7 +58,7 @@ body:
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` and provide output below
|
||||
description: Run `ffprobe <camera_url>` from within the Frigate container if possible, and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
@@ -78,7 +102,7 @@ body:
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
- CPU (no Coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
@@ -98,6 +122,13 @@ body:
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field. Please post screenshots of at least General and Cameras tabs.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
|
||||
31
.github/DISCUSSION_TEMPLATE/config-support.yml
vendored
31
.github/DISCUSSION_TEMPLATE/config-support.yml
vendored
@@ -1,6 +1,16 @@
|
||||
title: "[Config Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for support or questions related to Frigate's configuration and config file.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
@@ -11,7 +21,7 @@ body:
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -23,10 +33,18 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
@@ -73,6 +91,11 @@ body:
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop or simple cut/paste is possible in this field
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
|
||||
33
.github/DISCUSSION_TEMPLATE/detector-support.yml
vendored
33
.github/DISCUSSION_TEMPLATE/detector-support.yml
vendored
@@ -1,6 +1,16 @@
|
||||
title: "[Detector Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for support or questions related to Frigate's object detectors.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
@@ -11,7 +21,7 @@ body:
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -31,10 +41,18 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
@@ -75,6 +93,13 @@ body:
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field. Please post screenshots of at least General and Cameras tabs.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
|
||||
39
.github/DISCUSSION_TEMPLATE/general-support.yml
vendored
39
.github/DISCUSSION_TEMPLATE/general-support.yml
vendored
@@ -1,6 +1,16 @@
|
||||
title: "[Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for support for issues that don't fall into any specific category.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
@@ -11,9 +21,15 @@ body:
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: What browser(s) are you using?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
@@ -23,10 +39,18 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
@@ -34,7 +58,7 @@ body:
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` and provide output below
|
||||
description: Run `ffprobe <camera_url>` from within the Frigate container if possible, and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
@@ -98,6 +122,11 @@ body:
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
title: "[HW Accel Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form to submit a support request for hardware acceleration issues.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
@@ -11,9 +21,15 @@ body:
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: In which browser(s) are you experiencing the issue with?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
@@ -31,10 +47,18 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
@@ -42,7 +66,7 @@ body:
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` and provide output below
|
||||
description: Run `ffprobe <camera_url>` from within the Frigate container if possible, and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
@@ -87,6 +111,13 @@ body:
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field. Please post screenshots of at least General and Cameras tabs.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
|
||||
14
.github/DISCUSSION_TEMPLATE/question.yml
vendored
14
.github/DISCUSSION_TEMPLATE/question.yml
vendored
@@ -1,9 +1,21 @@
|
||||
title: "[Question]: "
|
||||
labels: ["question"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for questions you have about Frigate.
|
||||
|
||||
Before submitting your question, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
**If you are looking for support, start a new discussion and use a support category.**
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: "What is your question:"
|
||||
label: "What is your question?"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
146
.github/DISCUSSION_TEMPLATE/report-a-bug.yml
vendored
Normal file
146
.github/DISCUSSION_TEMPLATE/report-a-bug.yml
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form to submit a reproducible bug in Frigate or Frigate's UI.
|
||||
|
||||
Before submitting your bug report, please [search the discussions][discussions], look at recent open and closed [pull requests][prs], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your bug has already been fixed by the developers or reported by the community.
|
||||
|
||||
**If you are unsure if your issue is actually a bug or not, please submit a support request first.**
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[prs]: https://www.github.com/blakeblackshear/frigate/pulls
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Checklist
|
||||
description: Please verify that you've followed these steps
|
||||
options:
|
||||
- label: I have updated to the latest available Frigate version.
|
||||
required: true
|
||||
- label: I have cleared the cache of my browser.
|
||||
required: true
|
||||
- label: I have tried a different browser to see if it is related to my browser.
|
||||
required: true
|
||||
- label: I have tried reproducing the issue in [incognito mode](https://www.computerworld.com/article/1719851/how-to-go-incognito-in-chrome-firefox-safari-and-edge.html) to rule out problems with any third party extensions or plugins I have installed.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
description: Provide a clear and concise description of what the bug is.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Please tell us exactly how to reproduce your issue.
|
||||
Provide clear and concise step by step instructions and add code snippets if needed.
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
...
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: In which browser(s) are you experiencing the issue with?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field. Please post screenshots of all tabs.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
||||
2
.github/actions/setup/action.yml
vendored
2
.github/actions/setup/action.yml
vendored
@@ -5,7 +5,7 @@ inputs:
|
||||
required: true
|
||||
outputs:
|
||||
image-name:
|
||||
value: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ steps.create-short-sha.outputs.SHORT_SHA }}
|
||||
value: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ steps.create-short-sha.outputs.SHORT_SHA }}
|
||||
cache-name:
|
||||
value: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:cache
|
||||
runs:
|
||||
|
||||
30
.github/workflows/ci.yml
vendored
30
.github/workflows/ci.yml
vendored
@@ -155,6 +155,30 @@ jobs:
|
||||
tensorrt.tags=${{ steps.setup.outputs.image-name }}-tensorrt
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64,mode=max
|
||||
combined_extra_builds:
|
||||
runs-on: ubuntu-latest
|
||||
name: Combined Extra Builds
|
||||
needs:
|
||||
- amd64_build
|
||||
- arm64_build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push Hailo-8l build
|
||||
uses: docker/bake-action@v4
|
||||
with:
|
||||
push: true
|
||||
targets: h8l
|
||||
files: docker/hailo8l/h8l.hcl
|
||||
set: |
|
||||
h8l.tags=${{ steps.setup.outputs.image-name }}-h8l
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-h8l
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-h8l,mode=max
|
||||
#- name: AMD/ROCm general build
|
||||
# env:
|
||||
# AMDGPU: gfx
|
||||
@@ -229,7 +253,7 @@ jobs:
|
||||
run: echo "SHORT_SHA=${GITHUB_SHA::7}" >> $GITHUB_ENV
|
||||
- uses: int128/docker-manifest-create-action@v2
|
||||
with:
|
||||
tags: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ env.SHORT_SHA }}
|
||||
tags: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}
|
||||
sources: |
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ env.SHORT_SHA }}-amd64
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ env.SHORT_SHA }}-rpi
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-amd64
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-rpi
|
||||
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -23,10 +23,10 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create tag variables
|
||||
run: |
|
||||
BRANCH=$([[ "${{ github.ref_name }}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && echo "master" || echo "dev")
|
||||
echo "BRANCH=${BRANCH}" >> $GITHUB_ENV
|
||||
BUILD_TYPE=$([[ "${{ github.ref_name }}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && echo "stable" || echo "beta")
|
||||
echo "BUILD_TYPE=${BUILD_TYPE}" >> $GITHUB_ENV
|
||||
echo "BASE=ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}" >> $GITHUB_ENV
|
||||
echo "BUILD_TAG=${BRANCH}-${GITHUB_SHA::7}" >> $GITHUB_ENV
|
||||
echo "BUILD_TAG=${GITHUB_SHA::7}" >> $GITHUB_ENV
|
||||
echo "CLEAN_VERSION=$(echo ${GITHUB_REF##*/} | tr '[:upper:]' '[:lower:]' | sed 's/^[v]//')" >> $GITHUB_ENV
|
||||
- name: Tag and push the main image
|
||||
run: |
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
done
|
||||
|
||||
# stable tag
|
||||
if [[ "${BRANCH}" == "master" ]]; then
|
||||
if [[ "${BUILD_TYPE}" == "stable" ]]; then
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG} docker://${STABLE_TAG}
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk; do
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG}-${variant} docker://${STABLE_TAG}-${variant}
|
||||
|
||||
26
.github/workflows/stale.yml
vendored
26
.github/workflows/stale.yml
vendored
@@ -25,17 +25,17 @@ jobs:
|
||||
- name: Print outputs
|
||||
run: echo ${{ join(steps.stale.outputs.*, ',') }}
|
||||
|
||||
clean_ghcr:
|
||||
name: Delete outdated dev container images
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Delete old images
|
||||
uses: snok/container-retention-policy@v2
|
||||
with:
|
||||
image-names: dev-*
|
||||
cut-off: 60 days ago UTC
|
||||
keep-at-least: 5
|
||||
account-type: personal
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token-type: github-token
|
||||
# clean_ghcr:
|
||||
# name: Delete outdated dev container images
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - name: Delete old images
|
||||
# uses: snok/container-retention-policy@v2
|
||||
# with:
|
||||
# image-names: dev-*
|
||||
# cut-off: 60 days ago UTC
|
||||
# keep-at-least: 5
|
||||
# account-type: personal
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# token-type: github-token
|
||||
|
||||
|
||||
@@ -4,3 +4,4 @@
|
||||
/docker/tensorrt/*jetson* @madsciencetist
|
||||
/docker/rockchip/ @MarcA711
|
||||
/docker/rocm/ @harakas
|
||||
/docker/hailo8l/ @spanner3003
|
||||
|
||||
2
Makefile
2
Makefile
@@ -1,7 +1,7 @@
|
||||
default_target: local
|
||||
|
||||
COMMIT_HASH := $(shell git log -1 --pretty=format:"%h"|tail -1)
|
||||
VERSION = 0.14.0
|
||||
VERSION = 0.15.0
|
||||
IMAGE_REPO ?= ghcr.io/blakeblackshear/frigate
|
||||
GITHUB_REF_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
CURRENT_UID := $(shell id -u)
|
||||
|
||||
104
docker/hailo8l/Dockerfile
Normal file
104
docker/hailo8l/Dockerfile
Normal file
@@ -0,0 +1,104 @@
|
||||
# syntax=docker/dockerfile:1.6
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Build Python wheels
|
||||
FROM wheels AS h8l-wheels
|
||||
|
||||
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
|
||||
COPY docker/hailo8l/requirements-wheels-h8l.txt /requirements-wheels-h8l.txt
|
||||
|
||||
RUN sed -i "/https:\/\//d" /requirements-wheels.txt
|
||||
|
||||
# Create a directory to store the built wheels
|
||||
RUN mkdir /h8l-wheels
|
||||
|
||||
# Build the wheels
|
||||
RUN pip3 wheel --wheel-dir=/h8l-wheels -c /requirements-wheels.txt -r /requirements-wheels-h8l.txt
|
||||
|
||||
# Build HailoRT and create wheel
|
||||
FROM wheels AS build-hailort
|
||||
ARG TARGETARCH
|
||||
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
# Install necessary APT packages
|
||||
RUN apt-get -qq update \
|
||||
&& apt-get -qq install -y \
|
||||
apt-transport-https \
|
||||
gnupg \
|
||||
wget \
|
||||
# the key fingerprint can be obtained from https://ftp-master.debian.org/keys.html
|
||||
&& wget -qO- "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0xA4285295FC7B1A81600062A9605C66F00D6C9793" | \
|
||||
gpg --dearmor > /usr/share/keyrings/debian-archive-bullseye-stable.gpg \
|
||||
&& echo "deb [signed-by=/usr/share/keyrings/debian-archive-bullseye-stable.gpg] http://deb.debian.org/debian bullseye main contrib non-free" | \
|
||||
tee /etc/apt/sources.list.d/debian-bullseye-nonfree.list \
|
||||
&& apt-get -qq update \
|
||||
&& apt-get -qq install -y \
|
||||
python3.9 \
|
||||
python3.9-dev \
|
||||
build-essential cmake git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Extract Python version and set environment variables
|
||||
RUN PYTHON_VERSION=$(python3 --version 2>&1 | awk '{print $2}' | cut -d. -f1,2) && \
|
||||
PYTHON_VERSION_NO_DOT=$(echo $PYTHON_VERSION | sed 's/\.//') && \
|
||||
echo "PYTHON_VERSION=$PYTHON_VERSION" > /etc/environment && \
|
||||
echo "PYTHON_VERSION_NO_DOT=$PYTHON_VERSION_NO_DOT" >> /etc/environment
|
||||
|
||||
# Clone and build HailoRT
|
||||
RUN . /etc/environment && \
|
||||
git clone https://github.com/hailo-ai/hailort.git /opt/hailort && \
|
||||
cd /opt/hailort && \
|
||||
git checkout v4.17.0 && \
|
||||
cmake -H. -Bbuild -DCMAKE_BUILD_TYPE=Release -DHAILO_BUILD_PYBIND=1 -DPYBIND11_PYTHON_VERSION=${PYTHON_VERSION} && \
|
||||
cmake --build build --config release --target libhailort && \
|
||||
cmake --build build --config release --target _pyhailort && \
|
||||
cp build/hailort/libhailort/bindings/python/src/_pyhailort.cpython-${PYTHON_VERSION_NO_DOT}-$(if [ $TARGETARCH == "amd64" ]; then echo 'x86_64'; else echo 'aarch64'; fi )-linux-gnu.so hailort/libhailort/bindings/python/platform/hailo_platform/pyhailort/ && \
|
||||
cp build/hailort/libhailort/src/libhailort.so hailort/libhailort/bindings/python/platform/hailo_platform/pyhailort/
|
||||
|
||||
RUN ls -ahl /opt/hailort/build/hailort/libhailort/src/
|
||||
RUN ls -ahl /opt/hailort/hailort/libhailort/bindings/python/platform/hailo_platform/pyhailort/
|
||||
|
||||
# Remove the existing setup.py if it exists in the target directory
|
||||
RUN rm -f /opt/hailort/hailort/libhailort/bindings/python/platform/setup.py
|
||||
|
||||
# Copy generate_wheel_conf.py and setup.py
|
||||
COPY docker/hailo8l/pyhailort_build_scripts/generate_wheel_conf.py /opt/hailort/hailort/libhailort/bindings/python/platform/generate_wheel_conf.py
|
||||
COPY docker/hailo8l/pyhailort_build_scripts/setup.py /opt/hailort/hailort/libhailort/bindings/python/platform/setup.py
|
||||
|
||||
# Run the generate_wheel_conf.py script
|
||||
RUN python3 /opt/hailort/hailort/libhailort/bindings/python/platform/generate_wheel_conf.py
|
||||
|
||||
# Create a wheel file using pip3 wheel
|
||||
RUN cd /opt/hailort/hailort/libhailort/bindings/python/platform && \
|
||||
python3 setup.py bdist_wheel --dist-dir /hailo-wheels
|
||||
|
||||
# Use deps as the base image
|
||||
FROM deps AS h8l-frigate
|
||||
|
||||
# Copy the wheels from the wheels stage
|
||||
COPY --from=h8l-wheels /h8l-wheels /deps/h8l-wheels
|
||||
COPY --from=build-hailort /hailo-wheels /deps/hailo-wheels
|
||||
COPY --from=build-hailort /etc/environment /etc/environment
|
||||
RUN CC=$(python3 -c "import sysconfig; import shlex; cc = sysconfig.get_config_var('CC'); cc_cmd = shlex.split(cc)[0]; print(cc_cmd[:-4] if cc_cmd.endswith('-gcc') else cc_cmd)") && \
|
||||
echo "CC=$CC" >> /etc/environment
|
||||
|
||||
# Install the wheels
|
||||
RUN pip3 install -U /deps/h8l-wheels/*.whl
|
||||
RUN pip3 install -U /deps/hailo-wheels/*.whl
|
||||
|
||||
RUN . /etc/environment && \
|
||||
mv /usr/local/lib/python${PYTHON_VERSION}/dist-packages/hailo_platform/pyhailort/libhailort.so /usr/lib/${CC} && \
|
||||
cd /usr/lib/${CC}/ && \
|
||||
ln -s libhailort.so libhailort.so.4.17.0
|
||||
|
||||
# Copy base files from the rootfs stage
|
||||
COPY --from=rootfs / /
|
||||
|
||||
# Set environment variables for Hailo SDK
|
||||
ENV PATH="/opt/hailort/bin:${PATH}"
|
||||
ENV LD_LIBRARY_PATH="/usr/lib/$(if [ $TARGETARCH == "amd64" ]; then echo 'x86_64'; else echo 'aarch64'; fi )-linux-gnu:${LD_LIBRARY_PATH}"
|
||||
|
||||
# Set workdir
|
||||
WORKDIR /opt/frigate/
|
||||
27
docker/hailo8l/h8l.hcl
Normal file
27
docker/hailo8l/h8l.hcl
Normal file
@@ -0,0 +1,27 @@
|
||||
target wheels {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64","linux/amd64"]
|
||||
target = "wheels"
|
||||
}
|
||||
|
||||
target deps {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64","linux/amd64"]
|
||||
target = "deps"
|
||||
}
|
||||
|
||||
target rootfs {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64","linux/amd64"]
|
||||
target = "rootfs"
|
||||
}
|
||||
|
||||
target h8l {
|
||||
dockerfile = "docker/hailo8l/Dockerfile"
|
||||
contexts = {
|
||||
wheels = "target:wheels"
|
||||
deps = "target:deps"
|
||||
rootfs = "target:rootfs"
|
||||
}
|
||||
platforms = ["linux/arm64","linux/amd64"]
|
||||
}
|
||||
10
docker/hailo8l/h8l.mk
Normal file
10
docker/hailo8l/h8l.mk
Normal file
@@ -0,0 +1,10 @@
|
||||
BOARDS += h8l
|
||||
|
||||
local-h8l: version
|
||||
docker buildx bake --load --file=docker/hailo8l/h8l.hcl --set h8l.tags=frigate:latest-h8l h8l
|
||||
|
||||
build-h8l: version
|
||||
docker buildx bake --file=docker/hailo8l/h8l.hcl --set h8l.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-h8l h8l
|
||||
|
||||
push-h8l: build-h8l
|
||||
docker buildx bake --push --file=docker/hailo8l/h8l.hcl --set h8l.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-h8l h8l
|
||||
@@ -0,0 +1,67 @@
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import sysconfig
|
||||
|
||||
|
||||
def extract_toolchain_info(compiler):
|
||||
# Remove the "-gcc" or "-g++" suffix if present
|
||||
if compiler.endswith("-gcc") or compiler.endswith("-g++"):
|
||||
compiler = compiler.rsplit("-", 1)[0]
|
||||
|
||||
# Extract the toolchain and ABI part (e.g., "gnu")
|
||||
toolchain_parts = compiler.split("-")
|
||||
abi_conventions = next(
|
||||
(part for part in toolchain_parts if part in ["gnu", "musl", "eabi", "uclibc"]),
|
||||
"",
|
||||
)
|
||||
|
||||
return abi_conventions
|
||||
|
||||
|
||||
def generate_wheel_conf():
|
||||
conf_file_path = os.path.join(
|
||||
os.path.abspath(os.path.dirname(__file__)), "wheel_conf.json"
|
||||
)
|
||||
|
||||
# Extract current system and Python version information
|
||||
py_version = f"cp{sys.version_info.major}{sys.version_info.minor}"
|
||||
arch = platform.machine()
|
||||
system = platform.system().lower()
|
||||
libc_version = platform.libc_ver()[1]
|
||||
|
||||
# Get the compiler information
|
||||
compiler = sysconfig.get_config_var("CC")
|
||||
abi_conventions = extract_toolchain_info(compiler)
|
||||
|
||||
# Create the new configuration data
|
||||
new_conf_data = {
|
||||
"py_version": py_version,
|
||||
"arch": arch,
|
||||
"system": system,
|
||||
"libc_version": libc_version,
|
||||
"abi": abi_conventions,
|
||||
"extension": {
|
||||
"posix": "so",
|
||||
"nt": "pyd", # Windows
|
||||
}[os.name],
|
||||
}
|
||||
|
||||
# If the file exists, load the existing data
|
||||
if os.path.isfile(conf_file_path):
|
||||
with open(conf_file_path, "r") as conf_file:
|
||||
conf_data = json.load(conf_file)
|
||||
# Update the existing data with the new data
|
||||
conf_data.update(new_conf_data)
|
||||
else:
|
||||
# If the file does not exist, use the new data
|
||||
conf_data = new_conf_data
|
||||
|
||||
# Write the updated data to the file
|
||||
with open(conf_file_path, "w") as conf_file:
|
||||
json.dump(conf_data, conf_file, indent=4)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
generate_wheel_conf()
|
||||
111
docker/hailo8l/pyhailort_build_scripts/setup.py
Normal file
111
docker/hailo8l/pyhailort_build_scripts/setup.py
Normal file
@@ -0,0 +1,111 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
from wheel.bdist_wheel import bdist_wheel as orig_bdist_wheel
|
||||
|
||||
|
||||
class NonPurePythonBDistWheel(orig_bdist_wheel):
|
||||
"""Makes the wheel platform-dependent so it can be based on the _pyhailort architecture"""
|
||||
|
||||
def finalize_options(self):
|
||||
orig_bdist_wheel.finalize_options(self)
|
||||
self.root_is_pure = False
|
||||
|
||||
|
||||
def _get_hailort_lib_path():
|
||||
lib_filename = "libhailort.so"
|
||||
lib_path = os.path.join(
|
||||
os.path.abspath(os.path.dirname(__file__)),
|
||||
f"hailo_platform/pyhailort/{lib_filename}",
|
||||
)
|
||||
if os.path.exists(lib_path):
|
||||
print(f"Found libhailort shared library at: {lib_path}")
|
||||
else:
|
||||
print(f"Error: libhailort shared library not found at: {lib_path}")
|
||||
raise FileNotFoundError(f"libhailort shared library not found at: {lib_path}")
|
||||
return lib_path
|
||||
|
||||
|
||||
def _get_pyhailort_lib_path():
|
||||
conf_file_path = os.path.join(
|
||||
os.path.abspath(os.path.dirname(__file__)), "wheel_conf.json"
|
||||
)
|
||||
if not os.path.isfile(conf_file_path):
|
||||
raise FileNotFoundError(f"Configuration file not found: {conf_file_path}")
|
||||
|
||||
with open(conf_file_path, "r") as conf_file:
|
||||
content = json.load(conf_file)
|
||||
py_version = content["py_version"]
|
||||
arch = content["arch"]
|
||||
system = content["system"]
|
||||
extension = content["extension"]
|
||||
abi = content["abi"]
|
||||
|
||||
# Construct the filename directly
|
||||
lib_filename = f"_pyhailort.cpython-{py_version.split('cp')[1]}-{arch}-{system}-{abi}.{extension}"
|
||||
lib_path = os.path.join(
|
||||
os.path.abspath(os.path.dirname(__file__)),
|
||||
f"hailo_platform/pyhailort/{lib_filename}",
|
||||
)
|
||||
|
||||
if os.path.exists(lib_path):
|
||||
print(f"Found _pyhailort shared library at: {lib_path}")
|
||||
else:
|
||||
print(f"Error: _pyhailort shared library not found at: {lib_path}")
|
||||
raise FileNotFoundError(
|
||||
f"_pyhailort shared library not found at: {lib_path}"
|
||||
)
|
||||
|
||||
return lib_path
|
||||
|
||||
|
||||
def _get_package_paths():
|
||||
packages = []
|
||||
pyhailort_lib = _get_pyhailort_lib_path()
|
||||
hailort_lib = _get_hailort_lib_path()
|
||||
if pyhailort_lib:
|
||||
packages.append(pyhailort_lib)
|
||||
if hailort_lib:
|
||||
packages.append(hailort_lib)
|
||||
packages.append(os.path.abspath("hailo_tutorials/notebooks/*"))
|
||||
packages.append(os.path.abspath("hailo_tutorials/hefs/*"))
|
||||
return packages
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup(
|
||||
author="Hailo team",
|
||||
author_email="contact@hailo.ai",
|
||||
cmdclass={
|
||||
"bdist_wheel": NonPurePythonBDistWheel,
|
||||
},
|
||||
description="HailoRT",
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"hailo=hailo_platform.tools.hailocli.main:main",
|
||||
]
|
||||
},
|
||||
install_requires=[
|
||||
"argcomplete",
|
||||
"contextlib2",
|
||||
"future",
|
||||
"netaddr",
|
||||
"netifaces",
|
||||
"verboselogs",
|
||||
"numpy==1.23.3",
|
||||
],
|
||||
name="hailort",
|
||||
package_data={
|
||||
"hailo_platform": _get_package_paths(),
|
||||
},
|
||||
packages=find_packages(),
|
||||
platforms=[
|
||||
"linux_x86_64",
|
||||
"linux_aarch64",
|
||||
"win_amd64",
|
||||
],
|
||||
url="https://hailo.ai/",
|
||||
version="4.17.0",
|
||||
zip_safe=False,
|
||||
)
|
||||
12
docker/hailo8l/requirements-wheels-h8l.txt
Normal file
12
docker/hailo8l/requirements-wheels-h8l.txt
Normal file
@@ -0,0 +1,12 @@
|
||||
appdirs==1.4.4
|
||||
argcomplete==2.0.0
|
||||
contextlib2==0.6.0.post1
|
||||
distlib==0.3.6
|
||||
filelock==3.8.0
|
||||
future==0.18.3
|
||||
importlib-metadata==5.1.0
|
||||
importlib-resources==5.1.2
|
||||
netaddr==0.8.0
|
||||
netifaces==0.10.9
|
||||
verboselogs==1.7
|
||||
virtualenv==20.17.0
|
||||
35
docker/hailo8l/user_installation.sh
Normal file
35
docker/hailo8l/user_installation.sh
Normal file
@@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Update package list and install dependencies
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y build-essential cmake git wget linux-modules-extra-$(uname -r)
|
||||
|
||||
arch=$(uname -m)
|
||||
|
||||
if [[ $arch == "x86_64" ]]; then
|
||||
sudo apt install -y linux-headers-$(uname -r);
|
||||
else
|
||||
sudo apt install -y linux-modules-extra-$(uname -r);
|
||||
fi
|
||||
|
||||
# Clone the HailoRT driver repository
|
||||
git clone --depth 1 --branch v4.17.0 https://github.com/hailo-ai/hailort-drivers.git
|
||||
|
||||
# Build and install the HailoRT driver
|
||||
cd hailort-drivers/linux/pcie
|
||||
sudo make all
|
||||
sudo make install
|
||||
|
||||
# Load the Hailo PCI driver
|
||||
sudo modprobe hailo_pci
|
||||
|
||||
# Download and install the firmware
|
||||
cd ../../
|
||||
./download_firmware.sh
|
||||
sudo mv hailo8_fw.4.17.0.bin /lib/firmware/hailo/hailo8_fw.bin
|
||||
|
||||
# Install udev rules
|
||||
sudo cp ./linux/pcie/51-hailo-udev.rules /etc/udev/rules.d/
|
||||
sudo udevadm control --reload-rules && sudo udevadm trigger
|
||||
|
||||
echo "HailoRT driver installation complete."
|
||||
@@ -66,6 +66,40 @@ RUN --mount=type=bind,source=docker/main/build_ov_model.py,target=/build_ov_mode
|
||||
&& tar -xvf ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz \
|
||||
&& python3 /build_ov_model.py
|
||||
|
||||
####
|
||||
#
|
||||
# Coral Compatibility
|
||||
#
|
||||
# Builds libusb without udev. Needed for synology and other devices with USB coral
|
||||
####
|
||||
# libUSB - No Udev
|
||||
FROM wget as libusb-build
|
||||
ARG TARGETARCH
|
||||
ARG DEBIAN_FRONTEND
|
||||
ENV CCACHE_DIR /root/.ccache
|
||||
ENV CCACHE_MAXSIZE 2G
|
||||
|
||||
# Build libUSB without udev. Needed for Openvino NCS2 support
|
||||
WORKDIR /opt
|
||||
RUN apt-get update && apt-get install -y unzip build-essential automake libtool ccache pkg-config
|
||||
RUN --mount=type=cache,target=/root/.ccache wget -q https://github.com/libusb/libusb/archive/v1.0.26.zip -O v1.0.26.zip && \
|
||||
unzip v1.0.26.zip && cd libusb-1.0.26 && \
|
||||
./bootstrap.sh && \
|
||||
./configure CC='ccache gcc' CCX='ccache g++' --disable-udev --enable-shared && \
|
||||
make -j $(nproc --all)
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends libusb-1.0-0-dev && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
WORKDIR /opt/libusb-1.0.26/libusb
|
||||
RUN /bin/mkdir -p '/usr/local/lib' && \
|
||||
/bin/bash ../libtool --mode=install /usr/bin/install -c libusb-1.0.la '/usr/local/lib' && \
|
||||
/bin/mkdir -p '/usr/local/include/libusb-1.0' && \
|
||||
/usr/bin/install -c -m 644 libusb.h '/usr/local/include/libusb-1.0' && \
|
||||
/bin/mkdir -p '/usr/local/lib/pkgconfig' && \
|
||||
cd /opt/libusb-1.0.26/ && \
|
||||
/usr/bin/install -c -m 644 libusb-1.0.pc '/usr/local/lib/pkgconfig' && \
|
||||
ldconfig
|
||||
|
||||
FROM wget AS models
|
||||
|
||||
# Get model and labels
|
||||
@@ -78,7 +112,7 @@ COPY --from=ov-converter /models/ssdlite_mobilenet_v2.bin openvino-model/
|
||||
RUN wget -q https://github.com/openvinotoolkit/open_model_zoo/raw/master/data/dataset_classes/coco_91cl_bkgr.txt -O openvino-model/coco_91cl_bkgr.txt && \
|
||||
sed -i 's/truck/car/g' openvino-model/coco_91cl_bkgr.txt
|
||||
# Get Audio Model and labels
|
||||
RUN wget -qO - https://www.kaggle.com/api/v1/models/google/yamnet/tfLite/classification-tflite/1/download | tar xvz && mv 1.tflite cpu_audio_model.tflite
|
||||
RUN wget -qO - https://www.kaggle.com/api/v1/models/google/yamnet/tfLite/classification-tflite/1/download | tar xvz && mv 1.tflite cpu_audio_model.tflite
|
||||
COPY audio-labelmap.txt .
|
||||
|
||||
|
||||
@@ -114,6 +148,8 @@ RUN apt-get -qq update \
|
||||
gfortran openexr libatlas-base-dev libssl-dev\
|
||||
libtbb2 libtbb-dev libdc1394-22-dev libopenexr-dev \
|
||||
libgstreamer-plugins-base1.0-dev libgstreamer1.0-dev \
|
||||
# sqlite3 dependencies
|
||||
tclsh \
|
||||
# scipy dependencies
|
||||
gcc gfortran libopenblas-dev liblapack-dev && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
@@ -127,6 +163,10 @@ RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
||||
COPY docker/main/requirements.txt /requirements.txt
|
||||
RUN pip3 install -r /requirements.txt
|
||||
|
||||
# Build pysqlite3 from source to support ChromaDB
|
||||
COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh
|
||||
RUN /build_pysqlite3.sh
|
||||
|
||||
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
|
||||
RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt
|
||||
|
||||
@@ -135,6 +175,7 @@ RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt
|
||||
FROM scratch AS deps-rootfs
|
||||
COPY --from=nginx /usr/local/nginx/ /usr/local/nginx/
|
||||
COPY --from=go2rtc /rootfs/ /
|
||||
COPY --from=libusb-build /usr/local/lib /usr/local/lib
|
||||
COPY --from=tempio /rootfs/ /
|
||||
COPY --from=s6-overlay /rootfs/ /
|
||||
COPY --from=models /rootfs/ /
|
||||
@@ -153,6 +194,13 @@ ARG APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility"
|
||||
|
||||
# Turn off Chroma Telemetry: https://docs.trychroma.com/telemetry#opting-out
|
||||
ENV ANONYMIZED_TELEMETRY=False
|
||||
# Allow resetting the chroma database
|
||||
ENV ALLOW_RESET=True
|
||||
# Disable tokenizer parallelism warning
|
||||
ENV TOKENIZERS_PARALLELISM=true
|
||||
|
||||
ENV PATH="/usr/lib/btbn-ffmpeg/bin:/usr/local/go2rtc/bin:/usr/local/tempio/bin:/usr/local/nginx/sbin:${PATH}"
|
||||
|
||||
# Install dependencies
|
||||
@@ -165,6 +213,8 @@ RUN --mount=type=bind,from=wheels,source=/wheels,target=/deps/wheels \
|
||||
|
||||
COPY --from=deps-rootfs / /
|
||||
|
||||
RUN ldconfig
|
||||
|
||||
EXPOSE 5000
|
||||
EXPOSE 8554
|
||||
EXPOSE 8555/tcp 8555/udp
|
||||
|
||||
35
docker/main/build_pysqlite3.sh
Executable file
35
docker/main/build_pysqlite3.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
SQLITE3_VERSION="96c92aba00c8375bc32fafcdf12429c58bd8aabfcadab6683e35bbb9cdebf19e" # 3.46.0
|
||||
PYSQLITE3_VERSION="0.5.3"
|
||||
|
||||
# Fetch the source code for the latest release of Sqlite.
|
||||
if [[ ! -d "sqlite" ]]; then
|
||||
wget https://www.sqlite.org/src/tarball/sqlite.tar.gz?r=${SQLITE3_VERSION} -O sqlite.tar.gz
|
||||
tar xzf sqlite.tar.gz
|
||||
cd sqlite/
|
||||
LIBS="-lm" ./configure --disable-tcl --enable-tempstore=always
|
||||
make sqlite3.c
|
||||
cd ../
|
||||
rm sqlite.tar.gz
|
||||
fi
|
||||
|
||||
# Grab the pysqlite3 source code.
|
||||
if [[ ! -d "./pysqlite3" ]]; then
|
||||
git clone https://github.com/coleifer/pysqlite3.git
|
||||
fi
|
||||
|
||||
cd pysqlite3/
|
||||
git checkout ${PYSQLITE3_VERSION}
|
||||
|
||||
# Copy the sqlite3 source amalgamation into the pysqlite3 directory so we can
|
||||
# create a self-contained extension module.
|
||||
cp "../sqlite/sqlite3.c" ./
|
||||
cp "../sqlite/sqlite3.h" ./
|
||||
|
||||
# Create the wheel and put it in the /wheels dir.
|
||||
sed -i "s|name='pysqlite3-binary'|name=PACKAGE_NAME|g" setup.py
|
||||
python3 setup.py build_static
|
||||
pip3 wheel . -w /wheels
|
||||
@@ -1,8 +1,8 @@
|
||||
click == 8.1.*
|
||||
Flask == 3.0.*
|
||||
Flask_Limiter == 3.7.*
|
||||
Flask_Limiter == 3.8.*
|
||||
imutils == 0.5.*
|
||||
joserfc == 0.11.*
|
||||
joserfc == 1.0.*
|
||||
markupsafe == 2.1.*
|
||||
mypy == 1.6.1
|
||||
numpy == 1.26.*
|
||||
@@ -11,13 +11,13 @@ opencv-python-headless == 4.9.0.*
|
||||
paho-mqtt == 2.1.*
|
||||
pandas == 2.2.*
|
||||
peewee == 3.17.*
|
||||
peewee_migrate == 1.12.*
|
||||
peewee_migrate == 1.13.*
|
||||
psutil == 5.9.*
|
||||
pydantic == 2.7.*
|
||||
pydantic == 2.8.*
|
||||
git+https://github.com/fbcotter/py3nvml#egg=py3nvml
|
||||
PyYAML == 6.0.*
|
||||
pytz == 2024.1
|
||||
pyzmq == 26.0.*
|
||||
pyzmq == 26.2.*
|
||||
ruamel.yaml == 0.18.*
|
||||
tzlocal == 5.2
|
||||
types-PyYAML == 6.0.*
|
||||
@@ -30,3 +30,13 @@ ws4py == 0.5.*
|
||||
unidecode == 1.3.*
|
||||
onnxruntime == 1.18.*
|
||||
openvino == 2024.1.*
|
||||
# Embeddings
|
||||
onnx_clip == 4.0.*
|
||||
chromadb == 0.5.0
|
||||
# Generative AI
|
||||
google-generativeai == 0.6.*
|
||||
ollama == 0.2.*
|
||||
openai == 1.30.*
|
||||
# push notifications
|
||||
py-vapid == 1.9.*
|
||||
pywebpush == 2.0.*
|
||||
@@ -0,0 +1 @@
|
||||
chroma
|
||||
@@ -0,0 +1 @@
|
||||
chroma-pipeline
|
||||
4
docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/run
Executable file
4
docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma-log/run
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
exec logutil-service /dev/shm/logs/chroma
|
||||
@@ -0,0 +1 @@
|
||||
longrun
|
||||
28
docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/finish
Normal file
28
docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/finish
Normal file
@@ -0,0 +1,28 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Take down the S6 supervision tree when the service exits
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
declare exit_code_container
|
||||
exit_code_container=$(cat /run/s6-linux-init-container-results/exitcode)
|
||||
readonly exit_code_container
|
||||
readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
readonly service="ChromaDB"
|
||||
|
||||
echo "[INFO] Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})"
|
||||
|
||||
if [[ "${exit_code_service}" -eq 256 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo $((128 + exit_code_signal)) >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo "${exit_code_service}" >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
fi
|
||||
|
||||
exec /run/s6/basedir/bin/halt
|
||||
@@ -0,0 +1 @@
|
||||
chroma-log
|
||||
27
docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/run
Normal file
27
docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/run
Normal file
@@ -0,0 +1,27 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Start the Frigate service
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
# Tell S6-Overlay not to restart this service
|
||||
s6-svc -O .
|
||||
|
||||
search_enabled=`python3 /usr/local/semantic_search/get_search_settings.py | jq -r .enabled`
|
||||
|
||||
# Replace the bash process with the Frigate process, redirecting stderr to stdout
|
||||
exec 2>&1
|
||||
|
||||
if [[ "$search_enabled" == 'true' ]]; then
|
||||
echo "[INFO] Starting ChromaDB..."
|
||||
exec /usr/local/chroma run --path /config/chroma --host 127.0.0.1
|
||||
else
|
||||
while true
|
||||
do
|
||||
sleep 9999
|
||||
continue
|
||||
done
|
||||
exit 0
|
||||
fi
|
||||
@@ -0,0 +1 @@
|
||||
120000
|
||||
1
docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/type
Normal file
1
docker/main/rootfs/etc/s6-overlay/s6-rc.d/chroma/type
Normal file
@@ -0,0 +1 @@
|
||||
longrun
|
||||
@@ -16,8 +16,8 @@ function migrate_db_path() {
|
||||
if [[ -f "${config_file_yaml}" ]]; then
|
||||
config_file="${config_file_yaml}"
|
||||
elif [[ ! -f "${config_file}" ]]; then
|
||||
echo "[ERROR] Frigate config file not found"
|
||||
return 1
|
||||
# Frigate will create the config file on startup
|
||||
return 0
|
||||
fi
|
||||
unset config_file_yaml
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx /dev/shm/logs/certsync)
|
||||
dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx /dev/shm/logs/certsync /dev/shm/logs/chroma)
|
||||
|
||||
mkdir -p "${dirs[@]}"
|
||||
chown nobody:nogroup "${dirs[@]}"
|
||||
|
||||
@@ -38,7 +38,7 @@ function get_cpus() {
|
||||
fi
|
||||
|
||||
local cpus
|
||||
if [ -n "${quota}" ] && [ -n "${period}" ]; then
|
||||
if [ "${period}" != "0" ] && [ -n "${quota}" ] && [ -n "${period}" ]; then
|
||||
cpus=$((quota / period))
|
||||
if [ "$cpus" -eq 0 ]; then
|
||||
cpus=1
|
||||
|
||||
14
docker/main/rootfs/usr/local/chroma
Executable file
14
docker/main/rootfs/usr/local/chroma
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-s
|
||||
__import__("pysqlite3")
|
||||
|
||||
import re
|
||||
import sys
|
||||
|
||||
sys.modules["sqlite3"] = sys.modules.pop("pysqlite3")
|
||||
|
||||
from chromadb.cli.cli import app
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
|
||||
sys.exit(app())
|
||||
@@ -0,0 +1,28 @@
|
||||
"""Prints the semantic_search config as json to stdout."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import yaml
|
||||
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
# Check if we can use .yaml instead of .yml
|
||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||
if os.path.isfile(config_file_yaml):
|
||||
config_file = config_file_yaml
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
raw_config = f.read()
|
||||
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config: dict[str, any] = yaml.safe_load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
except FileNotFoundError:
|
||||
config: dict[str, any] = {}
|
||||
|
||||
search_config: dict[str, any] = config.get("semantic_search", {"enabled": False})
|
||||
|
||||
print(json.dumps(search_config))
|
||||
@@ -80,6 +80,14 @@ model:
|
||||
input_pixel_format: "bgr"
|
||||
```
|
||||
|
||||
#### `labelmap`
|
||||
|
||||
:::warning
|
||||
|
||||
If the labelmap is customized then the labels used for alerts will need to be adjusted as well. See [alert labels](../configuration/review.md#restricting-alerts-to-specific-labels) for more info.
|
||||
|
||||
:::
|
||||
|
||||
The labelmap can be customized to your needs. A common reason to do this is to combine multiple object types that are easily confused when you don't need to be as granular such as car/truck. By default, truck is renamed to car because they are often confused. You cannot add new object types, but you can change the names of existing objects in the model.
|
||||
|
||||
```yaml
|
||||
|
||||
@@ -111,6 +111,6 @@ camera_groups:
|
||||
cameras:
|
||||
- driveway_cam
|
||||
- garage_cam
|
||||
icon: car
|
||||
icon: LuCar
|
||||
order: 0
|
||||
```
|
||||
|
||||
135
docs/docs/configuration/genai.md
Normal file
135
docs/docs/configuration/genai.md
Normal file
@@ -0,0 +1,135 @@
|
||||
---
|
||||
id: genai
|
||||
title: Generative AI
|
||||
---
|
||||
|
||||
Generative AI can be used to automatically generate descriptions based on the thumbnails of your events. This helps with [semantic search](/configuration/semantic_search) in Frigate by providing detailed text descriptions as a basis of the search query.
|
||||
|
||||
## Configuration
|
||||
|
||||
Generative AI can be enabled for all cameras or only for specific cameras. There are currently 3 providers available to integrate with Frigate.
|
||||
|
||||
If the provider you choose requires an API key, you may either directly paste it in your configuration, or store it in an environment variable prefixed with `FRIGATE_`.
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
enabled: True
|
||||
provider: gemini
|
||||
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
||||
model: gemini-1.5-flash
|
||||
|
||||
cameras:
|
||||
front_camera: ...
|
||||
indoor_camera:
|
||||
genai: # <- disable GenAI for your indoor camera
|
||||
enabled: False
|
||||
```
|
||||
|
||||
## Ollama
|
||||
|
||||
[Ollama](https://ollama.com/) allows you to self-host large language models and keep everything running locally. It provides a nice API over [llama.cpp](https://github.com/ggerganov/llama.cpp). It is highly recommended to host this server on a machine with an Nvidia graphics card, or on a Apple silicon Mac for best performance. Most of the 7b parameter 4-bit vision models will fit inside 8GB of VRAM. There is also a [docker container](https://hub.docker.com/r/ollama/ollama) available.
|
||||
|
||||
### Supported Models
|
||||
|
||||
You must use a vision capable model with Frigate. Current model variants can be found [in their model library](https://ollama.com/library). At the time of writing, this includes `llava`, `llava-llama3`, `llava-phi3`, and `moondream`.
|
||||
|
||||
:::note
|
||||
|
||||
You should have at least 8 GB of RAM available (or VRAM if running on GPU) to run the 7B models, 16 GB to run the 13B models, and 32 GB to run the 33B models.
|
||||
|
||||
:::
|
||||
|
||||
### Configuration
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
enabled: True
|
||||
provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: llava
|
||||
```
|
||||
|
||||
## Google Gemini
|
||||
|
||||
Google Gemini has a free tier allowing [15 queries per minute](https://ai.google.dev/pricing) to the API, which is more than sufficient for standard Frigate usage.
|
||||
|
||||
### Supported Models
|
||||
|
||||
You must use a vision capable model with Frigate. Current model variants can be found [in their documentation](https://ai.google.dev/gemini-api/docs/models/gemini). At the time of writing, this includes `gemini-1.5-pro` and `gemini-1.5-flash`.
|
||||
|
||||
### Get API Key
|
||||
|
||||
To start using Gemini, you must first get an API key from [Google AI Studio](https://aistudio.google.com).
|
||||
|
||||
1. Accept the Terms of Service
|
||||
2. Click "Get API Key" from the right hand navigation
|
||||
3. Click "Create API key in new project"
|
||||
4. Copy the API key for use in your config
|
||||
|
||||
### Configuration
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
enabled: True
|
||||
provider: gemini
|
||||
api_key: "{FRIGATE_GEMINI_API_KEY}"
|
||||
model: gemini-1.5-flash
|
||||
```
|
||||
|
||||
## OpenAI
|
||||
|
||||
OpenAI does not have a free tier for their API. With the release of gpt-4o, pricing has been reduced and each generation should cost fractions of a cent if you choose to go this route.
|
||||
|
||||
### Supported Models
|
||||
|
||||
You must use a vision capable model with Frigate. Current model variants can be found [in their documentation](https://platform.openai.com/docs/models). At the time of writing, this includes `gpt-4o` and `gpt-4-turbo`.
|
||||
|
||||
### Get API Key
|
||||
|
||||
To start using OpenAI, you must first [create an API key](https://platform.openai.com/api-keys) and [configure billing](https://platform.openai.com/settings/organization/billing/overview).
|
||||
|
||||
### Configuration
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
enabled: True
|
||||
provider: openai
|
||||
api_key: "{FRIGATE_OPENAI_API_KEY}"
|
||||
model: gpt-4o
|
||||
```
|
||||
|
||||
## Custom Prompts
|
||||
|
||||
Frigate sends multiple frames from the detection along with a prompt to your Generative AI provider asking it to generate a description. The default prompt is as follows:
|
||||
|
||||
```
|
||||
Describe the {label} in the sequence of images with as much detail as possible. Do not describe the background.
|
||||
```
|
||||
|
||||
:::tip
|
||||
|
||||
Prompts can use variable replacements like `{label}`, `{sub_label}`, and `{camera}` to substitute information from the detection as part of the prompt.
|
||||
|
||||
:::
|
||||
|
||||
You are also able to define custom prompts in your configuration.
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
enabled: True
|
||||
provider: ollama
|
||||
base_url: http://localhost:11434
|
||||
model: llava
|
||||
prompt: "Describe the {label} in these images from the {camera} security camera."
|
||||
object_prompts:
|
||||
person: "Describe the main person in these images (gender, age, clothing, activity, etc). Do not include where the activity is occurring (sidewalk, concrete, driveway, etc). If delivering a package, include the company the package is from."
|
||||
car: "Label the primary vehicle in these images with just the name of the company if it is a delivery vehicle, or the color make and model."
|
||||
```
|
||||
|
||||
### Experiment with prompts
|
||||
|
||||
Providers also has a public facing chat interface for their models. Download a couple different thumbnails or snapshots from Frigate and try new things in the playground to get descriptions to your liking before updating the prompt in Frigate.
|
||||
|
||||
- OpenAI - [ChatGPT](https://chatgpt.com)
|
||||
- Gemini - [Google AI Studio](https://aistudio.google.com)
|
||||
- Ollama - [Open WebUI](https://docs.openwebui.com/)
|
||||
@@ -56,6 +56,11 @@ go2rtc:
|
||||
password: "{FRIGATE_GO2RTC_RTSP_PASSWORD}"
|
||||
```
|
||||
|
||||
```yaml
|
||||
genai:
|
||||
api_key: "{FRIGATE_GENAI_API_KEY}"
|
||||
```
|
||||
|
||||
## Common configuration examples
|
||||
|
||||
Here are some common starter configuration examples. Refer to the [reference config](./reference.md) for detailed information about all the config values.
|
||||
|
||||
@@ -78,7 +78,7 @@ It is, but the definition of "unnecessary" varies. I want to ignore areas of mot
|
||||
|
||||
> For me, giving my masks ANY padding results in a lot of people detection I'm not interested in. I live in the city and catch a lot of the sidewalk on my camera. People walk by my front door all the time and the margin between the sidewalk and actually walking onto my stoop is very thin, so I basically have everything but the exact contours of my stoop masked out. This results in very tidy detections but this info keeps throwing me off. Am I just overthinking it?
|
||||
|
||||
This is what `required_zones` are for. You should define a zone (remember this is evaluated based on the bottom center of the bounding box) and make it required to save snapshots and clips (now events in 0.9.0). You can also use this in your conditions for a notification.
|
||||
This is what `required_zones` are for. You should define a zone (remember this is evaluated based on the bottom center of the bounding box) and make it required to save snapshots and clips (previously events in 0.9.0 to 0.13.0 and review items in 0.14.0 and later). You can also use this in your conditions for a notification.
|
||||
|
||||
> Maybe my specific situation just warrants this. I've just been having a hard time understanding the relevance of this information - it seems to be that it's exactly what would be expected when "masking out" an area of ANY image.
|
||||
|
||||
|
||||
42
docs/docs/configuration/notifications.md
Normal file
42
docs/docs/configuration/notifications.md
Normal file
@@ -0,0 +1,42 @@
|
||||
---
|
||||
id: notifications
|
||||
title: Notifications
|
||||
---
|
||||
|
||||
# Notifications
|
||||
|
||||
Frigate offers native notifications using the [WebPush Protocol](https://web.dev/articles/push-notifications-web-push-protocol) which uses the [VAPID spec](https://tools.ietf.org/html/draft-thomson-webpush-vapid) to deliver notifications to web apps using encryption.
|
||||
|
||||
## Setting up Notifications
|
||||
|
||||
In order to use notifications the following requirements must be met:
|
||||
|
||||
- Frigate must be accessed via a secure https connection
|
||||
- A supported browser must be used. Currently Chrome, Firefox, and Safari are known to be supported.
|
||||
- In order for notifications to be usable externally, Frigate must be accessible externally
|
||||
|
||||
### Configuration
|
||||
|
||||
To configure notifications, go to the Frigate WebUI -> Settings -> Notifications and enable, then fill out the fields and save.
|
||||
|
||||
### Registration
|
||||
|
||||
Once notifications are enabled, press the `Register for Notifications` button on all devices that you would like to receive notifications on. This will register the background worker. After this Frigate must be restarted and then notifications will begin to be sent.
|
||||
|
||||
## Supported Notifications
|
||||
|
||||
Currently notifications are only supported for review alerts. More notifications will be supported in the future.
|
||||
|
||||
:::note
|
||||
|
||||
Currently, only Chrome supports images in notifications. Safari and Firefox will only show a title and message in the notification.
|
||||
|
||||
:::
|
||||
|
||||
## Reduce Notification Latency
|
||||
|
||||
Different platforms handle notifications differently, some settings changes may be required to get optimal notification delivery.
|
||||
|
||||
### Android
|
||||
|
||||
Most Android phones have battery optimization settings. To get reliable Notification delivery the browser (Chrome, Firefox) should have battery optimizations disabled. If Frigate is running as a PWA then the Frigate app should have battery optimizations disabled as well.
|
||||
@@ -5,7 +5,7 @@ title: Object Detectors
|
||||
|
||||
# Officially Supported Detectors
|
||||
|
||||
Frigate provides the following builtin detector types: `cpu`, `edgetpu`, `openvino`, `tensorrt`, and `rknn`. By default, Frigate will use a single CPU detector. Other detectors may require additional configuration as described below. When using multiple detectors they will run in dedicated processes, but pull from a common queue of detection requests from across all cameras.
|
||||
Frigate provides the following builtin detector types: `cpu`, `edgetpu`, `openvino`, `tensorrt`, `rknn`, and `hailo8l`. By default, Frigate will use a single CPU detector. Other detectors may require additional configuration as described below. When using multiple detectors they will run in dedicated processes, but pull from a common queue of detection requests from across all cameras.
|
||||
|
||||
## CPU Detector (not recommended)
|
||||
|
||||
@@ -81,6 +81,15 @@ detectors:
|
||||
device: ""
|
||||
```
|
||||
|
||||
### Single PCIE/M.2 Coral
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
coral:
|
||||
type: edgetpu
|
||||
device: pci
|
||||
```
|
||||
|
||||
### Multiple PCIE/M.2 Corals
|
||||
|
||||
```yaml
|
||||
@@ -136,23 +145,7 @@ model:
|
||||
|
||||
#### YOLOX
|
||||
|
||||
This detector also supports YOLOX. Frigate does not come with any YOLOX models preloaded, so you will need to supply your own models. This detector has been verified to work with the [yolox_tiny](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny) model from Intel's Open Model Zoo. You can follow [these instructions](https://github.com/openvinotoolkit/open_model_zoo/tree/master/models/public/yolox-tiny#download-a-model-and-convert-it-into-openvino-ir-format) to retrieve the OpenVINO-compatible `yolox_tiny` model. Make sure that the model input dimensions match the `width` and `height` parameters, and `model_type` is set accordingly. See [Full Configuration Reference](/configuration/reference.md) for a list of possible `model_type` options. Below is an example of how `yolox_tiny` can be used in Frigate:
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
ov:
|
||||
type: openvino
|
||||
device: GPU
|
||||
|
||||
model:
|
||||
width: 416
|
||||
height: 416
|
||||
input_tensor: nchw
|
||||
input_pixel_format: bgr
|
||||
model_type: yolox
|
||||
path: /path/to/yolox_tiny.xml
|
||||
labelmap_path: /path/to/coco_80cl.txt
|
||||
```
|
||||
This detector also supports YOLOX. Frigate does not come with any YOLOX models preloaded, so you will need to supply your own models.
|
||||
|
||||
#### YOLO-NAS
|
||||
|
||||
@@ -393,3 +386,25 @@ $ cat /sys/kernel/debug/rknpu/load
|
||||
|
||||
- All models are automatically downloaded and stored in the folder `config/model_cache/rknn_cache`. After upgrading Frigate, you should remove older models to free up space.
|
||||
- You can also provide your own `.rknn` model. You should not save your own models in the `rknn_cache` folder, store them directly in the `model_cache` folder or another subfolder. To convert a model to `.rknn` format see the `rknn-toolkit2` (requires a x86 machine). Note, that there is only post-processing for the supported models.
|
||||
|
||||
## Hailo-8l
|
||||
|
||||
This detector is available if you are using the Raspberry Pi 5 with Hailo-8L AI Kit. This has not been tested using the Hailo-8L with other hardware.
|
||||
|
||||
### Configuration
|
||||
|
||||
```yaml
|
||||
detectors:
|
||||
hailo8l:
|
||||
type: hailo8l
|
||||
device: PCIe
|
||||
model:
|
||||
path: /config/model_cache/h8l_cache/ssd_mobilenet_v1.hef
|
||||
|
||||
model:
|
||||
width: 300
|
||||
height: 300
|
||||
input_tensor: nhwc
|
||||
input_pixel_format: bgr
|
||||
model_type: ssd
|
||||
```
|
||||
|
||||
@@ -202,7 +202,7 @@ birdseye:
|
||||
inactivity_threshold: 30
|
||||
# Optional: Configure the birdseye layout
|
||||
layout:
|
||||
# Optional: Scaling factor for the layout calculator (default: shown below)
|
||||
# Optional: Scaling factor for the layout calculator, range 1.0-5.0 (default: shown below)
|
||||
scaling_factor: 2.0
|
||||
# Optional: Maximum number of cameras to show at one time, showing the most recent (default: show all cameras)
|
||||
max_cameras: 1
|
||||
@@ -372,6 +372,14 @@ motion:
|
||||
# Optional: Delay when updating camera motion through MQTT from ON -> OFF (default: shown below).
|
||||
mqtt_off_delay: 30
|
||||
|
||||
# Optional: Notification Configuration
|
||||
notifications:
|
||||
# Optional: Enable notification service (default: shown below)
|
||||
enabled: False
|
||||
# Optional: Email for push service to reach out to
|
||||
# NOTE: This is required to use notifications
|
||||
email: "admin@example.com"
|
||||
|
||||
# Optional: Record configuration
|
||||
# NOTE: Can be overridden at the camera level
|
||||
record:
|
||||
@@ -465,8 +473,37 @@ snapshots:
|
||||
# Optional: quality of the encoded jpeg, 0-100 (default: shown below)
|
||||
quality: 70
|
||||
|
||||
# Optional: Configuration for semantic search capability
|
||||
semantic_search:
|
||||
# Optional: Enable semantic search (default: shown below)
|
||||
enabled: False
|
||||
# Optional: Re-index embeddings database from historical events (default: shown below)
|
||||
reindex: False
|
||||
|
||||
# Optional: Configuration for AI generated event descriptions
|
||||
# NOTE: Semantic Search must be enabled for this to do anything.
|
||||
# WARNING: Depending on the provider, this will send thumbnails over the internet
|
||||
# to Google or OpenAI's LLMs to generate descriptions. It can be overridden at
|
||||
# the camera level (enabled: False) to enhance privacy for indoor cameras.
|
||||
genai:
|
||||
# Optional: Enable Google Gemini description generation (default: shown below)
|
||||
enabled: False
|
||||
# Required if enabled: Provider must be one of ollama, gemini, or openai
|
||||
provider: ollama
|
||||
# Required if provider is ollama. May also be used for an OpenAI API compatible backend with the openai provider.
|
||||
base_url: http://localhost::11434
|
||||
# Required if gemini or openai
|
||||
api_key: "{FRIGATE_GENAI_API_KEY}"
|
||||
# Optional: The default prompt for generating descriptions. Can use replacement
|
||||
# variables like "label", "sub_label", "camera" to make more dynamic. (default: shown below)
|
||||
prompt: "Describe the {label} in the sequence of images with as much detail as possible. Do not describe the background."
|
||||
# Optional: Object specific prompts to customize description results
|
||||
# Format: {label}: {prompt}
|
||||
object_prompts:
|
||||
person: "My special person prompt."
|
||||
|
||||
# Optional: Restream configuration
|
||||
# Uses https://github.com/AlexxIT/go2rtc (v1.8.3)
|
||||
# Uses https://github.com/AlexxIT/go2rtc (v1.9.2)
|
||||
go2rtc:
|
||||
|
||||
# Optional: jsmpeg stream configuration for WebUI
|
||||
@@ -613,8 +650,8 @@ cameras:
|
||||
user: admin
|
||||
# Optional: password for login.
|
||||
password: admin
|
||||
# Optional: Ignores time synchronization mismatches between the camera and the server during authentication.
|
||||
# Using NTP on both ends is recommended and this should only be set to True in a "safe" environment due to the security risk it represents.
|
||||
# Optional: Ignores time synchronization mismatches between the camera and the server during authentication.
|
||||
# Using NTP on both ends is recommended and this should only be set to True in a "safe" environment due to the security risk it represents.
|
||||
ignore_time_mismatch: False
|
||||
# Optional: PTZ camera object autotracking. Keeps a moving object in
|
||||
# the center of the frame by automatically moving the PTZ camera.
|
||||
|
||||
@@ -7,6 +7,16 @@ The Review page of the Frigate UI is for quickly reviewing historical footage of
|
||||
|
||||
Review items are filterable by date, object type, and camera.
|
||||
|
||||
### Review items vs. events
|
||||
|
||||
In Frigate 0.13 and earlier versions, the UI presented "events". An event was synonymous with a tracked or detected object. In Frigate 0.14 and later, a review item is a time period where any number of tracked objects were active.
|
||||
|
||||
For example, consider a situation where two people walked past your house. One was walking a dog. At the same time, a car drove by on the street behind them.
|
||||
|
||||
In this scenario, Frigate 0.13 and earlier would show 4 events in the UI - one for each person, another for the dog, and yet another for the car. You would have had 4 separate videos to watch even though they would have all overlapped.
|
||||
|
||||
In 0.14 and later, all of that is bundled into a single review item which starts and ends to capture all of that activity. Reviews for a single camera cannot overlap. Once you have watched that time period on that camera, it is marked as reviewed.
|
||||
|
||||
## Alerts and Detections
|
||||
|
||||
Not every segment of video captured by Frigate may be of the same level of interest to you. Video of people who enter your property may be a different priority than those walking by on the sidewalk. For this reason, Frigate 0.14 categorizes review items as _alerts_ and _detections_. By default, all person and car objects are considered alerts. You can refine categorization of your review items by configuring required zones for them.
|
||||
|
||||
38
docs/docs/configuration/semantic_search.md
Normal file
38
docs/docs/configuration/semantic_search.md
Normal file
@@ -0,0 +1,38 @@
|
||||
---
|
||||
id: semantic_search
|
||||
title: Using Semantic Search
|
||||
---
|
||||
|
||||
Semantic search works by embedding images and/or text into a vector representation identified by numbers. Frigate has support for two such models which both run locally: [OpenAI CLIP](https://openai.com/research/clip) and [all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2). Embeddings are then saved to a local instance of [ChromaDB](https://trychroma.com).
|
||||
|
||||
## Configuration
|
||||
|
||||
Semantic Search is a global configuration setting.
|
||||
|
||||
```yaml
|
||||
semantic_search:
|
||||
enabled: True
|
||||
reindex: False
|
||||
```
|
||||
|
||||
:::tip
|
||||
|
||||
The embeddings database can be re-indexed from the existing detections in your database by adding `reindex: True` to your `semantic_search` configuration. Depending on the number of detections you have, it can take up to 30 minutes to complete and may max out your CPU while indexing. Make sure to set the config back to `False` before restarting Frigate again.
|
||||
|
||||
:::
|
||||
|
||||
### OpenAI CLIP
|
||||
|
||||
This model is able to embed both images and text into the same vector space, which allows `image -> image` and `text -> image` similarity searches. Frigate uses this model on detections to encode the thumbnail image and store it in Chroma. When searching detections via text in the search box, frigate will perform a `text -> image` similarity search against this embedding. When clicking "FIND SIMILAR" next to a detection, Frigate will perform an `image -> image` similarity search to retrieve the closest matching thumbnails.
|
||||
|
||||
### all-MiniLM-L6-v2
|
||||
|
||||
This is a sentence embedding model that has been fine tuned on over 1 billion sentence pairs. This model is used to embed detection descriptions and perform searches against them. Descriptions can be created and/or modified on the search page when clicking on the info icon next to a detection. See [the Generative AI docs](/configuration/genai.md) for more information on how to automatically generate event descriptions.
|
||||
|
||||
## Usage Tips
|
||||
|
||||
1. Semantic search is used in conjunction with the other filters available on the search page. Use a combination of traditional filtering and semantic search for the best results.
|
||||
2. The comparison between text and image embedding distances generally means that results matching `description` will appear first, even if a `thumbnail` embedding may be a better match. Play with the "Search Type" filter to help find what you are looking for.
|
||||
3. Make your search language and tone closely match your descriptions. If you are using thumbnail search, phrase your query as an image caption.
|
||||
4. Semantic search on thumbnails tends to return better results when matching large subjects that take up most of the frame. Small things like "cat" tend to not work well.
|
||||
5. Experiment! Find a detection you want to test and start typing keywords to see what works for you.
|
||||
@@ -48,6 +48,10 @@ When pixels in the current camera frame are different than previous frames. When
|
||||
|
||||
A portion of the camera frame that is sent to object detection, regions can be sent due to motion, active objects, or occasionally for stationary objects. These are represented by green boxes in the debug live view.
|
||||
|
||||
## Review Item
|
||||
|
||||
A review item is a time period where any number of events/tracked objects were active. [See the review docs for more info](/configuration/review)
|
||||
|
||||
## Snapshot Score
|
||||
|
||||
The score shown in a snapshot is the score of that object at that specific moment in time.
|
||||
|
||||
@@ -107,6 +107,12 @@ Frigate supports hardware video processing on all Rockchip boards. However, hard
|
||||
|
||||
The inference time of a rk3588 with all 3 cores enabled is typically 25-30 ms for yolo-nas s.
|
||||
|
||||
#### Hailo-8l PCIe
|
||||
|
||||
Frigate supports the Hailo-8l M.2 card on any hardware but currently it is only tested on the Raspberry Pi5 PCIe hat from the AI kit.
|
||||
|
||||
The inference time for the Hailo-8L chip at time of writing is around 17-21 ms for the SSD MobileNet Version 1 model.
|
||||
|
||||
## What does Frigate use the CPU for and what does it use a detector for? (ELI5 Version)
|
||||
|
||||
This is taken from a [user question on reddit](https://www.reddit.com/r/homeassistant/comments/q8mgau/comment/hgqbxh5/?utm_source=share&utm_medium=web2x&context=3). Modified slightly for clarity.
|
||||
|
||||
@@ -5,6 +5,12 @@ title: Installation
|
||||
|
||||
Frigate is a Docker container that can be run on any Docker host including as a [HassOS Addon](https://www.home-assistant.io/addons/). Note that a Home Assistant Addon is **not** the same thing as the integration. The [integration](/integrations/home-assistant) is required to integrate Frigate into Home Assistant.
|
||||
|
||||
:::tip
|
||||
|
||||
If you already have Frigate installed as a Home Assistant addon, check out the [getting started guide](../guides/getting_started#configuring-frigate) to configure Frigate.
|
||||
|
||||
:::
|
||||
|
||||
## Dependencies
|
||||
|
||||
**MQTT broker (optional)** - An MQTT broker is optional with Frigate, but is required for the Home Assistant integration. If using Home Assistant, Frigate and Home Assistant must be connected to the same MQTT broker.
|
||||
@@ -94,6 +100,38 @@ By default, the Raspberry Pi limits the amount of memory available to the GPU. I
|
||||
|
||||
Additionally, the USB Coral draws a considerable amount of power. If using any other USB devices such as an SSD, you will experience instability due to the Pi not providing enough power to USB devices. You will need to purchase an external USB hub with it's own power supply. Some have reported success with <a href="https://amzn.to/3a2mH0P" target="_blank" rel="nofollow noopener sponsored">this</a> (affiliate link).
|
||||
|
||||
### Hailo-8L
|
||||
|
||||
The Hailo-8L is an M.2 card typically connected to a carrier board for PCIe, which then connects to the Raspberry Pi 5 as part of the AI Kit. However, it can also be used on other boards equipped with an M.2 M key edge connector.
|
||||
|
||||
#### Installation
|
||||
|
||||
For Raspberry Pi 5 users with the AI Kit, installation is straightforward. Simply follow this [guide](https://www.raspberrypi.com/documentation/accessories/ai-kit.html#ai-kit-installation) to install the driver and software.
|
||||
|
||||
For other installations, follow these steps for installation:
|
||||
|
||||
1. Install the driver from the [Hailo GitHub repository](https://github.com/hailo-ai/hailort-drivers). A convenient script for Linux is available to clone the repository, build the driver, and install it.
|
||||
2. Copy or download [this script](https://github.com/blakeblackshear/frigate/blob/41c9b13d2fffce508b32dfc971fa529b49295fbd/docker/hailo8l/user_installation.sh).
|
||||
3. Ensure it has execution permissions with `sudo chmod +x install_hailo8l_driver.sh`
|
||||
4. Run the script with `./install_hailo8l_driver.sh`
|
||||
|
||||
#### Setup
|
||||
|
||||
To set up Frigate, follow the default installation instructions, but use a Docker image with the `-h8l` suffix, for example: `ghcr.io/blakeblackshear/frigate:stable-h8l`
|
||||
|
||||
Next, grant Docker permissions to access your hardware by adding the following lines to your `docker-compose.yml` file:
|
||||
|
||||
```yaml
|
||||
devices:
|
||||
- /dev/hailo0
|
||||
```
|
||||
|
||||
If you are using `docker run`, add this option to your command `--device /dev/hailo0`
|
||||
|
||||
#### Configuration
|
||||
|
||||
Finally, configure [hardware object detection](/configuration/object_detectors#hailo-8l) to complete the setup.
|
||||
|
||||
### Rockchip platform
|
||||
|
||||
Make sure that you use a linux distribution that comes with the rockchip BSP kernel 5.10 or 6.1 and necessary drivers (especially rkvdec2 and rknpu). To check, enter the following commands:
|
||||
@@ -216,6 +254,7 @@ The community supported docker image tags for the current stable version are:
|
||||
- `stable-rocm-gfx900` - AMD gfx900 driver only
|
||||
- `stable-rocm-gfx1030` - AMD gfx1030 driver only
|
||||
- `stable-rocm-gfx1100` - AMD gfx1100 driver only
|
||||
- `stable-h8l` - Frigate build for the Hailo-8L M.2 PICe Raspberry Pi 5 hat
|
||||
|
||||
## Home Assistant Addon
|
||||
|
||||
|
||||
@@ -5,9 +5,17 @@ title: Getting started
|
||||
|
||||
# Getting Started
|
||||
|
||||
:::tip
|
||||
|
||||
If you already have an environment with Linux and Docker installed, you can continue to [Installing Frigate](#installing-frigate) below.
|
||||
|
||||
If you already have Frigate installed in Docker or as a Home Assistant addon, you can continue to [Configuring Frigate](#configuring-frigate) below.
|
||||
|
||||
:::
|
||||
|
||||
## Setting up hardware
|
||||
|
||||
This section guides you through setting up a server with Debian Bookworm and Docker. If you already have an environment with Linux and Docker installed, you can continue to [Installing Frigate](#installing-frigate) below.
|
||||
This section guides you through setting up a server with Debian Bookworm and Docker.
|
||||
|
||||
### Install Debian 12 (Bookworm)
|
||||
|
||||
@@ -77,20 +85,19 @@ This section shows how to create a minimal directory structure for a Docker inst
|
||||
|
||||
### Setup directories
|
||||
|
||||
Frigate requires a valid config file to start. The following directory structure is the bare minimum to get started. Once Frigate is running, you can use the built-in config editor which supports config validation.
|
||||
Frigate will create a config file if one does not exist on the initial startup. The following directory structure is the bare minimum to get started. Once Frigate is running, you can use the built-in config editor which supports config validation.
|
||||
|
||||
```
|
||||
.
|
||||
├── docker-compose.yml
|
||||
├── config/
|
||||
│ └── config.yml
|
||||
└── storage/
|
||||
```
|
||||
|
||||
This will create the above structure:
|
||||
|
||||
```bash
|
||||
mkdir storage config && touch docker-compose.yml config/config.yml
|
||||
mkdir storage config && touch docker-compose.yml
|
||||
```
|
||||
|
||||
If you are setting up Frigate on a Linux device via SSH, you can use [nano](https://itsfoss.com/nano-editor-guide/) to edit the following files. If you prefer to edit remote files with a full editor instead of a terminal, I recommend using [Visual Studio Code](https://code.visualstudio.com/) with the [Remote SSH extension](https://code.visualstudio.com/docs/remote/ssh-tutorial).
|
||||
@@ -121,22 +128,6 @@ services:
|
||||
- "8554:8554" # RTSP feeds
|
||||
```
|
||||
|
||||
`config.yml`
|
||||
|
||||
```yaml
|
||||
mqtt:
|
||||
enabled: False
|
||||
|
||||
cameras:
|
||||
dummy_camera: # <--- this will be changed to your actual camera later
|
||||
enabled: False
|
||||
ffmpeg:
|
||||
inputs:
|
||||
- path: rtsp://127.0.0.1:554/rtsp
|
||||
roles:
|
||||
- detect
|
||||
```
|
||||
|
||||
Now you should be able to start Frigate by running `docker compose up -d` from within the folder containing `docker-compose.yml`. On startup, an admin user and password will be created and outputted in the logs. You can see this by running `docker logs frigate`. Frigate should now be accessible at `https://server_ip:8971` where you can login with the `admin` user and finish the configuration using the built-in configuration editor.
|
||||
|
||||
## Configuring Frigate
|
||||
@@ -274,13 +265,11 @@ cameras:
|
||||
- 0,461,3,0,1919,0,1919,843,1699,492,1344,458,1346,336,973,317,869,375,866,432
|
||||
```
|
||||
|
||||
### Step 6: Enable recording and/or snapshots
|
||||
### Step 6: Enable recordings
|
||||
|
||||
In order to see Events in the Frigate UI, either snapshots or record will need to be enabled.
|
||||
In order to review activity in the Frigate UI, recordings need to be enabled.
|
||||
|
||||
#### Record
|
||||
|
||||
To enable recording video, add the `record` role to a stream and enable it in the config. If record is disabled in the config, turning it on via the UI will not have any effect.
|
||||
To enable recording video, add the `record` role to a stream and enable it in the config. If record is disabled in the config, it won't be possible to enable it in the UI.
|
||||
|
||||
```yaml
|
||||
mqtt: ...
|
||||
@@ -307,26 +296,6 @@ If you don't have separate streams for detect and record, you would just add the
|
||||
|
||||
By default, Frigate will retain video of all events for 10 days. The full set of options for recording can be found [here](../configuration/reference.md).
|
||||
|
||||
#### Snapshots
|
||||
|
||||
To enable snapshots of your events, just enable it in the config. Snapshots are taken from the detect stream because it is the only stream decoded.
|
||||
|
||||
```yaml
|
||||
mqtt: ...
|
||||
|
||||
detectors: ...
|
||||
|
||||
cameras:
|
||||
name_of_your_camera: ...
|
||||
detect: ...
|
||||
record: ...
|
||||
snapshots: # <----- Enable snapshots
|
||||
enabled: True
|
||||
motion: ...
|
||||
```
|
||||
|
||||
By default, Frigate will retain snapshots of all events for 10 days. The full set of options for snapshots can be found [here](../configuration/reference.md).
|
||||
|
||||
### Step 7: Complete config
|
||||
|
||||
At this point you have a complete config with basic functionality. You can see the [full config reference](../configuration/reference.md) for a complete list of configuration options.
|
||||
@@ -336,6 +305,8 @@ At this point you have a complete config with basic functionality. You can see t
|
||||
Now that you have a working install, you can use the following documentation for additional features:
|
||||
|
||||
1. [Configuring go2rtc](configuring_go2rtc.md) - Additional live view options and RTSP relay
|
||||
2. [Home Assistant Integration](../integrations/home-assistant.md) - Integrate with Home Assistant
|
||||
3. [Masks](../configuration/masks.md)
|
||||
4. [Zones](../configuration/zones.md)
|
||||
2. [Zones](../configuration/zones.md)
|
||||
3. [Review](../configuration/review.md)
|
||||
4. [Masks](../configuration/masks.md)
|
||||
5. [Home Assistant Integration](../integrations/home-assistant.md) - Integrate with Home Assistant
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ title: Home Assistant notifications
|
||||
|
||||
The best way to get started with notifications for Frigate is to use the [Blueprint](https://community.home-assistant.io/t/frigate-mobile-app-notifications-2-0/559732). You can use the yaml generated from the Blueprint as a starting point and customize from there.
|
||||
|
||||
It is generally recommended to trigger notifications based on the `frigate/events` mqtt topic. This provides the event_id needed to fetch [thumbnails/snapshots/clips](../integrations/home-assistant.md#notification-api) and other useful information to customize when and where you want to receive alerts. The data is published in the form of a change feed, which means you can reference the "previous state" of the object in the `before` section and the "current state" of the object in the `after` section. You can see an example [here](../integrations/mqtt.md#frigateevents).
|
||||
It is generally recommended to trigger notifications based on the `frigate/reviews` mqtt topic. This provides the event_id(s) needed to fetch [thumbnails/snapshots/clips](../integrations/home-assistant.md#notification-api) and other useful information to customize when and where you want to receive alerts. The data is published in the form of a change feed, which means you can reference the "previous state" of the object in the `before` section and the "current state" of the object in the `after` section. You can see an example [here](../integrations/mqtt.md#frigateevents).
|
||||
|
||||
Here is a simple example of a notification automation of events which will update the existing notification for each change. This means the image you see in the notification will update as Frigate finds a "better" image.
|
||||
|
||||
@@ -17,7 +17,7 @@ automation:
|
||||
topic: frigate/events
|
||||
action:
|
||||
- service: notify.mobile_app_pixel_3
|
||||
data_template:
|
||||
data:
|
||||
message: 'A {{trigger.payload_json["after"]["label"]}} was detected.'
|
||||
data:
|
||||
image: 'https://your.public.hass.address.com/api/frigate/notifications/{{trigger.payload_json["after"]["id"]}}/thumbnail.jpg?format=android'
|
||||
@@ -33,48 +33,18 @@ automation:
|
||||
description: ""
|
||||
trigger:
|
||||
- platform: mqtt
|
||||
topic: frigate/events
|
||||
payload: new
|
||||
value_template: "{{ value_json.type }}"
|
||||
topic: frigate/reviews
|
||||
payload: alert
|
||||
value_template: "{{ value_json['after']['severity'] }}"
|
||||
action:
|
||||
- service: notify.mobile_app_iphone
|
||||
data:
|
||||
message: 'A {{trigger.payload_json["after"]["label"]}} was detected.'
|
||||
message: 'A {{trigger.payload_json["after"]["data"]["objects"] | sort | join(", ") | title}} was detected.'
|
||||
data:
|
||||
image: >-
|
||||
https://your.public.hass.address.com/api/frigate/notifications/{{trigger.payload_json["after"]["id"]}}/thumbnail.jpg
|
||||
https://your.public.hass.address.com/api/frigate/notifications/{{trigger.payload_json["after"]["data"]["detections"][0]}}/thumbnail.jpg
|
||||
tag: '{{trigger.payload_json["after"]["id"]}}'
|
||||
when: '{{trigger.payload_json["after"]["start_time"]|int}}'
|
||||
entity_id: camera.{{trigger.payload_json["after"]["camera"] | replace("-","_") | lower}}
|
||||
mode: single
|
||||
```
|
||||
|
||||
## Conditions
|
||||
|
||||
Conditions with the `before` and `after` values allow a high degree of customization for automations.
|
||||
|
||||
When a person enters a zone named yard
|
||||
|
||||
```yaml
|
||||
condition:
|
||||
- "{{ trigger.payload_json['after']['label'] == 'person' }}"
|
||||
- "{{ 'yard' in trigger.payload_json['after']['entered_zones'] }}"
|
||||
```
|
||||
|
||||
When a person leaves a zone named yard
|
||||
|
||||
```yaml
|
||||
condition:
|
||||
- "{{ trigger.payload_json['after']['label'] == 'person' }}"
|
||||
- "{{ 'yard' in trigger.payload_json['before']['current_zones'] }}"
|
||||
- "{{ not 'yard' in trigger.payload_json['after']['current_zones'] }}"
|
||||
```
|
||||
|
||||
Notify for dogs in the front with a high top score
|
||||
|
||||
```yaml
|
||||
condition:
|
||||
- "{{ trigger.payload_json['after']['label'] == 'dog' }}"
|
||||
- "{{ trigger.payload_json['after']['camera'] == 'front' }}"
|
||||
- "{{ trigger.payload_json['after']['top_score'] > 0.98 }}"
|
||||
```
|
||||
|
||||
@@ -381,9 +381,13 @@ List of frames in the preview cache for the time range. Previews are only kept i
|
||||
|
||||
Specific preview frame from preview cache.
|
||||
|
||||
### `GET /<camera>/start/<start-timestamp>/end/<end-timestamp>/preview.gif`
|
||||
### `GET /<camera>/start/<start-timestamp>/end/<end-timestamp>/preview`
|
||||
|
||||
Gif made from preview video / frames during this time range
|
||||
Looping image made from preview video / frames during this time range.
|
||||
|
||||
| param | Type | Description |
|
||||
| --------- | ---- | -------------------------------- |
|
||||
| `format` | str | Format of preview [`gif`, `mp4`] |
|
||||
|
||||
## Recordings
|
||||
|
||||
@@ -455,6 +459,10 @@ Reviews from the database. Accepts the following query string parameters:
|
||||
| `limit` | int | Limit the number of events returned |
|
||||
| `severity` | str | Limit items to severity (alert, detection, significant_motion) |
|
||||
|
||||
### `GET /api/review/<id>`
|
||||
|
||||
Get review with `id` from the database.
|
||||
|
||||
### `GET /api/review/summary`
|
||||
|
||||
Summary of reviews for the last 30 days. Accepts the following query string parameters:
|
||||
|
||||
@@ -138,13 +138,14 @@ Message published for each changed review item. The first message is published w
|
||||
"person",
|
||||
"car"
|
||||
],
|
||||
"sub_labels": [],
|
||||
"sub_labels": ["Bob"],
|
||||
"zones": [
|
||||
"front_yard"
|
||||
],
|
||||
"audio": []
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### `frigate/stats`
|
||||
|
||||
@@ -3,7 +3,7 @@ id: index
|
||||
title: Models
|
||||
---
|
||||
|
||||
<a href="https://plus.frigate.video" target="_blank" rel="nofollow">Frigate+</a> offers models trained on images submitted by Frigate+ users from their security cameras and is specifically designed for the way Frigate NVR analyzes video footage. These models offer higher accuracy with less resources. The images you upload are used to fine tune a baseline model trained from images uploaded by all Frigate+ users. This fine tuning process results in a model that is optimized for accuracy in your specific conditions.
|
||||
<a href="https://frigate.video/plus" target="_blank" rel="nofollow">Frigate+</a> offers models trained on images submitted by Frigate+ users from their security cameras and is specifically designed for the way Frigate NVR analyzes video footage. These models offer higher accuracy with less resources. The images you upload are used to fine tune a baseline model trained from images uploaded by all Frigate+ users. This fine tuning process results in a model that is optimized for accuracy in your specific conditions.
|
||||
|
||||
:::info
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ You can open `chrome://media-internals/` in another tab and then try to playback
|
||||
Frigate generally [recommends cameras with configurable sub streams](/frigate/hardware.md). However, if your camera does not have a sub stream that a suitable resolution, the main stream can be resized.
|
||||
|
||||
To do this efficiently the following setup is required:
|
||||
|
||||
1. A GPU or iGPU must be available to do the scaling.
|
||||
2. [ffmpeg presets for hwaccel](/configuration/hardware_acceleration.md) must be used
|
||||
3. Set the desired detection resolution for `detect -> width` and `detect -> height`.
|
||||
@@ -56,10 +57,44 @@ SQLite does not work well on a network share, if the `/media` folder is mapped t
|
||||
|
||||
If MQTT isn't working in docker try using the IP of the device hosting the MQTT server instead of `localhost`, `127.0.0.1`, or `mosquitto.ix-mosquitto.svc.cluster.local`.
|
||||
|
||||
This is because, by default, Frigate does not run in host mode so localhost points to the Frigate container and not the host device's network.
|
||||
This is because Frigate does not run in host mode so localhost points to the Frigate container and not the host device's network.
|
||||
|
||||
### How do I know if my camera is offline
|
||||
|
||||
A camera being offline can be detected via MQTT or /api/stats, the camera_fps for any offline camera will be 0.
|
||||
|
||||
Also, Home Assistant will mark any offline camera as being unavailable when the camera is offline
|
||||
Also, Home Assistant will mark any offline camera as being unavailable when the camera is offline.
|
||||
|
||||
### How can I view the Frigate log files without using the Web UI?
|
||||
|
||||
Frigate manages logs internally as well as outputs directly to Docker via standard output. To view these logs using the CLI, follow these steps:
|
||||
|
||||
- Open a terminal or command prompt on the host running your Frigate container.
|
||||
- Type the following command and press Enter:
|
||||
```
|
||||
docker logs -f frigate
|
||||
```
|
||||
This command tells Docker to show you the logs from the Frigate container.
|
||||
Note: If you've given your Frigate container a different name, replace "frigate" in the command with your container's actual name. The "-f" option means the logs will continue to update in real-time as new entries are added. To stop viewing the logs, press `Ctrl+C`. If you'd like to learn more about using Docker logs, including additional options and features, you can explore Docker's [official documentation](https://docs.docker.com/engine/reference/commandline/logs/).
|
||||
|
||||
Alternatively, when you create the Frigate Docker container, you can bind a directory on the host to the mountpoint `/dev/shm/logs` to not only be able to persist the logs to disk, but also to be able to query them directly from the host using your favorite log parsing/query utility.
|
||||
|
||||
```
|
||||
docker run -d \
|
||||
--name frigate \
|
||||
--restart=unless-stopped \
|
||||
--mount type=tmpfs,target=/tmp/cache,tmpfs-size=1000000000 \
|
||||
--device /dev/bus/usb:/dev/bus/usb \
|
||||
--device /dev/dri/renderD128 \
|
||||
--shm-size=64m \
|
||||
-v /path/to/your/storage:/media/frigate \
|
||||
-v /path/to/your/config:/config \
|
||||
-v /etc/localtime:/etc/localtime:ro \
|
||||
-v /path/to/local/log/dir:/dev/shm/logs \
|
||||
-e FRIGATE_RTSP_PASSWORD='password' \
|
||||
-p 5000:5000 \
|
||||
-p 8554:8554 \
|
||||
-p 8555:8555/tcp \
|
||||
-p 8555:8555/udp \
|
||||
ghcr.io/blakeblackshear/frigate:stable
|
||||
```
|
||||
|
||||
@@ -29,6 +29,10 @@ module.exports = {
|
||||
"configuration/object_detectors",
|
||||
"configuration/audio_detectors",
|
||||
],
|
||||
"Semantic Search": [
|
||||
"configuration/semantic_search",
|
||||
"configuration/genai",
|
||||
],
|
||||
Cameras: [
|
||||
"configuration/cameras",
|
||||
"configuration/review",
|
||||
@@ -50,6 +54,7 @@ module.exports = {
|
||||
],
|
||||
"Extra Configuration": [
|
||||
"configuration/authentication",
|
||||
"configuration/notifications",
|
||||
"configuration/hardware_acceleration",
|
||||
"configuration/ffmpeg_presets",
|
||||
"configuration/tls",
|
||||
|
||||
@@ -7,6 +7,7 @@ import os
|
||||
import traceback
|
||||
from datetime import datetime, timedelta
|
||||
from functools import reduce
|
||||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from flask import Blueprint, Flask, current_app, jsonify, make_response, request
|
||||
@@ -19,10 +20,12 @@ from frigate.api.auth import AuthBp, get_jwt_secret, limiter
|
||||
from frigate.api.event import EventBp
|
||||
from frigate.api.export import ExportBp
|
||||
from frigate.api.media import MediaBp
|
||||
from frigate.api.notification import NotificationBp
|
||||
from frigate.api.preview import PreviewBp
|
||||
from frigate.api.review import ReviewBp
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import CONFIG_DIR
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.events.external import ExternalEventProcessor
|
||||
from frigate.models import Event, Timeline
|
||||
from frigate.plus import PlusApi
|
||||
@@ -47,11 +50,13 @@ bp.register_blueprint(MediaBp)
|
||||
bp.register_blueprint(PreviewBp)
|
||||
bp.register_blueprint(ReviewBp)
|
||||
bp.register_blueprint(AuthBp)
|
||||
bp.register_blueprint(NotificationBp)
|
||||
|
||||
|
||||
def create_app(
|
||||
frigate_config,
|
||||
database: SqliteQueueDatabase,
|
||||
embeddings: Optional[EmbeddingsContext],
|
||||
detected_frames_processor,
|
||||
storage_maintainer: StorageMaintainer,
|
||||
onvif: OnvifController,
|
||||
@@ -79,6 +84,7 @@ def create_app(
|
||||
database.close()
|
||||
|
||||
app.frigate_config = frigate_config
|
||||
app.embeddings = embeddings
|
||||
app.detected_frames_processor = detected_frames_processor
|
||||
app.storage_maintainer = storage_maintainer
|
||||
app.onvif = onvif
|
||||
@@ -450,10 +456,24 @@ def vainfo():
|
||||
|
||||
@bp.route("/logs/<service>", methods=["GET"])
|
||||
def logs(service: str):
|
||||
def download_logs(service_location: str):
|
||||
try:
|
||||
file = open(service_location, "r")
|
||||
contents = file.read()
|
||||
file.close()
|
||||
return jsonify(contents)
|
||||
except FileNotFoundError as e:
|
||||
logger.error(e)
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Could not find log file"}),
|
||||
500,
|
||||
)
|
||||
|
||||
log_locations = {
|
||||
"frigate": "/dev/shm/logs/frigate/current",
|
||||
"go2rtc": "/dev/shm/logs/go2rtc/current",
|
||||
"nginx": "/dev/shm/logs/nginx/current",
|
||||
"chroma": "/dev/shm/logs/chroma/current",
|
||||
}
|
||||
service_location = log_locations.get(service)
|
||||
|
||||
@@ -463,6 +483,9 @@ def logs(service: str):
|
||||
404,
|
||||
)
|
||||
|
||||
if request.args.get("download", type=bool, default=False):
|
||||
return download_logs(service_location)
|
||||
|
||||
start = request.args.get("start", type=int, default=0)
|
||||
end = request.args.get("end", type=int)
|
||||
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Event apis."""
|
||||
|
||||
import base64
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
@@ -8,6 +10,7 @@ from pathlib import Path
|
||||
from urllib.parse import unquote
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from flask import (
|
||||
Blueprint,
|
||||
current_app,
|
||||
@@ -15,13 +18,16 @@ from flask import (
|
||||
make_response,
|
||||
request,
|
||||
)
|
||||
from peewee import DoesNotExist, fn, operator
|
||||
from peewee import JOIN, DoesNotExist, fn, operator
|
||||
from PIL import Image
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.const import (
|
||||
CLIPS_DIR,
|
||||
)
|
||||
from frigate.models import Event, Timeline
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.embeddings.embeddings import get_metadata
|
||||
from frigate.models import Event, ReviewSegment, Timeline
|
||||
from frigate.object_processing import TrackedObject
|
||||
from frigate.util.builtin import get_tz_modifiers
|
||||
|
||||
@@ -245,6 +251,209 @@ def events():
|
||||
return jsonify(list(events))
|
||||
|
||||
|
||||
@EventBp.route("/event_ids")
|
||||
def event_ids():
|
||||
idString = request.args.get("ids")
|
||||
ids = idString.split(",")
|
||||
|
||||
if not ids:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Valid list of ids must be sent"}),
|
||||
400,
|
||||
)
|
||||
|
||||
try:
|
||||
events = Event.select().where(Event.id << ids).dicts().iterator()
|
||||
return jsonify(list(events))
|
||||
except Exception:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Events not found"}), 400
|
||||
)
|
||||
|
||||
|
||||
@EventBp.route("/events/search")
|
||||
def events_search():
|
||||
query = request.args.get("query", type=str)
|
||||
search_type = request.args.get("search_type", "text", type=str)
|
||||
include_thumbnails = request.args.get("include_thumbnails", default=1, type=int)
|
||||
limit = request.args.get("limit", 50, type=int)
|
||||
|
||||
# Filters
|
||||
cameras = request.args.get("cameras", "all", type=str)
|
||||
labels = request.args.get("labels", "all", type=str)
|
||||
zones = request.args.get("zones", "all", type=str)
|
||||
after = request.args.get("after", type=float)
|
||||
before = request.args.get("before", type=float)
|
||||
|
||||
if not query:
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"message": "A search query must be supplied",
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
if not current_app.frigate_config.semantic_search.enabled:
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Semantic search is not enabled",
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
context: EmbeddingsContext = current_app.embeddings
|
||||
|
||||
selected_columns = [
|
||||
Event.id,
|
||||
Event.camera,
|
||||
Event.label,
|
||||
Event.sub_label,
|
||||
Event.zones,
|
||||
Event.start_time,
|
||||
Event.end_time,
|
||||
Event.data,
|
||||
ReviewSegment.thumb_path,
|
||||
]
|
||||
|
||||
if include_thumbnails:
|
||||
selected_columns.append(Event.thumbnail)
|
||||
|
||||
# Build the where clause for the embeddings query
|
||||
embeddings_filters = []
|
||||
|
||||
if cameras != "all":
|
||||
camera_list = cameras.split(",")
|
||||
embeddings_filters.append({"camera": {"$in": camera_list}})
|
||||
|
||||
if labels != "all":
|
||||
label_list = labels.split(",")
|
||||
embeddings_filters.append({"label": {"$in": label_list}})
|
||||
|
||||
if zones != "all":
|
||||
filtered_zones = zones.split(",")
|
||||
zone_filters = [{f"zones_{zone}": {"$eq": True}} for zone in filtered_zones]
|
||||
if len(zone_filters) > 1:
|
||||
embeddings_filters.append({"$or": zone_filters})
|
||||
else:
|
||||
embeddings_filters.append(zone_filters[0])
|
||||
|
||||
if after:
|
||||
embeddings_filters.append({"start_time": {"$gt": after}})
|
||||
|
||||
if before:
|
||||
embeddings_filters.append({"start_time": {"$lt": before}})
|
||||
|
||||
where = None
|
||||
if len(embeddings_filters) > 1:
|
||||
where = {"$and": embeddings_filters}
|
||||
elif len(embeddings_filters) == 1:
|
||||
where = embeddings_filters[0]
|
||||
|
||||
thumb_ids = {}
|
||||
desc_ids = {}
|
||||
|
||||
if search_type == "thumbnail":
|
||||
# Grab the ids of events that match the thumbnail image embeddings
|
||||
try:
|
||||
search_event: Event = Event.get(Event.id == query)
|
||||
except DoesNotExist:
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"message": "Event not found",
|
||||
}
|
||||
),
|
||||
404,
|
||||
)
|
||||
thumbnail = base64.b64decode(search_event.thumbnail)
|
||||
img = np.array(Image.open(io.BytesIO(thumbnail)).convert("RGB"))
|
||||
thumb_result = context.embeddings.thumbnail.query(
|
||||
query_images=[img],
|
||||
n_results=limit,
|
||||
where=where,
|
||||
)
|
||||
thumb_ids = dict(zip(thumb_result["ids"][0], thumb_result["distances"][0]))
|
||||
else:
|
||||
thumb_result = context.embeddings.thumbnail.query(
|
||||
query_texts=[query],
|
||||
n_results=limit,
|
||||
where=where,
|
||||
)
|
||||
# Do a rudimentary normalization of the difference in distances returned by CLIP and MiniLM.
|
||||
thumb_ids = dict(
|
||||
zip(
|
||||
thumb_result["ids"][0],
|
||||
context.thumb_stats.normalize(thumb_result["distances"][0]),
|
||||
)
|
||||
)
|
||||
desc_result = context.embeddings.description.query(
|
||||
query_texts=[query],
|
||||
n_results=limit,
|
||||
where=where,
|
||||
)
|
||||
desc_ids = dict(
|
||||
zip(
|
||||
desc_result["ids"][0],
|
||||
context.desc_stats.normalize(desc_result["distances"][0]),
|
||||
)
|
||||
)
|
||||
|
||||
results = {}
|
||||
for event_id in thumb_ids.keys() | desc_ids:
|
||||
min_distance = min(
|
||||
i
|
||||
for i in (thumb_ids.get(event_id), desc_ids.get(event_id))
|
||||
if i is not None
|
||||
)
|
||||
results[event_id] = {
|
||||
"distance": min_distance,
|
||||
"source": "thumbnail"
|
||||
if min_distance == thumb_ids.get(event_id)
|
||||
else "description",
|
||||
}
|
||||
|
||||
if not results:
|
||||
return jsonify([])
|
||||
|
||||
# Get the event data
|
||||
events = (
|
||||
Event.select(*selected_columns)
|
||||
.join(
|
||||
ReviewSegment,
|
||||
JOIN.LEFT_OUTER,
|
||||
on=(fn.json_extract(ReviewSegment.data, "$.detections").contains(Event.id)),
|
||||
)
|
||||
.where(Event.id << list(results.keys()))
|
||||
.dicts()
|
||||
.iterator()
|
||||
)
|
||||
events = list(events)
|
||||
|
||||
events = [
|
||||
{k: v for k, v in event.items() if k != "data"}
|
||||
| {
|
||||
k: v
|
||||
for k, v in event["data"].items()
|
||||
if k in ["type", "score", "top_score", "description"]
|
||||
}
|
||||
| {
|
||||
"search_distance": results[event["id"]]["distance"],
|
||||
"search_source": results[event["id"]]["source"],
|
||||
}
|
||||
for event in events
|
||||
]
|
||||
events = sorted(events, key=lambda x: x["search_distance"])[:limit]
|
||||
|
||||
return jsonify(events)
|
||||
|
||||
|
||||
@EventBp.route("/events/summary")
|
||||
def events_summary():
|
||||
tz_name = request.args.get("timezone", default="utc", type=str)
|
||||
@@ -604,6 +813,52 @@ def set_sub_label(id):
|
||||
)
|
||||
|
||||
|
||||
@EventBp.route("/events/<id>/description", methods=("POST",))
|
||||
def set_description(id):
|
||||
try:
|
||||
event: Event = Event.get(Event.id == id)
|
||||
except DoesNotExist:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Event " + id + " not found"}), 404
|
||||
)
|
||||
|
||||
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||
new_description = json.get("description")
|
||||
|
||||
if new_description is None or len(new_description) == 0:
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"message": "description cannot be empty",
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
event.data["description"] = new_description
|
||||
event.save()
|
||||
|
||||
# If semantic search is enabled, update the index
|
||||
if current_app.frigate_config.semantic_search.enabled:
|
||||
context: EmbeddingsContext = current_app.embeddings
|
||||
context.embeddings.description.upsert(
|
||||
documents=[new_description],
|
||||
metadatas=[get_metadata(event)],
|
||||
ids=[id],
|
||||
)
|
||||
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"success": True,
|
||||
"message": "Event " + id + " description set to " + new_description,
|
||||
}
|
||||
),
|
||||
200,
|
||||
)
|
||||
|
||||
|
||||
@EventBp.route("/events/<id>", methods=("DELETE",))
|
||||
def delete_event(id):
|
||||
try:
|
||||
@@ -625,6 +880,11 @@ def delete_event(id):
|
||||
|
||||
event.delete_instance()
|
||||
Timeline.delete().where(Timeline.source_id == id).execute()
|
||||
# If semantic search is enabled, update the index
|
||||
if current_app.frigate_config.semantic_search.enabled:
|
||||
context: EmbeddingsContext = current_app.embeddings
|
||||
context.embeddings.thumbnail.delete(ids=[id])
|
||||
context.embeddings.description.delete(ids=[id])
|
||||
return make_response(
|
||||
jsonify({"success": True, "message": "Event " + id + " deleted"}), 200
|
||||
)
|
||||
|
||||
@@ -13,7 +13,6 @@ from flask import (
|
||||
request,
|
||||
)
|
||||
from peewee import DoesNotExist
|
||||
from werkzeug.utils import secure_filename
|
||||
|
||||
from frigate.const import EXPORT_DIR
|
||||
from frigate.models import Export, Recordings
|
||||
@@ -48,14 +47,16 @@ def export_recording(camera_name: str, start_time, end_time):
|
||||
|
||||
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||
playback_factor = json.get("playback", "realtime")
|
||||
name: Optional[str] = json.get("name")
|
||||
friendly_name: Optional[str] = json.get("name")
|
||||
|
||||
if len(name or "") > 256:
|
||||
if len(friendly_name or "") > 256:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "File name is too long."}),
|
||||
401,
|
||||
)
|
||||
|
||||
existing_image = json.get("image_path")
|
||||
|
||||
recordings_count = (
|
||||
Recordings.select()
|
||||
.where(
|
||||
@@ -78,7 +79,8 @@ def export_recording(camera_name: str, start_time, end_time):
|
||||
exporter = RecordingExporter(
|
||||
current_app.frigate_config,
|
||||
camera_name,
|
||||
secure_filename(name) if name else None,
|
||||
friendly_name,
|
||||
existing_image,
|
||||
int(start_time),
|
||||
int(end_time),
|
||||
(
|
||||
|
||||
@@ -546,6 +546,11 @@ def vod_ts(camera_name, start_ts, end_ts):
|
||||
if recording.end_time > end_ts:
|
||||
duration -= int((recording.end_time - end_ts) * 1000)
|
||||
|
||||
if duration == 0:
|
||||
# this means the segment starts right at the end of the requested time range
|
||||
# and it does not need to be included
|
||||
continue
|
||||
|
||||
if 0 < duration < max_duration_ms:
|
||||
clip["keyFrameDurations"] = [duration]
|
||||
clips.append(clip)
|
||||
@@ -554,7 +559,9 @@ def vod_ts(camera_name, start_ts, end_ts):
|
||||
logger.warning(f"Recording clip is missing or empty: {recording.path}")
|
||||
|
||||
if not clips:
|
||||
logger.error("No recordings found for the requested time range")
|
||||
logger.error(
|
||||
f"No recordings found for {camera_name} during the requested time range"
|
||||
)
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
|
||||
65
frigate/api/notification.py
Normal file
65
frigate/api/notification.py
Normal file
@@ -0,0 +1,65 @@
|
||||
"""Notification apis."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from flask import (
|
||||
Blueprint,
|
||||
current_app,
|
||||
jsonify,
|
||||
make_response,
|
||||
request,
|
||||
)
|
||||
from peewee import DoesNotExist
|
||||
from py_vapid import Vapid01, utils
|
||||
|
||||
from frigate.const import CONFIG_DIR
|
||||
from frigate.models import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
NotificationBp = Blueprint("notifications", __name__)
|
||||
|
||||
|
||||
@NotificationBp.route("/notifications/pubkey", methods=["GET"])
|
||||
def get_vapid_pub_key():
|
||||
if not current_app.frigate_config.notifications.enabled:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Notifications are not enabled."}),
|
||||
400,
|
||||
)
|
||||
|
||||
key = Vapid01.from_file(os.path.join(CONFIG_DIR, "notifications.pem"))
|
||||
raw_pub = key.public_key.public_bytes(
|
||||
serialization.Encoding.X962, serialization.PublicFormat.UncompressedPoint
|
||||
)
|
||||
return jsonify(utils.b64urlencode(raw_pub)), 200
|
||||
|
||||
|
||||
@NotificationBp.route("/notifications/register", methods=["POST"])
|
||||
def register_notifications():
|
||||
if current_app.frigate_config.auth.enabled:
|
||||
username = request.headers.get("remote-user", type=str) or "admin"
|
||||
else:
|
||||
username = "admin"
|
||||
|
||||
json: dict[str, any] = request.get_json(silent=True) or {}
|
||||
sub = json.get("sub")
|
||||
|
||||
if not sub:
|
||||
return jsonify(
|
||||
{"success": False, "message": "Subscription must be provided."}
|
||||
), 400
|
||||
|
||||
try:
|
||||
User.update(notification_tokens=User.notification_tokens.append(sub)).where(
|
||||
User.username == username
|
||||
).execute()
|
||||
return make_response(
|
||||
jsonify({"success": True, "message": "Successfully saved token."}), 200
|
||||
)
|
||||
except DoesNotExist:
|
||||
return make_response(
|
||||
jsonify({"success": False, "message": "Could not find user."}), 404
|
||||
)
|
||||
@@ -475,7 +475,7 @@ def motion_activity():
|
||||
logger.warning("No motion data found for the requested time range")
|
||||
return jsonify([])
|
||||
|
||||
df = df.astype(dtype={"motion": "float16"})
|
||||
df = df.astype(dtype={"motion": "float32"})
|
||||
|
||||
# set date as datetime index
|
||||
df["start_time"] = pd.to_datetime(df["start_time"], unit="s")
|
||||
@@ -497,11 +497,13 @@ def motion_activity():
|
||||
|
||||
for i in range(0, length, chunk):
|
||||
part = df.iloc[i : i + chunk]
|
||||
df.iloc[i : i + chunk, 0] = (
|
||||
(part["motion"] - part["motion"].min())
|
||||
/ (part["motion"].max() - part["motion"].min())
|
||||
* 100
|
||||
).fillna(0.0)
|
||||
min_val, max_val = part["motion"].min(), part["motion"].max()
|
||||
if min_val != max_val:
|
||||
df.iloc[i : i + chunk, 0] = (
|
||||
part["motion"].sub(min_val).div(max_val - min_val).mul(100).fillna(0)
|
||||
)
|
||||
else:
|
||||
df.iloc[i : i + chunk, 0] = 0.0
|
||||
|
||||
# change types for output
|
||||
df.index = df.index.astype(int) // (10**9)
|
||||
|
||||
@@ -22,11 +22,12 @@ from pydantic import ValidationError
|
||||
from frigate.api.app import create_app
|
||||
from frigate.api.auth import hash_password
|
||||
from frigate.comms.config_updater import ConfigPublisher
|
||||
from frigate.comms.detections_updater import DetectionProxy
|
||||
from frigate.comms.dispatcher import Communicator, Dispatcher
|
||||
from frigate.comms.inter_process import InterProcessCommunicator
|
||||
from frigate.comms.mqtt import MqttClient
|
||||
from frigate.comms.webpush import WebPushClient
|
||||
from frigate.comms.ws import WebSocketClient
|
||||
from frigate.comms.zmq_proxy import ZmqProxy
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import (
|
||||
CACHE_DIR,
|
||||
@@ -37,6 +38,7 @@ from frigate.const import (
|
||||
MODEL_CACHE_DIR,
|
||||
RECORD_DIR,
|
||||
)
|
||||
from frigate.embeddings import EmbeddingsContext, manage_embeddings
|
||||
from frigate.events.audio import listen_to_audio
|
||||
from frigate.events.cleanup import EventCleanup
|
||||
from frigate.events.external import ExternalEventProcessor
|
||||
@@ -316,7 +318,25 @@ class FrigateApp:
|
||||
self.review_segment_process = review_segment_process
|
||||
review_segment_process.start()
|
||||
self.processes["review_segment"] = review_segment_process.pid or 0
|
||||
logger.info(f"Recording process started: {review_segment_process.pid}")
|
||||
logger.info(f"Review process started: {review_segment_process.pid}")
|
||||
|
||||
def init_embeddings_manager(self) -> None:
|
||||
if not self.config.semantic_search.enabled:
|
||||
self.embeddings = None
|
||||
return
|
||||
|
||||
# Create a client for other processes to use
|
||||
self.embeddings = EmbeddingsContext()
|
||||
embedding_process = mp.Process(
|
||||
target=manage_embeddings,
|
||||
name="embeddings_manager",
|
||||
args=(self.config,),
|
||||
)
|
||||
embedding_process.daemon = True
|
||||
self.embedding_process = embedding_process
|
||||
embedding_process.start()
|
||||
self.processes["embeddings"] = embedding_process.pid or 0
|
||||
logger.info(f"Embedding process started: {embedding_process.pid}")
|
||||
|
||||
def bind_database(self) -> None:
|
||||
"""Bind db to the main process."""
|
||||
@@ -362,12 +382,13 @@ class FrigateApp:
|
||||
def init_inter_process_communicator(self) -> None:
|
||||
self.inter_process_communicator = InterProcessCommunicator()
|
||||
self.inter_config_updater = ConfigPublisher()
|
||||
self.inter_detection_proxy = DetectionProxy()
|
||||
self.inter_zmq_proxy = ZmqProxy()
|
||||
|
||||
def init_web_server(self) -> None:
|
||||
self.flask_app = create_app(
|
||||
self.config,
|
||||
self.db,
|
||||
self.embeddings,
|
||||
self.detected_frames_processor,
|
||||
self.storage_maintainer,
|
||||
self.onvif_controller,
|
||||
@@ -385,6 +406,9 @@ class FrigateApp:
|
||||
if self.config.mqtt.enabled:
|
||||
comms.append(MqttClient(self.config))
|
||||
|
||||
if self.config.notifications.enabled:
|
||||
comms.append(WebPushClient(self.config))
|
||||
|
||||
comms.append(WebSocketClient(self.config))
|
||||
comms.append(self.inter_process_communicator)
|
||||
|
||||
@@ -678,6 +702,7 @@ class FrigateApp:
|
||||
self.init_onvif()
|
||||
self.init_recording_manager()
|
||||
self.init_review_segment_manager()
|
||||
self.init_embeddings_manager()
|
||||
self.init_go2rtc()
|
||||
self.bind_database()
|
||||
self.check_db_data_migrations()
|
||||
@@ -794,10 +819,14 @@ class FrigateApp:
|
||||
self.frigate_watchdog.join()
|
||||
self.db.stop()
|
||||
|
||||
# Save embeddings stats to disk
|
||||
if self.embeddings:
|
||||
self.embeddings.save_stats()
|
||||
|
||||
# Stop Communicators
|
||||
self.inter_process_communicator.stop()
|
||||
self.inter_config_updater.stop()
|
||||
self.inter_detection_proxy.stop()
|
||||
self.inter_zmq_proxy.stop()
|
||||
|
||||
while len(self.detection_shms) > 0:
|
||||
shm = self.detection_shms.pop()
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
"""Facilitates communication between processes."""
|
||||
|
||||
import threading
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
import zmq
|
||||
|
||||
SOCKET_CONTROL = "inproc://control.detections_updater"
|
||||
SOCKET_PUB = "ipc:///tmp/cache/detect_pub"
|
||||
SOCKET_SUB = "ipc:///tmp/cache/detect_sub"
|
||||
from .zmq_proxy import Publisher, Subscriber
|
||||
|
||||
|
||||
class DetectionTypeEnum(str, Enum):
|
||||
@@ -18,85 +13,31 @@ class DetectionTypeEnum(str, Enum):
|
||||
audio = "audio"
|
||||
|
||||
|
||||
class DetectionProxyRunner(threading.Thread):
|
||||
def __init__(self, context: zmq.Context[zmq.Socket]) -> None:
|
||||
threading.Thread.__init__(self)
|
||||
self.name = "detection_proxy"
|
||||
self.context = context
|
||||
|
||||
def run(self) -> None:
|
||||
"""Run the proxy."""
|
||||
control = self.context.socket(zmq.REP)
|
||||
control.connect(SOCKET_CONTROL)
|
||||
incoming = self.context.socket(zmq.XSUB)
|
||||
incoming.bind(SOCKET_PUB)
|
||||
outgoing = self.context.socket(zmq.XPUB)
|
||||
outgoing.bind(SOCKET_SUB)
|
||||
|
||||
zmq.proxy_steerable(
|
||||
incoming, outgoing, None, control
|
||||
) # blocking, will unblock terminate message is received
|
||||
incoming.close()
|
||||
outgoing.close()
|
||||
|
||||
|
||||
class DetectionProxy:
|
||||
"""Proxies video and audio detections."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.context = zmq.Context()
|
||||
self.control = self.context.socket(zmq.REQ)
|
||||
self.control.bind(SOCKET_CONTROL)
|
||||
self.runner = DetectionProxyRunner(self.context)
|
||||
self.runner.start()
|
||||
|
||||
def stop(self) -> None:
|
||||
self.control.send("TERMINATE".encode()) # tell the proxy to stop
|
||||
self.runner.join()
|
||||
self.context.destroy()
|
||||
|
||||
|
||||
class DetectionPublisher:
|
||||
class DetectionPublisher(Publisher):
|
||||
"""Simplifies receiving video and audio detections."""
|
||||
|
||||
topic_base = "detection/"
|
||||
|
||||
def __init__(self, topic: DetectionTypeEnum) -> None:
|
||||
self.topic = topic
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.PUB)
|
||||
self.socket.connect(SOCKET_PUB)
|
||||
|
||||
def send_data(self, payload: any) -> None:
|
||||
"""Publish detection."""
|
||||
self.socket.send_string(self.topic.value, flags=zmq.SNDMORE)
|
||||
self.socket.send_pyobj(payload)
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
topic = topic.value
|
||||
super().__init__(topic)
|
||||
|
||||
|
||||
class DetectionSubscriber:
|
||||
class DetectionSubscriber(Subscriber):
|
||||
"""Simplifies receiving video and audio detections."""
|
||||
|
||||
topic_base = "detection/"
|
||||
|
||||
def __init__(self, topic: DetectionTypeEnum) -> None:
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.SUB)
|
||||
self.socket.setsockopt_string(zmq.SUBSCRIBE, topic.value)
|
||||
self.socket.connect(SOCKET_SUB)
|
||||
topic = topic.value
|
||||
super().__init__(topic)
|
||||
|
||||
def get_data(self, timeout: float = None) -> Optional[tuple[str, any]]:
|
||||
"""Returns detections or None if no update."""
|
||||
try:
|
||||
has_update, _, _ = zmq.select([self.socket], [], [], timeout)
|
||||
def check_for_update(
|
||||
self, timeout: float = None
|
||||
) -> Optional[tuple[DetectionTypeEnum, any]]:
|
||||
return super().check_for_update(timeout)
|
||||
|
||||
if has_update:
|
||||
topic = DetectionTypeEnum[self.socket.recv_string(flags=zmq.NOBLOCK)]
|
||||
return (topic, self.socket.recv_pyobj())
|
||||
except zmq.ZMQError:
|
||||
pass
|
||||
|
||||
return (None, None)
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
def _return_object(self, topic: str, payload: any) -> any:
|
||||
if payload is None:
|
||||
return (None, None)
|
||||
return (DetectionTypeEnum[topic[len(self.topic_base) :]], payload)
|
||||
|
||||
@@ -14,9 +14,10 @@ from frigate.const import (
|
||||
INSERT_PREVIEW,
|
||||
REQUEST_REGION_GRID,
|
||||
UPDATE_CAMERA_ACTIVITY,
|
||||
UPDATE_EVENT_DESCRIPTION,
|
||||
UPSERT_REVIEW_SEGMENT,
|
||||
)
|
||||
from frigate.models import Previews, Recordings, ReviewSegment
|
||||
from frigate.models import Event, Previews, Recordings, ReviewSegment
|
||||
from frigate.ptz.onvif import OnvifCommandEnum, OnvifController
|
||||
from frigate.types import PTZMetricsTypes
|
||||
from frigate.util.object import get_camera_regions_grid
|
||||
@@ -128,8 +129,25 @@ class Dispatcher:
|
||||
).execute()
|
||||
elif topic == UPDATE_CAMERA_ACTIVITY:
|
||||
self.camera_activity = payload
|
||||
elif topic == UPDATE_EVENT_DESCRIPTION:
|
||||
event: Event = Event.get(Event.id == payload["id"])
|
||||
event.data["description"] = payload["description"]
|
||||
event.save()
|
||||
elif topic == "onConnect":
|
||||
self.publish("camera_activity", json.dumps(self.camera_activity))
|
||||
camera_status = self.camera_activity.copy()
|
||||
|
||||
for camera in camera_status.keys():
|
||||
camera_status[camera]["config"] = {
|
||||
"detect": self.config.cameras[camera].detect.enabled,
|
||||
"snapshots": self.config.cameras[camera].snapshots.enabled,
|
||||
"record": self.config.cameras[camera].record.enabled,
|
||||
"audio": self.config.cameras[camera].audio.enabled,
|
||||
"autotracking": self.config.cameras[
|
||||
camera
|
||||
].onvif.autotracking.enabled,
|
||||
}
|
||||
|
||||
self.publish("camera_activity", json.dumps(camera_status))
|
||||
else:
|
||||
self.publish(topic, payload, retain=False)
|
||||
|
||||
|
||||
@@ -1,100 +1,51 @@
|
||||
"""Facilitates communication between processes."""
|
||||
|
||||
import zmq
|
||||
|
||||
from frigate.events.types import EventStateEnum, EventTypeEnum
|
||||
|
||||
SOCKET_PUSH_PULL = "ipc:///tmp/cache/events"
|
||||
SOCKET_PUSH_PULL_END = "ipc:///tmp/cache/events_ended"
|
||||
from .zmq_proxy import Publisher, Subscriber
|
||||
|
||||
|
||||
class EventUpdatePublisher:
|
||||
class EventUpdatePublisher(Publisher):
|
||||
"""Publishes events (objects, audio, manual)."""
|
||||
|
||||
topic_base = "event/"
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.PUSH)
|
||||
self.socket.connect(SOCKET_PUSH_PULL)
|
||||
super().__init__("update")
|
||||
|
||||
def publish(
|
||||
self, payload: tuple[EventTypeEnum, EventStateEnum, str, dict[str, any]]
|
||||
) -> None:
|
||||
"""There is no communication back to the processes."""
|
||||
self.socket.send_pyobj(payload)
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
super().publish(payload)
|
||||
|
||||
|
||||
class EventUpdateSubscriber:
|
||||
class EventUpdateSubscriber(Subscriber):
|
||||
"""Receives event updates."""
|
||||
|
||||
topic_base = "event/"
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.PULL)
|
||||
self.socket.bind(SOCKET_PUSH_PULL)
|
||||
|
||||
def check_for_update(
|
||||
self, timeout=1
|
||||
) -> tuple[EventTypeEnum, EventStateEnum, str, dict[str, any]]:
|
||||
"""Returns events or None if no update."""
|
||||
try:
|
||||
has_update, _, _ = zmq.select([self.socket], [], [], timeout)
|
||||
|
||||
if has_update:
|
||||
return self.socket.recv_pyobj()
|
||||
except zmq.ZMQError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
super().__init__("update")
|
||||
|
||||
|
||||
class EventEndPublisher:
|
||||
class EventEndPublisher(Publisher):
|
||||
"""Publishes events that have ended."""
|
||||
|
||||
topic_base = "event/"
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.PUSH)
|
||||
self.socket.connect(SOCKET_PUSH_PULL_END)
|
||||
super().__init__("finalized")
|
||||
|
||||
def publish(
|
||||
self, payload: tuple[EventTypeEnum, EventStateEnum, str, dict[str, any]]
|
||||
) -> None:
|
||||
"""There is no communication back to the processes."""
|
||||
self.socket.send_pyobj(payload)
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
super().publish(payload)
|
||||
|
||||
|
||||
class EventEndSubscriber:
|
||||
class EventEndSubscriber(Subscriber):
|
||||
"""Receives events that have ended."""
|
||||
|
||||
topic_base = "event/"
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.PULL)
|
||||
self.socket.bind(SOCKET_PUSH_PULL_END)
|
||||
|
||||
def check_for_update(
|
||||
self, timeout=1
|
||||
) -> tuple[EventTypeEnum, EventStateEnum, str, dict[str, any]]:
|
||||
"""Returns events ended or None if no update."""
|
||||
try:
|
||||
has_update, _, _ = zmq.select([self.socket], [], [], timeout)
|
||||
|
||||
if has_update:
|
||||
return self.socket.recv_pyobj()
|
||||
except zmq.ZMQError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
super().__init__("finalized")
|
||||
|
||||
@@ -37,14 +37,14 @@ class InterProcessCommunicator(Communicator):
|
||||
break
|
||||
|
||||
try:
|
||||
(topic, value) = self.socket.recv_pyobj(flags=zmq.NOBLOCK)
|
||||
(topic, value) = self.socket.recv_json(flags=zmq.NOBLOCK)
|
||||
|
||||
response = self._dispatcher(topic, value)
|
||||
|
||||
if response is not None:
|
||||
self.socket.send_pyobj(response)
|
||||
self.socket.send_json(response)
|
||||
else:
|
||||
self.socket.send_pyobj([])
|
||||
self.socket.send_json([])
|
||||
except zmq.ZMQError:
|
||||
break
|
||||
|
||||
@@ -65,8 +65,8 @@ class InterProcessRequestor:
|
||||
|
||||
def send_data(self, topic: str, data: any) -> any:
|
||||
"""Sends data and then waits for reply."""
|
||||
self.socket.send_pyobj((topic, data))
|
||||
return self.socket.recv_pyobj()
|
||||
self.socket.send_json((topic, data))
|
||||
return self.socket.recv_json()
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
|
||||
190
frigate/comms/webpush.py
Normal file
190
frigate/comms/webpush.py
Normal file
@@ -0,0 +1,190 @@
|
||||
"""Handle sending notifications for Frigate via Firebase."""
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, Callable
|
||||
|
||||
from py_vapid import Vapid01
|
||||
from pywebpush import WebPusher
|
||||
|
||||
from frigate.comms.dispatcher import Communicator
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import CONFIG_DIR
|
||||
from frigate.models import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WebPushClient(Communicator): # type: ignore[misc]
|
||||
"""Frigate wrapper for webpush client."""
|
||||
|
||||
def __init__(self, config: FrigateConfig) -> None:
|
||||
self.config = config
|
||||
self.claim_headers: dict[str, dict[str, str]] = {}
|
||||
self.refresh: int = 0
|
||||
self.web_pushers: dict[str, list[WebPusher]] = {}
|
||||
self.expired_subs: dict[str, list[str]] = {}
|
||||
|
||||
if not self.config.notifications.email:
|
||||
logger.warning("Email must be provided for push notifications to be sent.")
|
||||
|
||||
# Pull keys from PEM or generate if they do not exist
|
||||
self.vapid = Vapid01.from_file(os.path.join(CONFIG_DIR, "notifications.pem"))
|
||||
|
||||
users: list[User] = (
|
||||
User.select(User.username, User.notification_tokens).dicts().iterator()
|
||||
)
|
||||
for user in users:
|
||||
self.web_pushers[user["username"]] = []
|
||||
for sub in user["notification_tokens"]:
|
||||
self.web_pushers[user["username"]].append(WebPusher(sub))
|
||||
|
||||
def subscribe(self, receiver: Callable) -> None:
|
||||
"""Wrapper for allowing dispatcher to subscribe."""
|
||||
pass
|
||||
|
||||
def check_registrations(self) -> None:
|
||||
# check for valid claim or create new one
|
||||
now = datetime.datetime.now().timestamp()
|
||||
if len(self.claim_headers) == 0 or self.refresh < now:
|
||||
self.refresh = int(
|
||||
(datetime.datetime.now() + datetime.timedelta(hours=1)).timestamp()
|
||||
)
|
||||
endpoints: set[str] = set()
|
||||
|
||||
# get a unique set of push endpoints
|
||||
for pushers in self.web_pushers.values():
|
||||
for push in pushers:
|
||||
endpoint: str = push.subscription_info["endpoint"]
|
||||
endpoints.add(endpoint[0 : endpoint.index("/", 10)])
|
||||
|
||||
# create new claim
|
||||
for endpoint in endpoints:
|
||||
claim = {
|
||||
"sub": f"mailto:{self.config.notifications.email}",
|
||||
"aud": endpoint,
|
||||
"exp": self.refresh,
|
||||
}
|
||||
self.claim_headers[endpoint] = self.vapid.sign(claim)
|
||||
|
||||
def cleanup_registrations(self) -> None:
|
||||
# delete any expired subs
|
||||
if len(self.expired_subs) > 0:
|
||||
for user, expired in self.expired_subs.items():
|
||||
user_subs = []
|
||||
|
||||
# get all subscriptions, removing ones that are expired
|
||||
stored_user: User = User.get_by_id(user)
|
||||
for token in stored_user.notification_tokens:
|
||||
if token["endpoint"] in expired:
|
||||
continue
|
||||
|
||||
user_subs.append(token)
|
||||
|
||||
# overwrite the database and reset web pushers
|
||||
User.update(notification_tokens=user_subs).where(
|
||||
User.username == user
|
||||
).execute()
|
||||
|
||||
self.web_pushers[user] = []
|
||||
|
||||
for sub in user_subs:
|
||||
self.web_pushers[user].append(WebPusher(sub))
|
||||
|
||||
logger.info(
|
||||
f"Cleaned up {len(expired)} notification subscriptions for {user}"
|
||||
)
|
||||
|
||||
self.expired_subs = {}
|
||||
|
||||
def publish(self, topic: str, payload: Any, retain: bool = False) -> None:
|
||||
"""Wrapper for publishing when client is in valid state."""
|
||||
if topic == "reviews":
|
||||
self.send_alert(json.loads(payload))
|
||||
|
||||
def send_alert(self, payload: dict[str, any]) -> None:
|
||||
if not self.config.notifications.email:
|
||||
return
|
||||
|
||||
self.check_registrations()
|
||||
|
||||
# Only notify for alerts
|
||||
if payload["after"]["severity"] != "alert":
|
||||
return
|
||||
|
||||
state = payload["type"]
|
||||
|
||||
# Don't notify if message is an update and important fields don't have an update
|
||||
if (
|
||||
state == "update"
|
||||
and len(payload["before"]["data"]["objects"])
|
||||
== len(payload["after"]["data"]["objects"])
|
||||
and len(payload["before"]["data"]["zones"])
|
||||
== len(payload["after"]["data"]["zones"])
|
||||
):
|
||||
return
|
||||
|
||||
reviewId = payload["after"]["id"]
|
||||
sorted_objects: set[str] = set()
|
||||
|
||||
for obj in payload["after"]["data"]["objects"]:
|
||||
if "-verified" not in obj:
|
||||
sorted_objects.add(obj)
|
||||
|
||||
sorted_objects.update(payload["after"]["data"]["sub_labels"])
|
||||
|
||||
camera: str = payload["after"]["camera"]
|
||||
title = f"{', '.join(sorted_objects).replace('_', ' ').title()}{' was' if state == 'end' else ''} detected in {', '.join(payload['after']['data']['zones']).replace('_', ' ').title()}"
|
||||
message = f"Detected on {camera.replace('_', ' ').title()}"
|
||||
image = f'{payload["after"]["thumb_path"].replace("/media/frigate", "")}'
|
||||
|
||||
# if event is ongoing open to live view otherwise open to recordings view
|
||||
direct_url = f"/review?id={reviewId}" if state == "end" else f"/#{camera}"
|
||||
|
||||
for user, pushers in self.web_pushers.items():
|
||||
for pusher in pushers:
|
||||
endpoint = pusher.subscription_info["endpoint"]
|
||||
|
||||
# set headers for notification behavior
|
||||
headers = self.claim_headers[
|
||||
endpoint[0 : endpoint.index("/", 10)]
|
||||
].copy()
|
||||
headers["urgency"] = "high"
|
||||
ttl = 3600 if state == "end" else 0
|
||||
|
||||
# send message
|
||||
resp = pusher.send(
|
||||
headers=headers,
|
||||
ttl=ttl,
|
||||
data=json.dumps(
|
||||
{
|
||||
"title": title,
|
||||
"message": message,
|
||||
"direct_url": direct_url,
|
||||
"image": image,
|
||||
"id": reviewId,
|
||||
"type": "alert",
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
if resp.status_code == 201:
|
||||
pass
|
||||
elif resp.status_code == 404 or resp.status_code == 410:
|
||||
# subscription is not found or has been unsubscribed
|
||||
if not self.expired_subs.get(user):
|
||||
self.expired_subs[user] = []
|
||||
|
||||
self.expired_subs[user].append(pusher.subscription_info["endpoint"])
|
||||
# the subscription no longer exists and should be removed
|
||||
else:
|
||||
logger.warning(
|
||||
f"Failed to send notification to {user} :: {resp.headers}"
|
||||
)
|
||||
|
||||
self.cleanup_registrations()
|
||||
|
||||
def stop(self) -> None:
|
||||
pass
|
||||
99
frigate/comms/zmq_proxy.py
Normal file
99
frigate/comms/zmq_proxy.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Facilitates communication over zmq proxy."""
|
||||
|
||||
import json
|
||||
import threading
|
||||
from typing import Optional
|
||||
|
||||
import zmq
|
||||
|
||||
SOCKET_PUB = "ipc:///tmp/cache/proxy_pub"
|
||||
SOCKET_SUB = "ipc:///tmp/cache/proxy_sub"
|
||||
|
||||
|
||||
class ZmqProxyRunner(threading.Thread):
|
||||
def __init__(self, context: zmq.Context[zmq.Socket]) -> None:
|
||||
threading.Thread.__init__(self)
|
||||
self.name = "detection_proxy"
|
||||
self.context = context
|
||||
|
||||
def run(self) -> None:
|
||||
"""Run the proxy."""
|
||||
incoming = self.context.socket(zmq.XSUB)
|
||||
incoming.bind(SOCKET_PUB)
|
||||
outgoing = self.context.socket(zmq.XPUB)
|
||||
outgoing.bind(SOCKET_SUB)
|
||||
|
||||
# Blocking: This will unblock (via exception) when we destroy the context
|
||||
# The incoming and outgoing sockets will be closed automatically
|
||||
# when the context is destroyed as well.
|
||||
try:
|
||||
zmq.proxy(incoming, outgoing)
|
||||
except zmq.ZMQError:
|
||||
pass
|
||||
|
||||
|
||||
class ZmqProxy:
|
||||
"""Proxies video and audio detections."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.context = zmq.Context()
|
||||
self.runner = ZmqProxyRunner(self.context)
|
||||
self.runner.start()
|
||||
|
||||
def stop(self) -> None:
|
||||
# destroying the context will tell the proxy to stop
|
||||
self.context.destroy()
|
||||
self.runner.join()
|
||||
|
||||
|
||||
class Publisher:
|
||||
"""Publishes messages."""
|
||||
|
||||
topic_base: str = ""
|
||||
|
||||
def __init__(self, topic: str = "") -> None:
|
||||
self.topic = f"{self.topic_base}{topic}"
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.PUB)
|
||||
self.socket.connect(SOCKET_PUB)
|
||||
|
||||
def publish(self, payload: any, sub_topic: str = "") -> None:
|
||||
"""Publish message."""
|
||||
self.socket.send_string(f"{self.topic}{sub_topic} {json.dumps(payload)}")
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
|
||||
|
||||
class Subscriber:
|
||||
"""Receives messages."""
|
||||
|
||||
topic_base: str = ""
|
||||
|
||||
def __init__(self, topic: str = "") -> None:
|
||||
self.topic = f"{self.topic_base}{topic}"
|
||||
self.context = zmq.Context()
|
||||
self.socket = self.context.socket(zmq.SUB)
|
||||
self.socket.setsockopt_string(zmq.SUBSCRIBE, self.topic)
|
||||
self.socket.connect(SOCKET_SUB)
|
||||
|
||||
def check_for_update(self, timeout: float = 1) -> Optional[tuple[str, any]]:
|
||||
"""Returns message or None if no update."""
|
||||
try:
|
||||
has_update, _, _ = zmq.select([self.socket], [], [], timeout)
|
||||
|
||||
if has_update:
|
||||
parts = self.socket.recv_string(flags=zmq.NOBLOCK).split(maxsplit=1)
|
||||
return self._return_object(parts[0], json.loads(parts[1]))
|
||||
except zmq.ZMQError:
|
||||
pass
|
||||
|
||||
return self._return_object("", None)
|
||||
|
||||
def stop(self) -> None:
|
||||
self.socket.close()
|
||||
self.context.destroy()
|
||||
|
||||
def _return_object(self, topic: str, payload: any) -> any:
|
||||
return payload
|
||||
@@ -169,6 +169,11 @@ class AuthConfig(FrigateBaseModel):
|
||||
hash_iterations: int = Field(default=600000, title="Password hash iterations")
|
||||
|
||||
|
||||
class NotificationConfig(FrigateBaseModel):
|
||||
enabled: bool = Field(default=False, title="Enable notifications")
|
||||
email: Optional[str] = Field(default=None, title="Email required for push.")
|
||||
|
||||
|
||||
class StatsConfig(FrigateBaseModel):
|
||||
amd_gpu_stats: bool = Field(default=True, title="Enable AMD GPU stats.")
|
||||
intel_gpu_stats: bool = Field(default=True, title="Enable Intel GPU stats.")
|
||||
@@ -730,6 +735,38 @@ class ReviewConfig(FrigateBaseModel):
|
||||
)
|
||||
|
||||
|
||||
class SemanticSearchConfig(FrigateBaseModel):
|
||||
enabled: bool = Field(default=False, title="Enable semantic search.")
|
||||
reindex: Optional[bool] = Field(
|
||||
default=False, title="Reindex all detections on startup."
|
||||
)
|
||||
|
||||
|
||||
class GenAIProviderEnum(str, Enum):
|
||||
openai = "openai"
|
||||
gemini = "gemini"
|
||||
ollama = "ollama"
|
||||
|
||||
|
||||
class GenAIConfig(FrigateBaseModel):
|
||||
enabled: bool = Field(default=False, title="Enable GenAI.")
|
||||
provider: GenAIProviderEnum = Field(
|
||||
default=GenAIProviderEnum.openai, title="GenAI provider."
|
||||
)
|
||||
base_url: Optional[str] = Field(None, title="Provider base url.")
|
||||
api_key: Optional[str] = Field(None, title="Provider API key.")
|
||||
model: str = Field(default="gpt-4o", title="GenAI model.")
|
||||
prompt: str = Field(
|
||||
default="Describe the {label} in the sequence of images with as much detail as possible. Do not describe the background.",
|
||||
title="Default caption prompt.",
|
||||
)
|
||||
object_prompts: Dict[str, str] = Field(default={}, title="Object specific prompts.")
|
||||
|
||||
|
||||
class GenAICameraConfig(FrigateBaseModel):
|
||||
enabled: bool = Field(default=False, title="Enable GenAI for camera.")
|
||||
|
||||
|
||||
class AudioConfig(FrigateBaseModel):
|
||||
enabled: bool = Field(default=False, title="Enable audio events.")
|
||||
max_not_heard: int = Field(
|
||||
@@ -1011,6 +1048,9 @@ class CameraConfig(FrigateBaseModel):
|
||||
review: ReviewConfig = Field(
|
||||
default_factory=ReviewConfig, title="Review configuration."
|
||||
)
|
||||
genai: GenAICameraConfig = Field(
|
||||
default_factory=GenAICameraConfig, title="Generative AI configuration."
|
||||
)
|
||||
audio: AudioConfig = Field(
|
||||
default_factory=AudioConfig, title="Audio events configuration."
|
||||
)
|
||||
@@ -1326,6 +1366,9 @@ class FrigateConfig(FrigateBaseModel):
|
||||
default_factory=dict, title="Frigate environment variables."
|
||||
)
|
||||
ui: UIConfig = Field(default_factory=UIConfig, title="UI configuration.")
|
||||
notifications: NotificationConfig = Field(
|
||||
default_factory=NotificationConfig, title="Notification Config"
|
||||
)
|
||||
telemetry: TelemetryConfig = Field(
|
||||
default_factory=TelemetryConfig, title="Telemetry configuration."
|
||||
)
|
||||
@@ -1363,6 +1406,12 @@ class FrigateConfig(FrigateBaseModel):
|
||||
review: ReviewConfig = Field(
|
||||
default_factory=ReviewConfig, title="Review configuration."
|
||||
)
|
||||
semantic_search: SemanticSearchConfig = Field(
|
||||
default_factory=SemanticSearchConfig, title="Semantic search configuration."
|
||||
)
|
||||
genai: GenAIConfig = Field(
|
||||
default_factory=GenAIConfig, title="Generative AI configuration."
|
||||
)
|
||||
audio: AudioConfig = Field(
|
||||
default_factory=AudioConfig, title="Global Audio events configuration."
|
||||
)
|
||||
@@ -1397,6 +1446,10 @@ class FrigateConfig(FrigateBaseModel):
|
||||
config.mqtt.user = config.mqtt.user.format(**FRIGATE_ENV_VARS)
|
||||
config.mqtt.password = config.mqtt.password.format(**FRIGATE_ENV_VARS)
|
||||
|
||||
# GenAI substitution
|
||||
if config.genai.api_key:
|
||||
config.genai.api_key = config.genai.api_key.format(**FRIGATE_ENV_VARS)
|
||||
|
||||
# set default min_score for object attributes
|
||||
for attribute in ALL_ATTRIBUTE_LABELS:
|
||||
if not config.objects.filters.get(attribute):
|
||||
@@ -1418,6 +1471,7 @@ class FrigateConfig(FrigateBaseModel):
|
||||
"live": ...,
|
||||
"objects": ...,
|
||||
"review": ...,
|
||||
"genai": {"enabled"},
|
||||
"motion": ...,
|
||||
"detect": ...,
|
||||
"ffmpeg": ...,
|
||||
|
||||
@@ -81,6 +81,7 @@ REQUEST_REGION_GRID = "request_region_grid"
|
||||
UPSERT_REVIEW_SEGMENT = "upsert_review_segment"
|
||||
CLEAR_ONGOING_REVIEW_SEGMENTS = "clear_ongoing_review_segments"
|
||||
UPDATE_CAMERA_ACTIVITY = "update_camera_activity"
|
||||
UPDATE_EVENT_DESCRIPTION = "update_event_description"
|
||||
|
||||
# Stats Values
|
||||
|
||||
|
||||
294
frigate/detectors/plugins/hailo8l.py
Normal file
294
frigate/detectors/plugins/hailo8l.py
Normal file
@@ -0,0 +1,294 @@
|
||||
import logging
|
||||
import os
|
||||
import urllib.request
|
||||
|
||||
import numpy as np
|
||||
|
||||
try:
|
||||
from hailo_platform import (
|
||||
HEF,
|
||||
ConfigureParams,
|
||||
FormatType,
|
||||
HailoRTException,
|
||||
HailoStreamInterface,
|
||||
InferVStreams,
|
||||
InputVStreamParams,
|
||||
OutputVStreamParams,
|
||||
VDevice,
|
||||
)
|
||||
except ModuleNotFoundError:
|
||||
pass
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
from typing_extensions import Literal
|
||||
|
||||
from frigate.detectors.detection_api import DetectionApi
|
||||
from frigate.detectors.detector_config import BaseDetectorConfig
|
||||
from frigate.detectors.util import preprocess # Assuming this function is available
|
||||
|
||||
# Set up logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Define the detector key for Hailo
|
||||
DETECTOR_KEY = "hailo8l"
|
||||
|
||||
|
||||
# Configuration class for model settings
|
||||
class ModelConfig(BaseModel):
|
||||
path: str = Field(default=None, title="Model Path") # Path to the HEF file
|
||||
|
||||
|
||||
# Configuration class for Hailo detector
|
||||
class HailoDetectorConfig(BaseDetectorConfig):
|
||||
type: Literal[DETECTOR_KEY] # Type of the detector
|
||||
device: str = Field(default="PCIe", title="Device Type") # Device type (e.g., PCIe)
|
||||
|
||||
|
||||
# Hailo detector class implementation
|
||||
class HailoDetector(DetectionApi):
|
||||
type_key = DETECTOR_KEY # Set the type key to the Hailo detector key
|
||||
|
||||
def __init__(self, detector_config: HailoDetectorConfig):
|
||||
# Initialize device type and model path from the configuration
|
||||
self.h8l_device_type = detector_config.device
|
||||
self.h8l_model_path = detector_config.model.path
|
||||
self.h8l_model_height = detector_config.model.height
|
||||
self.h8l_model_width = detector_config.model.width
|
||||
self.h8l_model_type = detector_config.model.model_type
|
||||
self.h8l_tensor_format = detector_config.model.input_tensor
|
||||
self.h8l_pixel_format = detector_config.model.input_pixel_format
|
||||
self.model_url = "https://hailo-model-zoo.s3.eu-west-2.amazonaws.com/ModelZoo/Compiled/v2.11.0/hailo8l/ssd_mobilenet_v1.hef"
|
||||
self.cache_dir = "/config/model_cache/h8l_cache"
|
||||
self.expected_model_filename = "ssd_mobilenet_v1.hef"
|
||||
output_type = "FLOAT32"
|
||||
|
||||
logger.info(f"Initializing Hailo device as {self.h8l_device_type}")
|
||||
self.check_and_prepare_model()
|
||||
try:
|
||||
# Validate device type
|
||||
if self.h8l_device_type not in ["PCIe", "M.2"]:
|
||||
raise ValueError(f"Unsupported device type: {self.h8l_device_type}")
|
||||
|
||||
# Initialize the Hailo device
|
||||
self.target = VDevice()
|
||||
# Load the HEF (Hailo's binary format for neural networks)
|
||||
self.hef = HEF(self.h8l_model_path)
|
||||
# Create configuration parameters from the HEF
|
||||
self.configure_params = ConfigureParams.create_from_hef(
|
||||
hef=self.hef, interface=HailoStreamInterface.PCIe
|
||||
)
|
||||
# Configure the device with the HEF
|
||||
self.network_groups = self.target.configure(self.hef, self.configure_params)
|
||||
self.network_group = self.network_groups[0]
|
||||
self.network_group_params = self.network_group.create_params()
|
||||
|
||||
# Create input and output virtual stream parameters
|
||||
self.input_vstreams_params = InputVStreamParams.make(
|
||||
self.network_group,
|
||||
format_type=self.hef.get_input_vstream_infos()[0].format.type,
|
||||
)
|
||||
self.output_vstreams_params = OutputVStreamParams.make(
|
||||
self.network_group, format_type=getattr(FormatType, output_type)
|
||||
)
|
||||
|
||||
# Get input and output stream information from the HEF
|
||||
self.input_vstream_info = self.hef.get_input_vstream_infos()
|
||||
self.output_vstream_info = self.hef.get_output_vstream_infos()
|
||||
|
||||
logger.info("Hailo device initialized successfully")
|
||||
logger.debug(f"[__init__] Model Path: {self.h8l_model_path}")
|
||||
logger.debug(f"[__init__] Input Tensor Format: {self.h8l_tensor_format}")
|
||||
logger.debug(f"[__init__] Input Pixel Format: {self.h8l_pixel_format}")
|
||||
logger.debug(f"[__init__] Input VStream Info: {self.input_vstream_info[0]}")
|
||||
logger.debug(
|
||||
f"[__init__] Output VStream Info: {self.output_vstream_info[0]}"
|
||||
)
|
||||
except HailoRTException as e:
|
||||
logger.error(f"HailoRTException during initialization: {e}")
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize Hailo device: {e}")
|
||||
raise
|
||||
|
||||
def check_and_prepare_model(self):
|
||||
# Ensure cache directory exists
|
||||
if not os.path.exists(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
|
||||
# Check for the expected model file
|
||||
model_file_path = os.path.join(self.cache_dir, self.expected_model_filename)
|
||||
if not os.path.isfile(model_file_path):
|
||||
logger.info(
|
||||
f"A model file was not found at {model_file_path}, Downloading one from {self.model_url}."
|
||||
)
|
||||
urllib.request.urlretrieve(self.model_url, model_file_path)
|
||||
logger.info(f"A model file was downloaded to {model_file_path}.")
|
||||
else:
|
||||
logger.info(
|
||||
f"A model file already exists at {model_file_path} not downloading one."
|
||||
)
|
||||
|
||||
def detect_raw(self, tensor_input):
|
||||
logger.debug("[detect_raw] Entering function")
|
||||
logger.debug(
|
||||
f"[detect_raw] The `tensor_input` = {tensor_input} tensor_input shape = {tensor_input.shape}"
|
||||
)
|
||||
|
||||
if tensor_input is None:
|
||||
raise ValueError(
|
||||
"[detect_raw] The 'tensor_input' argument must be provided"
|
||||
)
|
||||
|
||||
# Ensure tensor_input is a numpy array
|
||||
if isinstance(tensor_input, list):
|
||||
tensor_input = np.array(tensor_input)
|
||||
logger.debug(
|
||||
f"[detect_raw] Converted tensor_input to numpy array: shape {tensor_input.shape}"
|
||||
)
|
||||
|
||||
# Preprocess the tensor input using Frigate's preprocess function
|
||||
processed_tensor = preprocess(
|
||||
tensor_input, (1, self.h8l_model_height, self.h8l_model_width, 3), np.uint8
|
||||
)
|
||||
logger.debug(
|
||||
f"[detect_raw] Tensor data and shape after preprocessing: {processed_tensor} {processed_tensor.shape}"
|
||||
)
|
||||
|
||||
input_data = processed_tensor
|
||||
logger.debug(
|
||||
f"[detect_raw] Input data for inference shape: {processed_tensor.shape}, dtype: {processed_tensor.dtype}"
|
||||
)
|
||||
|
||||
try:
|
||||
with InferVStreams(
|
||||
self.network_group,
|
||||
self.input_vstreams_params,
|
||||
self.output_vstreams_params,
|
||||
) as infer_pipeline:
|
||||
input_dict = {}
|
||||
if isinstance(input_data, dict):
|
||||
input_dict = input_data
|
||||
logger.debug("[detect_raw] it a dictionary.")
|
||||
elif isinstance(input_data, (list, tuple)):
|
||||
for idx, layer_info in enumerate(self.input_vstream_info):
|
||||
input_dict[layer_info.name] = input_data[idx]
|
||||
logger.debug("[detect_raw] converted from list/tuple.")
|
||||
else:
|
||||
if len(input_data.shape) == 3:
|
||||
input_data = np.expand_dims(input_data, axis=0)
|
||||
logger.debug("[detect_raw] converted from an array.")
|
||||
input_dict[self.input_vstream_info[0].name] = input_data
|
||||
|
||||
logger.debug(
|
||||
f"[detect_raw] Input dictionary for inference keys: {input_dict.keys()}"
|
||||
)
|
||||
|
||||
with self.network_group.activate(self.network_group_params):
|
||||
raw_output = infer_pipeline.infer(input_dict)
|
||||
logger.debug(f"[detect_raw] Raw inference output: {raw_output}")
|
||||
|
||||
if self.output_vstream_info[0].name not in raw_output:
|
||||
logger.error(
|
||||
f"[detect_raw] Missing output stream {self.output_vstream_info[0].name} in inference results"
|
||||
)
|
||||
return np.zeros((20, 6), np.float32)
|
||||
|
||||
raw_output = raw_output[self.output_vstream_info[0].name][0]
|
||||
logger.debug(
|
||||
f"[detect_raw] Raw output for stream {self.output_vstream_info[0].name}: {raw_output}"
|
||||
)
|
||||
|
||||
# Process the raw output
|
||||
detections = self.process_detections(raw_output)
|
||||
if len(detections) == 0:
|
||||
logger.debug(
|
||||
"[detect_raw] No detections found after processing. Setting default values."
|
||||
)
|
||||
return np.zeros((20, 6), np.float32)
|
||||
else:
|
||||
formatted_detections = detections
|
||||
if (
|
||||
formatted_detections.shape[1] != 6
|
||||
): # Ensure the formatted detections have 6 columns
|
||||
logger.error(
|
||||
f"[detect_raw] Unexpected shape for formatted detections: {formatted_detections.shape}. Expected (20, 6)."
|
||||
)
|
||||
return np.zeros((20, 6), np.float32)
|
||||
return formatted_detections
|
||||
except HailoRTException as e:
|
||||
logger.error(f"[detect_raw] HailoRTException during inference: {e}")
|
||||
return np.zeros((20, 6), np.float32)
|
||||
except Exception as e:
|
||||
logger.error(f"[detect_raw] Exception during inference: {e}")
|
||||
return np.zeros((20, 6), np.float32)
|
||||
finally:
|
||||
logger.debug("[detect_raw] Exiting function")
|
||||
|
||||
def process_detections(self, raw_detections, threshold=0.5):
|
||||
boxes, scores, classes = [], [], []
|
||||
num_detections = 0
|
||||
|
||||
logger.debug(f"[process_detections] Raw detections: {raw_detections}")
|
||||
|
||||
for i, detection_set in enumerate(raw_detections):
|
||||
if not isinstance(detection_set, np.ndarray) or detection_set.size == 0:
|
||||
logger.debug(
|
||||
f"[process_detections] Detection set {i} is empty or not an array, skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
logger.debug(
|
||||
f"[process_detections] Detection set {i} shape: {detection_set.shape}"
|
||||
)
|
||||
|
||||
for detection in detection_set:
|
||||
if detection.shape[0] == 0:
|
||||
logger.debug(
|
||||
f"[process_detections] Detection in set {i} is empty, skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
ymin, xmin, ymax, xmax = detection[:4]
|
||||
score = np.clip(detection[4], 0, 1) # Use np.clip for clarity
|
||||
|
||||
if score < threshold:
|
||||
logger.debug(
|
||||
f"[process_detections] Detection in set {i} has a score {score} below threshold {threshold}. Skipping."
|
||||
)
|
||||
continue
|
||||
|
||||
logger.debug(
|
||||
f"[process_detections] Adding detection with coordinates: ({xmin}, {ymin}), ({xmax}, {ymax}) and score: {score}"
|
||||
)
|
||||
boxes.append([ymin, xmin, ymax, xmax])
|
||||
scores.append(score)
|
||||
classes.append(i)
|
||||
num_detections += 1
|
||||
|
||||
logger.debug(
|
||||
f"[process_detections] Boxes: {boxes}, Scores: {scores}, Classes: {classes}, Num detections: {num_detections}"
|
||||
)
|
||||
|
||||
if num_detections == 0:
|
||||
logger.debug("[process_detections] No valid detections found.")
|
||||
return np.zeros((20, 6), np.float32)
|
||||
|
||||
combined = np.hstack(
|
||||
(
|
||||
np.array(classes)[:, np.newaxis],
|
||||
np.array(scores)[:, np.newaxis],
|
||||
np.array(boxes),
|
||||
)
|
||||
)
|
||||
|
||||
if combined.shape[0] < 20:
|
||||
padding = np.zeros(
|
||||
(20 - combined.shape[0], combined.shape[1]), dtype=combined.dtype
|
||||
)
|
||||
combined = np.vstack((combined, padding))
|
||||
|
||||
logger.debug(
|
||||
f"[process_detections] Combined detections (padded to 20 if necessary): {np.array_str(combined, precision=4, suppress_small=True)}"
|
||||
)
|
||||
|
||||
return combined[:20, :6]
|
||||
@@ -23,7 +23,6 @@ model_chache_dir = "/config/model_cache/rknn_cache/"
|
||||
class RknnDetectorConfig(BaseDetectorConfig):
|
||||
type: Literal[DETECTOR_KEY]
|
||||
num_cores: int = Field(default=0, ge=0, le=3, title="Number of NPU cores to use.")
|
||||
purge_model_cache: bool = Field(default=True)
|
||||
|
||||
|
||||
class Rknn(DetectionApi):
|
||||
@@ -36,7 +35,9 @@ class Rknn(DetectionApi):
|
||||
core_mask = 2**config.num_cores - 1
|
||||
soc = self.get_soc()
|
||||
|
||||
model_props = self.parse_model_input(config.model.path, soc)
|
||||
model_path = config.model.path or "deci-fp16-yolonas_s"
|
||||
|
||||
model_props = self.parse_model_input(model_path, soc)
|
||||
|
||||
if model_props["preset"]:
|
||||
config.model.model_type = model_props["model_type"]
|
||||
|
||||
91
frigate/embeddings/__init__.py
Normal file
91
frigate/embeddings/__init__.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""ChromaDB embeddings database."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import multiprocessing as mp
|
||||
import signal
|
||||
import threading
|
||||
from types import FrameType
|
||||
from typing import Optional
|
||||
|
||||
from playhouse.sqliteq import SqliteQueueDatabase
|
||||
from setproctitle import setproctitle
|
||||
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import CONFIG_DIR
|
||||
from frigate.models import Event
|
||||
from frigate.util.services import listen
|
||||
|
||||
from .embeddings import Embeddings
|
||||
from .maintainer import EmbeddingMaintainer
|
||||
from .util import ZScoreNormalization
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def manage_embeddings(config: FrigateConfig) -> None:
|
||||
# Only initialize embeddings if semantic search is enabled
|
||||
if not config.semantic_search.enabled:
|
||||
return
|
||||
|
||||
stop_event = mp.Event()
|
||||
|
||||
def receiveSignal(signalNumber: int, frame: Optional[FrameType]) -> None:
|
||||
stop_event.set()
|
||||
|
||||
signal.signal(signal.SIGTERM, receiveSignal)
|
||||
signal.signal(signal.SIGINT, receiveSignal)
|
||||
|
||||
threading.current_thread().name = "process:embeddings_manager"
|
||||
setproctitle("frigate.embeddings_manager")
|
||||
listen()
|
||||
|
||||
# Configure Frigate DB
|
||||
db = SqliteQueueDatabase(
|
||||
config.database.path,
|
||||
pragmas={
|
||||
"auto_vacuum": "FULL", # Does not defragment database
|
||||
"cache_size": -512 * 1000, # 512MB of cache
|
||||
"synchronous": "NORMAL", # Safe when using WAL https://www.sqlite.org/pragma.html#pragma_synchronous
|
||||
},
|
||||
timeout=max(60, 10 * len([c for c in config.cameras.values() if c.enabled])),
|
||||
)
|
||||
models = [Event]
|
||||
db.bind(models)
|
||||
|
||||
embeddings = Embeddings()
|
||||
|
||||
# Check if we need to re-index events
|
||||
if config.semantic_search.reindex:
|
||||
embeddings.reindex()
|
||||
|
||||
maintainer = EmbeddingMaintainer(
|
||||
config,
|
||||
stop_event,
|
||||
)
|
||||
maintainer.start()
|
||||
|
||||
|
||||
class EmbeddingsContext:
|
||||
def __init__(self):
|
||||
self.embeddings = Embeddings()
|
||||
self.thumb_stats = ZScoreNormalization()
|
||||
self.desc_stats = ZScoreNormalization()
|
||||
|
||||
# load stats from disk
|
||||
try:
|
||||
with open(f"{CONFIG_DIR}/.search_stats.json", "r") as f:
|
||||
data = json.loads(f.read())
|
||||
self.thumb_stats.from_dict(data["thumb_stats"])
|
||||
self.desc_stats.from_dict(data["desc_stats"])
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
def save_stats(self):
|
||||
"""Write the stats to disk as JSON on exit."""
|
||||
contents = {
|
||||
"thumb_stats": self.thumb_stats.to_dict(),
|
||||
"desc_stats": self.desc_stats.to_dict(),
|
||||
}
|
||||
with open(f"{CONFIG_DIR}/.search_stats.json", "w") as f:
|
||||
f.write(json.dumps(contents))
|
||||
163
frigate/embeddings/embeddings.py
Normal file
163
frigate/embeddings/embeddings.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""ChromaDB embeddings database."""
|
||||
|
||||
import base64
|
||||
import io
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.models import Event
|
||||
|
||||
# Squelch posthog logging
|
||||
logging.getLogger("chromadb.telemetry.product.posthog").setLevel(logging.CRITICAL)
|
||||
|
||||
# Hotsawp the sqlite3 module for Chroma compatibility
|
||||
try:
|
||||
from chromadb import Collection
|
||||
from chromadb import HttpClient as ChromaClient
|
||||
from chromadb.config import Settings
|
||||
|
||||
from .functions.clip import ClipEmbedding
|
||||
from .functions.minilm_l6_v2 import MiniLMEmbedding
|
||||
except RuntimeError:
|
||||
__import__("pysqlite3")
|
||||
sys.modules["sqlite3"] = sys.modules.pop("pysqlite3")
|
||||
from chromadb import Collection
|
||||
from chromadb import HttpClient as ChromaClient
|
||||
from chromadb.config import Settings
|
||||
|
||||
from .functions.clip import ClipEmbedding
|
||||
from .functions.minilm_l6_v2 import MiniLMEmbedding
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_metadata(event: Event) -> dict:
|
||||
"""Extract valid event metadata."""
|
||||
event_dict = model_to_dict(event)
|
||||
return (
|
||||
{
|
||||
k: v
|
||||
for k, v in event_dict.items()
|
||||
if k not in ["id", "thumbnail"]
|
||||
and v is not None
|
||||
and isinstance(v, (str, int, float, bool))
|
||||
}
|
||||
| {
|
||||
k: v
|
||||
for k, v in event_dict["data"].items()
|
||||
if k not in ["description"]
|
||||
and v is not None
|
||||
and isinstance(v, (str, int, float, bool))
|
||||
}
|
||||
| {
|
||||
# Metadata search doesn't support $contains
|
||||
# and an event can have multiple zones, so
|
||||
# we need to create a key for each zone
|
||||
f"{k}_{x}": True
|
||||
for k, v in event_dict.items()
|
||||
if isinstance(v, list) and len(v) > 0
|
||||
for x in v
|
||||
if isinstance(x, str)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class Embeddings:
|
||||
"""ChromaDB embeddings database."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.client: ChromaClient = ChromaClient(
|
||||
host="127.0.0.1",
|
||||
settings=Settings(anonymized_telemetry=False),
|
||||
)
|
||||
|
||||
@property
|
||||
def thumbnail(self) -> Collection:
|
||||
return self.client.get_or_create_collection(
|
||||
name="event_thumbnail", embedding_function=ClipEmbedding()
|
||||
)
|
||||
|
||||
@property
|
||||
def description(self) -> Collection:
|
||||
return self.client.get_or_create_collection(
|
||||
name="event_description", embedding_function=MiniLMEmbedding()
|
||||
)
|
||||
|
||||
def reindex(self) -> None:
|
||||
"""Reindex all event embeddings."""
|
||||
logger.info("Indexing event embeddings...")
|
||||
self.client.reset()
|
||||
|
||||
st = time.time()
|
||||
totals = {
|
||||
"thumb": 0,
|
||||
"desc": 0,
|
||||
}
|
||||
|
||||
batch_size = 100
|
||||
current_page = 1
|
||||
events = (
|
||||
Event.select()
|
||||
.where(
|
||||
(Event.has_clip == True | Event.has_snapshot == True)
|
||||
& Event.thumbnail.is_null(False)
|
||||
)
|
||||
.order_by(Event.start_time.desc())
|
||||
.paginate(current_page, batch_size)
|
||||
)
|
||||
|
||||
while len(events) > 0:
|
||||
thumbnails = {"ids": [], "images": [], "metadatas": []}
|
||||
descriptions = {"ids": [], "documents": [], "metadatas": []}
|
||||
|
||||
event: Event
|
||||
for event in events:
|
||||
metadata = get_metadata(event)
|
||||
thumbnail = base64.b64decode(event.thumbnail)
|
||||
img = np.array(Image.open(io.BytesIO(thumbnail)).convert("RGB"))
|
||||
thumbnails["ids"].append(event.id)
|
||||
thumbnails["images"].append(img)
|
||||
thumbnails["metadatas"].append(metadata)
|
||||
if event.data.get("description") is not None:
|
||||
descriptions["ids"].append(event.id)
|
||||
descriptions["documents"].append(event.data["description"])
|
||||
descriptions["metadatas"].append(metadata)
|
||||
|
||||
if len(thumbnails["ids"]) > 0:
|
||||
totals["thumb"] += len(thumbnails["ids"])
|
||||
self.thumbnail.upsert(
|
||||
images=thumbnails["images"],
|
||||
metadatas=thumbnails["metadatas"],
|
||||
ids=thumbnails["ids"],
|
||||
)
|
||||
|
||||
if len(descriptions["ids"]) > 0:
|
||||
totals["desc"] += len(descriptions["ids"])
|
||||
self.description.upsert(
|
||||
documents=descriptions["documents"],
|
||||
metadatas=descriptions["metadatas"],
|
||||
ids=descriptions["ids"],
|
||||
)
|
||||
|
||||
current_page += 1
|
||||
events = (
|
||||
Event.select()
|
||||
.where(
|
||||
(Event.has_clip == True | Event.has_snapshot == True)
|
||||
& Event.thumbnail.is_null(False)
|
||||
)
|
||||
.order_by(Event.start_time.desc())
|
||||
.paginate(current_page, batch_size)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Embedded %d thumbnails and %d descriptions in %s seconds",
|
||||
totals["thumb"],
|
||||
totals["desc"],
|
||||
time.time() - st,
|
||||
)
|
||||
63
frigate/embeddings/functions/clip.py
Normal file
63
frigate/embeddings/functions/clip.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""CLIP Embeddings for Frigate."""
|
||||
|
||||
import os
|
||||
from typing import Tuple, Union
|
||||
|
||||
import onnxruntime as ort
|
||||
from chromadb import EmbeddingFunction, Embeddings
|
||||
from chromadb.api.types import (
|
||||
Documents,
|
||||
Images,
|
||||
is_document,
|
||||
is_image,
|
||||
)
|
||||
from onnx_clip import OnnxClip
|
||||
|
||||
from frigate.const import MODEL_CACHE_DIR
|
||||
|
||||
|
||||
class Clip(OnnxClip):
|
||||
"""Override load models to download to cache directory."""
|
||||
|
||||
@staticmethod
|
||||
def _load_models(
|
||||
model: str,
|
||||
silent: bool,
|
||||
) -> Tuple[ort.InferenceSession, ort.InferenceSession]:
|
||||
"""
|
||||
These models are a part of the container. Treat as as such.
|
||||
"""
|
||||
if model == "ViT-B/32":
|
||||
IMAGE_MODEL_FILE = "clip_image_model_vitb32.onnx"
|
||||
TEXT_MODEL_FILE = "clip_text_model_vitb32.onnx"
|
||||
elif model == "RN50":
|
||||
IMAGE_MODEL_FILE = "clip_image_model_rn50.onnx"
|
||||
TEXT_MODEL_FILE = "clip_text_model_rn50.onnx"
|
||||
else:
|
||||
raise ValueError(f"Unexpected model {model}. No `.onnx` file found.")
|
||||
|
||||
models = []
|
||||
for model_file in [IMAGE_MODEL_FILE, TEXT_MODEL_FILE]:
|
||||
path = os.path.join(MODEL_CACHE_DIR, "clip", model_file)
|
||||
models.append(OnnxClip._load_model(path, silent))
|
||||
|
||||
return models[0], models[1]
|
||||
|
||||
|
||||
class ClipEmbedding(EmbeddingFunction):
|
||||
"""Embedding function for CLIP model used in Chroma."""
|
||||
|
||||
def __init__(self, model: str = "ViT-B/32"):
|
||||
"""Initialize CLIP Embedding function."""
|
||||
self.model = Clip(model)
|
||||
|
||||
def __call__(self, input: Union[Documents, Images]) -> Embeddings:
|
||||
embeddings: Embeddings = []
|
||||
for item in input:
|
||||
if is_image(item):
|
||||
result = self.model.get_image_embeddings([item])
|
||||
embeddings.append(result[0, :].tolist())
|
||||
elif is_document(item):
|
||||
result = self.model.get_text_embeddings([item])
|
||||
embeddings.append(result[0, :].tolist())
|
||||
return embeddings
|
||||
11
frigate/embeddings/functions/minilm_l6_v2.py
Normal file
11
frigate/embeddings/functions/minilm_l6_v2.py
Normal file
@@ -0,0 +1,11 @@
|
||||
"""Embedding function for ONNX MiniLM-L6 model used in Chroma."""
|
||||
|
||||
from chromadb.utils.embedding_functions import ONNXMiniLM_L6_V2
|
||||
|
||||
from frigate.const import MODEL_CACHE_DIR
|
||||
|
||||
|
||||
class MiniLMEmbedding(ONNXMiniLM_L6_V2):
|
||||
"""Override DOWNLOAD_PATH to download to cache directory."""
|
||||
|
||||
DOWNLOAD_PATH = f"{MODEL_CACHE_DIR}/all-MiniLM-L6-v2"
|
||||
197
frigate/embeddings/maintainer.py
Normal file
197
frigate/embeddings/maintainer.py
Normal file
@@ -0,0 +1,197 @@
|
||||
"""Maintain embeddings in Chroma."""
|
||||
|
||||
import base64
|
||||
import io
|
||||
import logging
|
||||
import threading
|
||||
from multiprocessing.synchronize import Event as MpEvent
|
||||
from typing import Optional
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
from peewee import DoesNotExist
|
||||
from PIL import Image
|
||||
|
||||
from frigate.comms.events_updater import EventEndSubscriber, EventUpdateSubscriber
|
||||
from frigate.comms.inter_process import InterProcessRequestor
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import UPDATE_EVENT_DESCRIPTION
|
||||
from frigate.events.types import EventTypeEnum
|
||||
from frigate.genai import get_genai_client
|
||||
from frigate.models import Event
|
||||
from frigate.util.image import SharedMemoryFrameManager, calculate_region
|
||||
|
||||
from .embeddings import Embeddings, get_metadata
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EmbeddingMaintainer(threading.Thread):
|
||||
"""Handle embedding queue and post event updates."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: FrigateConfig,
|
||||
stop_event: MpEvent,
|
||||
) -> None:
|
||||
threading.Thread.__init__(self)
|
||||
self.name = "embeddings_maintainer"
|
||||
self.config = config
|
||||
self.embeddings = Embeddings()
|
||||
self.event_subscriber = EventUpdateSubscriber()
|
||||
self.event_end_subscriber = EventEndSubscriber()
|
||||
self.frame_manager = SharedMemoryFrameManager()
|
||||
# create communication for updating event descriptions
|
||||
self.requestor = InterProcessRequestor()
|
||||
self.stop_event = stop_event
|
||||
self.tracked_events = {}
|
||||
self.genai_client = get_genai_client(config.genai)
|
||||
|
||||
def run(self) -> None:
|
||||
"""Maintain a Chroma vector database for semantic search."""
|
||||
while not self.stop_event.is_set():
|
||||
self._process_updates()
|
||||
self._process_finalized()
|
||||
|
||||
self.event_subscriber.stop()
|
||||
self.event_end_subscriber.stop()
|
||||
self.requestor.stop()
|
||||
logger.info("Exiting embeddings maintenance...")
|
||||
|
||||
def _process_updates(self) -> None:
|
||||
"""Process event updates"""
|
||||
update = self.event_subscriber.check_for_update()
|
||||
|
||||
if update is None:
|
||||
return
|
||||
|
||||
source_type, _, camera, data = update
|
||||
|
||||
if not camera or source_type != EventTypeEnum.tracked_object:
|
||||
return
|
||||
|
||||
camera_config = self.config.cameras[camera]
|
||||
if data["id"] not in self.tracked_events:
|
||||
self.tracked_events[data["id"]] = []
|
||||
|
||||
# Create our own thumbnail based on the bounding box and the frame time
|
||||
try:
|
||||
frame_id = f"{camera}{data['frame_time']}"
|
||||
yuv_frame = self.frame_manager.get(frame_id, camera_config.frame_shape_yuv)
|
||||
data["thumbnail"] = self._create_thumbnail(yuv_frame, data["box"])
|
||||
self.tracked_events[data["id"]].append(data)
|
||||
self.frame_manager.close(frame_id)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
def _process_finalized(self) -> None:
|
||||
"""Process the end of an event."""
|
||||
while True:
|
||||
ended = self.event_end_subscriber.check_for_update()
|
||||
|
||||
if ended == None:
|
||||
break
|
||||
|
||||
event_id, camera, updated_db = ended
|
||||
camera_config = self.config.cameras[camera]
|
||||
|
||||
if updated_db:
|
||||
try:
|
||||
event: Event = Event.get(Event.id == event_id)
|
||||
except DoesNotExist:
|
||||
continue
|
||||
|
||||
# Skip the event if not an object
|
||||
if event.data.get("type") != "object":
|
||||
continue
|
||||
|
||||
# Extract valid event metadata
|
||||
metadata = get_metadata(event)
|
||||
thumbnail = base64.b64decode(event.thumbnail)
|
||||
|
||||
# Embed the thumbnail
|
||||
self._embed_thumbnail(event_id, thumbnail, metadata)
|
||||
|
||||
if (
|
||||
camera_config.genai.enabled
|
||||
and self.genai_client is not None
|
||||
and event.data.get("description") is None
|
||||
):
|
||||
# Generate the description. Call happens in a thread since it is network bound.
|
||||
threading.Thread(
|
||||
target=self._embed_description,
|
||||
name=f"_embed_description_{event.id}",
|
||||
daemon=True,
|
||||
args=(
|
||||
event,
|
||||
[
|
||||
data["thumbnail"]
|
||||
for data in self.tracked_events[event_id]
|
||||
]
|
||||
if len(self.tracked_events.get(event_id, [])) > 0
|
||||
else [thumbnail],
|
||||
metadata,
|
||||
),
|
||||
).start()
|
||||
|
||||
# Delete tracked events based on the event_id
|
||||
if event_id in self.tracked_events:
|
||||
del self.tracked_events[event_id]
|
||||
|
||||
def _create_thumbnail(self, yuv_frame, box, height=500) -> Optional[bytes]:
|
||||
"""Return jpg thumbnail of a region of the frame."""
|
||||
frame = cv2.cvtColor(yuv_frame, cv2.COLOR_YUV2BGR_I420)
|
||||
region = calculate_region(
|
||||
frame.shape, box[0], box[1], box[2], box[3], height, multiplier=1.4
|
||||
)
|
||||
frame = frame[region[1] : region[3], region[0] : region[2]]
|
||||
width = int(height * frame.shape[1] / frame.shape[0])
|
||||
frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA)
|
||||
ret, jpg = cv2.imencode(".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), 100])
|
||||
|
||||
if ret:
|
||||
return jpg.tobytes()
|
||||
|
||||
return None
|
||||
|
||||
def _embed_thumbnail(self, event_id: str, thumbnail: bytes, metadata: dict) -> None:
|
||||
"""Embed the thumbnail for an event."""
|
||||
|
||||
# Encode the thumbnail
|
||||
img = np.array(Image.open(io.BytesIO(thumbnail)).convert("RGB"))
|
||||
self.embeddings.thumbnail.upsert(
|
||||
images=[img],
|
||||
metadatas=[metadata],
|
||||
ids=[event_id],
|
||||
)
|
||||
|
||||
def _embed_description(
|
||||
self, event: Event, thumbnails: list[bytes], metadata: dict
|
||||
) -> None:
|
||||
"""Embed the description for an event."""
|
||||
|
||||
description = self.genai_client.generate_description(thumbnails, metadata)
|
||||
|
||||
if description is None:
|
||||
logger.debug("Failed to generate description for %s", event.id)
|
||||
return
|
||||
|
||||
# fire and forget description update
|
||||
self.requestor.send_data(
|
||||
UPDATE_EVENT_DESCRIPTION,
|
||||
{"id": event.id, "description": description},
|
||||
)
|
||||
|
||||
# Encode the description
|
||||
self.embeddings.description.upsert(
|
||||
documents=[description],
|
||||
metadatas=[metadata],
|
||||
ids=[event.id],
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Generated description for %s (%d images): %s",
|
||||
event.id,
|
||||
len(thumbnails),
|
||||
description,
|
||||
)
|
||||
47
frigate/embeddings/util.py
Normal file
47
frigate/embeddings/util.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Z-score normalization for search distance."""
|
||||
|
||||
import math
|
||||
|
||||
|
||||
class ZScoreNormalization:
|
||||
"""Running Z-score normalization for search distance."""
|
||||
|
||||
def __init__(self):
|
||||
self.n = 0
|
||||
self.mean = 0
|
||||
self.m2 = 0
|
||||
|
||||
@property
|
||||
def variance(self):
|
||||
return self.m2 / (self.n - 1) if self.n > 1 else 0.0
|
||||
|
||||
@property
|
||||
def stddev(self):
|
||||
return math.sqrt(self.variance)
|
||||
|
||||
def normalize(self, distances: list[float]):
|
||||
self._update(distances)
|
||||
if self.stddev == 0:
|
||||
return distances
|
||||
return [(x - self.mean) / self.stddev for x in distances]
|
||||
|
||||
def _update(self, distances: list[float]):
|
||||
for x in distances:
|
||||
self.n += 1
|
||||
delta = x - self.mean
|
||||
self.mean += delta / self.n
|
||||
delta2 = x - self.mean
|
||||
self.m2 += delta * delta2
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"n": self.n,
|
||||
"mean": self.mean,
|
||||
"m2": self.m2,
|
||||
}
|
||||
|
||||
def from_dict(self, data: dict):
|
||||
self.n = data["n"]
|
||||
self.mean = data["mean"]
|
||||
self.m2 = data["m2"]
|
||||
return self
|
||||
@@ -209,7 +209,9 @@ class AudioEventMaintainer(threading.Thread):
|
||||
audio_detections = []
|
||||
|
||||
for label, score, _ in model_detections:
|
||||
logger.debug(f"Heard {label} with a score of {score}")
|
||||
logger.debug(
|
||||
f"{self.config.name} heard {label} with a score of {score}"
|
||||
)
|
||||
|
||||
if label not in self.config.audio.listen:
|
||||
continue
|
||||
@@ -221,7 +223,7 @@ class AudioEventMaintainer(threading.Thread):
|
||||
audio_detections.append(label)
|
||||
|
||||
# send audio detection data
|
||||
self.detection_publisher.send_data(
|
||||
self.detection_publisher.publish(
|
||||
(
|
||||
self.config.name,
|
||||
datetime.datetime.now().timestamp(),
|
||||
|
||||
@@ -10,6 +10,7 @@ from pathlib import Path
|
||||
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import CLIPS_DIR
|
||||
from frigate.embeddings.embeddings import Embeddings
|
||||
from frigate.models import Event, Timeline
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -30,6 +31,9 @@ class EventCleanup(threading.Thread):
|
||||
self.removed_camera_labels: list[str] = None
|
||||
self.camera_labels: dict[str, dict[str, any]] = {}
|
||||
|
||||
if self.config.semantic_search.enabled:
|
||||
self.embeddings = Embeddings()
|
||||
|
||||
def get_removed_camera_labels(self) -> list[Event]:
|
||||
"""Get a list of distinct labels for removed cameras."""
|
||||
if self.removed_camera_labels is None:
|
||||
@@ -190,16 +194,31 @@ class EventCleanup(threading.Thread):
|
||||
events_with_expired_clips = self.expire(EventCleanupType.clips)
|
||||
|
||||
# delete timeline entries for events that have expired recordings
|
||||
Timeline.delete().where(
|
||||
Timeline.source_id << events_with_expired_clips
|
||||
).execute()
|
||||
# delete up to 100,000 at a time
|
||||
max_deletes = 100000
|
||||
deleted_events_list = list(events_with_expired_clips)
|
||||
for i in range(0, len(deleted_events_list), max_deletes):
|
||||
Timeline.delete().where(
|
||||
Timeline.source_id << deleted_events_list[i : i + max_deletes]
|
||||
).execute()
|
||||
|
||||
self.expire(EventCleanupType.snapshots)
|
||||
|
||||
# drop events from db where has_clip and has_snapshot are false
|
||||
delete_query = Event.delete().where(
|
||||
Event.has_clip == False, Event.has_snapshot == False
|
||||
events = (
|
||||
Event.select()
|
||||
.where(Event.has_clip == False, Event.has_snapshot == False)
|
||||
.iterator()
|
||||
)
|
||||
delete_query.execute()
|
||||
events_to_delete = [e.id for e in events]
|
||||
if len(events_to_delete) > 0:
|
||||
chunk_size = 50
|
||||
for i in range(0, len(events_to_delete), chunk_size):
|
||||
chunk = events_to_delete[i : i + chunk_size]
|
||||
Event.delete().where(Event.id << chunk).execute()
|
||||
|
||||
if self.config.semantic_search.enabled:
|
||||
self.embeddings.thumbnail.delete(ids=chunk)
|
||||
self.embeddings.description.delete(ids=chunk)
|
||||
|
||||
logger.info("Exiting event cleanup...")
|
||||
|
||||
@@ -86,7 +86,7 @@ class ExternalEventProcessor:
|
||||
|
||||
if source_type == "api":
|
||||
self.event_camera[event_id] = camera
|
||||
self.detection_updater.send_data(
|
||||
self.detection_updater.publish(
|
||||
(
|
||||
camera,
|
||||
now,
|
||||
@@ -115,7 +115,7 @@ class ExternalEventProcessor:
|
||||
)
|
||||
|
||||
if event_id in self.event_camera:
|
||||
self.detection_updater.send_data(
|
||||
self.detection_updater.publish(
|
||||
(
|
||||
self.event_camera[event_id],
|
||||
end_time,
|
||||
|
||||
@@ -237,7 +237,7 @@ class EventProcessor(threading.Thread):
|
||||
|
||||
if event_type == EventStateEnum.end:
|
||||
del self.events_in_process[event_data["id"]]
|
||||
self.event_end_publisher.publish((event_data["id"], camera))
|
||||
self.event_end_publisher.publish((event_data["id"], camera, updated_db))
|
||||
|
||||
def handle_external_detection(
|
||||
self, event_type: EventStateEnum, event_data: Event
|
||||
|
||||
@@ -214,8 +214,7 @@ def parse_preset_hardware_acceleration_encode(
|
||||
|
||||
|
||||
PRESETS_INPUT = {
|
||||
"preset-http-jpeg-generic": _user_agent_args
|
||||
+ [
|
||||
"preset-http-jpeg-generic": [
|
||||
"-r",
|
||||
"{}",
|
||||
"-stream_loop",
|
||||
|
||||
63
frigate/genai/__init__.py
Normal file
63
frigate/genai/__init__.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Generative AI module for Frigate."""
|
||||
|
||||
import importlib
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from frigate.config import GenAIConfig, GenAIProviderEnum
|
||||
|
||||
PROVIDERS = {}
|
||||
|
||||
|
||||
def register_genai_provider(key: GenAIProviderEnum):
|
||||
"""Register a GenAI provider."""
|
||||
|
||||
def decorator(cls):
|
||||
PROVIDERS[key] = cls
|
||||
return cls
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
class GenAIClient:
|
||||
"""Generative AI client for Frigate."""
|
||||
|
||||
def __init__(self, genai_config: GenAIConfig, timeout: int = 60) -> None:
|
||||
self.genai_config: GenAIConfig = genai_config
|
||||
self.timeout = timeout
|
||||
self.provider = self._init_provider()
|
||||
|
||||
def generate_description(
|
||||
self, thumbnails: list[bytes], metadata: dict[str, any]
|
||||
) -> Optional[str]:
|
||||
"""Generate a description for the frame."""
|
||||
prompt = self.genai_config.object_prompts.get(
|
||||
metadata["label"], self.genai_config.prompt
|
||||
).format(**metadata)
|
||||
return self._send(prompt, thumbnails)
|
||||
|
||||
def _init_provider(self):
|
||||
"""Initialize the client."""
|
||||
return None
|
||||
|
||||
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
||||
"""Submit a request to the provider."""
|
||||
return None
|
||||
|
||||
|
||||
def get_genai_client(genai_config: GenAIConfig) -> Optional[GenAIClient]:
|
||||
"""Get the GenAI client."""
|
||||
if genai_config.enabled:
|
||||
load_providers()
|
||||
provider = PROVIDERS.get(genai_config.provider)
|
||||
if provider:
|
||||
return provider(genai_config)
|
||||
return None
|
||||
|
||||
|
||||
def load_providers():
|
||||
package_dir = os.path.dirname(__file__)
|
||||
for filename in os.listdir(package_dir):
|
||||
if filename.endswith(".py") and filename != "__init__.py":
|
||||
module_name = f"frigate.genai.{filename[:-3]}"
|
||||
importlib.import_module(module_name)
|
||||
49
frigate/genai/gemini.py
Normal file
49
frigate/genai/gemini.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Gemini Provider for Frigate AI."""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import google.generativeai as genai
|
||||
from google.api_core.exceptions import GoogleAPICallError
|
||||
|
||||
from frigate.config import GenAIProviderEnum
|
||||
from frigate.genai import GenAIClient, register_genai_provider
|
||||
|
||||
|
||||
@register_genai_provider(GenAIProviderEnum.gemini)
|
||||
class GeminiClient(GenAIClient):
|
||||
"""Generative AI client for Frigate using Gemini."""
|
||||
|
||||
provider: genai.GenerativeModel
|
||||
|
||||
def _init_provider(self):
|
||||
"""Initialize the client."""
|
||||
genai.configure(api_key=self.genai_config.api_key)
|
||||
return genai.GenerativeModel(self.genai_config.model)
|
||||
|
||||
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
||||
"""Submit a request to Gemini."""
|
||||
data = [
|
||||
{
|
||||
"mime_type": "image/jpeg",
|
||||
"data": img,
|
||||
}
|
||||
for img in images
|
||||
] + [prompt]
|
||||
try:
|
||||
response = self.provider.generate_content(
|
||||
data,
|
||||
generation_config=genai.types.GenerationConfig(
|
||||
candidate_count=1,
|
||||
),
|
||||
request_options=genai.types.RequestOptions(
|
||||
timeout=self.timeout,
|
||||
),
|
||||
)
|
||||
except GoogleAPICallError:
|
||||
return None
|
||||
try:
|
||||
description = response.text.strip()
|
||||
except ValueError:
|
||||
# No description was generated
|
||||
return None
|
||||
return description
|
||||
41
frigate/genai/ollama.py
Normal file
41
frigate/genai/ollama.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Ollama Provider for Frigate AI."""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from httpx import TimeoutException
|
||||
from ollama import Client as ApiClient
|
||||
from ollama import ResponseError
|
||||
|
||||
from frigate.config import GenAIProviderEnum
|
||||
from frigate.genai import GenAIClient, register_genai_provider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@register_genai_provider(GenAIProviderEnum.ollama)
|
||||
class OllamaClient(GenAIClient):
|
||||
"""Generative AI client for Frigate using Ollama."""
|
||||
|
||||
provider: ApiClient
|
||||
|
||||
def _init_provider(self):
|
||||
"""Initialize the client."""
|
||||
client = ApiClient(host=self.genai_config.base_url, timeout=self.timeout)
|
||||
response = client.pull(self.genai_config.model)
|
||||
if response["status"] != "success":
|
||||
logger.error("Failed to pull %s model from Ollama", self.genai_config.model)
|
||||
return None
|
||||
return client
|
||||
|
||||
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
||||
"""Submit a request to Ollama"""
|
||||
try:
|
||||
result = self.provider.generate(
|
||||
self.genai_config.model,
|
||||
prompt,
|
||||
images=images,
|
||||
)
|
||||
return result["response"].strip()
|
||||
except (TimeoutException, ResponseError):
|
||||
return None
|
||||
51
frigate/genai/openai.py
Normal file
51
frigate/genai/openai.py
Normal file
@@ -0,0 +1,51 @@
|
||||
"""OpenAI Provider for Frigate AI."""
|
||||
|
||||
import base64
|
||||
from typing import Optional
|
||||
|
||||
from httpx import TimeoutException
|
||||
from openai import OpenAI
|
||||
|
||||
from frigate.config import GenAIProviderEnum
|
||||
from frigate.genai import GenAIClient, register_genai_provider
|
||||
|
||||
|
||||
@register_genai_provider(GenAIProviderEnum.openai)
|
||||
class OpenAIClient(GenAIClient):
|
||||
"""Generative AI client for Frigate using OpenAI."""
|
||||
|
||||
provider: OpenAI
|
||||
|
||||
def _init_provider(self):
|
||||
"""Initialize the client."""
|
||||
return OpenAI(api_key=self.genai_config.api_key)
|
||||
|
||||
def _send(self, prompt: str, images: list[bytes]) -> Optional[str]:
|
||||
"""Submit a request to OpenAI."""
|
||||
encoded_images = [base64.b64encode(image).decode("utf-8") for image in images]
|
||||
try:
|
||||
result = self.provider.chat.completions.create(
|
||||
model=self.genai_config.model,
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {
|
||||
"url": f"data:image/jpeg;base64,{image}",
|
||||
"detail": "low",
|
||||
},
|
||||
}
|
||||
for image in encoded_images
|
||||
]
|
||||
+ [prompt],
|
||||
},
|
||||
],
|
||||
timeout=self.timeout,
|
||||
)
|
||||
except TimeoutException:
|
||||
return None
|
||||
if len(result.choices) > 0:
|
||||
return result.choices[0].message.content.strip()
|
||||
return None
|
||||
@@ -118,3 +118,4 @@ class RecordingsToDelete(Model): # type: ignore[misc]
|
||||
class User(Model): # type: ignore[misc]
|
||||
username = CharField(null=False, primary_key=True, max_length=30)
|
||||
password_hash = CharField(null=False, max_length=120)
|
||||
notification_tokens = JSONField()
|
||||
|
||||
@@ -1187,7 +1187,7 @@ class TrackedObjectProcessor(threading.Thread):
|
||||
]
|
||||
|
||||
# publish info on this frame
|
||||
self.detection_publisher.send_data(
|
||||
self.detection_publisher.publish(
|
||||
(
|
||||
camera,
|
||||
frame_time,
|
||||
@@ -1274,7 +1274,7 @@ class TrackedObjectProcessor(threading.Thread):
|
||||
if not update:
|
||||
break
|
||||
|
||||
event_id, camera = update
|
||||
event_id, camera, _ = update
|
||||
self.camera_states[camera].finished(event_id)
|
||||
|
||||
self.requestor.stop()
|
||||
|
||||
@@ -395,7 +395,8 @@ class BirdsEyeFrameManager:
|
||||
[
|
||||
cam
|
||||
for cam, cam_data in self.cameras.items()
|
||||
if cam_data["last_active_frame"] > 0
|
||||
if self.config.cameras[cam].birdseye.enabled
|
||||
and cam_data["last_active_frame"] > 0
|
||||
and cam_data["current_frame"] - cam_data["last_active_frame"]
|
||||
< self.inactivity_threshold
|
||||
]
|
||||
|
||||
@@ -80,7 +80,7 @@ def output_frames(
|
||||
websocket_thread.start()
|
||||
|
||||
while not stop_event.is_set():
|
||||
(topic, data) = detection_subscriber.get_data(timeout=1)
|
||||
(topic, data) = detection_subscriber.check_for_update(timeout=1)
|
||||
|
||||
if not topic:
|
||||
continue
|
||||
@@ -134,7 +134,7 @@ def output_frames(
|
||||
move_preview_frames("clips")
|
||||
|
||||
while True:
|
||||
(topic, data) = detection_subscriber.get_data(timeout=0)
|
||||
(topic, data) = detection_subscriber.check_for_update(timeout=0)
|
||||
|
||||
if not topic:
|
||||
break
|
||||
|
||||
@@ -77,8 +77,8 @@ class FFMpegConverter(threading.Thread):
|
||||
# write a PREVIEW at fps and 1 key frame per clip
|
||||
self.ffmpeg_cmd = parse_preset_hardware_acceleration_encode(
|
||||
config.ffmpeg.hwaccel_args,
|
||||
input="-f concat -y -protocol_whitelist pipe,file -safe 0 -i /dev/stdin",
|
||||
output=f"-g {PREVIEW_KEYFRAME_INTERVAL} -bf 0 -b:v {PREVIEW_QUALITY_BIT_RATES[self.config.record.preview.quality]} {FPS_VFR_PARAM} -movflags +faststart -pix_fmt yuv420p {self.path}",
|
||||
input="-f concat -y -protocol_whitelist pipe,file -safe 0 -threads 1 -i /dev/stdin",
|
||||
output=f"-threads 1 -g {PREVIEW_KEYFRAME_INTERVAL} -bf 0 -b:v {PREVIEW_QUALITY_BIT_RATES[self.config.record.preview.quality]} {FPS_VFR_PARAM} -movflags +faststart -pix_fmt yuv420p {self.path}",
|
||||
type=EncodeTypeEnum.preview,
|
||||
)
|
||||
|
||||
@@ -129,12 +129,12 @@ class FFMpegConverter(threading.Thread):
|
||||
self.requestor.send_data(
|
||||
INSERT_PREVIEW,
|
||||
{
|
||||
Previews.id: f"{self.config.name}_{end}",
|
||||
Previews.camera: self.config.name,
|
||||
Previews.path: self.path,
|
||||
Previews.start_time: start,
|
||||
Previews.end_time: end,
|
||||
Previews.duration: end - start,
|
||||
Previews.id.name: f"{self.config.name}_{end}",
|
||||
Previews.camera.name: self.config.name,
|
||||
Previews.path.name: self.path,
|
||||
Previews.start_time.name: start,
|
||||
Previews.end_time.name: end,
|
||||
Previews.duration.name: end - start,
|
||||
},
|
||||
)
|
||||
else:
|
||||
|
||||
@@ -83,6 +83,7 @@ class OnvifController:
|
||||
|
||||
try:
|
||||
profiles = media.GetProfiles()
|
||||
logger.debug(f"Onvif profiles for {camera_name}: {profiles}")
|
||||
except (ONVIFError, Fault, TransportError) as e:
|
||||
logger.error(
|
||||
f"Unable to get Onvif media profiles for camera: {camera_name}: {e}"
|
||||
@@ -93,7 +94,6 @@ class OnvifController:
|
||||
for key, onvif_profile in enumerate(profiles):
|
||||
if (
|
||||
onvif_profile.VideoEncoderConfiguration
|
||||
and onvif_profile.VideoEncoderConfiguration.Encoding == "H264"
|
||||
and onvif_profile.PTZConfiguration
|
||||
and (
|
||||
onvif_profile.PTZConfiguration.DefaultContinuousPanTiltVelocitySpace
|
||||
@@ -102,6 +102,7 @@ class OnvifController:
|
||||
is not None
|
||||
)
|
||||
):
|
||||
# use the first profile that has a valid ptz configuration
|
||||
profile = onvif_profile
|
||||
logger.debug(f"Selected Onvif profile for {camera_name}: {profile}")
|
||||
break
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user