Compare commits

..

3 Commits

Author SHA1 Message Date
Blake Blackshear
0f66a8cb41 call the restart function and handle errors better in the detection process 2020-03-01 18:45:07 -06:00
Blake Blackshear
04ef6ac30e clarify mqtt password readme 2020-03-01 18:45:07 -06:00
Blake Blackshear
ab42a9625d readme updates 2020-03-01 07:47:22 -06:00
4 changed files with 22 additions and 19 deletions

View File

@@ -16,16 +16,6 @@ You see multiple bounding boxes because it draws bounding boxes from all frames
[![](http://img.youtube.com/vi/nqHbCtyo4dY/0.jpg)](http://www.youtube.com/watch?v=nqHbCtyo4dY "Frigate") [![](http://img.youtube.com/vi/nqHbCtyo4dY/0.jpg)](http://www.youtube.com/watch?v=nqHbCtyo4dY "Frigate")
## Getting Started ## Getting Started
Build the container with
```
docker build -t frigate .
```
Models for both CPU and EdgeTPU (Coral) are bundled in the image. You can use your own models with volume mounts:
- CPU Model: `/cpu_model.tflite`
- EdgeTPU Model: `/edgetpu_model.tflite`
- Labels: `/labelmap.txt`
Run the container with Run the container with
```bash ```bash
docker run --rm \ docker run --rm \
@@ -36,7 +26,7 @@ docker run --rm \
-v /etc/localtime:/etc/localtime:ro \ -v /etc/localtime:/etc/localtime:ro \
-p 5000:5000 \ -p 5000:5000 \
-e FRIGATE_RTSP_PASSWORD='password' \ -e FRIGATE_RTSP_PASSWORD='password' \
frigate:latest blakeblackshear/frigate:stable
``` ```
Example docker-compose: Example docker-compose:
@@ -46,7 +36,7 @@ Example docker-compose:
restart: unless-stopped restart: unless-stopped
privileged: true privileged: true
shm_size: '1g' # should work for 5-7 cameras shm_size: '1g' # should work for 5-7 cameras
image: frigate:latest image: blakeblackshear/frigate:stable
volumes: volumes:
- /dev/bus/usb:/dev/bus/usb - /dev/bus/usb:/dev/bus/usb
- /etc/localtime:/etc/localtime:ro - /etc/localtime:/etc/localtime:ro
@@ -127,6 +117,11 @@ sensor:
value_template: '{{ states.sensor.frigate_debug.attributes["coral"]["inference_speed"] }}' value_template: '{{ states.sensor.frigate_debug.attributes["coral"]["inference_speed"] }}'
unit_of_measurement: 'ms' unit_of_measurement: 'ms'
``` ```
## Using a custom model
Models for both CPU and EdgeTPU (Coral) are bundled in the image. You can use your own models with volume mounts:
- CPU Model: `/cpu_model.tflite`
- EdgeTPU Model: `/edgetpu_model.tflite`
- Labels: `/labelmap.txt`
## Tips ## Tips
- Lower the framerate of the video feed on the camera to reduce the CPU usage for capturing the feed - Lower the framerate of the video feed on the camera to reduce the CPU usage for capturing the feed

View File

@@ -3,9 +3,13 @@ web_port: 5000
mqtt: mqtt:
host: mqtt.server.com host: mqtt.server.com
topic_prefix: frigate topic_prefix: frigate
# client_id: frigate # Optional -- set to override default client id of 'frigate' if running multiple instances # client_id: frigate # Optional -- set to override default client id of 'frigate' if running multiple instances
# user: username # Optional -- Uncomment for use # user: username # Optional
# password: password # Optional -- Uncomment for use #################
## Environment variables that begin with 'FRIGATE_' may be referenced in {}.
## password: '{FRIGATE_MQTT_PASSWORD}'
#################
# password: password # Optional
################# #################
# Default ffmpeg args. Optional and can be overwritten per camera. # Default ffmpeg args. Optional and can be overwritten per camera.

View File

@@ -76,6 +76,7 @@ class CameraWatchdog(threading.Thread):
if (self.tflite_process.detection_start.value > 0.0 and if (self.tflite_process.detection_start.value > 0.0 and
datetime.datetime.now().timestamp() - self.tflite_process.detection_start.value > 10): datetime.datetime.now().timestamp() - self.tflite_process.detection_start.value > 10):
print("Detection appears to be stuck. Restarting detection process") print("Detection appears to be stuck. Restarting detection process")
self.tflite_process.start_or_restart()
time.sleep(30) time.sleep(30)
for name, camera_process in self.camera_processes.items(): for name, camera_process in self.camera_processes.items():

View File

@@ -71,16 +71,19 @@ def run_detector(detection_queue, avg_speed, start):
object_id_str = detection_queue.get() object_id_str = detection_queue.get()
object_id_hash = hashlib.sha1(str.encode(object_id_str)) object_id_hash = hashlib.sha1(str.encode(object_id_str))
object_id = plasma.ObjectID(object_id_hash.digest()) object_id = plasma.ObjectID(object_id_hash.digest())
object_id_out = plasma.ObjectID(hashlib.sha1(str.encode(f"out-{object_id_str}")).digest())
input_frame = plasma_client.get(object_id, timeout_ms=0) input_frame = plasma_client.get(object_id, timeout_ms=0)
start.value = datetime.datetime.now().timestamp() if input_frame is plasma.ObjectNotAvailable:
plasma_client.put(np.zeros((20,6), np.float32), object_id_out)
continue
# detect and put the output in the plasma store # detect and put the output in the plasma store
object_id_out = hashlib.sha1(str.encode(f"out-{object_id_str}")).digest() start.value = datetime.datetime.now().timestamp()
plasma_client.put(object_detector.detect_raw(input_frame), plasma.ObjectID(object_id_out)) plasma_client.put(object_detector.detect_raw(input_frame), object_id_out)
duration = datetime.datetime.now().timestamp()-start.value duration = datetime.datetime.now().timestamp()-start.value
start.value = 0.0 start.value = 0.0
avg_speed.value = (avg_speed.value*9 + duration)/10 avg_speed.value = (avg_speed.value*9 + duration)/10
class EdgeTPUProcess(): class EdgeTPUProcess():