forked from Github/frigate
Compare commits
15 Commits
v0.10.0-be
...
v0.10.0-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c1155af169 | ||
|
|
77c1f1bb1b | ||
|
|
ae3c01fe2d | ||
|
|
7a2a85d253 | ||
|
|
77c66d4e49 | ||
|
|
494e5ac4ec | ||
|
|
63b7465452 | ||
|
|
e6d2df5661 | ||
|
|
a3301e0347 | ||
|
|
3d556cc2cb | ||
|
|
585efe1a0f | ||
|
|
c7d47439dd | ||
|
|
19a6978228 | ||
|
|
1ebb8a54bf | ||
|
|
ae968044d6 |
@@ -154,7 +154,8 @@ class DetectConfig(FrigateBaseModel):
|
|||||||
title="Maximum number of frames the object can dissapear before detection ends."
|
title="Maximum number of frames the object can dissapear before detection ends."
|
||||||
)
|
)
|
||||||
stationary_interval: Optional[int] = Field(
|
stationary_interval: Optional[int] = Field(
|
||||||
title="Frame interval for checking stationary objects."
|
title="Frame interval for checking stationary objects.",
|
||||||
|
ge=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -658,10 +658,15 @@ def vod_ts(camera, start_ts, end_ts):
|
|||||||
# Determine if we need to end the last clip early
|
# Determine if we need to end the last clip early
|
||||||
if recording.end_time > end_ts:
|
if recording.end_time > end_ts:
|
||||||
duration -= int((recording.end_time - end_ts) * 1000)
|
duration -= int((recording.end_time - end_ts) * 1000)
|
||||||
clips.append(clip)
|
|
||||||
durations.append(duration)
|
if duration > 0:
|
||||||
|
clips.append(clip)
|
||||||
|
durations.append(duration)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Recording clip is missing or empty: {recording.path}")
|
||||||
|
|
||||||
if not clips:
|
if not clips:
|
||||||
|
logger.error("No recordings found for the requested time range")
|
||||||
return "No recordings found.", 404
|
return "No recordings found.", 404
|
||||||
|
|
||||||
hour_ago = datetime.now() - timedelta(hours=1)
|
hour_ago = datetime.now() - timedelta(hours=1)
|
||||||
@@ -690,10 +695,12 @@ def vod_event(id):
|
|||||||
try:
|
try:
|
||||||
event: Event = Event.get(Event.id == id)
|
event: Event = Event.get(Event.id == id)
|
||||||
except DoesNotExist:
|
except DoesNotExist:
|
||||||
|
logger.error(f"Event not found: {id}")
|
||||||
return "Event not found.", 404
|
return "Event not found.", 404
|
||||||
|
|
||||||
if not event.has_clip:
|
if not event.has_clip:
|
||||||
return "Clip not available", 404
|
logger.error(f"Event does not have recordings: {id}")
|
||||||
|
return "Recordings not available", 404
|
||||||
|
|
||||||
clip_path = os.path.join(CLIPS_DIR, f"{event.camera}-{id}.mp4")
|
clip_path = os.path.join(CLIPS_DIR, f"{event.camera}-{id}.mp4")
|
||||||
|
|
||||||
|
|||||||
@@ -40,10 +40,12 @@ class MotionDetector:
|
|||||||
# Improve contrast
|
# Improve contrast
|
||||||
minval = np.percentile(resized_frame, 4)
|
minval = np.percentile(resized_frame, 4)
|
||||||
maxval = np.percentile(resized_frame, 96)
|
maxval = np.percentile(resized_frame, 96)
|
||||||
resized_frame = np.clip(resized_frame, minval, maxval)
|
# don't adjust if the image is a single color
|
||||||
resized_frame = (((resized_frame - minval) / (maxval - minval)) * 255).astype(
|
if minval < maxval:
|
||||||
np.uint8
|
resized_frame = np.clip(resized_frame, minval, maxval)
|
||||||
)
|
resized_frame = (
|
||||||
|
((resized_frame - minval) / (maxval - minval)) * 255
|
||||||
|
).astype(np.uint8)
|
||||||
|
|
||||||
# mask frame
|
# mask frame
|
||||||
resized_frame[self.mask] = [255]
|
resized_frame[self.mask] = [255]
|
||||||
|
|||||||
@@ -45,6 +45,8 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
self.name = "recording_maint"
|
self.name = "recording_maint"
|
||||||
self.config = config
|
self.config = config
|
||||||
self.stop_event = stop_event
|
self.stop_event = stop_event
|
||||||
|
self.first_pass = True
|
||||||
|
self.end_time_cache = {}
|
||||||
|
|
||||||
def move_files(self):
|
def move_files(self):
|
||||||
cache_files = [
|
cache_files = [
|
||||||
@@ -87,19 +89,18 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# delete all cached files past the most recent 2
|
# delete all cached files past the most recent 5
|
||||||
|
keep_count = 5
|
||||||
for camera in grouped_recordings.keys():
|
for camera in grouped_recordings.keys():
|
||||||
if len(grouped_recordings[camera]) > 2:
|
if len(grouped_recordings[camera]) > keep_count:
|
||||||
logger.warning(
|
|
||||||
"Proactively cleaning cache. Your recordings disk may be too slow."
|
|
||||||
)
|
|
||||||
sorted_recordings = sorted(
|
sorted_recordings = sorted(
|
||||||
grouped_recordings[camera], key=lambda i: i["start_time"]
|
grouped_recordings[camera], key=lambda i: i["start_time"]
|
||||||
)
|
)
|
||||||
to_remove = sorted_recordings[:-2]
|
to_remove = sorted_recordings[:-keep_count]
|
||||||
for f in to_remove:
|
for f in to_remove:
|
||||||
Path(f["cache_path"]).unlink(missing_ok=True)
|
Path(f["cache_path"]).unlink(missing_ok=True)
|
||||||
grouped_recordings[camera] = sorted_recordings[-2:]
|
self.end_time_cache.pop(f["cache_path"], None)
|
||||||
|
grouped_recordings[camera] = sorted_recordings[-keep_count:]
|
||||||
|
|
||||||
for camera, recordings in grouped_recordings.items():
|
for camera, recordings in grouped_recordings.items():
|
||||||
# get all events with the end time after the start of the oldest cache file
|
# get all events with the end time after the start of the oldest cache file
|
||||||
@@ -109,7 +110,7 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
.where(
|
.where(
|
||||||
Event.camera == camera,
|
Event.camera == camera,
|
||||||
(Event.end_time == None)
|
(Event.end_time == None)
|
||||||
| (Event.end_time >= recordings[0]["start_time"]),
|
| (Event.end_time >= recordings[0]["start_time"].timestamp()),
|
||||||
Event.has_clip,
|
Event.has_clip,
|
||||||
)
|
)
|
||||||
.order_by(Event.start_time)
|
.order_by(Event.start_time)
|
||||||
@@ -124,26 +125,31 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
or not self.config.cameras[camera].record.enabled
|
or not self.config.cameras[camera].record.enabled
|
||||||
):
|
):
|
||||||
Path(cache_path).unlink(missing_ok=True)
|
Path(cache_path).unlink(missing_ok=True)
|
||||||
|
self.end_time_cache.pop(cache_path, None)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ffprobe_cmd = [
|
if cache_path in self.end_time_cache:
|
||||||
"ffprobe",
|
end_time, duration = self.end_time_cache[cache_path]
|
||||||
"-v",
|
|
||||||
"error",
|
|
||||||
"-show_entries",
|
|
||||||
"format=duration",
|
|
||||||
"-of",
|
|
||||||
"default=noprint_wrappers=1:nokey=1",
|
|
||||||
f"{cache_path}",
|
|
||||||
]
|
|
||||||
p = sp.run(ffprobe_cmd, capture_output=True)
|
|
||||||
if p.returncode == 0:
|
|
||||||
duration = float(p.stdout.decode().strip())
|
|
||||||
end_time = start_time + datetime.timedelta(seconds=duration)
|
|
||||||
else:
|
else:
|
||||||
logger.warning(f"Discarding a corrupt recording segment: {f}")
|
ffprobe_cmd = [
|
||||||
Path(cache_path).unlink(missing_ok=True)
|
"ffprobe",
|
||||||
continue
|
"-v",
|
||||||
|
"error",
|
||||||
|
"-show_entries",
|
||||||
|
"format=duration",
|
||||||
|
"-of",
|
||||||
|
"default=noprint_wrappers=1:nokey=1",
|
||||||
|
f"{cache_path}",
|
||||||
|
]
|
||||||
|
p = sp.run(ffprobe_cmd, capture_output=True)
|
||||||
|
if p.returncode == 0:
|
||||||
|
duration = float(p.stdout.decode().strip())
|
||||||
|
end_time = start_time + datetime.timedelta(seconds=duration)
|
||||||
|
self.end_time_cache[cache_path] = (end_time, duration)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Discarding a corrupt recording segment: {f}")
|
||||||
|
Path(cache_path).unlink(missing_ok=True)
|
||||||
|
continue
|
||||||
|
|
||||||
# if cached file's start_time is earlier than the retain_days for the camera
|
# if cached file's start_time is earlier than the retain_days for the camera
|
||||||
if start_time <= (
|
if start_time <= (
|
||||||
@@ -158,14 +164,17 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
overlaps = False
|
overlaps = False
|
||||||
for event in events:
|
for event in events:
|
||||||
# if the event starts in the future, stop checking events
|
# if the event starts in the future, stop checking events
|
||||||
# and let this recording segment expire
|
# and remove this segment
|
||||||
if event.start_time > end_time.timestamp():
|
if event.start_time > end_time.timestamp():
|
||||||
overlaps = False
|
overlaps = False
|
||||||
break
|
break
|
||||||
|
|
||||||
# if the event is in progress or ends after the recording starts, keep it
|
# if the event is in progress or ends after the recording starts, keep it
|
||||||
# and stop looking at events
|
# and stop looking at events
|
||||||
if event.end_time is None or event.end_time >= start_time:
|
if (
|
||||||
|
event.end_time is None
|
||||||
|
or event.end_time >= start_time.timestamp()
|
||||||
|
):
|
||||||
overlaps = True
|
overlaps = True
|
||||||
break
|
break
|
||||||
|
|
||||||
@@ -218,6 +227,9 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
Path(cache_path).unlink(missing_ok=True)
|
Path(cache_path).unlink(missing_ok=True)
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
|
# clear end_time cache
|
||||||
|
self.end_time_cache.pop(cache_path, None)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
# Check for new files every 5 seconds
|
# Check for new files every 5 seconds
|
||||||
wait_time = 5
|
wait_time = 5
|
||||||
@@ -230,7 +242,14 @@ class RecordingMaintainer(threading.Thread):
|
|||||||
"Error occurred when attempting to maintain recording cache"
|
"Error occurred when attempting to maintain recording cache"
|
||||||
)
|
)
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
wait_time = max(0, 5 - (datetime.datetime.now().timestamp() - run_start))
|
duration = datetime.datetime.now().timestamp() - run_start
|
||||||
|
wait_time = max(0, 5 - duration)
|
||||||
|
if wait_time == 0 and not self.first_pass:
|
||||||
|
logger.warning(
|
||||||
|
"Cache is taking longer than 5 seconds to clear. Your recordings disk may be too slow."
|
||||||
|
)
|
||||||
|
if self.first_pass:
|
||||||
|
self.first_pass = False
|
||||||
|
|
||||||
logger.info(f"Exiting recording maintenance...")
|
logger.info(f"Exiting recording maintenance...")
|
||||||
|
|
||||||
@@ -389,11 +408,40 @@ class RecordingCleanup(threading.Thread):
|
|||||||
|
|
||||||
for f in files_to_check:
|
for f in files_to_check:
|
||||||
p = Path(f)
|
p = Path(f)
|
||||||
if p.stat().st_mtime < delete_before.get(p.parent.name, default_expire):
|
try:
|
||||||
p.unlink(missing_ok=True)
|
if p.stat().st_mtime < delete_before.get(p.parent.name, default_expire):
|
||||||
|
p.unlink(missing_ok=True)
|
||||||
|
except FileNotFoundError:
|
||||||
|
logger.warning(f"Attempted to expire missing file: {f}")
|
||||||
|
|
||||||
logger.debug("End expire files (legacy).")
|
logger.debug("End expire files (legacy).")
|
||||||
|
|
||||||
|
def sync_recordings(self):
|
||||||
|
logger.debug("Start sync recordings.")
|
||||||
|
|
||||||
|
# get all recordings in the db
|
||||||
|
recordings: Recordings = Recordings.select()
|
||||||
|
|
||||||
|
# get all recordings files on disk
|
||||||
|
process = sp.run(
|
||||||
|
["find", RECORD_DIR, "-type", "f"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
)
|
||||||
|
files_on_disk = process.stdout.splitlines()
|
||||||
|
|
||||||
|
recordings_to_delete = []
|
||||||
|
for recording in recordings.objects().iterator():
|
||||||
|
if not recording.path in files_on_disk:
|
||||||
|
recordings_to_delete.append(recording.id)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f"Deleting {len(recordings_to_delete)} recordings with missing files"
|
||||||
|
)
|
||||||
|
Recordings.delete().where(Recordings.id << recordings_to_delete).execute()
|
||||||
|
|
||||||
|
logger.debug("End sync recordings.")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
# Expire recordings every minute, clean directories every hour.
|
# Expire recordings every minute, clean directories every hour.
|
||||||
for counter in itertools.cycle(range(60)):
|
for counter in itertools.cycle(range(60)):
|
||||||
@@ -407,3 +455,4 @@ class RecordingCleanup(threading.Thread):
|
|||||||
if counter == 0:
|
if counter == 0:
|
||||||
self.expire_files()
|
self.expire_files()
|
||||||
remove_empty_directories(RECORD_DIR)
|
remove_empty_directories(RECORD_DIR)
|
||||||
|
self.sync_recordings()
|
||||||
|
|||||||
@@ -75,25 +75,7 @@ def filtered(obj, objects_to_track, object_filters):
|
|||||||
|
|
||||||
|
|
||||||
def create_tensor_input(frame, model_shape, region):
|
def create_tensor_input(frame, model_shape, region):
|
||||||
# TODO: is it faster to just convert grayscale to RGB? or repeat dimensions with numpy?
|
cropped_frame = yuv_region_2_rgb(frame, region)
|
||||||
height = frame.shape[0] // 3 * 2
|
|
||||||
width = frame.shape[1]
|
|
||||||
|
|
||||||
# get the crop box if the region extends beyond the frame
|
|
||||||
crop_x1 = max(0, region[0])
|
|
||||||
crop_y1 = max(0, region[1])
|
|
||||||
crop_x2 = min(width, region[2])
|
|
||||||
crop_y2 = min(height, region[3])
|
|
||||||
|
|
||||||
size = region[3] - region[1]
|
|
||||||
cropped_frame = np.zeros((size, size), np.uint8)
|
|
||||||
|
|
||||||
cropped_frame[
|
|
||||||
0 : crop_y2 - crop_y1,
|
|
||||||
0 : crop_x2 - crop_x1,
|
|
||||||
] = frame[crop_y1:crop_y2, crop_x1:crop_x2]
|
|
||||||
|
|
||||||
cropped_frame = np.repeat(np.expand_dims(cropped_frame, -1), 3, 2)
|
|
||||||
|
|
||||||
# Resize to 300x300 if needed
|
# Resize to 300x300 if needed
|
||||||
if cropped_frame.shape != (model_shape[0], model_shape[1], 3):
|
if cropped_frame.shape != (model_shape[0], model_shape[1], 3):
|
||||||
|
|||||||
@@ -199,7 +199,7 @@ export default function Event({ eventId, close, scrollRef }) {
|
|||||||
<img
|
<img
|
||||||
src={
|
src={
|
||||||
data.has_snapshot
|
data.has_snapshot
|
||||||
? `${apiHost}/clips/${data.camera}-${eventId}.jpg`
|
? `${apiHost}/api/events/${eventId}/snapshot.jpg`
|
||||||
: `data:image/jpeg;base64,${data.thumbnail}`
|
: `data:image/jpeg;base64,${data.thumbnail}`
|
||||||
}
|
}
|
||||||
alt={`${data.label} at ${(data.top_score * 100).toFixed(1)}% confidence`}
|
alt={`${data.label} at ${(data.top_score * 100).toFixed(1)}% confidence`}
|
||||||
|
|||||||
Reference in New Issue
Block a user