From 3879fde06d5f0b9402528efe141902f8d6434e3f Mon Sep 17 00:00:00 2001 From: Nicolas Mowen Date: Mon, 14 Oct 2024 16:11:43 -0600 Subject: [PATCH] Don't allow unlimited unprocessed segments to stay in cache (#14341) * Don't allow unlimited unprocessed frames to stay in cache * Formatting --- frigate/record/maintainer.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/frigate/record/maintainer.py b/frigate/record/maintainer.py index f43d1424f..314ff3646 100644 --- a/frigate/record/maintainer.py +++ b/frigate/record/maintainer.py @@ -142,6 +142,8 @@ class RecordingMaintainer(threading.Thread): ) ) ) + + # see if the recording mover is too slow and segments need to be deleted if processed_segment_count > keep_count: logger.warning( f"Unable to keep up with recording segments in cache for {camera}. Keeping the {keep_count} most recent segments out of {processed_segment_count} and discarding the rest..." @@ -153,6 +155,21 @@ class RecordingMaintainer(threading.Thread): self.end_time_cache.pop(cache_path, None) grouped_recordings[camera] = grouped_recordings[camera][-keep_count:] + # see if detection has failed and unprocessed segments need to be deleted + unprocessed_segment_count = ( + len(grouped_recordings[camera]) - processed_segment_count + ) + if unprocessed_segment_count > keep_count: + logger.warning( + f"Too many unprocessed recording segments in cache for {camera}. This likely indicates an issue with the detect stream, keeping the {keep_count} most recent segments out of {unprocessed_segment_count} and discarding the rest..." + ) + to_remove = grouped_recordings[camera][:-keep_count] + for rec in to_remove: + cache_path = rec["cache_path"] + Path(cache_path).unlink(missing_ok=True) + self.end_time_cache.pop(cache_path, None) + grouped_recordings[camera] = grouped_recordings[camera][-keep_count:] + tasks = [] for camera, recordings in grouped_recordings.items(): # clear out all the object recording info for old frames