Add initial implementation of history view in new webui framework (#8895)

* Add support for review grid

* Cleanup reloading on focus

* Adjust timeline api to include metadata and before

* Be more efficient about getting info

* Adjust to new data format

* Cleanup types

* Cleanup text

* Transition to history

* Cleanup

* remove old web implementations

* Cleanup
This commit is contained in:
Nicolas Mowen
2023-12-12 19:48:52 -07:00
committed by Blake Blackshear
parent c1f14e2d87
commit 4524d9440c
13 changed files with 932 additions and 33 deletions

View File

@@ -45,6 +45,12 @@ def should_update_state(prev_event: Event, current_event: Event) -> bool:
if prev_event["attributes"] != current_event["attributes"]:
return True
if prev_event["sub_label"] != current_event["sub_label"]:
return True
if len(prev_event["current_zones"]) < len(current_event["current_zones"]):
return True
return False

View File

@@ -611,6 +611,78 @@ def timeline():
return jsonify([t for t in timeline])
@bp.route("/timeline/hourly")
def hourly_timeline():
"""Get hourly summary for timeline."""
camera = request.args.get("camera", "all")
before = request.args.get("before", type=float)
limit = request.args.get("limit", 200)
tz_name = request.args.get("timezone", default="utc", type=str)
_, minute_modifier, _ = get_tz_modifiers(tz_name)
clauses = []
if camera != "all":
clauses.append((Timeline.camera == camera))
if before:
clauses.append((Timeline.timestamp < before))
if len(clauses) == 0:
clauses.append((True))
timeline = (
Timeline.select(
Timeline.camera,
Timeline.timestamp,
Timeline.data,
Timeline.class_type,
Timeline.source_id,
Timeline.source,
)
.where(reduce(operator.and_, clauses))
.order_by(Timeline.timestamp.desc())
.limit(limit)
.dicts()
.iterator()
)
count = 0
start = 0
end = 0
hours: dict[str, list[dict[str, any]]] = {}
for t in timeline:
if count == 0:
start = t["timestamp"]
else:
end = t["timestamp"]
count += 1
hour = (
datetime.fromtimestamp(t["timestamp"]).replace(
minute=0, second=0, microsecond=0
)
+ timedelta(
minutes=int(minute_modifier.split(" ")[0]),
)
).timestamp()
if hour not in hours:
hours[hour] = [t]
else:
hours[hour].insert(0, t)
return jsonify(
{
"start": start,
"end": end,
"count": count,
"hours": hours,
}
)
@bp.route("/<camera_name>/<label>/best.jpg")
@bp.route("/<camera_name>/<label>/thumbnail.jpg")
def label_thumbnail(camera_name, label):
@@ -1863,17 +1935,27 @@ def vod_hour(year_month, day, hour, camera_name, tz_name):
@bp.route("/preview/<camera_name>/start/<float:start_ts>/end/<float:end_ts>")
def preview_ts(camera_name, start_ts, end_ts):
"""Get all mp4 previews relevant for time period."""
if camera_name != "all":
camera_clause = Previews.camera == camera_name
else:
camera_clause = True
previews = (
Previews.select(
Previews.path, Previews.duration, Previews.start_time, Previews.end_time
Previews.camera,
Previews.path,
Previews.duration,
Previews.start_time,
Previews.end_time,
)
.where(
Previews.start_time.between(start_ts, end_ts)
| Previews.end_time.between(start_ts, end_ts)
| ((start_ts > Previews.start_time) & (end_ts < Previews.end_time))
)
.where(Previews.camera == camera_name)
.where(camera_clause)
.order_by(Previews.start_time.asc())
.dicts()
.iterator()
)
@@ -1883,15 +1965,15 @@ def preview_ts(camera_name, start_ts, end_ts):
for preview in previews:
clips.append(
{
"src": preview.path.replace("/media/frigate", ""),
"camera": preview["camera"],
"src": preview["path"].replace("/media/frigate", ""),
"type": "video/mp4",
"start": preview.start_time,
"end": preview.end_time,
"start": preview["start_time"],
"end": preview["end_time"],
}
)
if not clips:
logger.error("No previews found for the requested time range")
return make_response(
jsonify(
{
@@ -1919,6 +2001,40 @@ def preview_hour(year_month, day, hour, camera_name, tz_name):
return preview_ts(camera_name, start_ts, end_ts)
@bp.route("/preview/<camera_name>/<frame_time>/thumbnail.jpg")
def preview_thumbnail(camera_name, frame_time):
"""Get a thumbnail from the cached preview jpgs."""
preview_dir = os.path.join(CACHE_DIR, "preview_frames")
file_start = f"preview_{camera_name}"
file_check = f"{file_start}-{frame_time}.jpg"
selected_preview = None
for file in os.listdir(preview_dir):
if file.startswith(file_start):
if file < file_check:
selected_preview = file
break
if selected_preview is None:
return make_response(
jsonify(
{
"success": False,
"message": "Could not find valid preview jpg.",
}
),
404,
)
with open(os.path.join(preview_dir, selected_preview), "rb") as image_file:
jpg_bytes = image_file.read()
response = make_response(jpg_bytes)
response.headers["Content-Type"] = "image/jpeg"
response.headers["Cache-Control"] = "private, max-age=31536000"
return response
@bp.route("/vod/event/<id>")
def vod_event(id):
try:

View File

@@ -29,6 +29,7 @@ class TimelineProcessor(threading.Thread):
self.config = config
self.queue = queue
self.stop_event = stop_event
self.pre_event_cache: dict[str, list[dict[str, any]]] = {}
def run(self) -> None:
while not self.stop_event.is_set():
@@ -48,14 +49,39 @@ class TimelineProcessor(threading.Thread):
camera, event_type, prev_event_data, event_data
)
def insert_or_save(
self,
entry: dict[str, any],
prev_event_data: dict[any, any],
event_data: dict[any, any],
) -> None:
"""Insert into db or cache."""
id = entry[Timeline.source_id]
if not event_data["has_clip"] and not event_data["has_snapshot"]:
# the related event has not been saved yet, should be added to cache
if id in self.pre_event_cache.keys():
self.pre_event_cache[id].append(entry)
else:
self.pre_event_cache[id] = [entry]
else:
# the event is saved, insert to db and insert cached into db
if id in self.pre_event_cache.keys():
for e in self.pre_event_cache[id]:
Timeline.insert(e).execute()
self.pre_event_cache.pop(id)
Timeline.insert(entry).execute()
def handle_object_detection(
self,
camera: str,
event_type: str,
prev_event_data: dict[any, any],
event_data: dict[any, any],
) -> None:
) -> bool:
"""Handle object detection."""
save = False
camera_config = self.config.cameras[camera]
timeline_entry = {
@@ -70,6 +96,7 @@ class TimelineProcessor(threading.Thread):
event_data["box"],
),
"label": event_data["label"],
"sub_label": event_data.get("sub_label"),
"region": to_relative_box(
camera_config.detect.width,
camera_config.detect.height,
@@ -80,41 +107,36 @@ class TimelineProcessor(threading.Thread):
}
if event_type == "start":
timeline_entry[Timeline.class_type] = "visible"
Timeline.insert(timeline_entry).execute()
save = True
elif event_type == "update":
# zones have been updated
if (
prev_event_data["current_zones"] != event_data["current_zones"]
and len(event_data["current_zones"]) > 0
len(prev_event_data["current_zones"]) < len(event_data["current_zones"])
and not event_data["stationary"]
):
timeline_entry[Timeline.class_type] = "entered_zone"
timeline_entry[Timeline.data]["zones"] = event_data["current_zones"]
Timeline.insert(timeline_entry).execute()
save = True
elif prev_event_data["stationary"] != event_data["stationary"]:
timeline_entry[Timeline.class_type] = (
"stationary" if event_data["stationary"] else "active"
)
Timeline.insert(timeline_entry).execute()
save = True
elif prev_event_data["attributes"] == {} and event_data["attributes"] != {}:
timeline_entry[Timeline.class_type] = "attribute"
timeline_entry[Timeline.data]["attribute"] = list(
event_data["attributes"].keys()
)[0]
Timeline.insert(timeline_entry).execute()
save = True
elif not prev_event_data.get("sub_label") and event_data.get("sub_label"):
sub_label = event_data["sub_label"][0]
if sub_label not in ALL_ATTRIBUTE_LABELS:
timeline_entry[Timeline.class_type] = "sub_label"
timeline_entry[Timeline.data]["sub_label"] = sub_label
Timeline.insert(timeline_entry).execute()
save = True
elif event_type == "end":
if event_data["has_clip"] or event_data["has_snapshot"]:
timeline_entry[Timeline.class_type] = "gone"
Timeline.insert(timeline_entry).execute()
else:
# if event was not saved then the timeline entries should be deleted
Timeline.delete().where(
Timeline.source_id == event_data["id"]
).execute()
timeline_entry[Timeline.class_type] = "gone"
save = True
if save:
self.insert_or_save(timeline_entry, prev_event_data, event_data)