forked from Github/frigate
Improve graph using pandas (#9234)
* Ensure viewport is always full screen * Protect against hour with no cards and ensure data is consistent * Reduce grouped up image refreshes * Include current hour and fix scrubbing bugginess * Scroll initially selected timeline in to view * Expand timelne class type * Use poster image for preview on video player instead of using separate image view * Fix available streaming modes * Incrase timing for grouping timline items * Fix audio activity listener * Fix player not switching views correctly * Use player time to convert to timeline time * Update sub labels for previous timeline items * Show mini timeline bar for non selected items * Rewrite desktop timeline to use separate dynamic video player component * Extend improvements to mobile as well * Improve time formatting * Fix scroll * Fix no preview case * Mobile fixes * Audio toggle fixes * More fixes for mobile * Improve scaling of graph motion activity * Add keyboard shortcut hook and support shortcuts for playback page * Fix sizing of dialog * Improve height scaling of dialog * simplify and fix layout system for timeline * Fix timeilne items not working * Implement basic Frigate+ submitting from timeline
This commit is contained in:
committed by
Blake Blackshear
parent
9c4b69191b
commit
af3f6dadcb
@@ -16,6 +16,7 @@ from urllib.parse import unquote
|
||||
|
||||
import cv2
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pytz
|
||||
import requests
|
||||
from flask import (
|
||||
@@ -390,6 +391,17 @@ def set_sub_label(id):
|
||||
new_sub_label = json.get("subLabel")
|
||||
new_score = json.get("subLabelScore")
|
||||
|
||||
if new_sub_label is None:
|
||||
return make_response(
|
||||
jsonify(
|
||||
{
|
||||
"success": False,
|
||||
"message": "A sub label must be supplied",
|
||||
}
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
if new_sub_label and len(new_sub_label) > 100:
|
||||
return make_response(
|
||||
jsonify(
|
||||
@@ -415,6 +427,7 @@ def set_sub_label(id):
|
||||
)
|
||||
|
||||
if not event.end_time:
|
||||
# update tracked object
|
||||
tracked_obj: TrackedObject = (
|
||||
current_app.detected_frames_processor.camera_states[
|
||||
event.camera
|
||||
@@ -424,6 +437,11 @@ def set_sub_label(id):
|
||||
if tracked_obj:
|
||||
tracked_obj.obj_data["sub_label"] = (new_sub_label, new_score)
|
||||
|
||||
# update timeline items
|
||||
Timeline.update(
|
||||
data=Timeline.data.update({"sub_label": (new_sub_label, new_score)})
|
||||
).where(Timeline.source_id == id).execute()
|
||||
|
||||
event.sub_label = new_sub_label
|
||||
|
||||
if new_score:
|
||||
@@ -739,41 +757,59 @@ def hourly_timeline_activity(camera_name: str):
|
||||
|
||||
# set initial start so data is representative of full hour
|
||||
hours[int(key.timestamp())].append(
|
||||
{
|
||||
"date": key.timestamp(),
|
||||
"count": 0,
|
||||
"type": "motion",
|
||||
}
|
||||
[
|
||||
key.timestamp(),
|
||||
0,
|
||||
False,
|
||||
]
|
||||
)
|
||||
|
||||
for recording in all_recordings:
|
||||
if recording.start_time > check:
|
||||
hours[int(key.timestamp())].append(
|
||||
{
|
||||
"date": (key + timedelta(hours=1)).timestamp(),
|
||||
"count": 0,
|
||||
"type": "motion",
|
||||
}
|
||||
[
|
||||
(key + timedelta(minutes=59, seconds=59)).timestamp(),
|
||||
0,
|
||||
False,
|
||||
]
|
||||
)
|
||||
key = key + timedelta(hours=1)
|
||||
check = (key + timedelta(hours=1)).timestamp()
|
||||
hours[int(key.timestamp())].append(
|
||||
{
|
||||
"date": key.timestamp(),
|
||||
"count": 0,
|
||||
"type": "motion",
|
||||
}
|
||||
[
|
||||
key.timestamp(),
|
||||
0,
|
||||
False,
|
||||
]
|
||||
)
|
||||
|
||||
data_type = "motion" if recording.objects == 0 else "objects"
|
||||
data_type = recording.objects > 0
|
||||
count = recording.motion + recording.objects
|
||||
hours[int(key.timestamp())].append(
|
||||
{
|
||||
"date": recording.start_time + (recording.duration / 2),
|
||||
"count": recording.motion,
|
||||
"type": data_type,
|
||||
}
|
||||
[
|
||||
recording.start_time + (recording.duration / 2),
|
||||
0 if count == 0 else np.log2(count),
|
||||
data_type,
|
||||
]
|
||||
)
|
||||
|
||||
# resample data using pandas to get activity on minute to minute basis
|
||||
for key, data in hours.items():
|
||||
df = pd.DataFrame(data, columns=["date", "count", "hasObjects"])
|
||||
|
||||
# set date as datetime index
|
||||
df["date"] = pd.to_datetime(df["date"], unit="s")
|
||||
df.set_index(["date"], inplace=True)
|
||||
|
||||
# normalize data
|
||||
df = df.resample("T").mean().fillna(0)
|
||||
|
||||
# change types for output
|
||||
df.index = df.index.astype(int) // (10**9)
|
||||
df["count"] = df["count"].astype(int)
|
||||
df["hasObjects"] = df["hasObjects"].astype(bool)
|
||||
hours[key] = df.reset_index().to_dict("records")
|
||||
|
||||
return jsonify(hours)
|
||||
|
||||
|
||||
@@ -1840,6 +1876,7 @@ def recordings(camera_name):
|
||||
Recordings.segment_size,
|
||||
Recordings.motion,
|
||||
Recordings.objects,
|
||||
Recordings.duration,
|
||||
)
|
||||
.where(
|
||||
Recordings.camera == camera_name,
|
||||
|
||||
@@ -7,7 +7,6 @@ from multiprocessing import Queue
|
||||
from multiprocessing.synchronize import Event as MpEvent
|
||||
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import ALL_ATTRIBUTE_LABELS
|
||||
from frigate.events.maintainer import EventTypeEnum
|
||||
from frigate.models import Timeline
|
||||
from frigate.util.builtin import to_relative_box
|
||||
@@ -85,12 +84,13 @@ class TimelineProcessor(threading.Thread):
|
||||
"""Handle object detection."""
|
||||
save = False
|
||||
camera_config = self.config.cameras[camera]
|
||||
event_id = event_data["id"]
|
||||
|
||||
timeline_entry = {
|
||||
Timeline.timestamp: event_data["frame_time"],
|
||||
Timeline.camera: camera,
|
||||
Timeline.source: "tracked_object",
|
||||
Timeline.source_id: event_data["id"],
|
||||
Timeline.source_id: event_id,
|
||||
Timeline.data: {
|
||||
"box": to_relative_box(
|
||||
camera_config.detect.width,
|
||||
@@ -107,6 +107,16 @@ class TimelineProcessor(threading.Thread):
|
||||
"attribute": "",
|
||||
},
|
||||
}
|
||||
|
||||
# update sub labels for existing entries that haven't been added yet
|
||||
if (
|
||||
prev_event_data != None
|
||||
and prev_event_data["sub_label"] != event_data["sub_label"]
|
||||
and event_id in self.pre_event_cache.keys()
|
||||
):
|
||||
for e in self.pre_event_cache[event_id]:
|
||||
e[Timeline.data]["sub_label"] = event_data["sub_label"]
|
||||
|
||||
if event_type == "start":
|
||||
timeline_entry[Timeline.class_type] = "visible"
|
||||
save = True
|
||||
@@ -129,13 +139,6 @@ class TimelineProcessor(threading.Thread):
|
||||
event_data["attributes"].keys()
|
||||
)[0]
|
||||
save = True
|
||||
elif not prev_event_data.get("sub_label") and event_data.get("sub_label"):
|
||||
sub_label = event_data["sub_label"][0]
|
||||
|
||||
if sub_label not in ALL_ATTRIBUTE_LABELS:
|
||||
timeline_entry[Timeline.class_type] = "sub_label"
|
||||
timeline_entry[Timeline.data]["sub_label"] = sub_label
|
||||
save = True
|
||||
elif event_type == "end":
|
||||
timeline_entry[Timeline.class_type] = "gone"
|
||||
save = True
|
||||
|
||||
@@ -378,7 +378,7 @@ def auto_detect_hwaccel() -> str:
|
||||
try:
|
||||
cuda = False
|
||||
vaapi = False
|
||||
resp = requests.get("http://192.168.50.106:1984/api/ffmpeg/hardware", timeout=3)
|
||||
resp = requests.get("http://127.0.0.1:1984/api/ffmpeg/hardware", timeout=3)
|
||||
|
||||
if resp.status_code == 200:
|
||||
data: dict[str, list[dict[str, str]]] = resp.json()
|
||||
|
||||
Reference in New Issue
Block a user