forked from Github/frigate
Compare commits
12 Commits
dependabot
...
v0.15.0-be
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6b12a45a95 | ||
|
|
0b9c4c18dd | ||
|
|
d0cc8cb64b | ||
|
|
bb86e71e65 | ||
|
|
8aa6297308 | ||
|
|
d3b631a952 | ||
|
|
47d495fc01 | ||
|
|
32322b23b2 | ||
|
|
c0ba98e26f | ||
|
|
a5a7cd3107 | ||
|
|
a729408599 | ||
|
|
4dddc53735 |
@@ -231,28 +231,11 @@ docker run -d \
|
||||
|
||||
### Setup Decoder
|
||||
|
||||
The decoder you need to pass in the `hwaccel_args` will depend on the input video.
|
||||
|
||||
A list of supported codecs (you can use `ffmpeg -decoders | grep cuvid` in the container to get the ones your card supports)
|
||||
|
||||
```
|
||||
V..... h263_cuvid Nvidia CUVID H263 decoder (codec h263)
|
||||
V..... h264_cuvid Nvidia CUVID H264 decoder (codec h264)
|
||||
V..... hevc_cuvid Nvidia CUVID HEVC decoder (codec hevc)
|
||||
V..... mjpeg_cuvid Nvidia CUVID MJPEG decoder (codec mjpeg)
|
||||
V..... mpeg1_cuvid Nvidia CUVID MPEG1VIDEO decoder (codec mpeg1video)
|
||||
V..... mpeg2_cuvid Nvidia CUVID MPEG2VIDEO decoder (codec mpeg2video)
|
||||
V..... mpeg4_cuvid Nvidia CUVID MPEG4 decoder (codec mpeg4)
|
||||
V..... vc1_cuvid Nvidia CUVID VC1 decoder (codec vc1)
|
||||
V..... vp8_cuvid Nvidia CUVID VP8 decoder (codec vp8)
|
||||
V..... vp9_cuvid Nvidia CUVID VP9 decoder (codec vp9)
|
||||
```
|
||||
|
||||
For example, for H264 video, you'll select `preset-nvidia-h264`.
|
||||
Using `preset-nvidia` ffmpeg will automatically select the necessary profile for the incoming video, and will log an error if the profile is not supported by your GPU.
|
||||
|
||||
```yaml
|
||||
ffmpeg:
|
||||
hwaccel_args: preset-nvidia-h264
|
||||
hwaccel_args: preset-nvidia
|
||||
```
|
||||
|
||||
If everything is working correctly, you should see a significant improvement in performance.
|
||||
|
||||
@@ -132,6 +132,28 @@ cameras:
|
||||
- detect
|
||||
```
|
||||
|
||||
## Handling Complex Passwords
|
||||
|
||||
go2rtc expects URL-encoded passwords in the config, [urlencoder.org](https://urlencoder.org) can be used for this purpose.
|
||||
|
||||
For example:
|
||||
|
||||
```yaml
|
||||
go2rtc:
|
||||
streams:
|
||||
my_camera: rtsp://username:$@foo%@192.168.1.100
|
||||
```
|
||||
|
||||
becomes
|
||||
|
||||
```yaml
|
||||
go2rtc:
|
||||
streams:
|
||||
my_camera: rtsp://username:$%40foo%25@192.168.1.100
|
||||
```
|
||||
|
||||
See [this comment(https://github.com/AlexxIT/go2rtc/issues/1217#issuecomment-2242296489) for more information.
|
||||
|
||||
## Advanced Restream Configurations
|
||||
|
||||
The [exec](https://github.com/AlexxIT/go2rtc/tree/v1.9.2#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below:
|
||||
|
||||
1199
docs/static/frigate-api.yaml
vendored
1199
docs/static/frigate-api.yaml
vendored
File diff suppressed because it is too large
Load Diff
@@ -17,8 +17,8 @@ from fastapi.responses import JSONResponse, PlainTextResponse
|
||||
from markupsafe import escape
|
||||
from peewee import operator
|
||||
|
||||
from frigate.api.defs.app_body import AppConfigSetBody
|
||||
from frigate.api.defs.app_query_parameters import AppTimelineHourlyQueryParameters
|
||||
from frigate.api.defs.query.app_query_parameters import AppTimelineHourlyQueryParameters
|
||||
from frigate.api.defs.request.app_body import AppConfigSetBody
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.const import CONFIG_DIR
|
||||
|
||||
@@ -18,7 +18,7 @@ from joserfc import jwt
|
||||
from peewee import DoesNotExist
|
||||
from slowapi import Limiter
|
||||
|
||||
from frigate.api.defs.app_body import (
|
||||
from frigate.api.defs.request.app_body import (
|
||||
AppPostLoginBody,
|
||||
AppPostUsersBody,
|
||||
AppPutPasswordBody,
|
||||
@@ -85,7 +85,12 @@ def get_remote_addr(request: Request):
|
||||
return str(ip)
|
||||
|
||||
# if there wasn't anything in the route, just return the default
|
||||
return request.remote_addr or "127.0.0.1"
|
||||
remote_addr = None
|
||||
|
||||
if hasattr(request, "remote_addr"):
|
||||
remote_addr = request.remote_addr
|
||||
|
||||
return remote_addr or "127.0.0.1"
|
||||
|
||||
|
||||
def get_jwt_secret() -> str:
|
||||
@@ -324,7 +329,7 @@ def login(request: Request, body: AppPostLoginBody):
|
||||
try:
|
||||
db_user: User = User.get_by_id(user)
|
||||
except DoesNotExist:
|
||||
return JSONResponse(content={"message": "Login failed"}, status_code=400)
|
||||
return JSONResponse(content={"message": "Login failed"}, status_code=401)
|
||||
|
||||
password_hash = db_user.password_hash
|
||||
if verify_password(password, password_hash):
|
||||
@@ -335,7 +340,7 @@ def login(request: Request, body: AppPostLoginBody):
|
||||
response, JWT_COOKIE_NAME, encoded_jwt, expiration, JWT_COOKIE_SECURE
|
||||
)
|
||||
return response
|
||||
return JSONResponse(content={"message": "Login failed"}, status_code=400)
|
||||
return JSONResponse(content={"message": "Login failed"}, status_code=401)
|
||||
|
||||
|
||||
@router.get("/users")
|
||||
|
||||
@@ -3,7 +3,7 @@ from typing import Union
|
||||
from pydantic import BaseModel
|
||||
from pydantic.json_schema import SkipJsonSchema
|
||||
|
||||
from frigate.review.maintainer import SeverityEnum
|
||||
from frigate.review.types import SeverityEnum
|
||||
|
||||
|
||||
class ReviewQueryParams(BaseModel):
|
||||
42
frigate/api/defs/response/event_response.py
Normal file
42
frigate/api/defs/response/event_response.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from typing import Any, Optional
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class EventResponse(BaseModel):
|
||||
id: str
|
||||
label: str
|
||||
sub_label: Optional[str]
|
||||
camera: str
|
||||
start_time: float
|
||||
end_time: Optional[float]
|
||||
false_positive: Optional[bool]
|
||||
zones: list[str]
|
||||
thumbnail: str
|
||||
has_clip: bool
|
||||
has_snapshot: bool
|
||||
retain_indefinitely: bool
|
||||
plus_id: Optional[str]
|
||||
model_hash: Optional[str]
|
||||
detector_type: Optional[str]
|
||||
model_type: Optional[str]
|
||||
data: dict[str, Any]
|
||||
|
||||
model_config = ConfigDict(protected_namespaces=())
|
||||
|
||||
|
||||
class EventCreateResponse(BaseModel):
|
||||
success: bool
|
||||
message: str
|
||||
event_id: str
|
||||
|
||||
|
||||
class EventMultiDeleteResponse(BaseModel):
|
||||
success: bool
|
||||
deleted_events: list[str]
|
||||
not_found_events: list[str]
|
||||
|
||||
|
||||
class EventUploadPlusResponse(BaseModel):
|
||||
success: bool
|
||||
plus_id: str
|
||||
@@ -3,7 +3,7 @@ from typing import Dict
|
||||
|
||||
from pydantic import BaseModel, Json
|
||||
|
||||
from frigate.review.maintainer import SeverityEnum
|
||||
from frigate.review.types import SeverityEnum
|
||||
|
||||
|
||||
class ReviewSegmentResponse(BaseModel):
|
||||
@@ -14,7 +14,16 @@ from fastapi.responses import JSONResponse
|
||||
from peewee import JOIN, DoesNotExist, fn, operator
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.api.defs.events_body import (
|
||||
from frigate.api.defs.query.events_query_parameters import (
|
||||
DEFAULT_TIME_RANGE,
|
||||
EventsQueryParams,
|
||||
EventsSearchQueryParams,
|
||||
EventsSummaryQueryParams,
|
||||
)
|
||||
from frigate.api.defs.query.regenerate_query_parameters import (
|
||||
RegenerateQueryParameters,
|
||||
)
|
||||
from frigate.api.defs.request.events_body import (
|
||||
EventsCreateBody,
|
||||
EventsDeleteBody,
|
||||
EventsDescriptionBody,
|
||||
@@ -22,19 +31,15 @@ from frigate.api.defs.events_body import (
|
||||
EventsSubLabelBody,
|
||||
SubmitPlusBody,
|
||||
)
|
||||
from frigate.api.defs.events_query_parameters import (
|
||||
DEFAULT_TIME_RANGE,
|
||||
EventsQueryParams,
|
||||
EventsSearchQueryParams,
|
||||
EventsSummaryQueryParams,
|
||||
)
|
||||
from frigate.api.defs.regenerate_query_parameters import (
|
||||
RegenerateQueryParameters,
|
||||
from frigate.api.defs.response.event_response import (
|
||||
EventCreateResponse,
|
||||
EventMultiDeleteResponse,
|
||||
EventResponse,
|
||||
EventUploadPlusResponse,
|
||||
)
|
||||
from frigate.api.defs.response.generic_response import GenericResponse
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.const import (
|
||||
CLIPS_DIR,
|
||||
)
|
||||
from frigate.const import CLIPS_DIR
|
||||
from frigate.embeddings import EmbeddingsContext
|
||||
from frigate.events.external import ExternalEventProcessor
|
||||
from frigate.models import Event, ReviewSegment, Timeline
|
||||
@@ -46,7 +51,7 @@ logger = logging.getLogger(__name__)
|
||||
router = APIRouter(tags=[Tags.events])
|
||||
|
||||
|
||||
@router.get("/events")
|
||||
@router.get("/events", response_model=list[EventResponse])
|
||||
def events(params: EventsQueryParams = Depends()):
|
||||
camera = params.camera
|
||||
cameras = params.cameras
|
||||
@@ -248,6 +253,8 @@ def events(params: EventsQueryParams = Depends()):
|
||||
order_by = Event.start_time.asc()
|
||||
elif sort == "date_desc":
|
||||
order_by = Event.start_time.desc()
|
||||
else:
|
||||
order_by = Event.start_time.desc()
|
||||
else:
|
||||
order_by = Event.start_time.desc()
|
||||
|
||||
@@ -263,7 +270,7 @@ def events(params: EventsQueryParams = Depends()):
|
||||
return JSONResponse(content=list(events))
|
||||
|
||||
|
||||
@router.get("/events/explore")
|
||||
@router.get("/events/explore", response_model=list[EventResponse])
|
||||
def events_explore(limit: int = 10):
|
||||
# get distinct labels for all events
|
||||
distinct_labels = Event.select(Event.label).distinct().order_by(Event.label)
|
||||
@@ -308,7 +315,8 @@ def events_explore(limit: int = 10):
|
||||
"data": {
|
||||
k: v
|
||||
for k, v in event.data.items()
|
||||
if k in ["type", "score", "top_score", "description"]
|
||||
if k
|
||||
in ["type", "score", "top_score", "description", "sub_label_score"]
|
||||
},
|
||||
"event_count": label_counts[event.label],
|
||||
}
|
||||
@@ -324,7 +332,7 @@ def events_explore(limit: int = 10):
|
||||
return JSONResponse(content=processed_events)
|
||||
|
||||
|
||||
@router.get("/event_ids")
|
||||
@router.get("/event_ids", response_model=list[EventResponse])
|
||||
def event_ids(ids: str):
|
||||
ids = ids.split(",")
|
||||
|
||||
@@ -582,19 +590,17 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends())
|
||||
|
||||
processed_events.append(processed_event)
|
||||
|
||||
# Sort by search distance if search_results are available, otherwise by start_time as default
|
||||
if search_results:
|
||||
if (sort is None or sort == "relevance") and search_results:
|
||||
processed_events.sort(key=lambda x: x.get("search_distance", float("inf")))
|
||||
elif min_score is not None and max_score is not None and sort == "score_asc":
|
||||
processed_events.sort(key=lambda x: x["score"])
|
||||
elif min_score is not None and max_score is not None and sort == "score_desc":
|
||||
processed_events.sort(key=lambda x: x["score"], reverse=True)
|
||||
elif sort == "date_asc":
|
||||
processed_events.sort(key=lambda x: x["start_time"])
|
||||
else:
|
||||
if sort == "score_asc":
|
||||
processed_events.sort(key=lambda x: x["score"])
|
||||
elif sort == "score_desc":
|
||||
processed_events.sort(key=lambda x: x["score"], reverse=True)
|
||||
elif sort == "date_asc":
|
||||
processed_events.sort(key=lambda x: x["start_time"])
|
||||
else:
|
||||
# "date_desc" default
|
||||
processed_events.sort(key=lambda x: x["start_time"], reverse=True)
|
||||
# "date_desc" default
|
||||
processed_events.sort(key=lambda x: x["start_time"], reverse=True)
|
||||
|
||||
# Limit the number of events returned
|
||||
processed_events = processed_events[:limit]
|
||||
@@ -647,7 +653,7 @@ def events_summary(params: EventsSummaryQueryParams = Depends()):
|
||||
return JSONResponse(content=[e for e in groups.dicts()])
|
||||
|
||||
|
||||
@router.get("/events/{event_id}")
|
||||
@router.get("/events/{event_id}", response_model=EventResponse)
|
||||
def event(event_id: str):
|
||||
try:
|
||||
return model_to_dict(Event.get(Event.id == event_id))
|
||||
@@ -655,7 +661,7 @@ def event(event_id: str):
|
||||
return JSONResponse(content="Event not found", status_code=404)
|
||||
|
||||
|
||||
@router.post("/events/{event_id}/retain")
|
||||
@router.post("/events/{event_id}/retain", response_model=GenericResponse)
|
||||
def set_retain(event_id: str):
|
||||
try:
|
||||
event = Event.get(Event.id == event_id)
|
||||
@@ -674,7 +680,7 @@ def set_retain(event_id: str):
|
||||
)
|
||||
|
||||
|
||||
@router.post("/events/{event_id}/plus")
|
||||
@router.post("/events/{event_id}/plus", response_model=EventUploadPlusResponse)
|
||||
def send_to_plus(request: Request, event_id: str, body: SubmitPlusBody = None):
|
||||
if not request.app.frigate_config.plus_api.is_active():
|
||||
message = "PLUS_API_KEY environment variable is not set"
|
||||
@@ -786,7 +792,7 @@ def send_to_plus(request: Request, event_id: str, body: SubmitPlusBody = None):
|
||||
)
|
||||
|
||||
|
||||
@router.put("/events/{event_id}/false_positive")
|
||||
@router.put("/events/{event_id}/false_positive", response_model=EventUploadPlusResponse)
|
||||
def false_positive(request: Request, event_id: str):
|
||||
if not request.app.frigate_config.plus_api.is_active():
|
||||
message = "PLUS_API_KEY environment variable is not set"
|
||||
@@ -875,7 +881,7 @@ def false_positive(request: Request, event_id: str):
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/events/{event_id}/retain")
|
||||
@router.delete("/events/{event_id}/retain", response_model=GenericResponse)
|
||||
def delete_retain(event_id: str):
|
||||
try:
|
||||
event = Event.get(Event.id == event_id)
|
||||
@@ -894,7 +900,7 @@ def delete_retain(event_id: str):
|
||||
)
|
||||
|
||||
|
||||
@router.post("/events/{event_id}/sub_label")
|
||||
@router.post("/events/{event_id}/sub_label", response_model=GenericResponse)
|
||||
def set_sub_label(
|
||||
request: Request,
|
||||
event_id: str,
|
||||
@@ -946,7 +952,7 @@ def set_sub_label(
|
||||
)
|
||||
|
||||
|
||||
@router.post("/events/{event_id}/description")
|
||||
@router.post("/events/{event_id}/description", response_model=GenericResponse)
|
||||
def set_description(
|
||||
request: Request,
|
||||
event_id: str,
|
||||
@@ -993,7 +999,7 @@ def set_description(
|
||||
)
|
||||
|
||||
|
||||
@router.put("/events/{event_id}/description/regenerate")
|
||||
@router.put("/events/{event_id}/description/regenerate", response_model=GenericResponse)
|
||||
def regenerate_description(
|
||||
request: Request, event_id: str, params: RegenerateQueryParameters = Depends()
|
||||
):
|
||||
@@ -1064,14 +1070,14 @@ def delete_single_event(event_id: str, request: Request) -> dict:
|
||||
return {"success": True, "message": f"Event {event_id} deleted"}
|
||||
|
||||
|
||||
@router.delete("/events/{event_id}")
|
||||
@router.delete("/events/{event_id}", response_model=GenericResponse)
|
||||
def delete_event(request: Request, event_id: str):
|
||||
result = delete_single_event(event_id, request)
|
||||
status_code = 200 if result["success"] else 404
|
||||
return JSONResponse(content=result, status_code=status_code)
|
||||
|
||||
|
||||
@router.delete("/events/")
|
||||
@router.delete("/events/", response_model=EventMultiDeleteResponse)
|
||||
def delete_events(request: Request, body: EventsDeleteBody):
|
||||
if not body.event_ids:
|
||||
return JSONResponse(
|
||||
@@ -1097,7 +1103,7 @@ def delete_events(request: Request, body: EventsDeleteBody):
|
||||
return JSONResponse(content=response, status_code=200)
|
||||
|
||||
|
||||
@router.post("/events/{camera_name}/{label}/create")
|
||||
@router.post("/events/{camera_name}/{label}/create", response_model=EventCreateResponse)
|
||||
def create_event(
|
||||
request: Request,
|
||||
camera_name: str,
|
||||
@@ -1153,7 +1159,7 @@ def create_event(
|
||||
)
|
||||
|
||||
|
||||
@router.put("/events/{event_id}/end")
|
||||
@router.put("/events/{event_id}/end", response_model=GenericResponse)
|
||||
def end_event(request: Request, event_id: str, body: EventsEndBody):
|
||||
try:
|
||||
end_time = body.end_time or datetime.datetime.now().timestamp()
|
||||
|
||||
@@ -9,6 +9,7 @@ import psutil
|
||||
from fastapi import APIRouter, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
from peewee import DoesNotExist
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.api.defs.request.export_recordings_body import ExportRecordingsBody
|
||||
from frigate.api.defs.tags import Tags
|
||||
@@ -207,3 +208,14 @@ def export_delete(event_id: str):
|
||||
),
|
||||
status_code=200,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/exports/{export_id}")
|
||||
def get_export(export_id: str):
|
||||
try:
|
||||
return JSONResponse(content=model_to_dict(Export.get(Export.id == export_id)))
|
||||
except DoesNotExist:
|
||||
return JSONResponse(
|
||||
content={"success": False, "message": "Export not found"},
|
||||
status_code=404,
|
||||
)
|
||||
|
||||
@@ -87,7 +87,11 @@ def create_fastapi_app(
|
||||
logger.info("FastAPI started")
|
||||
|
||||
# Rate limiter (used for login endpoint)
|
||||
auth.rateLimiter.set_limit(frigate_config.auth.failed_login_rate_limit or "")
|
||||
if frigate_config.auth.failed_login_rate_limit is None:
|
||||
limiter.enabled = False
|
||||
else:
|
||||
auth.rateLimiter.set_limit(frigate_config.auth.failed_login_rate_limit)
|
||||
|
||||
app.state.limiter = limiter
|
||||
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
|
||||
app.add_middleware(SlowAPIMiddleware)
|
||||
|
||||
@@ -20,7 +20,7 @@ from pathvalidate import sanitize_filename
|
||||
from peewee import DoesNotExist, fn
|
||||
from tzlocal import get_localzone_name
|
||||
|
||||
from frigate.api.defs.media_query_parameters import (
|
||||
from frigate.api.defs.query.media_query_parameters import (
|
||||
Extension,
|
||||
MediaEventsSnapshotQueryParams,
|
||||
MediaLatestFrameQueryParams,
|
||||
|
||||
@@ -12,20 +12,21 @@ from fastapi.responses import JSONResponse
|
||||
from peewee import Case, DoesNotExist, fn, operator
|
||||
from playhouse.shortcuts import model_to_dict
|
||||
|
||||
from frigate.api.defs.generic_response import GenericResponse
|
||||
from frigate.api.defs.review_body import ReviewModifyMultipleBody
|
||||
from frigate.api.defs.review_query_parameters import (
|
||||
from frigate.api.defs.query.review_query_parameters import (
|
||||
ReviewActivityMotionQueryParams,
|
||||
ReviewQueryParams,
|
||||
ReviewSummaryQueryParams,
|
||||
)
|
||||
from frigate.api.defs.review_responses import (
|
||||
from frigate.api.defs.request.review_body import ReviewModifyMultipleBody
|
||||
from frigate.api.defs.response.generic_response import GenericResponse
|
||||
from frigate.api.defs.response.review_response import (
|
||||
ReviewActivityMotionResponse,
|
||||
ReviewSegmentResponse,
|
||||
ReviewSummaryResponse,
|
||||
)
|
||||
from frigate.api.defs.tags import Tags
|
||||
from frigate.models import Recordings, ReviewSegment
|
||||
from frigate.review.types import SeverityEnum
|
||||
from frigate.util.builtin import get_tz_modifiers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -161,7 +162,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "alert"),
|
||||
(ReviewSegment.severity == SeverityEnum.alert),
|
||||
ReviewSegment.has_been_reviewed,
|
||||
)
|
||||
],
|
||||
@@ -173,7 +174,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "detection"),
|
||||
(ReviewSegment.severity == SeverityEnum.detection),
|
||||
ReviewSegment.has_been_reviewed,
|
||||
)
|
||||
],
|
||||
@@ -185,7 +186,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "alert"),
|
||||
(ReviewSegment.severity == SeverityEnum.alert),
|
||||
1,
|
||||
)
|
||||
],
|
||||
@@ -197,7 +198,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "detection"),
|
||||
(ReviewSegment.severity == SeverityEnum.detection),
|
||||
1,
|
||||
)
|
||||
],
|
||||
@@ -230,6 +231,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
label_clause = reduce(operator.or_, label_clauses)
|
||||
clauses.append((label_clause))
|
||||
|
||||
day_in_seconds = 60 * 60 * 24
|
||||
last_month = (
|
||||
ReviewSegment.select(
|
||||
fn.strftime(
|
||||
@@ -246,7 +248,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "alert"),
|
||||
(ReviewSegment.severity == SeverityEnum.alert),
|
||||
ReviewSegment.has_been_reviewed,
|
||||
)
|
||||
],
|
||||
@@ -258,7 +260,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "detection"),
|
||||
(ReviewSegment.severity == SeverityEnum.detection),
|
||||
ReviewSegment.has_been_reviewed,
|
||||
)
|
||||
],
|
||||
@@ -270,7 +272,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "alert"),
|
||||
(ReviewSegment.severity == SeverityEnum.alert),
|
||||
1,
|
||||
)
|
||||
],
|
||||
@@ -282,7 +284,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
None,
|
||||
[
|
||||
(
|
||||
(ReviewSegment.severity == "detection"),
|
||||
(ReviewSegment.severity == SeverityEnum.detection),
|
||||
1,
|
||||
)
|
||||
],
|
||||
@@ -292,7 +294,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
|
||||
)
|
||||
.where(reduce(operator.and_, clauses))
|
||||
.group_by(
|
||||
(ReviewSegment.start_time + seconds_offset).cast("int") / (3600 * 24),
|
||||
(ReviewSegment.start_time + seconds_offset).cast("int") / day_in_seconds,
|
||||
)
|
||||
.order_by(ReviewSegment.start_time.desc())
|
||||
)
|
||||
@@ -362,7 +364,7 @@ def delete_reviews(body: ReviewModifyMultipleBody):
|
||||
ReviewSegment.delete().where(ReviewSegment.id << list_of_ids).execute()
|
||||
|
||||
return JSONResponse(
|
||||
content=({"success": True, "message": "Delete reviews"}), status_code=200
|
||||
content=({"success": True, "message": "Deleted review items."}), status_code=200
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import datetime
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
from enum import Enum
|
||||
from multiprocessing.synchronize import Event as MpEvent
|
||||
from pathlib import Path
|
||||
|
||||
@@ -16,11 +15,6 @@ from frigate.models import Event, Timeline
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EventCleanupType(str, Enum):
|
||||
clips = "clips"
|
||||
snapshots = "snapshots"
|
||||
|
||||
|
||||
CHUNK_SIZE = 50
|
||||
|
||||
|
||||
@@ -67,19 +61,11 @@ class EventCleanup(threading.Thread):
|
||||
|
||||
return self.camera_labels[camera]["labels"]
|
||||
|
||||
def expire(self, media_type: EventCleanupType) -> list[str]:
|
||||
def expire_snapshots(self) -> list[str]:
|
||||
## Expire events from unlisted cameras based on the global config
|
||||
if media_type == EventCleanupType.clips:
|
||||
expire_days = max(
|
||||
self.config.record.alerts.retain.days,
|
||||
self.config.record.detections.retain.days,
|
||||
)
|
||||
file_extension = None # mp4 clips are no longer stored in /clips
|
||||
update_params = {"has_clip": False}
|
||||
else:
|
||||
retain_config = self.config.snapshots.retain
|
||||
file_extension = "jpg"
|
||||
update_params = {"has_snapshot": False}
|
||||
retain_config = self.config.snapshots.retain
|
||||
file_extension = "jpg"
|
||||
update_params = {"has_snapshot": False}
|
||||
|
||||
distinct_labels = self.get_removed_camera_labels()
|
||||
|
||||
@@ -87,10 +73,7 @@ class EventCleanup(threading.Thread):
|
||||
# loop over object types in db
|
||||
for event in distinct_labels:
|
||||
# get expiration time for this label
|
||||
if media_type == EventCleanupType.snapshots:
|
||||
expire_days = retain_config.objects.get(
|
||||
event.label, retain_config.default
|
||||
)
|
||||
expire_days = retain_config.objects.get(event.label, retain_config.default)
|
||||
|
||||
expire_after = (
|
||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||
@@ -162,13 +145,7 @@ class EventCleanup(threading.Thread):
|
||||
|
||||
## Expire events from cameras based on the camera config
|
||||
for name, camera in self.config.cameras.items():
|
||||
if media_type == EventCleanupType.clips:
|
||||
expire_days = max(
|
||||
camera.record.alerts.retain.days,
|
||||
camera.record.detections.retain.days,
|
||||
)
|
||||
else:
|
||||
retain_config = camera.snapshots.retain
|
||||
retain_config = camera.snapshots.retain
|
||||
|
||||
# get distinct objects in database for this camera
|
||||
distinct_labels = self.get_camera_labels(name)
|
||||
@@ -176,10 +153,9 @@ class EventCleanup(threading.Thread):
|
||||
# loop over object types in db
|
||||
for event in distinct_labels:
|
||||
# get expiration time for this label
|
||||
if media_type == EventCleanupType.snapshots:
|
||||
expire_days = retain_config.objects.get(
|
||||
event.label, retain_config.default
|
||||
)
|
||||
expire_days = retain_config.objects.get(
|
||||
event.label, retain_config.default
|
||||
)
|
||||
|
||||
expire_after = (
|
||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||
@@ -206,19 +182,143 @@ class EventCleanup(threading.Thread):
|
||||
for event in expired_events:
|
||||
events_to_update.append(event.id)
|
||||
|
||||
if media_type == EventCleanupType.snapshots:
|
||||
try:
|
||||
media_name = f"{event.camera}-{event.id}"
|
||||
media_path = Path(
|
||||
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
||||
)
|
||||
media_path.unlink(missing_ok=True)
|
||||
media_path = Path(
|
||||
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
||||
)
|
||||
media_path.unlink(missing_ok=True)
|
||||
except OSError as e:
|
||||
logger.warning(f"Unable to delete event images: {e}")
|
||||
try:
|
||||
media_name = f"{event.camera}-{event.id}"
|
||||
media_path = Path(
|
||||
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
|
||||
)
|
||||
media_path.unlink(missing_ok=True)
|
||||
media_path = Path(
|
||||
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
||||
)
|
||||
media_path.unlink(missing_ok=True)
|
||||
except OSError as e:
|
||||
logger.warning(f"Unable to delete event images: {e}")
|
||||
|
||||
# update the clips attribute for the db entry
|
||||
for i in range(0, len(events_to_update), CHUNK_SIZE):
|
||||
batch = events_to_update[i : i + CHUNK_SIZE]
|
||||
logger.debug(f"Updating {update_params} for {len(batch)} events")
|
||||
Event.update(update_params).where(Event.id << batch).execute()
|
||||
|
||||
return events_to_update
|
||||
|
||||
def expire_clips(self) -> list[str]:
|
||||
## Expire events from unlisted cameras based on the global config
|
||||
expire_days = max(
|
||||
self.config.record.alerts.retain.days,
|
||||
self.config.record.detections.retain.days,
|
||||
)
|
||||
file_extension = None # mp4 clips are no longer stored in /clips
|
||||
update_params = {"has_clip": False}
|
||||
|
||||
# get expiration time for this label
|
||||
|
||||
expire_after = (
|
||||
datetime.datetime.now() - datetime.timedelta(days=expire_days)
|
||||
).timestamp()
|
||||
# grab all events after specific time
|
||||
expired_events: list[Event] = (
|
||||
Event.select(
|
||||
Event.id,
|
||||
Event.camera,
|
||||
)
|
||||
.where(
|
||||
Event.camera.not_in(self.camera_keys),
|
||||
Event.start_time < expire_after,
|
||||
Event.retain_indefinitely == False,
|
||||
)
|
||||
.namedtuples()
|
||||
.iterator()
|
||||
)
|
||||
logger.debug(f"{len(list(expired_events))} events can be expired")
|
||||
# delete the media from disk
|
||||
for expired in expired_events:
|
||||
media_name = f"{expired.camera}-{expired.id}"
|
||||
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}")
|
||||
|
||||
try:
|
||||
media_path.unlink(missing_ok=True)
|
||||
if file_extension == "jpg":
|
||||
media_path = Path(
|
||||
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
|
||||
)
|
||||
media_path.unlink(missing_ok=True)
|
||||
except OSError as e:
|
||||
logger.warning(f"Unable to delete event images: {e}")
|
||||
|
||||
# update the clips attribute for the db entry
|
||||
query = Event.select(Event.id).where(
|
||||
Event.camera.not_in(self.camera_keys),
|
||||
Event.start_time < expire_after,
|
||||
Event.retain_indefinitely == False,
|
||||
)
|
||||
|
||||
events_to_update = []
|
||||
|
||||
for batch in query.iterator():
|
||||
events_to_update.extend([event.id for event in batch])
|
||||
if len(events_to_update) >= CHUNK_SIZE:
|
||||
logger.debug(
|
||||
f"Updating {update_params} for {len(events_to_update)} events"
|
||||
)
|
||||
Event.update(update_params).where(
|
||||
Event.id << events_to_update
|
||||
).execute()
|
||||
events_to_update = []
|
||||
|
||||
# Update any remaining events
|
||||
if events_to_update:
|
||||
logger.debug(
|
||||
f"Updating clips/snapshots attribute for {len(events_to_update)} events"
|
||||
)
|
||||
Event.update(update_params).where(Event.id << events_to_update).execute()
|
||||
|
||||
events_to_update = []
|
||||
now = datetime.datetime.now()
|
||||
|
||||
## Expire events from cameras based on the camera config
|
||||
for name, camera in self.config.cameras.items():
|
||||
expire_days = max(
|
||||
camera.record.alerts.retain.days,
|
||||
camera.record.detections.retain.days,
|
||||
)
|
||||
alert_expire_date = (
|
||||
now - datetime.timedelta(days=camera.record.alerts.retain.days)
|
||||
).timestamp()
|
||||
detection_expire_date = (
|
||||
now - datetime.timedelta(days=camera.record.detections.retain.days)
|
||||
).timestamp()
|
||||
# grab all events after specific time
|
||||
expired_events = (
|
||||
Event.select(
|
||||
Event.id,
|
||||
Event.camera,
|
||||
)
|
||||
.where(
|
||||
Event.camera == name,
|
||||
Event.retain_indefinitely == False,
|
||||
(
|
||||
(
|
||||
(Event.data["max_severity"] != "detection")
|
||||
| (Event.data["max_severity"].is_null())
|
||||
)
|
||||
& (Event.end_time < alert_expire_date)
|
||||
)
|
||||
| (
|
||||
(Event.data["max_severity"] == "detection")
|
||||
& (Event.end_time < detection_expire_date)
|
||||
),
|
||||
)
|
||||
.namedtuples()
|
||||
.iterator()
|
||||
)
|
||||
|
||||
# delete the grabbed clips from disk
|
||||
# only snapshots are stored in /clips
|
||||
# so no need to delete mp4 files
|
||||
for event in expired_events:
|
||||
events_to_update.append(event.id)
|
||||
|
||||
# update the clips attribute for the db entry
|
||||
for i in range(0, len(events_to_update), CHUNK_SIZE):
|
||||
@@ -230,8 +330,9 @@ class EventCleanup(threading.Thread):
|
||||
|
||||
def run(self) -> None:
|
||||
# only expire events every 5 minutes
|
||||
while not self.stop_event.wait(300):
|
||||
events_with_expired_clips = self.expire(EventCleanupType.clips)
|
||||
while not self.stop_event.wait(1):
|
||||
events_with_expired_clips = self.expire_clips()
|
||||
return
|
||||
|
||||
# delete timeline entries for events that have expired recordings
|
||||
# delete up to 100,000 at a time
|
||||
@@ -242,7 +343,7 @@ class EventCleanup(threading.Thread):
|
||||
Timeline.source_id << deleted_events_list[i : i + max_deletes]
|
||||
).execute()
|
||||
|
||||
self.expire(EventCleanupType.snapshots)
|
||||
self.expire_snapshots()
|
||||
|
||||
# drop events from db where has_clip and has_snapshot are false
|
||||
events = (
|
||||
|
||||
@@ -210,6 +210,7 @@ class EventProcessor(threading.Thread):
|
||||
"top_score": event_data["top_score"],
|
||||
"attributes": attributes,
|
||||
"type": "object",
|
||||
"max_severity": event_data.get("max_severity"),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -702,30 +702,7 @@ class TrackedObjectProcessor(threading.Thread):
|
||||
return False
|
||||
|
||||
# If the object is not considered an alert or detection
|
||||
review_config = self.config.cameras[camera].review
|
||||
if not (
|
||||
(
|
||||
obj.obj_data["label"] in review_config.alerts.labels
|
||||
and (
|
||||
not review_config.alerts.required_zones
|
||||
or set(obj.entered_zones) & set(review_config.alerts.required_zones)
|
||||
)
|
||||
)
|
||||
or (
|
||||
(
|
||||
not review_config.detections.labels
|
||||
or obj.obj_data["label"] in review_config.detections.labels
|
||||
)
|
||||
and (
|
||||
not review_config.detections.required_zones
|
||||
or set(obj.entered_zones)
|
||||
& set(review_config.detections.required_zones)
|
||||
)
|
||||
)
|
||||
):
|
||||
logger.debug(
|
||||
f"Not creating clip for {obj.obj_data['id']} because it did not qualify as an alert or detection"
|
||||
)
|
||||
if obj.max_severity is None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@@ -7,7 +7,6 @@ import random
|
||||
import string
|
||||
import sys
|
||||
import threading
|
||||
from enum import Enum
|
||||
from multiprocessing.synchronize import Event as MpEvent
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
@@ -27,6 +26,7 @@ from frigate.const import (
|
||||
from frigate.events.external import ManualEventState
|
||||
from frigate.models import ReviewSegment
|
||||
from frigate.object_processing import TrackedObject
|
||||
from frigate.review.types import SeverityEnum
|
||||
from frigate.util.image import SharedMemoryFrameManager, calculate_16_9_crop
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -39,11 +39,6 @@ THRESHOLD_ALERT_ACTIVITY = 120
|
||||
THRESHOLD_DETECTION_ACTIVITY = 30
|
||||
|
||||
|
||||
class SeverityEnum(str, Enum):
|
||||
alert = "alert"
|
||||
detection = "detection"
|
||||
|
||||
|
||||
class PendingReviewSegment:
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
6
frigate/review/types.py
Normal file
6
frigate/review/types.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class SeverityEnum(str, Enum):
|
||||
alert = "alert"
|
||||
detection = "detection"
|
||||
@@ -9,8 +9,8 @@ from playhouse.sqliteq import SqliteQueueDatabase
|
||||
|
||||
from frigate.api.fastapi_app import create_fastapi_app
|
||||
from frigate.config import FrigateConfig
|
||||
from frigate.models import Event, ReviewSegment
|
||||
from frigate.review.maintainer import SeverityEnum
|
||||
from frigate.models import Event, Recordings, ReviewSegment
|
||||
from frigate.review.types import SeverityEnum
|
||||
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
|
||||
|
||||
|
||||
@@ -146,17 +146,35 @@ class BaseTestHttp(unittest.TestCase):
|
||||
def insert_mock_review_segment(
|
||||
self,
|
||||
id: str,
|
||||
start_time: datetime.datetime = datetime.datetime.now().timestamp(),
|
||||
end_time: datetime.datetime = datetime.datetime.now().timestamp() + 20,
|
||||
start_time: float = datetime.datetime.now().timestamp(),
|
||||
end_time: float = datetime.datetime.now().timestamp() + 20,
|
||||
severity: SeverityEnum = SeverityEnum.alert,
|
||||
has_been_reviewed: bool = False,
|
||||
) -> Event:
|
||||
"""Inserts a basic event model with a given id."""
|
||||
"""Inserts a review segment model with a given id."""
|
||||
return ReviewSegment.insert(
|
||||
id=id,
|
||||
camera="front_door",
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
has_been_reviewed=False,
|
||||
severity=SeverityEnum.alert,
|
||||
has_been_reviewed=has_been_reviewed,
|
||||
severity=severity,
|
||||
thumb_path=False,
|
||||
data={},
|
||||
).execute()
|
||||
|
||||
def insert_mock_recording(
|
||||
self,
|
||||
id: str,
|
||||
start_time: float = datetime.datetime.now().timestamp(),
|
||||
end_time: float = datetime.datetime.now().timestamp() + 20,
|
||||
) -> Event:
|
||||
"""Inserts a recording model with a given id."""
|
||||
return Recordings.insert(
|
||||
id=id,
|
||||
path=id,
|
||||
camera="front_door",
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
duration=end_time - start_time,
|
||||
).execute()
|
||||
|
||||
@@ -1,76 +1,89 @@
|
||||
import datetime
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from frigate.models import Event, ReviewSegment
|
||||
from frigate.models import Event, Recordings, ReviewSegment
|
||||
from frigate.review.types import SeverityEnum
|
||||
from frigate.test.http_api.base_http_test import BaseTestHttp
|
||||
|
||||
|
||||
class TestHttpReview(BaseTestHttp):
|
||||
def setUp(self):
|
||||
super().setUp([Event, ReviewSegment])
|
||||
super().setUp([Event, Recordings, ReviewSegment])
|
||||
self.app = super().create_app()
|
||||
|
||||
def _get_reviews(self, ids: list[str]):
|
||||
return list(
|
||||
ReviewSegment.select(ReviewSegment.id)
|
||||
.where(ReviewSegment.id.in_(ids))
|
||||
.execute()
|
||||
)
|
||||
|
||||
def _get_recordings(self, ids: list[str]):
|
||||
return list(
|
||||
Recordings.select(Recordings.id).where(Recordings.id.in_(ids)).execute()
|
||||
)
|
||||
|
||||
####################################################################################################################
|
||||
################################### GET /review Endpoint ########################################################
|
||||
####################################################################################################################
|
||||
|
||||
# Does not return any data point since the end time (before parameter) is not passed and the review segment end_time is 2 seconds from now
|
||||
def test_get_review_no_filters_no_matches(self):
|
||||
app = super().create_app()
|
||||
now = datetime.datetime.now().timestamp()
|
||||
now = datetime.now().timestamp()
|
||||
|
||||
with TestClient(app) as client:
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random", now, now + 2)
|
||||
reviews_response = client.get("/review")
|
||||
assert reviews_response.status_code == 200
|
||||
reviews_in_response = reviews_response.json()
|
||||
assert len(reviews_in_response) == 0
|
||||
response = client.get("/review")
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 0
|
||||
|
||||
def test_get_review_no_filters(self):
|
||||
app = super().create_app()
|
||||
now = datetime.datetime.now().timestamp()
|
||||
now = datetime.now().timestamp()
|
||||
|
||||
with TestClient(app) as client:
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random", now - 2, now - 1)
|
||||
reviews_response = client.get("/review")
|
||||
assert reviews_response.status_code == 200
|
||||
reviews_in_response = reviews_response.json()
|
||||
assert len(reviews_in_response) == 1
|
||||
response = client.get("/review")
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 1
|
||||
|
||||
def test_get_review_with_time_filter_no_matches(self):
|
||||
app = super().create_app()
|
||||
now = datetime.datetime.now().timestamp()
|
||||
now = datetime.now().timestamp()
|
||||
|
||||
with TestClient(app) as client:
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
super().insert_mock_review_segment(id, now, now + 2)
|
||||
params = {
|
||||
"after": now,
|
||||
"before": now + 3,
|
||||
}
|
||||
reviews_response = client.get("/review", params=params)
|
||||
assert reviews_response.status_code == 200
|
||||
reviews_in_response = reviews_response.json()
|
||||
assert len(reviews_in_response) == 0
|
||||
response = client.get("/review", params=params)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 0
|
||||
|
||||
def test_get_review_with_time_filter(self):
|
||||
app = super().create_app()
|
||||
now = datetime.datetime.now().timestamp()
|
||||
now = datetime.now().timestamp()
|
||||
|
||||
with TestClient(app) as client:
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
super().insert_mock_review_segment(id, now, now + 2)
|
||||
params = {
|
||||
"after": now - 1,
|
||||
"before": now + 3,
|
||||
}
|
||||
reviews_response = client.get("/review", params=params)
|
||||
assert reviews_response.status_code == 200
|
||||
reviews_in_response = reviews_response.json()
|
||||
assert len(reviews_in_response) == 1
|
||||
assert reviews_in_response[0]["id"] == id
|
||||
response = client.get("/review", params=params)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 1
|
||||
assert response_json[0]["id"] == id
|
||||
|
||||
def test_get_review_with_limit_filter(self):
|
||||
app = super().create_app()
|
||||
now = datetime.datetime.now().timestamp()
|
||||
now = datetime.now().timestamp()
|
||||
|
||||
with TestClient(app) as client:
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
id2 = "654321.random"
|
||||
super().insert_mock_review_segment(id, now, now + 2)
|
||||
@@ -80,17 +93,49 @@ class TestHttpReview(BaseTestHttp):
|
||||
"after": now,
|
||||
"before": now + 3,
|
||||
}
|
||||
reviews_response = client.get("/review", params=params)
|
||||
assert reviews_response.status_code == 200
|
||||
reviews_in_response = reviews_response.json()
|
||||
assert len(reviews_in_response) == 1
|
||||
assert reviews_in_response[0]["id"] == id2
|
||||
response = client.get("/review", params=params)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 1
|
||||
assert response_json[0]["id"] == id2
|
||||
|
||||
def test_get_review_with_severity_filters_no_matches(self):
|
||||
now = datetime.now().timestamp()
|
||||
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
super().insert_mock_review_segment(id, now, now + 2, SeverityEnum.detection)
|
||||
params = {
|
||||
"severity": "detection",
|
||||
"after": now - 1,
|
||||
"before": now + 3,
|
||||
}
|
||||
response = client.get("/review", params=params)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 1
|
||||
assert response_json[0]["id"] == id
|
||||
|
||||
def test_get_review_with_severity_filters(self):
|
||||
now = datetime.now().timestamp()
|
||||
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
super().insert_mock_review_segment(id, now, now + 2, SeverityEnum.detection)
|
||||
params = {
|
||||
"severity": "alert",
|
||||
"after": now - 1,
|
||||
"before": now + 3,
|
||||
}
|
||||
response = client.get("/review", params=params)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 0
|
||||
|
||||
def test_get_review_with_all_filters(self):
|
||||
app = super().create_app()
|
||||
now = datetime.datetime.now().timestamp()
|
||||
now = datetime.now().timestamp()
|
||||
|
||||
with TestClient(app) as client:
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
super().insert_mock_review_segment(id, now, now + 2)
|
||||
params = {
|
||||
@@ -103,8 +148,424 @@ class TestHttpReview(BaseTestHttp):
|
||||
"after": now - 1,
|
||||
"before": now + 3,
|
||||
}
|
||||
reviews_response = client.get("/review", params=params)
|
||||
assert reviews_response.status_code == 200
|
||||
reviews_in_response = reviews_response.json()
|
||||
assert len(reviews_in_response) == 1
|
||||
assert reviews_in_response[0]["id"] == id
|
||||
response = client.get("/review", params=params)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert len(response_json) == 1
|
||||
assert response_json[0]["id"] == id
|
||||
|
||||
####################################################################################################################
|
||||
################################### GET /review/summary Endpoint #################################################
|
||||
####################################################################################################################
|
||||
def test_get_review_summary_all_filters(self):
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random")
|
||||
params = {
|
||||
"cameras": "front_door",
|
||||
"labels": "all",
|
||||
"zones": "all",
|
||||
"timezone": "utc",
|
||||
}
|
||||
response = client.get("/review/summary", params=params)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
# e.g. '2024-11-24'
|
||||
today_formatted = datetime.today().strftime("%Y-%m-%d")
|
||||
expected_response = {
|
||||
"last24Hours": {
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
today_formatted: {
|
||||
"day": today_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
}
|
||||
self.assertEqual(response_json, expected_response)
|
||||
|
||||
def test_get_review_summary_no_filters(self):
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random")
|
||||
response = client.get("/review/summary")
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
# e.g. '2024-11-24'
|
||||
today_formatted = datetime.today().strftime("%Y-%m-%d")
|
||||
expected_response = {
|
||||
"last24Hours": {
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
today_formatted: {
|
||||
"day": today_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
}
|
||||
self.assertEqual(response_json, expected_response)
|
||||
|
||||
def test_get_review_summary_multiple_days(self):
|
||||
now = datetime.now()
|
||||
five_days_ago = datetime.today() - timedelta(days=5)
|
||||
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment(
|
||||
"123456.random", now.timestamp() - 2, now.timestamp() - 1
|
||||
)
|
||||
super().insert_mock_review_segment(
|
||||
"654321.random",
|
||||
five_days_ago.timestamp(),
|
||||
five_days_ago.timestamp() + 1,
|
||||
)
|
||||
response = client.get("/review/summary")
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
# e.g. '2024-11-24'
|
||||
today_formatted = now.strftime("%Y-%m-%d")
|
||||
# e.g. '2024-11-19'
|
||||
five_days_ago_formatted = five_days_ago.strftime("%Y-%m-%d")
|
||||
expected_response = {
|
||||
"last24Hours": {
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
today_formatted: {
|
||||
"day": today_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
five_days_ago_formatted: {
|
||||
"day": five_days_ago_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
}
|
||||
self.assertEqual(response_json, expected_response)
|
||||
|
||||
def test_get_review_summary_multiple_days_edge_cases(self):
|
||||
now = datetime.now()
|
||||
five_days_ago = datetime.today() - timedelta(days=5)
|
||||
twenty_days_ago = datetime.today() - timedelta(days=20)
|
||||
one_month_ago = datetime.today() - timedelta(days=30)
|
||||
one_month_ago_ts = one_month_ago.timestamp()
|
||||
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random", now.timestamp())
|
||||
super().insert_mock_review_segment(
|
||||
"123457.random", five_days_ago.timestamp()
|
||||
)
|
||||
super().insert_mock_review_segment(
|
||||
"123458.random",
|
||||
twenty_days_ago.timestamp(),
|
||||
None,
|
||||
SeverityEnum.detection,
|
||||
)
|
||||
# One month ago plus 5 seconds fits within the condition (review.start_time > month_ago). Assuming that the endpoint does not take more than 5 seconds to be invoked
|
||||
super().insert_mock_review_segment(
|
||||
"123459.random",
|
||||
one_month_ago_ts + 5,
|
||||
None,
|
||||
SeverityEnum.detection,
|
||||
)
|
||||
# This won't appear in the output since it's not within last month start_time clause (review.start_time > month_ago)
|
||||
super().insert_mock_review_segment("123450.random", one_month_ago_ts)
|
||||
response = client.get("/review/summary")
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
# e.g. '2024-11-24'
|
||||
today_formatted = now.strftime("%Y-%m-%d")
|
||||
# e.g. '2024-11-19'
|
||||
five_days_ago_formatted = five_days_ago.strftime("%Y-%m-%d")
|
||||
# e.g. '2024-11-04'
|
||||
twenty_days_ago_formatted = twenty_days_ago.strftime("%Y-%m-%d")
|
||||
# e.g. '2024-10-24'
|
||||
one_month_ago_formatted = one_month_ago.strftime("%Y-%m-%d")
|
||||
expected_response = {
|
||||
"last24Hours": {
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
today_formatted: {
|
||||
"day": today_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
five_days_ago_formatted: {
|
||||
"day": five_days_ago_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
twenty_days_ago_formatted: {
|
||||
"day": twenty_days_ago_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 0,
|
||||
"total_detection": 1,
|
||||
},
|
||||
one_month_ago_formatted: {
|
||||
"day": one_month_ago_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 0,
|
||||
"total_detection": 1,
|
||||
},
|
||||
}
|
||||
self.assertEqual(response_json, expected_response)
|
||||
|
||||
def test_get_review_summary_multiple_in_same_day(self):
|
||||
now = datetime.now()
|
||||
five_days_ago = datetime.today() - timedelta(days=5)
|
||||
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random", now.timestamp())
|
||||
five_days_ago_ts = five_days_ago.timestamp()
|
||||
for i in range(20):
|
||||
super().insert_mock_review_segment(
|
||||
f"123456_{i}.random_alert",
|
||||
five_days_ago_ts,
|
||||
five_days_ago_ts,
|
||||
SeverityEnum.alert,
|
||||
)
|
||||
for i in range(15):
|
||||
super().insert_mock_review_segment(
|
||||
f"123456_{i}.random_detection",
|
||||
five_days_ago_ts,
|
||||
five_days_ago_ts,
|
||||
SeverityEnum.detection,
|
||||
)
|
||||
response = client.get("/review/summary")
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
# e.g. '2024-11-24'
|
||||
today_formatted = now.strftime("%Y-%m-%d")
|
||||
# e.g. '2024-11-19'
|
||||
five_days_ago_formatted = five_days_ago.strftime("%Y-%m-%d")
|
||||
expected_response = {
|
||||
"last24Hours": {
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
today_formatted: {
|
||||
"day": today_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 1,
|
||||
"total_detection": 0,
|
||||
},
|
||||
five_days_ago_formatted: {
|
||||
"day": five_days_ago_formatted,
|
||||
"reviewed_alert": 0,
|
||||
"reviewed_detection": 0,
|
||||
"total_alert": 20,
|
||||
"total_detection": 15,
|
||||
},
|
||||
}
|
||||
self.assertEqual(response_json, expected_response)
|
||||
|
||||
def test_get_review_summary_multiple_in_same_day_with_reviewed(self):
|
||||
five_days_ago = datetime.today() - timedelta(days=5)
|
||||
|
||||
with TestClient(self.app) as client:
|
||||
five_days_ago_ts = five_days_ago.timestamp()
|
||||
for i in range(10):
|
||||
super().insert_mock_review_segment(
|
||||
f"123456_{i}.random_alert_not_reviewed",
|
||||
five_days_ago_ts,
|
||||
five_days_ago_ts,
|
||||
SeverityEnum.alert,
|
||||
False,
|
||||
)
|
||||
for i in range(10):
|
||||
super().insert_mock_review_segment(
|
||||
f"123456_{i}.random_alert_reviewed",
|
||||
five_days_ago_ts,
|
||||
five_days_ago_ts,
|
||||
SeverityEnum.alert,
|
||||
True,
|
||||
)
|
||||
for i in range(10):
|
||||
super().insert_mock_review_segment(
|
||||
f"123456_{i}.random_detection_not_reviewed",
|
||||
five_days_ago_ts,
|
||||
five_days_ago_ts,
|
||||
SeverityEnum.detection,
|
||||
False,
|
||||
)
|
||||
for i in range(5):
|
||||
super().insert_mock_review_segment(
|
||||
f"123456_{i}.random_detection_reviewed",
|
||||
five_days_ago_ts,
|
||||
five_days_ago_ts,
|
||||
SeverityEnum.detection,
|
||||
True,
|
||||
)
|
||||
response = client.get("/review/summary")
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
# e.g. '2024-11-19'
|
||||
five_days_ago_formatted = five_days_ago.strftime("%Y-%m-%d")
|
||||
expected_response = {
|
||||
"last24Hours": {
|
||||
"reviewed_alert": None,
|
||||
"reviewed_detection": None,
|
||||
"total_alert": None,
|
||||
"total_detection": None,
|
||||
},
|
||||
five_days_ago_formatted: {
|
||||
"day": five_days_ago_formatted,
|
||||
"reviewed_alert": 10,
|
||||
"reviewed_detection": 5,
|
||||
"total_alert": 20,
|
||||
"total_detection": 15,
|
||||
},
|
||||
}
|
||||
self.assertEqual(response_json, expected_response)
|
||||
|
||||
####################################################################################################################
|
||||
################################### POST reviews/viewed Endpoint ################################################
|
||||
####################################################################################################################
|
||||
def test_post_reviews_viewed_no_body(self):
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random")
|
||||
response = client.post("/reviews/viewed")
|
||||
# Missing ids
|
||||
assert response.status_code == 422
|
||||
|
||||
def test_post_reviews_viewed_no_body_ids(self):
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random")
|
||||
body = {"ids": [""]}
|
||||
response = client.post("/reviews/viewed", json=body)
|
||||
# Missing ids
|
||||
assert response.status_code == 422
|
||||
|
||||
def test_post_reviews_viewed_non_existent_id(self):
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
super().insert_mock_review_segment(id)
|
||||
body = {"ids": ["1"]}
|
||||
response = client.post("/reviews/viewed", json=body)
|
||||
assert response.status_code == 200
|
||||
response = response.json()
|
||||
assert response["success"] == True
|
||||
assert response["message"] == "Reviewed multiple items"
|
||||
# Verify that in DB the review segment was not changed
|
||||
review_segment_in_db = (
|
||||
ReviewSegment.select(ReviewSegment.has_been_reviewed)
|
||||
.where(ReviewSegment.id == id)
|
||||
.get()
|
||||
)
|
||||
assert review_segment_in_db.has_been_reviewed == False
|
||||
|
||||
def test_post_reviews_viewed(self):
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
super().insert_mock_review_segment(id)
|
||||
body = {"ids": [id]}
|
||||
response = client.post("/reviews/viewed", json=body)
|
||||
assert response.status_code == 200
|
||||
response = response.json()
|
||||
assert response["success"] == True
|
||||
assert response["message"] == "Reviewed multiple items"
|
||||
# Verify that in DB the review segment was changed
|
||||
review_segment_in_db = (
|
||||
ReviewSegment.select(ReviewSegment.has_been_reviewed)
|
||||
.where(ReviewSegment.id == id)
|
||||
.get()
|
||||
)
|
||||
assert review_segment_in_db.has_been_reviewed == True
|
||||
|
||||
####################################################################################################################
|
||||
################################### POST reviews/delete Endpoint ################################################
|
||||
####################################################################################################################
|
||||
def test_post_reviews_delete_no_body(self):
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random")
|
||||
response = client.post("/reviews/delete")
|
||||
# Missing ids
|
||||
assert response.status_code == 422
|
||||
|
||||
def test_post_reviews_delete_no_body_ids(self):
|
||||
with TestClient(self.app) as client:
|
||||
super().insert_mock_review_segment("123456.random")
|
||||
body = {"ids": [""]}
|
||||
response = client.post("/reviews/delete", json=body)
|
||||
# Missing ids
|
||||
assert response.status_code == 422
|
||||
|
||||
def test_post_reviews_delete_non_existent_id(self):
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
super().insert_mock_review_segment(id)
|
||||
body = {"ids": ["1"]}
|
||||
response = client.post("/reviews/delete", json=body)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert response_json["success"] == True
|
||||
assert response_json["message"] == "Deleted review items."
|
||||
# Verify that in DB the review segment was not deleted
|
||||
review_ids_in_db_after = self._get_reviews([id])
|
||||
assert len(review_ids_in_db_after) == 1
|
||||
assert review_ids_in_db_after[0].id == id
|
||||
|
||||
def test_post_reviews_delete(self):
|
||||
with TestClient(self.app) as client:
|
||||
id = "123456.random"
|
||||
super().insert_mock_review_segment(id)
|
||||
body = {"ids": [id]}
|
||||
response = client.post("/reviews/delete", json=body)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert response_json["success"] == True
|
||||
assert response_json["message"] == "Deleted review items."
|
||||
# Verify that in DB the review segment was deleted
|
||||
review_ids_in_db_after = self._get_reviews([id])
|
||||
assert len(review_ids_in_db_after) == 0
|
||||
|
||||
def test_post_reviews_delete_many(self):
|
||||
with TestClient(self.app) as client:
|
||||
ids = ["123456.random", "654321.random"]
|
||||
for id in ids:
|
||||
super().insert_mock_review_segment(id)
|
||||
super().insert_mock_recording(id)
|
||||
|
||||
review_ids_in_db_before = self._get_reviews(ids)
|
||||
recordings_ids_in_db_before = self._get_recordings(ids)
|
||||
assert len(review_ids_in_db_before) == 2
|
||||
assert len(recordings_ids_in_db_before) == 2
|
||||
|
||||
body = {"ids": ids}
|
||||
response = client.post("/reviews/delete", json=body)
|
||||
assert response.status_code == 200
|
||||
response_json = response.json()
|
||||
assert response_json["success"] == True
|
||||
assert response_json["message"] == "Deleted review items."
|
||||
|
||||
# Verify that in DB all review segments and recordings that were passed were deleted
|
||||
review_ids_in_db_after = self._get_reviews(ids)
|
||||
recording_ids_in_db_after = self._get_recordings(ids)
|
||||
assert len(review_ids_in_db_after) == 0
|
||||
assert len(recording_ids_in_db_after) == 0
|
||||
|
||||
@@ -168,7 +168,7 @@ class TestHttp(unittest.TestCase):
|
||||
|
||||
assert event
|
||||
assert event["id"] == id
|
||||
assert event == model_to_dict(Event.get(Event.id == id))
|
||||
assert event["id"] == model_to_dict(Event.get(Event.id == id))["id"]
|
||||
|
||||
def test_get_bad_event(self):
|
||||
app = create_fastapi_app(
|
||||
|
||||
@@ -13,6 +13,7 @@ from frigate.config import (
|
||||
CameraConfig,
|
||||
ModelConfig,
|
||||
)
|
||||
from frigate.review.types import SeverityEnum
|
||||
from frigate.util.image import (
|
||||
area,
|
||||
calculate_region,
|
||||
@@ -59,6 +60,27 @@ class TrackedObject:
|
||||
self.pending_loitering = False
|
||||
self.previous = self.to_dict()
|
||||
|
||||
@property
|
||||
def max_severity(self) -> Optional[str]:
|
||||
review_config = self.camera_config.review
|
||||
|
||||
if self.obj_data["label"] in review_config.alerts.labels and (
|
||||
not review_config.alerts.required_zones
|
||||
or set(self.entered_zones) & set(review_config.alerts.required_zones)
|
||||
):
|
||||
return SeverityEnum.alert
|
||||
|
||||
if (
|
||||
not review_config.detections.labels
|
||||
or self.obj_data["label"] in review_config.detections.labels
|
||||
) and (
|
||||
not review_config.detections.required_zones
|
||||
or set(self.entered_zones) & set(review_config.detections.required_zones)
|
||||
):
|
||||
return SeverityEnum.detection
|
||||
|
||||
return None
|
||||
|
||||
def _is_false_positive(self):
|
||||
# once a true positive, always a true positive
|
||||
if not self.false_positive:
|
||||
@@ -232,6 +254,7 @@ class TrackedObject:
|
||||
"attributes": self.attributes,
|
||||
"current_attributes": self.obj_data["attributes"],
|
||||
"pending_loitering": self.pending_loitering,
|
||||
"max_severity": self.max_severity,
|
||||
}
|
||||
|
||||
if include_thumbnail:
|
||||
|
||||
@@ -219,19 +219,35 @@ def draw_box_with_label(
|
||||
text_width = size[0][0]
|
||||
text_height = size[0][1]
|
||||
line_height = text_height + size[1]
|
||||
# get frame height
|
||||
frame_height = frame.shape[0]
|
||||
# set the text start position
|
||||
if position == "ul":
|
||||
text_offset_x = x_min
|
||||
text_offset_y = 0 if y_min < line_height else y_min - (line_height + 8)
|
||||
text_offset_y = max(0, y_min - (line_height + 8))
|
||||
elif position == "ur":
|
||||
text_offset_x = x_max - (text_width + 8)
|
||||
text_offset_y = 0 if y_min < line_height else y_min - (line_height + 8)
|
||||
text_offset_x = max(0, x_max - (text_width + 8))
|
||||
text_offset_y = max(0, y_min - (line_height + 8))
|
||||
elif position == "bl":
|
||||
text_offset_x = x_min
|
||||
text_offset_y = y_max
|
||||
text_offset_y = min(frame_height - line_height, y_max)
|
||||
elif position == "br":
|
||||
text_offset_x = x_max - (text_width + 8)
|
||||
text_offset_y = y_max
|
||||
text_offset_x = max(0, x_max - (text_width + 8))
|
||||
text_offset_y = min(frame_height - line_height, y_max)
|
||||
# Adjust position if it overlaps with the box or goes out of frame
|
||||
if position in {"ul", "ur"}:
|
||||
if text_offset_y < y_min + thickness: # Label overlaps with the box
|
||||
if y_min - (line_height + 8) < 0 and y_max + line_height <= frame_height:
|
||||
# Not enough space above, and there is space below
|
||||
text_offset_y = y_max
|
||||
elif y_min - (line_height + 8) >= 0:
|
||||
# Enough space above, keep the label at the top
|
||||
text_offset_y = max(0, y_min - (line_height + 8))
|
||||
elif position in {"bl", "br"}:
|
||||
if text_offset_y + line_height > frame_height:
|
||||
# If there's not enough space below, try above the box
|
||||
text_offset_y = max(0, y_min - (line_height + 8))
|
||||
|
||||
# make the coords of the box with a small padding of two pixels
|
||||
textbox_coords = (
|
||||
(text_offset_x, text_offset_y),
|
||||
|
||||
@@ -29,8 +29,11 @@ export function ApiProvider({ children, options }: ApiProviderType) {
|
||||
error.response &&
|
||||
[401, 302, 307].includes(error.response.status)
|
||||
) {
|
||||
window.location.href =
|
||||
error.response.headers.get("location") ?? "login";
|
||||
// redirect to the login page if not already there
|
||||
const loginPage = error.response.headers.get("location") ?? "login";
|
||||
if (window.location.href !== loginPage) {
|
||||
window.location.href = loginPage;
|
||||
}
|
||||
}
|
||||
},
|
||||
...options,
|
||||
|
||||
@@ -63,7 +63,7 @@ export function UserAuthForm({ className, ...props }: UserAuthFormProps) {
|
||||
toast.error("Exceeded rate limit. Try again later.", {
|
||||
position: "top-center",
|
||||
});
|
||||
} else if (err.response?.status === 400) {
|
||||
} else if (err.response?.status === 401) {
|
||||
toast.error("Login failed", {
|
||||
position: "top-center",
|
||||
});
|
||||
|
||||
@@ -15,13 +15,15 @@ import {
|
||||
SearchFilter,
|
||||
SearchFilters,
|
||||
SearchSource,
|
||||
SearchSortType,
|
||||
} from "@/types/search";
|
||||
import { DateRange } from "react-day-picker";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { MdLabel } from "react-icons/md";
|
||||
import { MdLabel, MdSort } from "react-icons/md";
|
||||
import PlatformAwareDialog from "../overlay/dialog/PlatformAwareDialog";
|
||||
import SearchFilterDialog from "../overlay/dialog/SearchFilterDialog";
|
||||
import { CalendarRangeFilterButton } from "./CalendarFilterButton";
|
||||
import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group";
|
||||
|
||||
type SearchFilterGroupProps = {
|
||||
className: string;
|
||||
@@ -107,6 +109,25 @@ export default function SearchFilterGroup({
|
||||
[config, allLabels, allZones],
|
||||
);
|
||||
|
||||
const availableSortTypes = useMemo(() => {
|
||||
const sortTypes = ["date_asc", "date_desc"];
|
||||
if (filter?.min_score || filter?.max_score) {
|
||||
sortTypes.push("score_desc", "score_asc");
|
||||
}
|
||||
if (filter?.event_id || filter?.query) {
|
||||
sortTypes.push("relevance");
|
||||
}
|
||||
return sortTypes as SearchSortType[];
|
||||
}, [filter]);
|
||||
|
||||
const defaultSortType = useMemo<SearchSortType>(() => {
|
||||
if (filter?.query || filter?.event_id) {
|
||||
return "relevance";
|
||||
} else {
|
||||
return "date_desc";
|
||||
}
|
||||
}, [filter]);
|
||||
|
||||
const groups = useMemo(() => {
|
||||
if (!config) {
|
||||
return [];
|
||||
@@ -179,6 +200,16 @@ export default function SearchFilterGroup({
|
||||
filterValues={filterValues}
|
||||
onUpdateFilter={onUpdateFilter}
|
||||
/>
|
||||
{filters.includes("sort") && Object.keys(filter ?? {}).length > 0 && (
|
||||
<SortTypeButton
|
||||
availableSortTypes={availableSortTypes ?? []}
|
||||
defaultSortType={defaultSortType}
|
||||
selectedSortType={filter?.sort}
|
||||
updateSortType={(newSort) => {
|
||||
onUpdateFilter({ ...filter, sort: newSort });
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -362,3 +393,176 @@ export function GeneralFilterContent({
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
type SortTypeButtonProps = {
|
||||
availableSortTypes: SearchSortType[];
|
||||
defaultSortType: SearchSortType;
|
||||
selectedSortType: SearchSortType | undefined;
|
||||
updateSortType: (sortType: SearchSortType | undefined) => void;
|
||||
};
|
||||
function SortTypeButton({
|
||||
availableSortTypes,
|
||||
defaultSortType,
|
||||
selectedSortType,
|
||||
updateSortType,
|
||||
}: SortTypeButtonProps) {
|
||||
const [open, setOpen] = useState(false);
|
||||
const [currentSortType, setCurrentSortType] = useState<
|
||||
SearchSortType | undefined
|
||||
>(selectedSortType as SearchSortType);
|
||||
|
||||
// ui
|
||||
|
||||
useEffect(() => {
|
||||
setCurrentSortType(selectedSortType);
|
||||
// only refresh when state changes
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [selectedSortType]);
|
||||
|
||||
const trigger = (
|
||||
<Button
|
||||
size="sm"
|
||||
variant={
|
||||
selectedSortType != defaultSortType && selectedSortType != undefined
|
||||
? "select"
|
||||
: "default"
|
||||
}
|
||||
className="flex items-center gap-2 capitalize"
|
||||
aria-label="Labels"
|
||||
>
|
||||
<MdSort
|
||||
className={`${selectedSortType != defaultSortType && selectedSortType != undefined ? "text-selected-foreground" : "text-secondary-foreground"}`}
|
||||
/>
|
||||
<div
|
||||
className={`${selectedSortType != defaultSortType && selectedSortType != undefined ? "text-selected-foreground" : "text-primary"}`}
|
||||
>
|
||||
Sort
|
||||
</div>
|
||||
</Button>
|
||||
);
|
||||
const content = (
|
||||
<SortTypeContent
|
||||
availableSortTypes={availableSortTypes ?? []}
|
||||
defaultSortType={defaultSortType}
|
||||
selectedSortType={selectedSortType}
|
||||
currentSortType={currentSortType}
|
||||
setCurrentSortType={setCurrentSortType}
|
||||
updateSortType={updateSortType}
|
||||
onClose={() => setOpen(false)}
|
||||
/>
|
||||
);
|
||||
|
||||
return (
|
||||
<PlatformAwareDialog
|
||||
trigger={trigger}
|
||||
content={content}
|
||||
contentClassName={
|
||||
isDesktop
|
||||
? "scrollbar-container h-auto max-h-[80dvh] overflow-y-auto"
|
||||
: "max-h-[75dvh] overflow-hidden p-4"
|
||||
}
|
||||
open={open}
|
||||
onOpenChange={(open) => {
|
||||
if (!open) {
|
||||
setCurrentSortType(selectedSortType);
|
||||
}
|
||||
|
||||
setOpen(open);
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
type SortTypeContentProps = {
|
||||
availableSortTypes: SearchSortType[];
|
||||
defaultSortType: SearchSortType;
|
||||
selectedSortType: SearchSortType | undefined;
|
||||
currentSortType: SearchSortType | undefined;
|
||||
updateSortType: (sort_type: SearchSortType | undefined) => void;
|
||||
setCurrentSortType: (sort_type: SearchSortType | undefined) => void;
|
||||
onClose: () => void;
|
||||
};
|
||||
export function SortTypeContent({
|
||||
availableSortTypes,
|
||||
defaultSortType,
|
||||
selectedSortType,
|
||||
currentSortType,
|
||||
updateSortType,
|
||||
setCurrentSortType,
|
||||
onClose,
|
||||
}: SortTypeContentProps) {
|
||||
const sortLabels = {
|
||||
date_asc: "Date (Ascending)",
|
||||
date_desc: "Date (Descending)",
|
||||
score_asc: "Object Score (Ascending)",
|
||||
score_desc: "Object Score (Descending)",
|
||||
relevance: "Relevance",
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<div className="overflow-x-hidden">
|
||||
<div className="my-2.5 flex flex-col gap-2.5">
|
||||
<RadioGroup
|
||||
value={
|
||||
Array.isArray(currentSortType)
|
||||
? currentSortType?.[0]
|
||||
: (currentSortType ?? defaultSortType)
|
||||
}
|
||||
defaultValue={defaultSortType}
|
||||
onValueChange={(value) =>
|
||||
setCurrentSortType(value as SearchSortType)
|
||||
}
|
||||
className="w-full space-y-1"
|
||||
>
|
||||
{availableSortTypes.map((value) => (
|
||||
<div className="flex flex-row gap-2">
|
||||
<RadioGroupItem
|
||||
key={value}
|
||||
value={value}
|
||||
id={`sort-${value}`}
|
||||
className={
|
||||
value == (currentSortType ?? defaultSortType)
|
||||
? "bg-selected from-selected/50 to-selected/90 text-selected"
|
||||
: "bg-secondary from-secondary/50 to-secondary/90 text-secondary"
|
||||
}
|
||||
/>
|
||||
<Label
|
||||
htmlFor={`sort-${value}`}
|
||||
className="flex cursor-pointer items-center space-x-2"
|
||||
>
|
||||
<span>{sortLabels[value]}</span>
|
||||
</Label>
|
||||
</div>
|
||||
))}
|
||||
</RadioGroup>
|
||||
</div>
|
||||
</div>
|
||||
<DropdownMenuSeparator />
|
||||
<div className="flex items-center justify-evenly p-2">
|
||||
<Button
|
||||
aria-label="Apply"
|
||||
variant="select"
|
||||
onClick={() => {
|
||||
if (selectedSortType != currentSortType) {
|
||||
updateSortType(currentSortType);
|
||||
}
|
||||
|
||||
onClose();
|
||||
}}
|
||||
>
|
||||
Apply
|
||||
</Button>
|
||||
<Button
|
||||
aria-label="Reset"
|
||||
onClick={() => {
|
||||
setCurrentSortType(undefined);
|
||||
updateSortType(undefined);
|
||||
}}
|
||||
>
|
||||
Reset
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
FilterType,
|
||||
SavedSearchQuery,
|
||||
SearchFilter,
|
||||
SearchSortType,
|
||||
SearchSource,
|
||||
} from "@/types/search";
|
||||
import useSuggestions from "@/hooks/use-suggestions";
|
||||
@@ -323,6 +324,9 @@ export default function InputWithTags({
|
||||
case "event_id":
|
||||
newFilters.event_id = value;
|
||||
break;
|
||||
case "sort":
|
||||
newFilters.sort = value as SearchSortType;
|
||||
break;
|
||||
default:
|
||||
// Handle array types (cameras, labels, subLabels, zones)
|
||||
if (!newFilters[type]) newFilters[type] = [];
|
||||
|
||||
@@ -175,7 +175,7 @@ export default function SearchFilterDialog({
|
||||
time_range: undefined,
|
||||
zones: undefined,
|
||||
sub_labels: undefined,
|
||||
search_type: ["thumbnail", "description"],
|
||||
search_type: undefined,
|
||||
min_score: undefined,
|
||||
max_score: undefined,
|
||||
has_snapshot: undefined,
|
||||
|
||||
@@ -15,7 +15,10 @@ export function useOverlayState<S>(
|
||||
(value: S, replace: boolean = false) => {
|
||||
const newLocationState = { ...currentLocationState };
|
||||
newLocationState[key] = value;
|
||||
navigate(location.pathname, { state: newLocationState, replace });
|
||||
navigate(location.pathname + location.search, {
|
||||
state: newLocationState,
|
||||
replace,
|
||||
});
|
||||
},
|
||||
// we know that these deps are correct
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
|
||||
@@ -116,6 +116,7 @@ export default function Explore() {
|
||||
is_submitted: searchSearchParams["is_submitted"],
|
||||
has_clip: searchSearchParams["has_clip"],
|
||||
event_id: searchSearchParams["event_id"],
|
||||
sort: searchSearchParams["sort"],
|
||||
limit:
|
||||
Object.keys(searchSearchParams).length == 0 ? API_LIMIT : undefined,
|
||||
timezone,
|
||||
@@ -148,6 +149,7 @@ export default function Explore() {
|
||||
is_submitted: searchSearchParams["is_submitted"],
|
||||
has_clip: searchSearchParams["has_clip"],
|
||||
event_id: searchSearchParams["event_id"],
|
||||
sort: searchSearchParams["sort"],
|
||||
timezone,
|
||||
include_thumbnails: 0,
|
||||
},
|
||||
@@ -165,12 +167,17 @@ export default function Explore() {
|
||||
|
||||
const [url, params] = searchQuery;
|
||||
|
||||
// If it's not the first page, use the last item's start_time as the 'before' parameter
|
||||
const isAscending = params.sort?.includes("date_asc");
|
||||
|
||||
if (pageIndex > 0 && previousPageData) {
|
||||
const lastDate = previousPageData[previousPageData.length - 1].start_time;
|
||||
return [
|
||||
url,
|
||||
{ ...params, before: lastDate.toString(), limit: API_LIMIT },
|
||||
{
|
||||
...params,
|
||||
[isAscending ? "after" : "before"]: lastDate.toString(),
|
||||
limit: API_LIMIT,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ const SEARCH_FILTERS = [
|
||||
"zone",
|
||||
"sub",
|
||||
"source",
|
||||
"sort",
|
||||
] as const;
|
||||
export type SearchFilters = (typeof SEARCH_FILTERS)[number];
|
||||
export const DEFAULT_SEARCH_FILTERS: SearchFilters[] = [
|
||||
@@ -16,10 +17,18 @@ export const DEFAULT_SEARCH_FILTERS: SearchFilters[] = [
|
||||
"zone",
|
||||
"sub",
|
||||
"source",
|
||||
"sort",
|
||||
];
|
||||
|
||||
export type SearchSource = "similarity" | "thumbnail" | "description";
|
||||
|
||||
export type SearchSortType =
|
||||
| "date_asc"
|
||||
| "date_desc"
|
||||
| "score_asc"
|
||||
| "score_desc"
|
||||
| "relevance";
|
||||
|
||||
export type SearchResult = {
|
||||
id: string;
|
||||
camera: string;
|
||||
@@ -65,6 +74,7 @@ export type SearchFilter = {
|
||||
time_range?: string;
|
||||
search_type?: SearchSource[];
|
||||
event_id?: string;
|
||||
sort?: SearchSortType;
|
||||
};
|
||||
|
||||
export const DEFAULT_TIME_RANGE_AFTER = "00:00";
|
||||
@@ -86,6 +96,7 @@ export type SearchQueryParams = {
|
||||
query?: string;
|
||||
page?: number;
|
||||
time_range?: string;
|
||||
sort?: SearchSortType;
|
||||
};
|
||||
|
||||
export type SearchQuery = [string, SearchQueryParams] | null;
|
||||
|
||||
Reference in New Issue
Block a user