Compare commits

..

12 Commits

Author SHA1 Message Date
Blake Blackshear
6b12a45a95 return 401 for login failures (#15432)
* return 401 for login failures

* only setup the rate limiter when configured
2024-12-10 06:42:55 -07:00
Nicolas Mowen
0b9c4c18dd Refactor event cleanup to consider review severity (#15415)
* Keep track of objects max review severity

* Refactor cleanup to split snapshots and clips

* Cleanup events based on review severity

* Cleanup review imports

* Don't catch detections
2024-12-09 08:25:45 -07:00
Nicolas Mowen
d0cc8cb64b API response cleanup (#15389)
* API response cleanup

* Remove extra field definition
2024-12-06 20:07:43 -06:00
Nicolas Mowen
bb86e71e65 fix auth remote addr access (#15378) 2024-12-06 10:25:43 -06:00
Josh Hawkins
8aa6297308 Ensure label does not overlap with box or go out of frame (#15376) 2024-12-06 08:32:16 -07:00
Nicolas Mowen
d3b631a952 Api improvements (#15327)
* Organize api files

* Add more API definitions for events

* Add export select by ID

* Typing fixes

* Update openapi spec

* Change type

* Fix test

* Fix message

* Fix tests
2024-12-06 08:04:02 -06:00
Nicolas Mowen
47d495fc01 Make note of go2rtc encoded URLs (#15348)
* Make note of go2rtc encoded URLs

* clarify
2024-12-04 16:54:57 -06:00
Nicolas Mowen
32322b23b2 Update nvidia docs to reflect preset (#15347) 2024-12-04 15:43:10 -07:00
Josh Hawkins
c0ba98e26f Explore sorting (#15342)
* backend

* add type and params

* radio group in ui

* ensure search_type is cleared on reset
2024-12-04 08:54:10 -07:00
Rui Alves
a5a7cd3107 Added more unit tests for the review controller (#15162) 2024-12-04 06:52:08 -06:00
Josh Hawkins
a729408599 preserve search query in overlay state hook (#15334) 2024-12-04 06:14:53 -06:00
Josh Hawkins
4dddc53735 move label placement when overlapping small boxes (#15310) 2024-12-02 13:07:12 -06:00
40 changed files with 1858 additions and 723 deletions

View File

@@ -231,28 +231,11 @@ docker run -d \
### Setup Decoder ### Setup Decoder
The decoder you need to pass in the `hwaccel_args` will depend on the input video. Using `preset-nvidia` ffmpeg will automatically select the necessary profile for the incoming video, and will log an error if the profile is not supported by your GPU.
A list of supported codecs (you can use `ffmpeg -decoders | grep cuvid` in the container to get the ones your card supports)
```
V..... h263_cuvid Nvidia CUVID H263 decoder (codec h263)
V..... h264_cuvid Nvidia CUVID H264 decoder (codec h264)
V..... hevc_cuvid Nvidia CUVID HEVC decoder (codec hevc)
V..... mjpeg_cuvid Nvidia CUVID MJPEG decoder (codec mjpeg)
V..... mpeg1_cuvid Nvidia CUVID MPEG1VIDEO decoder (codec mpeg1video)
V..... mpeg2_cuvid Nvidia CUVID MPEG2VIDEO decoder (codec mpeg2video)
V..... mpeg4_cuvid Nvidia CUVID MPEG4 decoder (codec mpeg4)
V..... vc1_cuvid Nvidia CUVID VC1 decoder (codec vc1)
V..... vp8_cuvid Nvidia CUVID VP8 decoder (codec vp8)
V..... vp9_cuvid Nvidia CUVID VP9 decoder (codec vp9)
```
For example, for H264 video, you'll select `preset-nvidia-h264`.
```yaml ```yaml
ffmpeg: ffmpeg:
hwaccel_args: preset-nvidia-h264 hwaccel_args: preset-nvidia
``` ```
If everything is working correctly, you should see a significant improvement in performance. If everything is working correctly, you should see a significant improvement in performance.

View File

@@ -132,6 +132,28 @@ cameras:
- detect - detect
``` ```
## Handling Complex Passwords
go2rtc expects URL-encoded passwords in the config, [urlencoder.org](https://urlencoder.org) can be used for this purpose.
For example:
```yaml
go2rtc:
streams:
my_camera: rtsp://username:$@foo%@192.168.1.100
```
becomes
```yaml
go2rtc:
streams:
my_camera: rtsp://username:$%40foo%25@192.168.1.100
```
See [this comment(https://github.com/AlexxIT/go2rtc/issues/1217#issuecomment-2242296489) for more information.
## Advanced Restream Configurations ## Advanced Restream Configurations
The [exec](https://github.com/AlexxIT/go2rtc/tree/v1.9.2#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below: The [exec](https://github.com/AlexxIT/go2rtc/tree/v1.9.2#source-exec) source in go2rtc can be used for custom ffmpeg commands. An example is below:

File diff suppressed because it is too large Load Diff

View File

@@ -17,8 +17,8 @@ from fastapi.responses import JSONResponse, PlainTextResponse
from markupsafe import escape from markupsafe import escape
from peewee import operator from peewee import operator
from frigate.api.defs.app_body import AppConfigSetBody from frigate.api.defs.query.app_query_parameters import AppTimelineHourlyQueryParameters
from frigate.api.defs.app_query_parameters import AppTimelineHourlyQueryParameters from frigate.api.defs.request.app_body import AppConfigSetBody
from frigate.api.defs.tags import Tags from frigate.api.defs.tags import Tags
from frigate.config import FrigateConfig from frigate.config import FrigateConfig
from frigate.const import CONFIG_DIR from frigate.const import CONFIG_DIR

View File

@@ -18,7 +18,7 @@ from joserfc import jwt
from peewee import DoesNotExist from peewee import DoesNotExist
from slowapi import Limiter from slowapi import Limiter
from frigate.api.defs.app_body import ( from frigate.api.defs.request.app_body import (
AppPostLoginBody, AppPostLoginBody,
AppPostUsersBody, AppPostUsersBody,
AppPutPasswordBody, AppPutPasswordBody,
@@ -85,7 +85,12 @@ def get_remote_addr(request: Request):
return str(ip) return str(ip)
# if there wasn't anything in the route, just return the default # if there wasn't anything in the route, just return the default
return request.remote_addr or "127.0.0.1" remote_addr = None
if hasattr(request, "remote_addr"):
remote_addr = request.remote_addr
return remote_addr or "127.0.0.1"
def get_jwt_secret() -> str: def get_jwt_secret() -> str:
@@ -324,7 +329,7 @@ def login(request: Request, body: AppPostLoginBody):
try: try:
db_user: User = User.get_by_id(user) db_user: User = User.get_by_id(user)
except DoesNotExist: except DoesNotExist:
return JSONResponse(content={"message": "Login failed"}, status_code=400) return JSONResponse(content={"message": "Login failed"}, status_code=401)
password_hash = db_user.password_hash password_hash = db_user.password_hash
if verify_password(password, password_hash): if verify_password(password, password_hash):
@@ -335,7 +340,7 @@ def login(request: Request, body: AppPostLoginBody):
response, JWT_COOKIE_NAME, encoded_jwt, expiration, JWT_COOKIE_SECURE response, JWT_COOKIE_NAME, encoded_jwt, expiration, JWT_COOKIE_SECURE
) )
return response return response
return JSONResponse(content={"message": "Login failed"}, status_code=400) return JSONResponse(content={"message": "Login failed"}, status_code=401)
@router.get("/users") @router.get("/users")

View File

@@ -3,7 +3,7 @@ from typing import Union
from pydantic import BaseModel from pydantic import BaseModel
from pydantic.json_schema import SkipJsonSchema from pydantic.json_schema import SkipJsonSchema
from frigate.review.maintainer import SeverityEnum from frigate.review.types import SeverityEnum
class ReviewQueryParams(BaseModel): class ReviewQueryParams(BaseModel):

View File

@@ -0,0 +1,42 @@
from typing import Any, Optional
from pydantic import BaseModel, ConfigDict
class EventResponse(BaseModel):
id: str
label: str
sub_label: Optional[str]
camera: str
start_time: float
end_time: Optional[float]
false_positive: Optional[bool]
zones: list[str]
thumbnail: str
has_clip: bool
has_snapshot: bool
retain_indefinitely: bool
plus_id: Optional[str]
model_hash: Optional[str]
detector_type: Optional[str]
model_type: Optional[str]
data: dict[str, Any]
model_config = ConfigDict(protected_namespaces=())
class EventCreateResponse(BaseModel):
success: bool
message: str
event_id: str
class EventMultiDeleteResponse(BaseModel):
success: bool
deleted_events: list[str]
not_found_events: list[str]
class EventUploadPlusResponse(BaseModel):
success: bool
plus_id: str

View File

@@ -3,7 +3,7 @@ from typing import Dict
from pydantic import BaseModel, Json from pydantic import BaseModel, Json
from frigate.review.maintainer import SeverityEnum from frigate.review.types import SeverityEnum
class ReviewSegmentResponse(BaseModel): class ReviewSegmentResponse(BaseModel):

View File

@@ -14,7 +14,16 @@ from fastapi.responses import JSONResponse
from peewee import JOIN, DoesNotExist, fn, operator from peewee import JOIN, DoesNotExist, fn, operator
from playhouse.shortcuts import model_to_dict from playhouse.shortcuts import model_to_dict
from frigate.api.defs.events_body import ( from frigate.api.defs.query.events_query_parameters import (
DEFAULT_TIME_RANGE,
EventsQueryParams,
EventsSearchQueryParams,
EventsSummaryQueryParams,
)
from frigate.api.defs.query.regenerate_query_parameters import (
RegenerateQueryParameters,
)
from frigate.api.defs.request.events_body import (
EventsCreateBody, EventsCreateBody,
EventsDeleteBody, EventsDeleteBody,
EventsDescriptionBody, EventsDescriptionBody,
@@ -22,19 +31,15 @@ from frigate.api.defs.events_body import (
EventsSubLabelBody, EventsSubLabelBody,
SubmitPlusBody, SubmitPlusBody,
) )
from frigate.api.defs.events_query_parameters import ( from frigate.api.defs.response.event_response import (
DEFAULT_TIME_RANGE, EventCreateResponse,
EventsQueryParams, EventMultiDeleteResponse,
EventsSearchQueryParams, EventResponse,
EventsSummaryQueryParams, EventUploadPlusResponse,
)
from frigate.api.defs.regenerate_query_parameters import (
RegenerateQueryParameters,
) )
from frigate.api.defs.response.generic_response import GenericResponse
from frigate.api.defs.tags import Tags from frigate.api.defs.tags import Tags
from frigate.const import ( from frigate.const import CLIPS_DIR
CLIPS_DIR,
)
from frigate.embeddings import EmbeddingsContext from frigate.embeddings import EmbeddingsContext
from frigate.events.external import ExternalEventProcessor from frigate.events.external import ExternalEventProcessor
from frigate.models import Event, ReviewSegment, Timeline from frigate.models import Event, ReviewSegment, Timeline
@@ -46,7 +51,7 @@ logger = logging.getLogger(__name__)
router = APIRouter(tags=[Tags.events]) router = APIRouter(tags=[Tags.events])
@router.get("/events") @router.get("/events", response_model=list[EventResponse])
def events(params: EventsQueryParams = Depends()): def events(params: EventsQueryParams = Depends()):
camera = params.camera camera = params.camera
cameras = params.cameras cameras = params.cameras
@@ -248,6 +253,8 @@ def events(params: EventsQueryParams = Depends()):
order_by = Event.start_time.asc() order_by = Event.start_time.asc()
elif sort == "date_desc": elif sort == "date_desc":
order_by = Event.start_time.desc() order_by = Event.start_time.desc()
else:
order_by = Event.start_time.desc()
else: else:
order_by = Event.start_time.desc() order_by = Event.start_time.desc()
@@ -263,7 +270,7 @@ def events(params: EventsQueryParams = Depends()):
return JSONResponse(content=list(events)) return JSONResponse(content=list(events))
@router.get("/events/explore") @router.get("/events/explore", response_model=list[EventResponse])
def events_explore(limit: int = 10): def events_explore(limit: int = 10):
# get distinct labels for all events # get distinct labels for all events
distinct_labels = Event.select(Event.label).distinct().order_by(Event.label) distinct_labels = Event.select(Event.label).distinct().order_by(Event.label)
@@ -308,7 +315,8 @@ def events_explore(limit: int = 10):
"data": { "data": {
k: v k: v
for k, v in event.data.items() for k, v in event.data.items()
if k in ["type", "score", "top_score", "description"] if k
in ["type", "score", "top_score", "description", "sub_label_score"]
}, },
"event_count": label_counts[event.label], "event_count": label_counts[event.label],
} }
@@ -324,7 +332,7 @@ def events_explore(limit: int = 10):
return JSONResponse(content=processed_events) return JSONResponse(content=processed_events)
@router.get("/event_ids") @router.get("/event_ids", response_model=list[EventResponse])
def event_ids(ids: str): def event_ids(ids: str):
ids = ids.split(",") ids = ids.split(",")
@@ -582,19 +590,17 @@ def events_search(request: Request, params: EventsSearchQueryParams = Depends())
processed_events.append(processed_event) processed_events.append(processed_event)
# Sort by search distance if search_results are available, otherwise by start_time as default if (sort is None or sort == "relevance") and search_results:
if search_results:
processed_events.sort(key=lambda x: x.get("search_distance", float("inf"))) processed_events.sort(key=lambda x: x.get("search_distance", float("inf")))
elif min_score is not None and max_score is not None and sort == "score_asc":
processed_events.sort(key=lambda x: x["score"])
elif min_score is not None and max_score is not None and sort == "score_desc":
processed_events.sort(key=lambda x: x["score"], reverse=True)
elif sort == "date_asc":
processed_events.sort(key=lambda x: x["start_time"])
else: else:
if sort == "score_asc": # "date_desc" default
processed_events.sort(key=lambda x: x["score"]) processed_events.sort(key=lambda x: x["start_time"], reverse=True)
elif sort == "score_desc":
processed_events.sort(key=lambda x: x["score"], reverse=True)
elif sort == "date_asc":
processed_events.sort(key=lambda x: x["start_time"])
else:
# "date_desc" default
processed_events.sort(key=lambda x: x["start_time"], reverse=True)
# Limit the number of events returned # Limit the number of events returned
processed_events = processed_events[:limit] processed_events = processed_events[:limit]
@@ -647,7 +653,7 @@ def events_summary(params: EventsSummaryQueryParams = Depends()):
return JSONResponse(content=[e for e in groups.dicts()]) return JSONResponse(content=[e for e in groups.dicts()])
@router.get("/events/{event_id}") @router.get("/events/{event_id}", response_model=EventResponse)
def event(event_id: str): def event(event_id: str):
try: try:
return model_to_dict(Event.get(Event.id == event_id)) return model_to_dict(Event.get(Event.id == event_id))
@@ -655,7 +661,7 @@ def event(event_id: str):
return JSONResponse(content="Event not found", status_code=404) return JSONResponse(content="Event not found", status_code=404)
@router.post("/events/{event_id}/retain") @router.post("/events/{event_id}/retain", response_model=GenericResponse)
def set_retain(event_id: str): def set_retain(event_id: str):
try: try:
event = Event.get(Event.id == event_id) event = Event.get(Event.id == event_id)
@@ -674,7 +680,7 @@ def set_retain(event_id: str):
) )
@router.post("/events/{event_id}/plus") @router.post("/events/{event_id}/plus", response_model=EventUploadPlusResponse)
def send_to_plus(request: Request, event_id: str, body: SubmitPlusBody = None): def send_to_plus(request: Request, event_id: str, body: SubmitPlusBody = None):
if not request.app.frigate_config.plus_api.is_active(): if not request.app.frigate_config.plus_api.is_active():
message = "PLUS_API_KEY environment variable is not set" message = "PLUS_API_KEY environment variable is not set"
@@ -786,7 +792,7 @@ def send_to_plus(request: Request, event_id: str, body: SubmitPlusBody = None):
) )
@router.put("/events/{event_id}/false_positive") @router.put("/events/{event_id}/false_positive", response_model=EventUploadPlusResponse)
def false_positive(request: Request, event_id: str): def false_positive(request: Request, event_id: str):
if not request.app.frigate_config.plus_api.is_active(): if not request.app.frigate_config.plus_api.is_active():
message = "PLUS_API_KEY environment variable is not set" message = "PLUS_API_KEY environment variable is not set"
@@ -875,7 +881,7 @@ def false_positive(request: Request, event_id: str):
) )
@router.delete("/events/{event_id}/retain") @router.delete("/events/{event_id}/retain", response_model=GenericResponse)
def delete_retain(event_id: str): def delete_retain(event_id: str):
try: try:
event = Event.get(Event.id == event_id) event = Event.get(Event.id == event_id)
@@ -894,7 +900,7 @@ def delete_retain(event_id: str):
) )
@router.post("/events/{event_id}/sub_label") @router.post("/events/{event_id}/sub_label", response_model=GenericResponse)
def set_sub_label( def set_sub_label(
request: Request, request: Request,
event_id: str, event_id: str,
@@ -946,7 +952,7 @@ def set_sub_label(
) )
@router.post("/events/{event_id}/description") @router.post("/events/{event_id}/description", response_model=GenericResponse)
def set_description( def set_description(
request: Request, request: Request,
event_id: str, event_id: str,
@@ -993,7 +999,7 @@ def set_description(
) )
@router.put("/events/{event_id}/description/regenerate") @router.put("/events/{event_id}/description/regenerate", response_model=GenericResponse)
def regenerate_description( def regenerate_description(
request: Request, event_id: str, params: RegenerateQueryParameters = Depends() request: Request, event_id: str, params: RegenerateQueryParameters = Depends()
): ):
@@ -1064,14 +1070,14 @@ def delete_single_event(event_id: str, request: Request) -> dict:
return {"success": True, "message": f"Event {event_id} deleted"} return {"success": True, "message": f"Event {event_id} deleted"}
@router.delete("/events/{event_id}") @router.delete("/events/{event_id}", response_model=GenericResponse)
def delete_event(request: Request, event_id: str): def delete_event(request: Request, event_id: str):
result = delete_single_event(event_id, request) result = delete_single_event(event_id, request)
status_code = 200 if result["success"] else 404 status_code = 200 if result["success"] else 404
return JSONResponse(content=result, status_code=status_code) return JSONResponse(content=result, status_code=status_code)
@router.delete("/events/") @router.delete("/events/", response_model=EventMultiDeleteResponse)
def delete_events(request: Request, body: EventsDeleteBody): def delete_events(request: Request, body: EventsDeleteBody):
if not body.event_ids: if not body.event_ids:
return JSONResponse( return JSONResponse(
@@ -1097,7 +1103,7 @@ def delete_events(request: Request, body: EventsDeleteBody):
return JSONResponse(content=response, status_code=200) return JSONResponse(content=response, status_code=200)
@router.post("/events/{camera_name}/{label}/create") @router.post("/events/{camera_name}/{label}/create", response_model=EventCreateResponse)
def create_event( def create_event(
request: Request, request: Request,
camera_name: str, camera_name: str,
@@ -1153,7 +1159,7 @@ def create_event(
) )
@router.put("/events/{event_id}/end") @router.put("/events/{event_id}/end", response_model=GenericResponse)
def end_event(request: Request, event_id: str, body: EventsEndBody): def end_event(request: Request, event_id: str, body: EventsEndBody):
try: try:
end_time = body.end_time or datetime.datetime.now().timestamp() end_time = body.end_time or datetime.datetime.now().timestamp()

View File

@@ -9,6 +9,7 @@ import psutil
from fastapi import APIRouter, Request from fastapi import APIRouter, Request
from fastapi.responses import JSONResponse from fastapi.responses import JSONResponse
from peewee import DoesNotExist from peewee import DoesNotExist
from playhouse.shortcuts import model_to_dict
from frigate.api.defs.request.export_recordings_body import ExportRecordingsBody from frigate.api.defs.request.export_recordings_body import ExportRecordingsBody
from frigate.api.defs.tags import Tags from frigate.api.defs.tags import Tags
@@ -207,3 +208,14 @@ def export_delete(event_id: str):
), ),
status_code=200, status_code=200,
) )
@router.get("/exports/{export_id}")
def get_export(export_id: str):
try:
return JSONResponse(content=model_to_dict(Export.get(Export.id == export_id)))
except DoesNotExist:
return JSONResponse(
content={"success": False, "message": "Export not found"},
status_code=404,
)

View File

@@ -87,7 +87,11 @@ def create_fastapi_app(
logger.info("FastAPI started") logger.info("FastAPI started")
# Rate limiter (used for login endpoint) # Rate limiter (used for login endpoint)
auth.rateLimiter.set_limit(frigate_config.auth.failed_login_rate_limit or "") if frigate_config.auth.failed_login_rate_limit is None:
limiter.enabled = False
else:
auth.rateLimiter.set_limit(frigate_config.auth.failed_login_rate_limit)
app.state.limiter = limiter app.state.limiter = limiter
app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler) app.add_exception_handler(RateLimitExceeded, _rate_limit_exceeded_handler)
app.add_middleware(SlowAPIMiddleware) app.add_middleware(SlowAPIMiddleware)

View File

@@ -20,7 +20,7 @@ from pathvalidate import sanitize_filename
from peewee import DoesNotExist, fn from peewee import DoesNotExist, fn
from tzlocal import get_localzone_name from tzlocal import get_localzone_name
from frigate.api.defs.media_query_parameters import ( from frigate.api.defs.query.media_query_parameters import (
Extension, Extension,
MediaEventsSnapshotQueryParams, MediaEventsSnapshotQueryParams,
MediaLatestFrameQueryParams, MediaLatestFrameQueryParams,

View File

@@ -12,20 +12,21 @@ from fastapi.responses import JSONResponse
from peewee import Case, DoesNotExist, fn, operator from peewee import Case, DoesNotExist, fn, operator
from playhouse.shortcuts import model_to_dict from playhouse.shortcuts import model_to_dict
from frigate.api.defs.generic_response import GenericResponse from frigate.api.defs.query.review_query_parameters import (
from frigate.api.defs.review_body import ReviewModifyMultipleBody
from frigate.api.defs.review_query_parameters import (
ReviewActivityMotionQueryParams, ReviewActivityMotionQueryParams,
ReviewQueryParams, ReviewQueryParams,
ReviewSummaryQueryParams, ReviewSummaryQueryParams,
) )
from frigate.api.defs.review_responses import ( from frigate.api.defs.request.review_body import ReviewModifyMultipleBody
from frigate.api.defs.response.generic_response import GenericResponse
from frigate.api.defs.response.review_response import (
ReviewActivityMotionResponse, ReviewActivityMotionResponse,
ReviewSegmentResponse, ReviewSegmentResponse,
ReviewSummaryResponse, ReviewSummaryResponse,
) )
from frigate.api.defs.tags import Tags from frigate.api.defs.tags import Tags
from frigate.models import Recordings, ReviewSegment from frigate.models import Recordings, ReviewSegment
from frigate.review.types import SeverityEnum
from frigate.util.builtin import get_tz_modifiers from frigate.util.builtin import get_tz_modifiers
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -161,7 +162,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
None, None,
[ [
( (
(ReviewSegment.severity == "alert"), (ReviewSegment.severity == SeverityEnum.alert),
ReviewSegment.has_been_reviewed, ReviewSegment.has_been_reviewed,
) )
], ],
@@ -173,7 +174,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
None, None,
[ [
( (
(ReviewSegment.severity == "detection"), (ReviewSegment.severity == SeverityEnum.detection),
ReviewSegment.has_been_reviewed, ReviewSegment.has_been_reviewed,
) )
], ],
@@ -185,7 +186,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
None, None,
[ [
( (
(ReviewSegment.severity == "alert"), (ReviewSegment.severity == SeverityEnum.alert),
1, 1,
) )
], ],
@@ -197,7 +198,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
None, None,
[ [
( (
(ReviewSegment.severity == "detection"), (ReviewSegment.severity == SeverityEnum.detection),
1, 1,
) )
], ],
@@ -230,6 +231,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
label_clause = reduce(operator.or_, label_clauses) label_clause = reduce(operator.or_, label_clauses)
clauses.append((label_clause)) clauses.append((label_clause))
day_in_seconds = 60 * 60 * 24
last_month = ( last_month = (
ReviewSegment.select( ReviewSegment.select(
fn.strftime( fn.strftime(
@@ -246,7 +248,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
None, None,
[ [
( (
(ReviewSegment.severity == "alert"), (ReviewSegment.severity == SeverityEnum.alert),
ReviewSegment.has_been_reviewed, ReviewSegment.has_been_reviewed,
) )
], ],
@@ -258,7 +260,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
None, None,
[ [
( (
(ReviewSegment.severity == "detection"), (ReviewSegment.severity == SeverityEnum.detection),
ReviewSegment.has_been_reviewed, ReviewSegment.has_been_reviewed,
) )
], ],
@@ -270,7 +272,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
None, None,
[ [
( (
(ReviewSegment.severity == "alert"), (ReviewSegment.severity == SeverityEnum.alert),
1, 1,
) )
], ],
@@ -282,7 +284,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
None, None,
[ [
( (
(ReviewSegment.severity == "detection"), (ReviewSegment.severity == SeverityEnum.detection),
1, 1,
) )
], ],
@@ -292,7 +294,7 @@ def review_summary(params: ReviewSummaryQueryParams = Depends()):
) )
.where(reduce(operator.and_, clauses)) .where(reduce(operator.and_, clauses))
.group_by( .group_by(
(ReviewSegment.start_time + seconds_offset).cast("int") / (3600 * 24), (ReviewSegment.start_time + seconds_offset).cast("int") / day_in_seconds,
) )
.order_by(ReviewSegment.start_time.desc()) .order_by(ReviewSegment.start_time.desc())
) )
@@ -362,7 +364,7 @@ def delete_reviews(body: ReviewModifyMultipleBody):
ReviewSegment.delete().where(ReviewSegment.id << list_of_ids).execute() ReviewSegment.delete().where(ReviewSegment.id << list_of_ids).execute()
return JSONResponse( return JSONResponse(
content=({"success": True, "message": "Delete reviews"}), status_code=200 content=({"success": True, "message": "Deleted review items."}), status_code=200
) )

View File

@@ -4,7 +4,6 @@ import datetime
import logging import logging
import os import os
import threading import threading
from enum import Enum
from multiprocessing.synchronize import Event as MpEvent from multiprocessing.synchronize import Event as MpEvent
from pathlib import Path from pathlib import Path
@@ -16,11 +15,6 @@ from frigate.models import Event, Timeline
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class EventCleanupType(str, Enum):
clips = "clips"
snapshots = "snapshots"
CHUNK_SIZE = 50 CHUNK_SIZE = 50
@@ -67,19 +61,11 @@ class EventCleanup(threading.Thread):
return self.camera_labels[camera]["labels"] return self.camera_labels[camera]["labels"]
def expire(self, media_type: EventCleanupType) -> list[str]: def expire_snapshots(self) -> list[str]:
## Expire events from unlisted cameras based on the global config ## Expire events from unlisted cameras based on the global config
if media_type == EventCleanupType.clips: retain_config = self.config.snapshots.retain
expire_days = max( file_extension = "jpg"
self.config.record.alerts.retain.days, update_params = {"has_snapshot": False}
self.config.record.detections.retain.days,
)
file_extension = None # mp4 clips are no longer stored in /clips
update_params = {"has_clip": False}
else:
retain_config = self.config.snapshots.retain
file_extension = "jpg"
update_params = {"has_snapshot": False}
distinct_labels = self.get_removed_camera_labels() distinct_labels = self.get_removed_camera_labels()
@@ -87,10 +73,7 @@ class EventCleanup(threading.Thread):
# loop over object types in db # loop over object types in db
for event in distinct_labels: for event in distinct_labels:
# get expiration time for this label # get expiration time for this label
if media_type == EventCleanupType.snapshots: expire_days = retain_config.objects.get(event.label, retain_config.default)
expire_days = retain_config.objects.get(
event.label, retain_config.default
)
expire_after = ( expire_after = (
datetime.datetime.now() - datetime.timedelta(days=expire_days) datetime.datetime.now() - datetime.timedelta(days=expire_days)
@@ -162,13 +145,7 @@ class EventCleanup(threading.Thread):
## Expire events from cameras based on the camera config ## Expire events from cameras based on the camera config
for name, camera in self.config.cameras.items(): for name, camera in self.config.cameras.items():
if media_type == EventCleanupType.clips: retain_config = camera.snapshots.retain
expire_days = max(
camera.record.alerts.retain.days,
camera.record.detections.retain.days,
)
else:
retain_config = camera.snapshots.retain
# get distinct objects in database for this camera # get distinct objects in database for this camera
distinct_labels = self.get_camera_labels(name) distinct_labels = self.get_camera_labels(name)
@@ -176,10 +153,9 @@ class EventCleanup(threading.Thread):
# loop over object types in db # loop over object types in db
for event in distinct_labels: for event in distinct_labels:
# get expiration time for this label # get expiration time for this label
if media_type == EventCleanupType.snapshots: expire_days = retain_config.objects.get(
expire_days = retain_config.objects.get( event.label, retain_config.default
event.label, retain_config.default )
)
expire_after = ( expire_after = (
datetime.datetime.now() - datetime.timedelta(days=expire_days) datetime.datetime.now() - datetime.timedelta(days=expire_days)
@@ -206,19 +182,143 @@ class EventCleanup(threading.Thread):
for event in expired_events: for event in expired_events:
events_to_update.append(event.id) events_to_update.append(event.id)
if media_type == EventCleanupType.snapshots: try:
try: media_name = f"{event.camera}-{event.id}"
media_name = f"{event.camera}-{event.id}" media_path = Path(
media_path = Path( f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}"
f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}" )
) media_path.unlink(missing_ok=True)
media_path.unlink(missing_ok=True) media_path = Path(
media_path = Path( f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png" )
) media_path.unlink(missing_ok=True)
media_path.unlink(missing_ok=True) except OSError as e:
except OSError as e: logger.warning(f"Unable to delete event images: {e}")
logger.warning(f"Unable to delete event images: {e}")
# update the clips attribute for the db entry
for i in range(0, len(events_to_update), CHUNK_SIZE):
batch = events_to_update[i : i + CHUNK_SIZE]
logger.debug(f"Updating {update_params} for {len(batch)} events")
Event.update(update_params).where(Event.id << batch).execute()
return events_to_update
def expire_clips(self) -> list[str]:
## Expire events from unlisted cameras based on the global config
expire_days = max(
self.config.record.alerts.retain.days,
self.config.record.detections.retain.days,
)
file_extension = None # mp4 clips are no longer stored in /clips
update_params = {"has_clip": False}
# get expiration time for this label
expire_after = (
datetime.datetime.now() - datetime.timedelta(days=expire_days)
).timestamp()
# grab all events after specific time
expired_events: list[Event] = (
Event.select(
Event.id,
Event.camera,
)
.where(
Event.camera.not_in(self.camera_keys),
Event.start_time < expire_after,
Event.retain_indefinitely == False,
)
.namedtuples()
.iterator()
)
logger.debug(f"{len(list(expired_events))} events can be expired")
# delete the media from disk
for expired in expired_events:
media_name = f"{expired.camera}-{expired.id}"
media_path = Path(f"{os.path.join(CLIPS_DIR, media_name)}.{file_extension}")
try:
media_path.unlink(missing_ok=True)
if file_extension == "jpg":
media_path = Path(
f"{os.path.join(CLIPS_DIR, media_name)}-clean.png"
)
media_path.unlink(missing_ok=True)
except OSError as e:
logger.warning(f"Unable to delete event images: {e}")
# update the clips attribute for the db entry
query = Event.select(Event.id).where(
Event.camera.not_in(self.camera_keys),
Event.start_time < expire_after,
Event.retain_indefinitely == False,
)
events_to_update = []
for batch in query.iterator():
events_to_update.extend([event.id for event in batch])
if len(events_to_update) >= CHUNK_SIZE:
logger.debug(
f"Updating {update_params} for {len(events_to_update)} events"
)
Event.update(update_params).where(
Event.id << events_to_update
).execute()
events_to_update = []
# Update any remaining events
if events_to_update:
logger.debug(
f"Updating clips/snapshots attribute for {len(events_to_update)} events"
)
Event.update(update_params).where(Event.id << events_to_update).execute()
events_to_update = []
now = datetime.datetime.now()
## Expire events from cameras based on the camera config
for name, camera in self.config.cameras.items():
expire_days = max(
camera.record.alerts.retain.days,
camera.record.detections.retain.days,
)
alert_expire_date = (
now - datetime.timedelta(days=camera.record.alerts.retain.days)
).timestamp()
detection_expire_date = (
now - datetime.timedelta(days=camera.record.detections.retain.days)
).timestamp()
# grab all events after specific time
expired_events = (
Event.select(
Event.id,
Event.camera,
)
.where(
Event.camera == name,
Event.retain_indefinitely == False,
(
(
(Event.data["max_severity"] != "detection")
| (Event.data["max_severity"].is_null())
)
& (Event.end_time < alert_expire_date)
)
| (
(Event.data["max_severity"] == "detection")
& (Event.end_time < detection_expire_date)
),
)
.namedtuples()
.iterator()
)
# delete the grabbed clips from disk
# only snapshots are stored in /clips
# so no need to delete mp4 files
for event in expired_events:
events_to_update.append(event.id)
# update the clips attribute for the db entry # update the clips attribute for the db entry
for i in range(0, len(events_to_update), CHUNK_SIZE): for i in range(0, len(events_to_update), CHUNK_SIZE):
@@ -230,8 +330,9 @@ class EventCleanup(threading.Thread):
def run(self) -> None: def run(self) -> None:
# only expire events every 5 minutes # only expire events every 5 minutes
while not self.stop_event.wait(300): while not self.stop_event.wait(1):
events_with_expired_clips = self.expire(EventCleanupType.clips) events_with_expired_clips = self.expire_clips()
return
# delete timeline entries for events that have expired recordings # delete timeline entries for events that have expired recordings
# delete up to 100,000 at a time # delete up to 100,000 at a time
@@ -242,7 +343,7 @@ class EventCleanup(threading.Thread):
Timeline.source_id << deleted_events_list[i : i + max_deletes] Timeline.source_id << deleted_events_list[i : i + max_deletes]
).execute() ).execute()
self.expire(EventCleanupType.snapshots) self.expire_snapshots()
# drop events from db where has_clip and has_snapshot are false # drop events from db where has_clip and has_snapshot are false
events = ( events = (

View File

@@ -210,6 +210,7 @@ class EventProcessor(threading.Thread):
"top_score": event_data["top_score"], "top_score": event_data["top_score"],
"attributes": attributes, "attributes": attributes,
"type": "object", "type": "object",
"max_severity": event_data.get("max_severity"),
}, },
} }

View File

@@ -702,30 +702,7 @@ class TrackedObjectProcessor(threading.Thread):
return False return False
# If the object is not considered an alert or detection # If the object is not considered an alert or detection
review_config = self.config.cameras[camera].review if obj.max_severity is None:
if not (
(
obj.obj_data["label"] in review_config.alerts.labels
and (
not review_config.alerts.required_zones
or set(obj.entered_zones) & set(review_config.alerts.required_zones)
)
)
or (
(
not review_config.detections.labels
or obj.obj_data["label"] in review_config.detections.labels
)
and (
not review_config.detections.required_zones
or set(obj.entered_zones)
& set(review_config.detections.required_zones)
)
)
):
logger.debug(
f"Not creating clip for {obj.obj_data['id']} because it did not qualify as an alert or detection"
)
return False return False
return True return True

View File

@@ -7,7 +7,6 @@ import random
import string import string
import sys import sys
import threading import threading
from enum import Enum
from multiprocessing.synchronize import Event as MpEvent from multiprocessing.synchronize import Event as MpEvent
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
@@ -27,6 +26,7 @@ from frigate.const import (
from frigate.events.external import ManualEventState from frigate.events.external import ManualEventState
from frigate.models import ReviewSegment from frigate.models import ReviewSegment
from frigate.object_processing import TrackedObject from frigate.object_processing import TrackedObject
from frigate.review.types import SeverityEnum
from frigate.util.image import SharedMemoryFrameManager, calculate_16_9_crop from frigate.util.image import SharedMemoryFrameManager, calculate_16_9_crop
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -39,11 +39,6 @@ THRESHOLD_ALERT_ACTIVITY = 120
THRESHOLD_DETECTION_ACTIVITY = 30 THRESHOLD_DETECTION_ACTIVITY = 30
class SeverityEnum(str, Enum):
alert = "alert"
detection = "detection"
class PendingReviewSegment: class PendingReviewSegment:
def __init__( def __init__(
self, self,

6
frigate/review/types.py Normal file
View File

@@ -0,0 +1,6 @@
from enum import Enum
class SeverityEnum(str, Enum):
alert = "alert"
detection = "detection"

View File

@@ -9,8 +9,8 @@ from playhouse.sqliteq import SqliteQueueDatabase
from frigate.api.fastapi_app import create_fastapi_app from frigate.api.fastapi_app import create_fastapi_app
from frigate.config import FrigateConfig from frigate.config import FrigateConfig
from frigate.models import Event, ReviewSegment from frigate.models import Event, Recordings, ReviewSegment
from frigate.review.maintainer import SeverityEnum from frigate.review.types import SeverityEnum
from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS from frigate.test.const import TEST_DB, TEST_DB_CLEANUPS
@@ -146,17 +146,35 @@ class BaseTestHttp(unittest.TestCase):
def insert_mock_review_segment( def insert_mock_review_segment(
self, self,
id: str, id: str,
start_time: datetime.datetime = datetime.datetime.now().timestamp(), start_time: float = datetime.datetime.now().timestamp(),
end_time: datetime.datetime = datetime.datetime.now().timestamp() + 20, end_time: float = datetime.datetime.now().timestamp() + 20,
severity: SeverityEnum = SeverityEnum.alert,
has_been_reviewed: bool = False,
) -> Event: ) -> Event:
"""Inserts a basic event model with a given id.""" """Inserts a review segment model with a given id."""
return ReviewSegment.insert( return ReviewSegment.insert(
id=id, id=id,
camera="front_door", camera="front_door",
start_time=start_time, start_time=start_time,
end_time=end_time, end_time=end_time,
has_been_reviewed=False, has_been_reviewed=has_been_reviewed,
severity=SeverityEnum.alert, severity=severity,
thumb_path=False, thumb_path=False,
data={}, data={},
).execute() ).execute()
def insert_mock_recording(
self,
id: str,
start_time: float = datetime.datetime.now().timestamp(),
end_time: float = datetime.datetime.now().timestamp() + 20,
) -> Event:
"""Inserts a recording model with a given id."""
return Recordings.insert(
id=id,
path=id,
camera="front_door",
start_time=start_time,
end_time=end_time,
duration=end_time - start_time,
).execute()

View File

@@ -1,76 +1,89 @@
import datetime from datetime import datetime, timedelta
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
from frigate.models import Event, ReviewSegment from frigate.models import Event, Recordings, ReviewSegment
from frigate.review.types import SeverityEnum
from frigate.test.http_api.base_http_test import BaseTestHttp from frigate.test.http_api.base_http_test import BaseTestHttp
class TestHttpReview(BaseTestHttp): class TestHttpReview(BaseTestHttp):
def setUp(self): def setUp(self):
super().setUp([Event, ReviewSegment]) super().setUp([Event, Recordings, ReviewSegment])
self.app = super().create_app()
def _get_reviews(self, ids: list[str]):
return list(
ReviewSegment.select(ReviewSegment.id)
.where(ReviewSegment.id.in_(ids))
.execute()
)
def _get_recordings(self, ids: list[str]):
return list(
Recordings.select(Recordings.id).where(Recordings.id.in_(ids)).execute()
)
####################################################################################################################
################################### GET /review Endpoint ########################################################
####################################################################################################################
# Does not return any data point since the end time (before parameter) is not passed and the review segment end_time is 2 seconds from now # Does not return any data point since the end time (before parameter) is not passed and the review segment end_time is 2 seconds from now
def test_get_review_no_filters_no_matches(self): def test_get_review_no_filters_no_matches(self):
app = super().create_app() now = datetime.now().timestamp()
now = datetime.datetime.now().timestamp()
with TestClient(app) as client: with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random", now, now + 2) super().insert_mock_review_segment("123456.random", now, now + 2)
reviews_response = client.get("/review") response = client.get("/review")
assert reviews_response.status_code == 200 assert response.status_code == 200
reviews_in_response = reviews_response.json() response_json = response.json()
assert len(reviews_in_response) == 0 assert len(response_json) == 0
def test_get_review_no_filters(self): def test_get_review_no_filters(self):
app = super().create_app() now = datetime.now().timestamp()
now = datetime.datetime.now().timestamp()
with TestClient(app) as client: with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random", now - 2, now - 1) super().insert_mock_review_segment("123456.random", now - 2, now - 1)
reviews_response = client.get("/review") response = client.get("/review")
assert reviews_response.status_code == 200 assert response.status_code == 200
reviews_in_response = reviews_response.json() response_json = response.json()
assert len(reviews_in_response) == 1 assert len(response_json) == 1
def test_get_review_with_time_filter_no_matches(self): def test_get_review_with_time_filter_no_matches(self):
app = super().create_app() now = datetime.now().timestamp()
now = datetime.datetime.now().timestamp()
with TestClient(app) as client: with TestClient(self.app) as client:
id = "123456.random" id = "123456.random"
super().insert_mock_review_segment(id, now, now + 2) super().insert_mock_review_segment(id, now, now + 2)
params = { params = {
"after": now, "after": now,
"before": now + 3, "before": now + 3,
} }
reviews_response = client.get("/review", params=params) response = client.get("/review", params=params)
assert reviews_response.status_code == 200 assert response.status_code == 200
reviews_in_response = reviews_response.json() response_json = response.json()
assert len(reviews_in_response) == 0 assert len(response_json) == 0
def test_get_review_with_time_filter(self): def test_get_review_with_time_filter(self):
app = super().create_app() now = datetime.now().timestamp()
now = datetime.datetime.now().timestamp()
with TestClient(app) as client: with TestClient(self.app) as client:
id = "123456.random" id = "123456.random"
super().insert_mock_review_segment(id, now, now + 2) super().insert_mock_review_segment(id, now, now + 2)
params = { params = {
"after": now - 1, "after": now - 1,
"before": now + 3, "before": now + 3,
} }
reviews_response = client.get("/review", params=params) response = client.get("/review", params=params)
assert reviews_response.status_code == 200 assert response.status_code == 200
reviews_in_response = reviews_response.json() response_json = response.json()
assert len(reviews_in_response) == 1 assert len(response_json) == 1
assert reviews_in_response[0]["id"] == id assert response_json[0]["id"] == id
def test_get_review_with_limit_filter(self): def test_get_review_with_limit_filter(self):
app = super().create_app() now = datetime.now().timestamp()
now = datetime.datetime.now().timestamp()
with TestClient(app) as client: with TestClient(self.app) as client:
id = "123456.random" id = "123456.random"
id2 = "654321.random" id2 = "654321.random"
super().insert_mock_review_segment(id, now, now + 2) super().insert_mock_review_segment(id, now, now + 2)
@@ -80,17 +93,49 @@ class TestHttpReview(BaseTestHttp):
"after": now, "after": now,
"before": now + 3, "before": now + 3,
} }
reviews_response = client.get("/review", params=params) response = client.get("/review", params=params)
assert reviews_response.status_code == 200 assert response.status_code == 200
reviews_in_response = reviews_response.json() response_json = response.json()
assert len(reviews_in_response) == 1 assert len(response_json) == 1
assert reviews_in_response[0]["id"] == id2 assert response_json[0]["id"] == id2
def test_get_review_with_severity_filters_no_matches(self):
now = datetime.now().timestamp()
with TestClient(self.app) as client:
id = "123456.random"
super().insert_mock_review_segment(id, now, now + 2, SeverityEnum.detection)
params = {
"severity": "detection",
"after": now - 1,
"before": now + 3,
}
response = client.get("/review", params=params)
assert response.status_code == 200
response_json = response.json()
assert len(response_json) == 1
assert response_json[0]["id"] == id
def test_get_review_with_severity_filters(self):
now = datetime.now().timestamp()
with TestClient(self.app) as client:
id = "123456.random"
super().insert_mock_review_segment(id, now, now + 2, SeverityEnum.detection)
params = {
"severity": "alert",
"after": now - 1,
"before": now + 3,
}
response = client.get("/review", params=params)
assert response.status_code == 200
response_json = response.json()
assert len(response_json) == 0
def test_get_review_with_all_filters(self): def test_get_review_with_all_filters(self):
app = super().create_app() now = datetime.now().timestamp()
now = datetime.datetime.now().timestamp()
with TestClient(app) as client: with TestClient(self.app) as client:
id = "123456.random" id = "123456.random"
super().insert_mock_review_segment(id, now, now + 2) super().insert_mock_review_segment(id, now, now + 2)
params = { params = {
@@ -103,8 +148,424 @@ class TestHttpReview(BaseTestHttp):
"after": now - 1, "after": now - 1,
"before": now + 3, "before": now + 3,
} }
reviews_response = client.get("/review", params=params) response = client.get("/review", params=params)
assert reviews_response.status_code == 200 assert response.status_code == 200
reviews_in_response = reviews_response.json() response_json = response.json()
assert len(reviews_in_response) == 1 assert len(response_json) == 1
assert reviews_in_response[0]["id"] == id assert response_json[0]["id"] == id
####################################################################################################################
################################### GET /review/summary Endpoint #################################################
####################################################################################################################
def test_get_review_summary_all_filters(self):
with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random")
params = {
"cameras": "front_door",
"labels": "all",
"zones": "all",
"timezone": "utc",
}
response = client.get("/review/summary", params=params)
assert response.status_code == 200
response_json = response.json()
# e.g. '2024-11-24'
today_formatted = datetime.today().strftime("%Y-%m-%d")
expected_response = {
"last24Hours": {
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
today_formatted: {
"day": today_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
}
self.assertEqual(response_json, expected_response)
def test_get_review_summary_no_filters(self):
with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random")
response = client.get("/review/summary")
assert response.status_code == 200
response_json = response.json()
# e.g. '2024-11-24'
today_formatted = datetime.today().strftime("%Y-%m-%d")
expected_response = {
"last24Hours": {
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
today_formatted: {
"day": today_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
}
self.assertEqual(response_json, expected_response)
def test_get_review_summary_multiple_days(self):
now = datetime.now()
five_days_ago = datetime.today() - timedelta(days=5)
with TestClient(self.app) as client:
super().insert_mock_review_segment(
"123456.random", now.timestamp() - 2, now.timestamp() - 1
)
super().insert_mock_review_segment(
"654321.random",
five_days_ago.timestamp(),
five_days_ago.timestamp() + 1,
)
response = client.get("/review/summary")
assert response.status_code == 200
response_json = response.json()
# e.g. '2024-11-24'
today_formatted = now.strftime("%Y-%m-%d")
# e.g. '2024-11-19'
five_days_ago_formatted = five_days_ago.strftime("%Y-%m-%d")
expected_response = {
"last24Hours": {
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
today_formatted: {
"day": today_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
five_days_ago_formatted: {
"day": five_days_ago_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
}
self.assertEqual(response_json, expected_response)
def test_get_review_summary_multiple_days_edge_cases(self):
now = datetime.now()
five_days_ago = datetime.today() - timedelta(days=5)
twenty_days_ago = datetime.today() - timedelta(days=20)
one_month_ago = datetime.today() - timedelta(days=30)
one_month_ago_ts = one_month_ago.timestamp()
with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random", now.timestamp())
super().insert_mock_review_segment(
"123457.random", five_days_ago.timestamp()
)
super().insert_mock_review_segment(
"123458.random",
twenty_days_ago.timestamp(),
None,
SeverityEnum.detection,
)
# One month ago plus 5 seconds fits within the condition (review.start_time > month_ago). Assuming that the endpoint does not take more than 5 seconds to be invoked
super().insert_mock_review_segment(
"123459.random",
one_month_ago_ts + 5,
None,
SeverityEnum.detection,
)
# This won't appear in the output since it's not within last month start_time clause (review.start_time > month_ago)
super().insert_mock_review_segment("123450.random", one_month_ago_ts)
response = client.get("/review/summary")
assert response.status_code == 200
response_json = response.json()
# e.g. '2024-11-24'
today_formatted = now.strftime("%Y-%m-%d")
# e.g. '2024-11-19'
five_days_ago_formatted = five_days_ago.strftime("%Y-%m-%d")
# e.g. '2024-11-04'
twenty_days_ago_formatted = twenty_days_ago.strftime("%Y-%m-%d")
# e.g. '2024-10-24'
one_month_ago_formatted = one_month_ago.strftime("%Y-%m-%d")
expected_response = {
"last24Hours": {
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
today_formatted: {
"day": today_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
five_days_ago_formatted: {
"day": five_days_ago_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
twenty_days_ago_formatted: {
"day": twenty_days_ago_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 0,
"total_detection": 1,
},
one_month_ago_formatted: {
"day": one_month_ago_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 0,
"total_detection": 1,
},
}
self.assertEqual(response_json, expected_response)
def test_get_review_summary_multiple_in_same_day(self):
now = datetime.now()
five_days_ago = datetime.today() - timedelta(days=5)
with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random", now.timestamp())
five_days_ago_ts = five_days_ago.timestamp()
for i in range(20):
super().insert_mock_review_segment(
f"123456_{i}.random_alert",
five_days_ago_ts,
five_days_ago_ts,
SeverityEnum.alert,
)
for i in range(15):
super().insert_mock_review_segment(
f"123456_{i}.random_detection",
five_days_ago_ts,
five_days_ago_ts,
SeverityEnum.detection,
)
response = client.get("/review/summary")
assert response.status_code == 200
response_json = response.json()
# e.g. '2024-11-24'
today_formatted = now.strftime("%Y-%m-%d")
# e.g. '2024-11-19'
five_days_ago_formatted = five_days_ago.strftime("%Y-%m-%d")
expected_response = {
"last24Hours": {
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
today_formatted: {
"day": today_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 1,
"total_detection": 0,
},
five_days_ago_formatted: {
"day": five_days_ago_formatted,
"reviewed_alert": 0,
"reviewed_detection": 0,
"total_alert": 20,
"total_detection": 15,
},
}
self.assertEqual(response_json, expected_response)
def test_get_review_summary_multiple_in_same_day_with_reviewed(self):
five_days_ago = datetime.today() - timedelta(days=5)
with TestClient(self.app) as client:
five_days_ago_ts = five_days_ago.timestamp()
for i in range(10):
super().insert_mock_review_segment(
f"123456_{i}.random_alert_not_reviewed",
five_days_ago_ts,
five_days_ago_ts,
SeverityEnum.alert,
False,
)
for i in range(10):
super().insert_mock_review_segment(
f"123456_{i}.random_alert_reviewed",
five_days_ago_ts,
five_days_ago_ts,
SeverityEnum.alert,
True,
)
for i in range(10):
super().insert_mock_review_segment(
f"123456_{i}.random_detection_not_reviewed",
five_days_ago_ts,
five_days_ago_ts,
SeverityEnum.detection,
False,
)
for i in range(5):
super().insert_mock_review_segment(
f"123456_{i}.random_detection_reviewed",
five_days_ago_ts,
five_days_ago_ts,
SeverityEnum.detection,
True,
)
response = client.get("/review/summary")
assert response.status_code == 200
response_json = response.json()
# e.g. '2024-11-19'
five_days_ago_formatted = five_days_ago.strftime("%Y-%m-%d")
expected_response = {
"last24Hours": {
"reviewed_alert": None,
"reviewed_detection": None,
"total_alert": None,
"total_detection": None,
},
five_days_ago_formatted: {
"day": five_days_ago_formatted,
"reviewed_alert": 10,
"reviewed_detection": 5,
"total_alert": 20,
"total_detection": 15,
},
}
self.assertEqual(response_json, expected_response)
####################################################################################################################
################################### POST reviews/viewed Endpoint ################################################
####################################################################################################################
def test_post_reviews_viewed_no_body(self):
with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random")
response = client.post("/reviews/viewed")
# Missing ids
assert response.status_code == 422
def test_post_reviews_viewed_no_body_ids(self):
with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random")
body = {"ids": [""]}
response = client.post("/reviews/viewed", json=body)
# Missing ids
assert response.status_code == 422
def test_post_reviews_viewed_non_existent_id(self):
with TestClient(self.app) as client:
id = "123456.random"
super().insert_mock_review_segment(id)
body = {"ids": ["1"]}
response = client.post("/reviews/viewed", json=body)
assert response.status_code == 200
response = response.json()
assert response["success"] == True
assert response["message"] == "Reviewed multiple items"
# Verify that in DB the review segment was not changed
review_segment_in_db = (
ReviewSegment.select(ReviewSegment.has_been_reviewed)
.where(ReviewSegment.id == id)
.get()
)
assert review_segment_in_db.has_been_reviewed == False
def test_post_reviews_viewed(self):
with TestClient(self.app) as client:
id = "123456.random"
super().insert_mock_review_segment(id)
body = {"ids": [id]}
response = client.post("/reviews/viewed", json=body)
assert response.status_code == 200
response = response.json()
assert response["success"] == True
assert response["message"] == "Reviewed multiple items"
# Verify that in DB the review segment was changed
review_segment_in_db = (
ReviewSegment.select(ReviewSegment.has_been_reviewed)
.where(ReviewSegment.id == id)
.get()
)
assert review_segment_in_db.has_been_reviewed == True
####################################################################################################################
################################### POST reviews/delete Endpoint ################################################
####################################################################################################################
def test_post_reviews_delete_no_body(self):
with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random")
response = client.post("/reviews/delete")
# Missing ids
assert response.status_code == 422
def test_post_reviews_delete_no_body_ids(self):
with TestClient(self.app) as client:
super().insert_mock_review_segment("123456.random")
body = {"ids": [""]}
response = client.post("/reviews/delete", json=body)
# Missing ids
assert response.status_code == 422
def test_post_reviews_delete_non_existent_id(self):
with TestClient(self.app) as client:
id = "123456.random"
super().insert_mock_review_segment(id)
body = {"ids": ["1"]}
response = client.post("/reviews/delete", json=body)
assert response.status_code == 200
response_json = response.json()
assert response_json["success"] == True
assert response_json["message"] == "Deleted review items."
# Verify that in DB the review segment was not deleted
review_ids_in_db_after = self._get_reviews([id])
assert len(review_ids_in_db_after) == 1
assert review_ids_in_db_after[0].id == id
def test_post_reviews_delete(self):
with TestClient(self.app) as client:
id = "123456.random"
super().insert_mock_review_segment(id)
body = {"ids": [id]}
response = client.post("/reviews/delete", json=body)
assert response.status_code == 200
response_json = response.json()
assert response_json["success"] == True
assert response_json["message"] == "Deleted review items."
# Verify that in DB the review segment was deleted
review_ids_in_db_after = self._get_reviews([id])
assert len(review_ids_in_db_after) == 0
def test_post_reviews_delete_many(self):
with TestClient(self.app) as client:
ids = ["123456.random", "654321.random"]
for id in ids:
super().insert_mock_review_segment(id)
super().insert_mock_recording(id)
review_ids_in_db_before = self._get_reviews(ids)
recordings_ids_in_db_before = self._get_recordings(ids)
assert len(review_ids_in_db_before) == 2
assert len(recordings_ids_in_db_before) == 2
body = {"ids": ids}
response = client.post("/reviews/delete", json=body)
assert response.status_code == 200
response_json = response.json()
assert response_json["success"] == True
assert response_json["message"] == "Deleted review items."
# Verify that in DB all review segments and recordings that were passed were deleted
review_ids_in_db_after = self._get_reviews(ids)
recording_ids_in_db_after = self._get_recordings(ids)
assert len(review_ids_in_db_after) == 0
assert len(recording_ids_in_db_after) == 0

View File

@@ -168,7 +168,7 @@ class TestHttp(unittest.TestCase):
assert event assert event
assert event["id"] == id assert event["id"] == id
assert event == model_to_dict(Event.get(Event.id == id)) assert event["id"] == model_to_dict(Event.get(Event.id == id))["id"]
def test_get_bad_event(self): def test_get_bad_event(self):
app = create_fastapi_app( app = create_fastapi_app(

View File

@@ -13,6 +13,7 @@ from frigate.config import (
CameraConfig, CameraConfig,
ModelConfig, ModelConfig,
) )
from frigate.review.types import SeverityEnum
from frigate.util.image import ( from frigate.util.image import (
area, area,
calculate_region, calculate_region,
@@ -59,6 +60,27 @@ class TrackedObject:
self.pending_loitering = False self.pending_loitering = False
self.previous = self.to_dict() self.previous = self.to_dict()
@property
def max_severity(self) -> Optional[str]:
review_config = self.camera_config.review
if self.obj_data["label"] in review_config.alerts.labels and (
not review_config.alerts.required_zones
or set(self.entered_zones) & set(review_config.alerts.required_zones)
):
return SeverityEnum.alert
if (
not review_config.detections.labels
or self.obj_data["label"] in review_config.detections.labels
) and (
not review_config.detections.required_zones
or set(self.entered_zones) & set(review_config.detections.required_zones)
):
return SeverityEnum.detection
return None
def _is_false_positive(self): def _is_false_positive(self):
# once a true positive, always a true positive # once a true positive, always a true positive
if not self.false_positive: if not self.false_positive:
@@ -232,6 +254,7 @@ class TrackedObject:
"attributes": self.attributes, "attributes": self.attributes,
"current_attributes": self.obj_data["attributes"], "current_attributes": self.obj_data["attributes"],
"pending_loitering": self.pending_loitering, "pending_loitering": self.pending_loitering,
"max_severity": self.max_severity,
} }
if include_thumbnail: if include_thumbnail:

View File

@@ -219,19 +219,35 @@ def draw_box_with_label(
text_width = size[0][0] text_width = size[0][0]
text_height = size[0][1] text_height = size[0][1]
line_height = text_height + size[1] line_height = text_height + size[1]
# get frame height
frame_height = frame.shape[0]
# set the text start position # set the text start position
if position == "ul": if position == "ul":
text_offset_x = x_min text_offset_x = x_min
text_offset_y = 0 if y_min < line_height else y_min - (line_height + 8) text_offset_y = max(0, y_min - (line_height + 8))
elif position == "ur": elif position == "ur":
text_offset_x = x_max - (text_width + 8) text_offset_x = max(0, x_max - (text_width + 8))
text_offset_y = 0 if y_min < line_height else y_min - (line_height + 8) text_offset_y = max(0, y_min - (line_height + 8))
elif position == "bl": elif position == "bl":
text_offset_x = x_min text_offset_x = x_min
text_offset_y = y_max text_offset_y = min(frame_height - line_height, y_max)
elif position == "br": elif position == "br":
text_offset_x = x_max - (text_width + 8) text_offset_x = max(0, x_max - (text_width + 8))
text_offset_y = y_max text_offset_y = min(frame_height - line_height, y_max)
# Adjust position if it overlaps with the box or goes out of frame
if position in {"ul", "ur"}:
if text_offset_y < y_min + thickness: # Label overlaps with the box
if y_min - (line_height + 8) < 0 and y_max + line_height <= frame_height:
# Not enough space above, and there is space below
text_offset_y = y_max
elif y_min - (line_height + 8) >= 0:
# Enough space above, keep the label at the top
text_offset_y = max(0, y_min - (line_height + 8))
elif position in {"bl", "br"}:
if text_offset_y + line_height > frame_height:
# If there's not enough space below, try above the box
text_offset_y = max(0, y_min - (line_height + 8))
# make the coords of the box with a small padding of two pixels # make the coords of the box with a small padding of two pixels
textbox_coords = ( textbox_coords = (
(text_offset_x, text_offset_y), (text_offset_x, text_offset_y),

14
web/package-lock.json generated
View File

@@ -220,12 +220,12 @@
"dev": true "dev": true
}, },
"node_modules/@bundled-es-modules/cookie": { "node_modules/@bundled-es-modules/cookie": {
"version": "2.0.1", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/@bundled-es-modules/cookie/-/cookie-2.0.1.tgz", "resolved": "https://registry.npmjs.org/@bundled-es-modules/cookie/-/cookie-2.0.0.tgz",
"integrity": "sha512-8o+5fRPLNbjbdGRRmJj3h6Hh1AQJf2dk3qQ/5ZFb+PXkRNiSoMGGUKlsgLfrxneb72axVJyIYji64E2+nNfYyw==", "integrity": "sha512-Or6YHg/kamKHpxULAdSqhGqnWFneIXu1NKvvfBBzKGwpVsYuFIQ5aBPHDnnoR3ghW1nvSkALd+EF9iMtY7Vjxw==",
"dev": true, "dev": true,
"dependencies": { "dependencies": {
"cookie": "^0.7.2" "cookie": "^0.5.0"
} }
}, },
"node_modules/@bundled-es-modules/statuses": { "node_modules/@bundled-es-modules/statuses": {
@@ -4352,9 +4352,9 @@
"integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="
}, },
"node_modules/cookie": { "node_modules/cookie": {
"version": "0.7.2", "version": "0.5.0",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz",
"integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==",
"dev": true, "dev": true,
"engines": { "engines": {
"node": ">= 0.6" "node": ">= 0.6"

View File

@@ -29,8 +29,11 @@ export function ApiProvider({ children, options }: ApiProviderType) {
error.response && error.response &&
[401, 302, 307].includes(error.response.status) [401, 302, 307].includes(error.response.status)
) { ) {
window.location.href = // redirect to the login page if not already there
error.response.headers.get("location") ?? "login"; const loginPage = error.response.headers.get("location") ?? "login";
if (window.location.href !== loginPage) {
window.location.href = loginPage;
}
} }
}, },
...options, ...options,

View File

@@ -63,7 +63,7 @@ export function UserAuthForm({ className, ...props }: UserAuthFormProps) {
toast.error("Exceeded rate limit. Try again later.", { toast.error("Exceeded rate limit. Try again later.", {
position: "top-center", position: "top-center",
}); });
} else if (err.response?.status === 400) { } else if (err.response?.status === 401) {
toast.error("Login failed", { toast.error("Login failed", {
position: "top-center", position: "top-center",
}); });

View File

@@ -15,13 +15,15 @@ import {
SearchFilter, SearchFilter,
SearchFilters, SearchFilters,
SearchSource, SearchSource,
SearchSortType,
} from "@/types/search"; } from "@/types/search";
import { DateRange } from "react-day-picker"; import { DateRange } from "react-day-picker";
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import { MdLabel } from "react-icons/md"; import { MdLabel, MdSort } from "react-icons/md";
import PlatformAwareDialog from "../overlay/dialog/PlatformAwareDialog"; import PlatformAwareDialog from "../overlay/dialog/PlatformAwareDialog";
import SearchFilterDialog from "../overlay/dialog/SearchFilterDialog"; import SearchFilterDialog from "../overlay/dialog/SearchFilterDialog";
import { CalendarRangeFilterButton } from "./CalendarFilterButton"; import { CalendarRangeFilterButton } from "./CalendarFilterButton";
import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group";
type SearchFilterGroupProps = { type SearchFilterGroupProps = {
className: string; className: string;
@@ -107,6 +109,25 @@ export default function SearchFilterGroup({
[config, allLabels, allZones], [config, allLabels, allZones],
); );
const availableSortTypes = useMemo(() => {
const sortTypes = ["date_asc", "date_desc"];
if (filter?.min_score || filter?.max_score) {
sortTypes.push("score_desc", "score_asc");
}
if (filter?.event_id || filter?.query) {
sortTypes.push("relevance");
}
return sortTypes as SearchSortType[];
}, [filter]);
const defaultSortType = useMemo<SearchSortType>(() => {
if (filter?.query || filter?.event_id) {
return "relevance";
} else {
return "date_desc";
}
}, [filter]);
const groups = useMemo(() => { const groups = useMemo(() => {
if (!config) { if (!config) {
return []; return [];
@@ -179,6 +200,16 @@ export default function SearchFilterGroup({
filterValues={filterValues} filterValues={filterValues}
onUpdateFilter={onUpdateFilter} onUpdateFilter={onUpdateFilter}
/> />
{filters.includes("sort") && Object.keys(filter ?? {}).length > 0 && (
<SortTypeButton
availableSortTypes={availableSortTypes ?? []}
defaultSortType={defaultSortType}
selectedSortType={filter?.sort}
updateSortType={(newSort) => {
onUpdateFilter({ ...filter, sort: newSort });
}}
/>
)}
</div> </div>
); );
} }
@@ -362,3 +393,176 @@ export function GeneralFilterContent({
</> </>
); );
} }
type SortTypeButtonProps = {
availableSortTypes: SearchSortType[];
defaultSortType: SearchSortType;
selectedSortType: SearchSortType | undefined;
updateSortType: (sortType: SearchSortType | undefined) => void;
};
function SortTypeButton({
availableSortTypes,
defaultSortType,
selectedSortType,
updateSortType,
}: SortTypeButtonProps) {
const [open, setOpen] = useState(false);
const [currentSortType, setCurrentSortType] = useState<
SearchSortType | undefined
>(selectedSortType as SearchSortType);
// ui
useEffect(() => {
setCurrentSortType(selectedSortType);
// only refresh when state changes
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [selectedSortType]);
const trigger = (
<Button
size="sm"
variant={
selectedSortType != defaultSortType && selectedSortType != undefined
? "select"
: "default"
}
className="flex items-center gap-2 capitalize"
aria-label="Labels"
>
<MdSort
className={`${selectedSortType != defaultSortType && selectedSortType != undefined ? "text-selected-foreground" : "text-secondary-foreground"}`}
/>
<div
className={`${selectedSortType != defaultSortType && selectedSortType != undefined ? "text-selected-foreground" : "text-primary"}`}
>
Sort
</div>
</Button>
);
const content = (
<SortTypeContent
availableSortTypes={availableSortTypes ?? []}
defaultSortType={defaultSortType}
selectedSortType={selectedSortType}
currentSortType={currentSortType}
setCurrentSortType={setCurrentSortType}
updateSortType={updateSortType}
onClose={() => setOpen(false)}
/>
);
return (
<PlatformAwareDialog
trigger={trigger}
content={content}
contentClassName={
isDesktop
? "scrollbar-container h-auto max-h-[80dvh] overflow-y-auto"
: "max-h-[75dvh] overflow-hidden p-4"
}
open={open}
onOpenChange={(open) => {
if (!open) {
setCurrentSortType(selectedSortType);
}
setOpen(open);
}}
/>
);
}
type SortTypeContentProps = {
availableSortTypes: SearchSortType[];
defaultSortType: SearchSortType;
selectedSortType: SearchSortType | undefined;
currentSortType: SearchSortType | undefined;
updateSortType: (sort_type: SearchSortType | undefined) => void;
setCurrentSortType: (sort_type: SearchSortType | undefined) => void;
onClose: () => void;
};
export function SortTypeContent({
availableSortTypes,
defaultSortType,
selectedSortType,
currentSortType,
updateSortType,
setCurrentSortType,
onClose,
}: SortTypeContentProps) {
const sortLabels = {
date_asc: "Date (Ascending)",
date_desc: "Date (Descending)",
score_asc: "Object Score (Ascending)",
score_desc: "Object Score (Descending)",
relevance: "Relevance",
};
return (
<>
<div className="overflow-x-hidden">
<div className="my-2.5 flex flex-col gap-2.5">
<RadioGroup
value={
Array.isArray(currentSortType)
? currentSortType?.[0]
: (currentSortType ?? defaultSortType)
}
defaultValue={defaultSortType}
onValueChange={(value) =>
setCurrentSortType(value as SearchSortType)
}
className="w-full space-y-1"
>
{availableSortTypes.map((value) => (
<div className="flex flex-row gap-2">
<RadioGroupItem
key={value}
value={value}
id={`sort-${value}`}
className={
value == (currentSortType ?? defaultSortType)
? "bg-selected from-selected/50 to-selected/90 text-selected"
: "bg-secondary from-secondary/50 to-secondary/90 text-secondary"
}
/>
<Label
htmlFor={`sort-${value}`}
className="flex cursor-pointer items-center space-x-2"
>
<span>{sortLabels[value]}</span>
</Label>
</div>
))}
</RadioGroup>
</div>
</div>
<DropdownMenuSeparator />
<div className="flex items-center justify-evenly p-2">
<Button
aria-label="Apply"
variant="select"
onClick={() => {
if (selectedSortType != currentSortType) {
updateSortType(currentSortType);
}
onClose();
}}
>
Apply
</Button>
<Button
aria-label="Reset"
onClick={() => {
setCurrentSortType(undefined);
updateSortType(undefined);
}}
>
Reset
</Button>
</div>
</>
);
}

View File

@@ -18,6 +18,7 @@ import {
FilterType, FilterType,
SavedSearchQuery, SavedSearchQuery,
SearchFilter, SearchFilter,
SearchSortType,
SearchSource, SearchSource,
} from "@/types/search"; } from "@/types/search";
import useSuggestions from "@/hooks/use-suggestions"; import useSuggestions from "@/hooks/use-suggestions";
@@ -323,6 +324,9 @@ export default function InputWithTags({
case "event_id": case "event_id":
newFilters.event_id = value; newFilters.event_id = value;
break; break;
case "sort":
newFilters.sort = value as SearchSortType;
break;
default: default:
// Handle array types (cameras, labels, subLabels, zones) // Handle array types (cameras, labels, subLabels, zones)
if (!newFilters[type]) newFilters[type] = []; if (!newFilters[type]) newFilters[type] = [];

View File

@@ -175,7 +175,7 @@ export default function SearchFilterDialog({
time_range: undefined, time_range: undefined,
zones: undefined, zones: undefined,
sub_labels: undefined, sub_labels: undefined,
search_type: ["thumbnail", "description"], search_type: undefined,
min_score: undefined, min_score: undefined,
max_score: undefined, max_score: undefined,
has_snapshot: undefined, has_snapshot: undefined,

View File

@@ -15,7 +15,10 @@ export function useOverlayState<S>(
(value: S, replace: boolean = false) => { (value: S, replace: boolean = false) => {
const newLocationState = { ...currentLocationState }; const newLocationState = { ...currentLocationState };
newLocationState[key] = value; newLocationState[key] = value;
navigate(location.pathname, { state: newLocationState, replace }); navigate(location.pathname + location.search, {
state: newLocationState,
replace,
});
}, },
// we know that these deps are correct // we know that these deps are correct
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps

View File

@@ -116,6 +116,7 @@ export default function Explore() {
is_submitted: searchSearchParams["is_submitted"], is_submitted: searchSearchParams["is_submitted"],
has_clip: searchSearchParams["has_clip"], has_clip: searchSearchParams["has_clip"],
event_id: searchSearchParams["event_id"], event_id: searchSearchParams["event_id"],
sort: searchSearchParams["sort"],
limit: limit:
Object.keys(searchSearchParams).length == 0 ? API_LIMIT : undefined, Object.keys(searchSearchParams).length == 0 ? API_LIMIT : undefined,
timezone, timezone,
@@ -148,6 +149,7 @@ export default function Explore() {
is_submitted: searchSearchParams["is_submitted"], is_submitted: searchSearchParams["is_submitted"],
has_clip: searchSearchParams["has_clip"], has_clip: searchSearchParams["has_clip"],
event_id: searchSearchParams["event_id"], event_id: searchSearchParams["event_id"],
sort: searchSearchParams["sort"],
timezone, timezone,
include_thumbnails: 0, include_thumbnails: 0,
}, },
@@ -165,12 +167,17 @@ export default function Explore() {
const [url, params] = searchQuery; const [url, params] = searchQuery;
// If it's not the first page, use the last item's start_time as the 'before' parameter const isAscending = params.sort?.includes("date_asc");
if (pageIndex > 0 && previousPageData) { if (pageIndex > 0 && previousPageData) {
const lastDate = previousPageData[previousPageData.length - 1].start_time; const lastDate = previousPageData[previousPageData.length - 1].start_time;
return [ return [
url, url,
{ ...params, before: lastDate.toString(), limit: API_LIMIT }, {
...params,
[isAscending ? "after" : "before"]: lastDate.toString(),
limit: API_LIMIT,
},
]; ];
} }

View File

@@ -6,6 +6,7 @@ const SEARCH_FILTERS = [
"zone", "zone",
"sub", "sub",
"source", "source",
"sort",
] as const; ] as const;
export type SearchFilters = (typeof SEARCH_FILTERS)[number]; export type SearchFilters = (typeof SEARCH_FILTERS)[number];
export const DEFAULT_SEARCH_FILTERS: SearchFilters[] = [ export const DEFAULT_SEARCH_FILTERS: SearchFilters[] = [
@@ -16,10 +17,18 @@ export const DEFAULT_SEARCH_FILTERS: SearchFilters[] = [
"zone", "zone",
"sub", "sub",
"source", "source",
"sort",
]; ];
export type SearchSource = "similarity" | "thumbnail" | "description"; export type SearchSource = "similarity" | "thumbnail" | "description";
export type SearchSortType =
| "date_asc"
| "date_desc"
| "score_asc"
| "score_desc"
| "relevance";
export type SearchResult = { export type SearchResult = {
id: string; id: string;
camera: string; camera: string;
@@ -65,6 +74,7 @@ export type SearchFilter = {
time_range?: string; time_range?: string;
search_type?: SearchSource[]; search_type?: SearchSource[];
event_id?: string; event_id?: string;
sort?: SearchSortType;
}; };
export const DEFAULT_TIME_RANGE_AFTER = "00:00"; export const DEFAULT_TIME_RANGE_AFTER = "00:00";
@@ -86,6 +96,7 @@ export type SearchQueryParams = {
query?: string; query?: string;
page?: number; page?: number;
time_range?: string; time_range?: string;
sort?: SearchSortType;
}; };
export type SearchQuery = [string, SearchQueryParams] | null; export type SearchQuery = [string, SearchQueryParams] | null;