forked from Github/frigate
Various fixes (#14786)
* Catch openvino error * Remove clip deletion * Update deletion text * Fix timeline not respecting timezone config * Tweaks * More timezone fixes * Fix * More timezone fixes * Fix shm docs
This commit is contained in:
@@ -1042,9 +1042,6 @@ def delete_event(request: Request, event_id: str):
|
||||
media.unlink(missing_ok=True)
|
||||
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}-clean.png")
|
||||
media.unlink(missing_ok=True)
|
||||
if event.has_clip:
|
||||
media = Path(f"{os.path.join(CLIPS_DIR, media_name)}.mp4")
|
||||
media.unlink(missing_ok=True)
|
||||
|
||||
event.delete_instance()
|
||||
Timeline.delete().where(Timeline.source_id == event_id).execute()
|
||||
|
||||
@@ -521,9 +521,9 @@ class FrigateApp:
|
||||
f"Calculated total camera size {available_shm} / {cam_total_frame_size} :: {shm_frame_count} frames for each camera in SHM"
|
||||
)
|
||||
|
||||
if shm_frame_count < 10:
|
||||
if shm_frame_count < 20:
|
||||
logger.warning(
|
||||
f"The current SHM size of {total_shm}MB is too small, recommend increasing it to at least {round(min_req_shm + cam_total_frame_size * 10)}MB."
|
||||
f"The current SHM size of {total_shm}MB is too small, recommend increasing it to at least {round(min_req_shm + cam_total_frame_size * 20)}MB."
|
||||
)
|
||||
|
||||
return shm_frame_count
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Model Utils"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
@@ -11,6 +12,8 @@ except ImportError:
|
||||
# openvino is not included
|
||||
pass
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_ort_providers(
|
||||
force_cpu: bool = False, device: str = "AUTO", requires_fp16: bool = False
|
||||
@@ -89,19 +92,27 @@ class ONNXModelRunner:
|
||||
self.ort: ort.InferenceSession = None
|
||||
self.ov: ov.Core = None
|
||||
providers, options = get_ort_providers(device == "CPU", device, requires_fp16)
|
||||
self.interpreter = None
|
||||
|
||||
if "OpenVINOExecutionProvider" in providers:
|
||||
# use OpenVINO directly
|
||||
self.type = "ov"
|
||||
self.ov = ov.Core()
|
||||
self.ov.set_property(
|
||||
{ov.properties.cache_dir: "/config/model_cache/openvino"}
|
||||
)
|
||||
self.interpreter = self.ov.compile_model(
|
||||
model=model_path, device_name=device
|
||||
)
|
||||
else:
|
||||
# Use ONNXRuntime
|
||||
try:
|
||||
# use OpenVINO directly
|
||||
self.type = "ov"
|
||||
self.ov = ov.Core()
|
||||
self.ov.set_property(
|
||||
{ov.properties.cache_dir: "/config/model_cache/openvino"}
|
||||
)
|
||||
self.interpreter = self.ov.compile_model(
|
||||
model=model_path, device_name=device
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"OpenVINO failed to build model, using CPU instead: {e}"
|
||||
)
|
||||
self.interpreter = None
|
||||
|
||||
# Use ONNXRuntime
|
||||
if self.interpreter is None:
|
||||
self.type = "ort"
|
||||
self.ort = ort.InferenceSession(
|
||||
model_path,
|
||||
|
||||
Reference in New Issue
Block a user