forked from Github/Axter-Stash
Added logic to stop FileMonitor immediately
Added command line options --stop, --restart, --url, and --trace. Moved logging logic to StashPluginHelper.py
This commit is contained in:
1
plugins/.gitignore
vendored
1
plugins/.gitignore
vendored
@@ -32,6 +32,7 @@ __pycache__/
|
||||
renamefile_settings.cpython-310.pyc
|
||||
/WindowsSymbolicLinkCleaner
|
||||
/DeleteMe
|
||||
/ATestPlugin
|
||||
|
||||
## Ignore Visual Studio temporary files, build results, and
|
||||
## files generated by popular Visual Studio add-ons.
|
||||
|
||||
@@ -89,23 +89,54 @@ settings = {
|
||||
"zzdebugTracing": False,
|
||||
"zzdryRun": False,
|
||||
}
|
||||
CanUpdatePluginConfigSettings = False
|
||||
try:
|
||||
plugins_configuration = stash.find_plugins_config()
|
||||
CanUpdatePluginConfigSettings = True
|
||||
except Exception as e:
|
||||
logger.exception('Got exception on main handler')
|
||||
logger.error('This exception most likely occurred because stashapp-tools needs to be upgraded. To fix this error, run the following command:\npip install --upgrade stashapp-tools')
|
||||
pass
|
||||
|
||||
if PLUGIN_ID in PLUGINCONFIGURATION:
|
||||
if PLUGIN_ID in PLUGINCONFIGURATION and (not CanUpdatePluginConfigSettings or 'INITIAL_VALUES_SET1' in PLUGINCONFIGURATION[PLUGIN_ID]):
|
||||
settings.update(PLUGINCONFIGURATION[PLUGIN_ID])
|
||||
# ----------------------------------------------------------------------
|
||||
debugTracing = settings["zzdebugTracing"]
|
||||
debugTracing = True
|
||||
|
||||
|
||||
if PLUGIN_ID in PLUGINCONFIGURATION:
|
||||
if 'ignoreSymbolicLinks' not in PLUGINCONFIGURATION[PLUGIN_ID]:
|
||||
logger.info(f"Debug Tracing (PLUGIN_ID={PLUGIN_ID})................")
|
||||
logger.info(f"Debug Tracing (PLUGINCONFIGURATION={PLUGINCONFIGURATION})................")
|
||||
for item in STASHPATHSCONFIG:
|
||||
stashPaths.append(item["path"])
|
||||
|
||||
# Extract dry_run setting from settings
|
||||
DRY_RUN = settings["zzdryRun"]
|
||||
dry_run_prefix = ''
|
||||
try:
|
||||
plugin_configuration = stash.find_plugins_config()
|
||||
logger.info(f"Debug Tracing (plugin_configuration={plugin_configuration})................")
|
||||
PLUGIN_ARGS_MODE = json_input['args']["mode"]
|
||||
except:
|
||||
pass
|
||||
logger.info(f"\nStarting (runningInPluginMode={runningInPluginMode}) (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (stash.stash_version()={stash.stash_version()})************************************************")
|
||||
if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})")
|
||||
if debugTracing: logger.info("settings: %s " % (settings,))
|
||||
if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})")
|
||||
if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})")
|
||||
if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ID={PLUGIN_ID})")
|
||||
if debugTracing: logger.info(f"Debug Tracing (PLUGINCONFIGURATION={PLUGINCONFIGURATION})")
|
||||
|
||||
if PLUGIN_ID in PLUGINCONFIGURATION:
|
||||
if 'INITIAL_VALUES_SET1' not in PLUGINCONFIGURATION[PLUGIN_ID]:
|
||||
if debugTracing: logger.info(f"Initializing plugin ({PLUGIN_ID}) settings (PLUGINCONFIGURATION[PLUGIN_ID]={PLUGINCONFIGURATION[PLUGIN_ID]})")
|
||||
try:
|
||||
plugins_configuration = stash.find_plugins_config()
|
||||
if debugTracing: logger.info(f"Debug Tracing (plugins_configuration={plugins_configuration})")
|
||||
stash.configure_plugin(PLUGIN_ID, {"INITIAL_VALUES_SET1": True})
|
||||
logger.info('Called stash.configure_plugin(PLUGIN_ID, {"INITIAL_VALUES_SET1": True})')
|
||||
plugins_configuration = stash.find_plugins_config()
|
||||
if debugTracing: logger.info(f"Debug Tracing (plugins_configuration={plugins_configuration})")
|
||||
stash.configure_plugin(PLUGIN_ID, settings)
|
||||
stash.configure_plugin(PLUGIN_ID, {"zmaximumTagKeys": 12})
|
||||
logger.info('Called stash.configure_plugin(PLUGIN_ID, settings)')
|
||||
plugins_configuration = stash.find_plugins_config()
|
||||
if debugTracing: logger.info(f"Debug Tracing (plugins_configuration={plugins_configuration})")
|
||||
except Exception as e:
|
||||
logger.exception('Got exception on main handler')
|
||||
try:
|
||||
@@ -119,24 +150,6 @@ if PLUGIN_ID in PLUGINCONFIGURATION:
|
||||
# stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
|
||||
for item in STASHPATHSCONFIG:
|
||||
stashPaths.append(item["path"])
|
||||
|
||||
# Extract dry_run setting from settings
|
||||
DRY_RUN = settings["zzdryRun"]
|
||||
dry_run_prefix = ''
|
||||
try:
|
||||
PLUGIN_ARGS_MODE = json_input['args']["mode"]
|
||||
except:
|
||||
pass
|
||||
logger.info(f"\nStarting (runningInPluginMode={runningInPluginMode}) (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})************************************************")
|
||||
if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................")
|
||||
if debugTracing: logger.info("settings: %s " % (settings,))
|
||||
if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................")
|
||||
if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})................")
|
||||
if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ID={PLUGIN_ID})................")
|
||||
if debugTracing: logger.info(f"Debug Tracing (PLUGINCONFIGURATION={PLUGINCONFIGURATION})................")
|
||||
|
||||
if DRY_RUN:
|
||||
logger.info("Dry run mode is enabled.")
|
||||
dry_run_prefix = "Would've "
|
||||
@@ -144,7 +157,76 @@ if debugTracing: logger.info("Debug Tracing................")
|
||||
# ----------------------------------------------------------------------
|
||||
# **********************************************************************
|
||||
|
||||
def realpath(path):
|
||||
"""
|
||||
get_symbolic_target for win
|
||||
"""
|
||||
try:
|
||||
import win32file
|
||||
f = win32file.CreateFile(path, win32file.GENERIC_READ,
|
||||
win32file.FILE_SHARE_READ, None,
|
||||
win32file.OPEN_EXISTING,
|
||||
win32file.FILE_FLAG_BACKUP_SEMANTICS, None)
|
||||
target = win32file.GetFinalPathNameByHandle(f, 0)
|
||||
# an above gives us something like u'\\\\?\\C:\\tmp\\scalarizr\\3.3.0.7978'
|
||||
return target.strip('\\\\?\\')
|
||||
except ImportError:
|
||||
handle = open_dir(path)
|
||||
target = get_symbolic_target(handle)
|
||||
check_closed(handle)
|
||||
return target
|
||||
|
||||
def isReparsePoint(path):
|
||||
import win32api
|
||||
import win32con
|
||||
FinalPathname = realpath(path)
|
||||
logger.info(f"(path='{path}') (FinalPathname='{FinalPathname}')")
|
||||
if FinalPathname != path:
|
||||
logger.info(f"Symbolic link '{path}'")
|
||||
return True
|
||||
if not os.path.isdir(path):
|
||||
path = os.path.dirname(path)
|
||||
return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT
|
||||
|
||||
def mangeDupFiles():
|
||||
import platform
|
||||
if debugTracing: logger.info(f"Debug Tracing (platform.system()={platform.system()})")
|
||||
myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link
|
||||
myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point
|
||||
myTestPath3 = r"B:\_\SpecialSet\Amateur Anal Attempts\Amateur Anal Attempts 4.mp4" #symbolic link
|
||||
myTestPath4 = r"E:\Stash\plugins\RenameFile\README.md" #symbolic link
|
||||
myTestPath5 = r"E:\_\David-Maisonave\Axter-Stash\plugins\RenameFile\README.md" #symbolic link
|
||||
myTestPath6 = r"E:\_\David-Maisonave\Axter-Stash\plugins\DeleteMe\Renamer\README.md" # not reparse point
|
||||
logger.info(f"Testing '{myTestPath1}'")
|
||||
if isReparsePoint(myTestPath1):
|
||||
logger.info(f"isSymLink '{myTestPath1}'")
|
||||
else:
|
||||
logger.info(f"Not isSymLink '{myTestPath1}'")
|
||||
|
||||
if isReparsePoint(myTestPath2):
|
||||
logger.info(f"isSymLink '{myTestPath2}'")
|
||||
else:
|
||||
logger.info(f"Not isSymLink '{myTestPath2}'")
|
||||
|
||||
if isReparsePoint(myTestPath3):
|
||||
logger.info(f"isSymLink '{myTestPath3}'")
|
||||
else:
|
||||
logger.info(f"Not isSymLink '{myTestPath3}'")
|
||||
|
||||
if isReparsePoint(myTestPath4):
|
||||
logger.info(f"isSymLink '{myTestPath4}'")
|
||||
else:
|
||||
logger.info(f"Not isSymLink '{myTestPath4}'")
|
||||
|
||||
if isReparsePoint(myTestPath5):
|
||||
logger.info(f"isSymLink '{myTestPath5}'")
|
||||
else:
|
||||
logger.info(f"Not isSymLink '{myTestPath5}'")
|
||||
|
||||
if isReparsePoint(myTestPath6):
|
||||
logger.info(f"isSymLink '{myTestPath6}'")
|
||||
else:
|
||||
logger.info(f"Not isSymLink '{myTestPath6}'")
|
||||
return
|
||||
|
||||
if mangeDupFilesTask:
|
||||
|
||||
@@ -24,6 +24,7 @@ This Plugin is under construction!!!
|
||||
|
||||
### Requirements
|
||||
`pip install stashapp-tools`
|
||||
`pip install --upgrade stashapp-tools`
|
||||
`pip install pyYAML`
|
||||
|
||||
### Installation
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
stashapp-tools
|
||||
stashapp-tools >= 0.2.48
|
||||
pyYAML
|
||||
watchdog
|
||||
requests
|
||||
241
plugins/FileMonitor/StashPluginHelper.py
Normal file
241
plugins/FileMonitor/StashPluginHelper.py
Normal file
@@ -0,0 +1,241 @@
|
||||
import stashapi.log as stashLog # stashapi.log by default for error and critical logging
|
||||
from stashapi.stashapp import StashInterface
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import inspect
|
||||
import sys
|
||||
import os
|
||||
import pathlib
|
||||
import logging
|
||||
import json
|
||||
import __main__
|
||||
|
||||
# StashPluginHelper (By David Maisonave aka Axter)
|
||||
# See end of this file for example usage
|
||||
# Log Features:
|
||||
# Can optionally log out to multiple outputs for each Log or Trace call.
|
||||
# Logging includes source code line number
|
||||
# Sets a maximum plugin log file size
|
||||
# Stash Interface Features:
|
||||
# Sets STASH_INTERFACE with StashInterface
|
||||
# Gets STASH_URL value from command line argument and/or from STDIN_READ
|
||||
# Sets FRAGMENT_SERVER based on command line arguments or STDIN_READ
|
||||
# Sets PLUGIN_ID based on the main script file name (in lower case)
|
||||
# Gets PLUGIN_TASK_NAME value
|
||||
# Sets pluginSettings (The plugin UI settings)
|
||||
# Misc Features:
|
||||
# Gets DRY_RUN value from command line argument and/or from UI and/or from config file
|
||||
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
|
||||
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
|
||||
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
|
||||
class StashPluginHelper:
|
||||
# Primary Members for external reference
|
||||
PLUGIN_TASK_NAME = None
|
||||
PLUGIN_ID = None
|
||||
PLUGIN_CONFIGURATION = None
|
||||
pluginSettings = None
|
||||
pluginConfig = None
|
||||
STASH_INTERFACE = None
|
||||
STASH_URL = None
|
||||
STASH_CONFIGURATION = None
|
||||
JSON_INPUT = None
|
||||
DEBUG_TRACING = False
|
||||
DRY_RUN = False
|
||||
CALLED_AS_STASH_PLUGIN = False
|
||||
RUNNING_IN_COMMAND_LINE_MODE = False
|
||||
|
||||
# printTo argument
|
||||
LOG_TO_FILE = 1
|
||||
LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
|
||||
LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
|
||||
LOG_TO_STASH = 8
|
||||
LOG_TO_WARN = 16
|
||||
LOG_TO_ERROR = 32
|
||||
LOG_TO_CRITICAL = 64
|
||||
LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
|
||||
|
||||
# Misc class variables
|
||||
MAIN_SCRIPT_NAME = None
|
||||
LOG_LEVEL = logging.INFO
|
||||
LOG_FILE_DIR = None
|
||||
LOG_FILE_NAME = None
|
||||
STDIN_READ = None
|
||||
FRAGMENT_SERVER = None
|
||||
logger = None
|
||||
|
||||
# Prefix message value
|
||||
LEV_TRACE = "TRACE: "
|
||||
LEV_DBG = "DBG: "
|
||||
LEV_INF = "INF: "
|
||||
LEV_WRN = "WRN: "
|
||||
LEV_ERR = "ERR: "
|
||||
LEV_CRITICAL = "CRITICAL: "
|
||||
|
||||
# Default format
|
||||
LOG_FORMAT = "[%(asctime)s] %(message)s"
|
||||
|
||||
# Externally modifiable variables
|
||||
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
||||
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
||||
log_to_wrn_set = LOG_TO_FILE + LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
||||
|
||||
def __init__(self,
|
||||
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
||||
logFormat = LOG_FORMAT, # Plugin log line format
|
||||
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
|
||||
maxbytes = 2*1024*1024, # Max size of plugin log file
|
||||
backupcount = 2, # Backup counts when log file size reaches max size
|
||||
logToWrnSet = 0, # Customize the target output set which will get warning logging
|
||||
logToErrSet = 0, # Customize the target output set which will get error logging
|
||||
logToNormSet = 0, # Customize the target output set which will get normal logging
|
||||
logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
|
||||
mainScriptName = "", # The main plugin script file name (full path)
|
||||
pluginID = "",
|
||||
settings = None, # Default settings for UI fields
|
||||
config = None, # From pluginName_config.py or pluginName_setting.py
|
||||
fragmentServer = None,
|
||||
stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
|
||||
DebugTraceFieldName = "zzdebugTracing",
|
||||
DryRunFieldName = "zzdryRun"):
|
||||
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
|
||||
if logToErrSet: self.log_to_err_set = logToErrSet
|
||||
if logToNormSet: self.log_to_norm = logToNormSet
|
||||
if stash_url and len(stash_url): self.STASH_URL = stash_url
|
||||
self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__
|
||||
self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower()
|
||||
print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr)
|
||||
self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log"
|
||||
self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent
|
||||
RFH = RotatingFileHandler(
|
||||
filename=self.LOG_FILE_NAME,
|
||||
mode='a',
|
||||
maxBytes=maxbytes,
|
||||
backupCount=backupcount,
|
||||
encoding=None,
|
||||
delay=0
|
||||
)
|
||||
if fragmentServer:
|
||||
self.FRAGMENT_SERVER = fragmentServer
|
||||
else:
|
||||
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
|
||||
|
||||
if debugTracing: self.DEBUG_TRACING = debugTracing
|
||||
if config:
|
||||
self.pluginConfig = config
|
||||
if DebugTraceFieldName in self.pluginConfig:
|
||||
self.DEBUG_TRACING = self.pluginConfig[DebugTraceFieldName]
|
||||
if DryRunFieldName in self.pluginConfig:
|
||||
self.DRY_RUN = self.pluginConfig[DryRunFieldName]
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
RUNNING_IN_COMMAND_LINE_MODE = True
|
||||
if not debugTracing or not stash_url:
|
||||
for argValue in sys.argv[1:]:
|
||||
if argValue.lower() == "--trace":
|
||||
self.DEBUG_TRACING = True
|
||||
elif argValue.lower() == "--dry_run" or argValue.lower() == "--dryrun":
|
||||
self.DRY_RUN = True
|
||||
elif ":" in argValue and not self.STASH_URL:
|
||||
self.STASH_URL = argValue
|
||||
if self.STASH_URL:
|
||||
endpointUrlArr = self.STASH_URL.split(":")
|
||||
if len(endpointUrlArr) == 3:
|
||||
self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0]
|
||||
self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:]
|
||||
self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2]
|
||||
self.STASH_INTERFACE = StashInterface(self.FRAGMENT_SERVER)
|
||||
else:
|
||||
try:
|
||||
self.STDIN_READ = sys.stdin.read()
|
||||
self.CALLED_AS_STASH_PLUGIN = True
|
||||
except:
|
||||
pass
|
||||
if self.STDIN_READ:
|
||||
self.JSON_INPUT = json.loads(self.STDIN_READ)
|
||||
if "args" in self.JSON_INPUT and "mode" in self.JSON_INPUT["args"]:
|
||||
self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"]
|
||||
self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"]
|
||||
self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}"
|
||||
self.STASH_INTERFACE = StashInterface(self.FRAGMENT_SERVER)
|
||||
|
||||
if self.STASH_INTERFACE:
|
||||
self.PLUGIN_CONFIGURATION = self.STASH_INTERFACE.get_configuration()["plugins"]
|
||||
self.STASH_CONFIGURATION = self.STASH_INTERFACE.get_configuration()["general"]
|
||||
if settings:
|
||||
self.pluginSettings = settings
|
||||
if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION:
|
||||
self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID])
|
||||
if DebugTraceFieldName in self.pluginSettings:
|
||||
self.DEBUG_TRACING = self.pluginSettings[DebugTraceFieldName]
|
||||
if DryRunFieldName in self.pluginSettings:
|
||||
self.DRY_RUN = self.pluginSettings[DryRunFieldName]
|
||||
if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
|
||||
|
||||
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
|
||||
self.logger = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
|
||||
|
||||
def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False):
|
||||
if printTo == 0:
|
||||
printTo = self.log_to_norm
|
||||
elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
|
||||
logLevel = logging.ERROR
|
||||
printTo = self.log_to_err_set
|
||||
elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
|
||||
logLevel = logging.CRITICAL
|
||||
printTo = self.log_to_err_set
|
||||
elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
|
||||
logLevel = logging.WARN
|
||||
printTo = self.log_to_wrn_set
|
||||
if lineNo == -1:
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
LN_Str = f"[LN:{lineNo}]"
|
||||
# print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
|
||||
if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
|
||||
if levelStr == "": levelStr = self.LEV_DBG
|
||||
if printTo & self.LOG_TO_FILE: self.logger.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: stashLog.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||
elif logLevel == logging.INFO or logLevel == logging.DEBUG:
|
||||
if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
|
||||
if printTo & self.LOG_TO_FILE: self.logger.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: stashLog.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||
elif logLevel == logging.WARN:
|
||||
if levelStr == "": levelStr = self.LEV_WRN
|
||||
if printTo & self.LOG_TO_FILE: self.logger.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: stashLog.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||
elif logLevel == logging.ERROR:
|
||||
if levelStr == "": levelStr = self.LEV_ERR
|
||||
if printTo & self.LOG_TO_FILE: self.logger.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: stashLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||
elif logLevel == logging.CRITICAL:
|
||||
if levelStr == "": levelStr = self.LEV_CRITICAL
|
||||
if printTo & self.LOG_TO_FILE: self.logger.critical(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: stashLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||
print(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
|
||||
|
||||
def Trace(self, logMsg = "", printTo = 0, logAlways = False):
|
||||
if printTo == 0: printTo = self.LOG_TO_FILE
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
logLev = logging.INFO if logAlways else logging.DEBUG
|
||||
if self.DEBUG_TRACING or logAlways:
|
||||
if logMsg == "":
|
||||
logMsg = f"Line number {lineNo}..."
|
||||
self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways)
|
||||
|
||||
def Warn(self, logMsg, printTo = 0):
|
||||
if printTo == 0: printTo = self.log_to_wrn_set
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
self.Log(logMsg, printTo, logging.WARN, lineNo)
|
||||
|
||||
def Error(self, logMsg, printTo = 0):
|
||||
if printTo == 0: printTo = self.log_to_err_set
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
self.Log(logMsg, printTo, logging.ERROR, lineNo)
|
||||
|
||||
def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
|
||||
if printTo == 0: printTo = self.log_to_norm
|
||||
if lineNo == -1:
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
|
||||
printTo, logLevel, lineNo)
|
||||
@@ -1,86 +1,29 @@
|
||||
# Description: This is a Stash plugin which updates Stash if any changes occurs in the Stash library paths.
|
||||
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
|
||||
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
|
||||
# Note: To call this script outside of Stash, pass any argument.
|
||||
# Example: python filemonitor.py start
|
||||
# Note: To call this script outside of Stash, pass --url and the Stash URL.
|
||||
# Example: python filemonitor.py --url http://localhost:9999
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
from pathlib import Path
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from stashapi.stashapp import StashInterface
|
||||
import time
|
||||
import pathlib
|
||||
import argparse
|
||||
from StashPluginHelper import StashPluginHelper
|
||||
from watchdog.observers import Observer # This is also needed for event attributes
|
||||
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
|
||||
from threading import Lock, Condition
|
||||
from multiprocessing import shared_memory
|
||||
from filemonitor_config import config # Import settings from filemonitor_config.py
|
||||
|
||||
# **********************************************************************
|
||||
# Constant global variables --------------------------------------------
|
||||
LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
|
||||
FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
|
||||
PLUGIN_ARGS_MODE = False
|
||||
PLUGIN_ID = Path(__file__).stem.lower()
|
||||
RFH = RotatingFileHandler(
|
||||
filename=LOG_FILE_PATH,
|
||||
mode='a',
|
||||
maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K
|
||||
backupCount=2,
|
||||
encoding=None,
|
||||
delay=0
|
||||
)
|
||||
TIMEOUT = 5
|
||||
CONTINUE_RUNNING_SIG = 99
|
||||
STOP_RUNNING_SIG = 32
|
||||
|
||||
# **********************************************************************
|
||||
# Global variables --------------------------------------------
|
||||
exitMsg = "Change success!!"
|
||||
mutex = Lock()
|
||||
signal = Condition(mutex)
|
||||
shouldUpdate = False
|
||||
TargetPaths = []
|
||||
runningInPluginMode = False
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
|
||||
parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
|
||||
parser.add_argument('--stop', '-s', dest='stop', action='store_true', help='Stop (kill) a running FileMonitor task.')
|
||||
parser.add_argument('--restart', '-r', dest='restart', action='store_true', help='Restart FileMonitor.')
|
||||
parse_args = parser.parse_args()
|
||||
|
||||
# Configure local log file for plugin within plugin folder having a limited max log file size
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH])
|
||||
logger = logging.getLogger(Path(__file__).stem)
|
||||
|
||||
# **********************************************************************
|
||||
# ----------------------------------------------------------------------
|
||||
# Code section to fetch variables from Plugin UI and from filemonitor_settings.py
|
||||
# Check if being called as Stash plugin
|
||||
gettingCalledAsStashPlugin = True
|
||||
stopLibraryMonitoring = False
|
||||
StdInRead = None
|
||||
try:
|
||||
if len(sys.argv) == 1:
|
||||
print(f"Attempting to read stdin. (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
|
||||
StdInRead = sys.stdin.read()
|
||||
else:
|
||||
if len(sys.argv) > 1 and sys.argv[1].lower() == "stop":
|
||||
stopLibraryMonitoring = True
|
||||
raise Exception("Not called in plugin mode.")
|
||||
except:
|
||||
gettingCalledAsStashPlugin = False
|
||||
print(f"Either len(sys.argv) not expected value OR sys.stdin.read() failed! (stopLibraryMonitoring={stopLibraryMonitoring}) (StdInRead={StdInRead}) (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
|
||||
pass
|
||||
|
||||
if gettingCalledAsStashPlugin and StdInRead:
|
||||
print(f"StdInRead={StdInRead} (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
|
||||
runningInPluginMode = True
|
||||
json_input = json.loads(StdInRead)
|
||||
FRAGMENT_SERVER = json_input["server_connection"]
|
||||
else:
|
||||
runningInPluginMode = False
|
||||
FRAGMENT_SERVER = {'Scheme': config['endpoint_Scheme'], 'Host': config['endpoint_Host'], 'Port': config['endpoint_Port'], 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent}
|
||||
print("Running in non-plugin mode!", file=sys.stderr)
|
||||
|
||||
stash = StashInterface(FRAGMENT_SERVER)
|
||||
PLUGINCONFIGURATION = stash.get_configuration()["plugins"]
|
||||
STASHCONFIGURATION = stash.get_configuration()["general"]
|
||||
STASHPATHSCONFIG = STASHCONFIGURATION['stashes']
|
||||
stashPaths = []
|
||||
settings = {
|
||||
"recursiveDisabled": False,
|
||||
"runCleanAfterDelete": False,
|
||||
@@ -88,39 +31,46 @@ settings = {
|
||||
"zzdebugTracing": False,
|
||||
"zzdryRun": False,
|
||||
}
|
||||
plugin = StashPluginHelper(
|
||||
stash_url=parse_args.stash_url,
|
||||
debugTracing=parse_args.trace,
|
||||
settings=settings,
|
||||
config=config,
|
||||
mainScriptName = __file__)
|
||||
plugin.Status()
|
||||
plugin.Log(f"\nStarting (__file__={__file__}) (plugin.CALLED_AS_STASH_PLUGIN={plugin.CALLED_AS_STASH_PLUGIN}) (plugin.DEBUG_TRACING={plugin.DEBUG_TRACING}) (plugin.DRY_RUN={plugin.DRY_RUN}) (plugin.PLUGIN_TASK_NAME={plugin.PLUGIN_TASK_NAME})************************************************")
|
||||
|
||||
if PLUGIN_ID in PLUGINCONFIGURATION:
|
||||
settings.update(PLUGINCONFIGURATION[PLUGIN_ID])
|
||||
# ----------------------------------------------------------------------
|
||||
debugTracing = settings["zzdebugTracing"]
|
||||
RECURSIVE = settings["recursiveDisabled"] == False
|
||||
SCAN_MODIFIED = settings["scanModified"]
|
||||
RUN_CLEAN_AFTER_DELETE = settings["runCleanAfterDelete"]
|
||||
RUN_GENERATE_CONTENT = config['runGenerateContent']
|
||||
exitMsg = "Change success!!"
|
||||
mutex = Lock()
|
||||
signal = Condition(mutex)
|
||||
shouldUpdate = False
|
||||
TargetPaths = []
|
||||
|
||||
RECURSIVE = plugin.pluginSettings["recursiveDisabled"] == False
|
||||
SCAN_MODIFIED = plugin.pluginSettings["scanModified"]
|
||||
RUN_CLEAN_AFTER_DELETE = plugin.pluginSettings["runCleanAfterDelete"]
|
||||
RUN_GENERATE_CONTENT = plugin.pluginConfig['runGenerateContent']
|
||||
SCAN_ON_ANY_EVENT = plugin.pluginConfig['onAnyEvent']
|
||||
SIGNAL_TIMEOUT = plugin.pluginConfig['timeOut']
|
||||
|
||||
CREATE_SPECIAL_FILE_TO_EXIT = plugin.pluginConfig['createSpecFileToExit']
|
||||
DELETE_SPECIAL_FILE_ON_STOP = plugin.pluginConfig['deleteSpecFileInStop']
|
||||
SPECIAL_FILE_DIR = f"{plugin.LOG_FILE_DIR}{os.sep}working"
|
||||
if not os.path.exists(SPECIAL_FILE_DIR) and CREATE_SPECIAL_FILE_TO_EXIT:
|
||||
os.makedirs(SPECIAL_FILE_DIR)
|
||||
# Unique name to trigger shutting down FileMonitor
|
||||
SPECIAL_FILE_NAME = f"{SPECIAL_FILE_DIR}{os.sep}trigger_to_kill_filemonitor_by_david_maisonave.txt"
|
||||
|
||||
STASHPATHSCONFIG = plugin.STASH_CONFIGURATION['stashes']
|
||||
stashPaths = []
|
||||
for item in STASHPATHSCONFIG:
|
||||
stashPaths.append(item["path"])
|
||||
stashPaths.append(SPECIAL_FILE_DIR)
|
||||
plugin.Trace(f"(stashPaths={stashPaths})")
|
||||
|
||||
# Extract dry_run setting from settings
|
||||
DRY_RUN = settings["zzdryRun"]
|
||||
dry_run_prefix = ''
|
||||
try:
|
||||
PLUGIN_ARGS_MODE = json_input['args']["mode"]
|
||||
except:
|
||||
pass
|
||||
logger.info(f"\nStarting (runningInPluginMode={runningInPluginMode}) (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})************************************************")
|
||||
if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})................")
|
||||
if debugTracing: logger.info("settings: %s " % (settings,))
|
||||
if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})................")
|
||||
if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})................")
|
||||
|
||||
if DRY_RUN:
|
||||
logger.info("Dry run mode is enabled.")
|
||||
dry_run_prefix = "Would've "
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
# ----------------------------------------------------------------------
|
||||
# **********************************************************************
|
||||
if debugTracing: logger.info(f"Debug Tracing (SCAN_MODIFIED={SCAN_MODIFIED}) (RECURSIVE={RECURSIVE})................")
|
||||
if plugin.DRY_RUN:
|
||||
plugin.Log("Dry run mode is enabled.")
|
||||
plugin.Trace(f"(SCAN_MODIFIED={SCAN_MODIFIED}) (SCAN_ON_ANY_EVENT={SCAN_ON_ANY_EVENT}) (RECURSIVE={RECURSIVE})")
|
||||
|
||||
def start_library_monitor():
|
||||
global shouldUpdate
|
||||
@@ -129,14 +79,13 @@ def start_library_monitor():
|
||||
# Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script
|
||||
shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=True, size=4)
|
||||
except:
|
||||
pass
|
||||
logger.info("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.")
|
||||
plugin.Error("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.")
|
||||
return
|
||||
type(shm_a.buf)
|
||||
shm_buffer = shm_a.buf
|
||||
len(shm_buffer)
|
||||
shm_buffer[0] = CONTINUE_RUNNING_SIG
|
||||
if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
|
||||
plugin.Trace(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
|
||||
RunCleanMetadata = False
|
||||
|
||||
event_handler = watchdog.events.FileSystemEventHandler()
|
||||
@@ -144,7 +93,7 @@ def start_library_monitor():
|
||||
global shouldUpdate
|
||||
global TargetPaths
|
||||
TargetPaths.append(event.src_path)
|
||||
logger.info(f"CREATE *** '{event.src_path}'")
|
||||
plugin.Log(f"CREATE *** '{event.src_path}'")
|
||||
with mutex:
|
||||
shouldUpdate = True
|
||||
signal.notify()
|
||||
@@ -154,7 +103,7 @@ def start_library_monitor():
|
||||
global TargetPaths
|
||||
nonlocal RunCleanMetadata
|
||||
TargetPaths.append(event.src_path)
|
||||
logger.info(f"DELETE *** '{event.src_path}'")
|
||||
plugin.Log(f"DELETE *** '{event.src_path}'")
|
||||
with mutex:
|
||||
shouldUpdate = True
|
||||
RunCleanMetadata = True
|
||||
@@ -165,106 +114,136 @@ def start_library_monitor():
|
||||
global TargetPaths
|
||||
if SCAN_MODIFIED:
|
||||
TargetPaths.append(event.src_path)
|
||||
logger.info(f"MODIFIED *** '{event.src_path}'")
|
||||
plugin.Log(f"MODIFIED *** '{event.src_path}'")
|
||||
with mutex:
|
||||
shouldUpdate = True
|
||||
signal.notify()
|
||||
else:
|
||||
if debugTracing: logger.info(f"Ignoring modifications due to plugin UI setting. path='{event.src_path}'")
|
||||
plugin.Trace(f"Ignoring modifications due to plugin UI setting. path='{event.src_path}'")
|
||||
|
||||
def on_moved(event):
|
||||
global shouldUpdate
|
||||
global TargetPaths
|
||||
TargetPaths.append(event.src_path)
|
||||
TargetPaths.append(event.dest_path)
|
||||
logger.info(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'")
|
||||
plugin.Log(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'")
|
||||
with mutex:
|
||||
shouldUpdate = True
|
||||
signal.notify()
|
||||
|
||||
if debugTracing: logger.info("Debug Trace........")
|
||||
def on_any_event(event):
|
||||
global shouldUpdate
|
||||
global TargetPaths
|
||||
if SCAN_ON_ANY_EVENT:
|
||||
plugin.Log(f"Any-Event *** '{event.src_path}'")
|
||||
TargetPaths.append(event.src_path)
|
||||
with mutex:
|
||||
shouldUpdate = True
|
||||
signal.notify()
|
||||
else:
|
||||
plugin.Trace("Ignoring on_any_event trigger.")
|
||||
|
||||
plugin.Trace()
|
||||
event_handler.on_created = on_created
|
||||
event_handler.on_deleted = on_deleted
|
||||
event_handler.on_modified = on_modified
|
||||
event_handler.on_moved = on_moved
|
||||
event_handler.on_any_event = on_any_event
|
||||
|
||||
observer = Observer()
|
||||
|
||||
# Iterate through stashPaths
|
||||
for path in stashPaths:
|
||||
observer.schedule(event_handler, path, recursive=RECURSIVE)
|
||||
if debugTracing: logger.info(f"Observing {path}")
|
||||
plugin.Trace(f"Observing {path}")
|
||||
observer.start()
|
||||
if debugTracing: logger.info("Starting loop................")
|
||||
plugin.Trace("Starting loop")
|
||||
try:
|
||||
while True:
|
||||
TmpTargetPaths = []
|
||||
with mutex:
|
||||
while not shouldUpdate:
|
||||
if debugTracing: logger.info("Wait start................")
|
||||
signal.wait()
|
||||
if debugTracing: logger.info("Wait end................")
|
||||
plugin.Trace("Wait start")
|
||||
signal.wait(timeout=SIGNAL_TIMEOUT)
|
||||
plugin.Trace("Wait end")
|
||||
if shm_buffer[0] != CONTINUE_RUNNING_SIG:
|
||||
plugin.Log(f"Breaking out of loop. (shm_buffer[0]={shm_buffer[0]})")
|
||||
break
|
||||
shouldUpdate = False
|
||||
TmpTargetPaths = []
|
||||
for TargetPath in TargetPaths:
|
||||
TmpTargetPaths.append(os.path.dirname(TargetPath))
|
||||
if TargetPath == SPECIAL_FILE_DIR:
|
||||
if os.path.isfile(SPECIAL_FILE_NAME):
|
||||
shm_buffer[0] = STOP_RUNNING_SIG
|
||||
plugin.Log(f"Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = plugin.LOG_TO_FILE + plugin.LOG_TO_CONSOLE + plugin.LOG_TO_STASH)
|
||||
TargetPaths = []
|
||||
TmpTargetPaths = list(set(TmpTargetPaths))
|
||||
if TmpTargetPaths != []:
|
||||
logger.info(f"Triggering stash scan for path(s) {TmpTargetPaths}")
|
||||
if not DRY_RUN:
|
||||
stash.metadata_scan(paths=TmpTargetPaths)
|
||||
plugin.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths}")
|
||||
if len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR:
|
||||
if not plugin.DRY_RUN:
|
||||
plugin.STASH_INTERFACE.metadata_scan(paths=TmpTargetPaths)
|
||||
if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
|
||||
stash.metadata_clean(paths=TmpTargetPaths, dry_run=DRY_RUN)
|
||||
plugin.STASH_INTERFACE.metadata_clean(paths=TmpTargetPaths, dry_run=plugin.DRY_RUN)
|
||||
if RUN_GENERATE_CONTENT:
|
||||
stash.metadata_generate()
|
||||
if gettingCalledAsStashPlugin and shm_buffer[0] == CONTINUE_RUNNING_SIG:
|
||||
stash.run_plugin_task(plugin_id=PLUGIN_ID, task_name="Start Library Monitor")
|
||||
if debugTracing: logger.info("Exiting plugin so that metadata_scan task can run.")
|
||||
plugin.STASH_INTERFACE.metadata_generate()
|
||||
if plugin.CALLED_AS_STASH_PLUGIN and shm_buffer[0] == CONTINUE_RUNNING_SIG:
|
||||
plugin.STASH_INTERFACE.run_plugin_task(plugin_id=plugin.PLUGIN_ID, task_name="Start Library Monitor")
|
||||
plugin.Trace("Exiting plugin so that metadata_scan task can run.")
|
||||
return
|
||||
else:
|
||||
if debugTracing: logger.info("Nothing to scan.")
|
||||
plugin.Trace("Nothing to scan.")
|
||||
if shm_buffer[0] != CONTINUE_RUNNING_SIG:
|
||||
logger.info(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]})")
|
||||
plugin.Log(f"Exiting Change File Monitor. (shm_buffer[0]={shm_buffer[0]})")
|
||||
shm_a.close()
|
||||
shm_a.unlink() # Call unlink only once to release the shared memory
|
||||
raise KeyboardInterrupt
|
||||
except KeyboardInterrupt:
|
||||
observer.stop()
|
||||
if debugTracing: logger.info("Stopping observer................")
|
||||
plugin.Trace("Stopping observer")
|
||||
if os.path.isfile(SPECIAL_FILE_NAME):
|
||||
os.remove(SPECIAL_FILE_NAME)
|
||||
observer.join()
|
||||
if debugTracing: logger.info("Exiting function................")
|
||||
plugin.Trace("Exiting function")
|
||||
|
||||
# This function is only useful when called outside of Stash.
|
||||
# Example: python filemonitor.py stop
|
||||
# Stops monitoring after triggered by the next file change.
|
||||
# ToDo: Add logic so it doesn't have to wait until the next file change
|
||||
# Example: python filemonitor.py --stop
|
||||
def stop_library_monitor():
|
||||
import time
|
||||
if debugTracing: logger.info("Opening shared memory map.")
|
||||
if CREATE_SPECIAL_FILE_TO_EXIT:
|
||||
if os.path.isfile(SPECIAL_FILE_NAME):
|
||||
os.remove(SPECIAL_FILE_NAME)
|
||||
pathlib.Path(SPECIAL_FILE_NAME).touch()
|
||||
if DELETE_SPECIAL_FILE_ON_STOP:
|
||||
os.remove(SPECIAL_FILE_NAME)
|
||||
plugin.Trace("Opening shared memory map.")
|
||||
try:
|
||||
shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=False, size=4)
|
||||
except:
|
||||
pass
|
||||
logger.info("Could not open shared memory map. Change File Monitor must not be running.")
|
||||
plugin.Log("Could not open shared memory map. Change File Monitor must not be running.")
|
||||
return
|
||||
type(shm_a.buf)
|
||||
shm_buffer = shm_a.buf
|
||||
len(shm_buffer)
|
||||
shm_buffer[0] = 123
|
||||
if debugTracing: logger.info(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
|
||||
shm_buffer[0] = STOP_RUNNING_SIG
|
||||
plugin.Trace(f"Shared memory map opended, and flag set to {shm_buffer[0]}")
|
||||
shm_a.close()
|
||||
shm_a.unlink() # Call unlink only once to release the shared memory
|
||||
time.sleep(1)
|
||||
return
|
||||
|
||||
if stopLibraryMonitoring:
|
||||
if parse_args.stop or parse_args.restart:
|
||||
stop_library_monitor()
|
||||
if debugTracing: logger.info(f"stop_library_monitor EXIT................")
|
||||
elif PLUGIN_ARGS_MODE == "start_library_monitor" or not gettingCalledAsStashPlugin:
|
||||
start_library_monitor()
|
||||
if debugTracing: logger.info(f"start_library_monitor EXIT................")
|
||||
if parse_args.restart:
|
||||
time.sleep(5)
|
||||
plugin.STASH_INTERFACE.run_plugin_task(plugin_id=plugin.PLUGIN_ID, task_name="Start Library Monitor")
|
||||
plugin.Trace(f"Restart FileMonitor EXIT")
|
||||
else:
|
||||
logger.info(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})")
|
||||
plugin.Trace(f"Stop FileMonitor EXIT")
|
||||
elif plugin.PLUGIN_TASK_NAME == "start_library_monitor" or not plugin.CALLED_AS_STASH_PLUGIN:
|
||||
start_library_monitor()
|
||||
plugin.Trace(f"start_library_monitor EXIT")
|
||||
else:
|
||||
plugin.Log(f"Nothing to do!!! (plugin.PLUGIN_TASK_NAME={plugin.PLUGIN_TASK_NAME})")
|
||||
|
||||
if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************")
|
||||
plugin.Trace("\n*********************************\nEXITING ***********************\n*********************************")
|
||||
|
||||
@@ -4,6 +4,14 @@
|
||||
config = {
|
||||
# Enable to run metadata_generate (Generate Content) after metadata scan.
|
||||
"runGenerateContent": False,
|
||||
# Enable to run scan when triggered by on_any_event.
|
||||
"onAnyEvent": False,
|
||||
# Timeout in seconds. This is how often it will check if a stop signal is sent.
|
||||
"timeOut": 60,
|
||||
# Enable to exit FileMonitor by creating special file in plugin folder\working
|
||||
"createSpecFileToExit": True,
|
||||
# Enable to delete special file imediately after it's created in stop process
|
||||
"deleteSpecFileInStop": False,
|
||||
|
||||
# The following fields are ONLY used when running FileMonitor in script mode
|
||||
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
stashapp-tools
|
||||
stashapp-tools >= 0.2.48
|
||||
pyYAML
|
||||
watchdog
|
||||
requests
|
||||
@@ -17,7 +17,7 @@ from renamefile_settings import config # Import settings from renamefile_setting
|
||||
|
||||
# **********************************************************************
|
||||
# Constant global variables --------------------------------------------
|
||||
LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
|
||||
LOG_FILE_PATH = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
|
||||
FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
|
||||
DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order
|
||||
PLUGIN_ID = Path(__file__).stem.lower()
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
stashapp-tools
|
||||
stashapp-tools >= 0.2.48
|
||||
pyYAML
|
||||
requests
|
||||
Reference in New Issue
Block a user