From ad44a543ba8e25ddf06f6649e9ff650b22a14a75 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Fri, 16 Aug 2024 03:42:37 -0400 Subject: [PATCH] Added logic to self test FileMonitor on all task types. --- plugins/.gitignore | 2 +- plugins/DupFileManager/StashPluginHelper.py | 21 +++++-- plugins/FileMonitor/StashPluginHelper.py | 21 +++++-- plugins/FileMonitor/filemonitor.py | 68 +++++++++++++++------ plugins/FileMonitor/filemonitor_config.py | 38 ++++++++++-- 5 files changed, 112 insertions(+), 38 deletions(-) diff --git a/plugins/.gitignore b/plugins/.gitignore index 7d66bf7..a338201 100644 --- a/plugins/.gitignore +++ b/plugins/.gitignore @@ -34,7 +34,7 @@ renamefile_settings.cpython-310.pyc /DeleteMe /ATestPlugin /FileMonitor/working -test_script_hello_world.py +test_hello_world*.* MyDummyFileFrom_test_script_hello_world.txt ## Ignore Visual Studio temporary files, build results, and diff --git a/plugins/DupFileManager/StashPluginHelper.py b/plugins/DupFileManager/StashPluginHelper.py index 7a7fdd2..218e055 100644 --- a/plugins/DupFileManager/StashPluginHelper.py +++ b/plugins/DupFileManager/StashPluginHelper.py @@ -1,6 +1,7 @@ from stashapi.stashapp import StashInterface from logging.handlers import RotatingFileHandler import inspect, sys, os, pathlib, logging, json +import concurrent.futures from stashapi.stash_types import PhashDistance import __main__ @@ -58,6 +59,7 @@ class StashPluginHelper(StashInterface): STDIN_READ = None pluginLog = None logLinePreviousHits = [] + thredPool = None # Prefix message value LEV_TRACE = "TRACE: " @@ -95,6 +97,7 @@ class StashPluginHelper(StashInterface): DebugTraceFieldName = "zzdebugTracing", DryRunFieldName = "zzdryRun", setStashLoggerAsPluginLogger = False): + self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2) if logToWrnSet: self.log_to_wrn_set = logToWrnSet if logToErrSet: self.log_to_err_set = logToErrSet if logToNormSet: self.log_to_norm = logToNormSet @@ -179,6 +182,9 @@ class StashPluginHelper(StashInterface): if setStashLoggerAsPluginLogger: self.log = self.pluginLog + def __del__(self): + self.thredPool.shutdown(wait=False) + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False): if printTo == 0: printTo = self.log_to_norm @@ -269,25 +275,28 @@ class StashPluginHelper(StashInterface): self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})", printTo, logLevel, lineNo) - def ExecuteProcess(self, args): + def ExecuteProcess(self, args, ExecDetach=False): import platform, subprocess is_windows = any(platform.win32_ver()) pid = None self.Trace(f"is_windows={is_windows} args={args}") if is_windows: - self.Trace("Executing process using Windows DETACHED_PROCESS") - DETACHED_PROCESS = 0x00000008 - pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid + if ExecDetach: + self.Trace("Executing process using Windows DETACHED_PROCESS") + DETACHED_PROCESS = 0x00000008 + pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid + else: + pid = subprocess.Popen(args, shell=True).pid else: self.Trace("Executing process using normal Popen") pid = subprocess.Popen(args).pid self.Trace(f"pid={pid}") return pid - def ExecutePythonScript(self, args): + def ExecutePythonScript(self, args, ExecDetach=True): PythonExe = f"{sys.executable}" argsWithPython = [f"{PythonExe}"] + args - return self.ExecuteProcess(argsWithPython) + return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) # Extends class StashInterface with functions which are not yet in the class def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]): diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py index 7a7fdd2..218e055 100644 --- a/plugins/FileMonitor/StashPluginHelper.py +++ b/plugins/FileMonitor/StashPluginHelper.py @@ -1,6 +1,7 @@ from stashapi.stashapp import StashInterface from logging.handlers import RotatingFileHandler import inspect, sys, os, pathlib, logging, json +import concurrent.futures from stashapi.stash_types import PhashDistance import __main__ @@ -58,6 +59,7 @@ class StashPluginHelper(StashInterface): STDIN_READ = None pluginLog = None logLinePreviousHits = [] + thredPool = None # Prefix message value LEV_TRACE = "TRACE: " @@ -95,6 +97,7 @@ class StashPluginHelper(StashInterface): DebugTraceFieldName = "zzdebugTracing", DryRunFieldName = "zzdryRun", setStashLoggerAsPluginLogger = False): + self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2) if logToWrnSet: self.log_to_wrn_set = logToWrnSet if logToErrSet: self.log_to_err_set = logToErrSet if logToNormSet: self.log_to_norm = logToNormSet @@ -179,6 +182,9 @@ class StashPluginHelper(StashInterface): if setStashLoggerAsPluginLogger: self.log = self.pluginLog + def __del__(self): + self.thredPool.shutdown(wait=False) + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False): if printTo == 0: printTo = self.log_to_norm @@ -269,25 +275,28 @@ class StashPluginHelper(StashInterface): self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})", printTo, logLevel, lineNo) - def ExecuteProcess(self, args): + def ExecuteProcess(self, args, ExecDetach=False): import platform, subprocess is_windows = any(platform.win32_ver()) pid = None self.Trace(f"is_windows={is_windows} args={args}") if is_windows: - self.Trace("Executing process using Windows DETACHED_PROCESS") - DETACHED_PROCESS = 0x00000008 - pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid + if ExecDetach: + self.Trace("Executing process using Windows DETACHED_PROCESS") + DETACHED_PROCESS = 0x00000008 + pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid + else: + pid = subprocess.Popen(args, shell=True).pid else: self.Trace("Executing process using normal Popen") pid = subprocess.Popen(args).pid self.Trace(f"pid={pid}") return pid - def ExecutePythonScript(self, args): + def ExecutePythonScript(self, args, ExecDetach=True): PythonExe = f"{sys.executable}" argsWithPython = [f"{PythonExe}"] + args - return self.ExecuteProcess(argsWithPython) + return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) # Extends class StashInterface with functions which are not yet in the class def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]): diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index 17f1669..4b72352 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -184,6 +184,7 @@ class StashScheduler: # Stash Scheduler stash.Error(f"Task '{task['task']}' is missing fields.") self.checkSchedulePending() + # ToDo: Add asynchronous threading logic to running task. def runTask(self, task): import datetime stash.Trace(f"Running task {task}") @@ -209,14 +210,17 @@ class StashScheduler: # Stash Scheduler elif task['task'] == "Backup": stash.LogOnce("Note: Backup task does not get listed in the Task Queue, but user can verify that it started by looking in the Stash log file as an INFO level log line.") result = stash.backup_database() - if stash.pluginSettings['zmaximumBackups'] < 2: - stash.TraceOnce(f"Skipping DB backup file trim because zmaximumBackups={stash.pluginSettings['zmaximumBackups']}. Value has to be greater than 1.") + maximumBackup = stash.pluginSettings['zmaximumBackups'] + if "maxBackups" in task: + maximumBackup = task['maxBackups'] + if maximumBackup < 2: + stash.TraceOnce(f"Skipping DB backup file trim because zmaximumBackups={maximumBackup}. Value has to be greater than 1.") elif 'backupDirectoryPath' in stash.STASH_CONFIGURATION: if len(stash.STASH_CONFIGURATION['backupDirectoryPath']) < 5: stash.TraceOnce(f"Skipping DB backup file trim because backupDirectoryPath length is to short. Len={len(stash.STASH_CONFIGURATION['backupDirectoryPath'])}. Only support length greater than 4 characters.") elif os.path.exists(stash.STASH_CONFIGURATION['backupDirectoryPath']): - stash.LogOnce(f"Checking quantity of DB backups if path {stash.STASH_CONFIGURATION['backupDirectoryPath']} exceeds {stash.pluginSettings['zmaximumBackups']} backup files.") - self.trimDbFiles(stash.STASH_CONFIGURATION['backupDirectoryPath'], stash.pluginSettings['zmaximumBackups']) + stash.LogOnce(f"Checking quantity of DB backups if path {stash.STASH_CONFIGURATION['backupDirectoryPath']} exceeds {maximumBackup} backup files.") + self.trimDbFiles(stash.STASH_CONFIGURATION['backupDirectoryPath'], maximumBackup) else: stash.TraceOnce(f"Skipping DB backup file trim because backupDirectoryPath does NOT exist. backupDirectoryPath={stash.STASH_CONFIGURATION['backupDirectoryPath']}") elif task['task'] == "Scan": @@ -225,33 +229,49 @@ class StashScheduler: # Stash Scheduler result = stash.metadata_autotag(paths=targetPaths) elif task['task'] == "Optimise Database": result = stash.optimise_database() + elif task['task'] == "RenameGeneratedFiles": + result = stash.rename_generated_files() elif task['task'] == "GQL": result = stash.call_GQL(task['input']) elif task['task'] == "python": - script = task['script'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}") - stash.Log(f"Executing python script {script}.") - args = [script] - if 'args' in task and len(task['args']) > 0: - args = args + [task['args']] - result = f"Python process PID = {stash.ExecutePythonScript(args)}" + if 'script' in task and task['script'] != "": + script = task['script'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}") + stash.Log(f"Executing python script {script}.") + args = [script] + if 'args' in task and len(task['args']) > 0: + args = args + [task['args']] + detached = True + if 'detach' in task: + detached = task['detach'] + result = f"Python process PID = {stash.ExecutePythonScript(args, ExecDetach=detached)}" + else: + stash.Error(f"Can not run task '{task['task']}', because it's missing 'script' field.") elif task['task'] == "execute": - cmd = task['command'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}") - stash.Log(f"Executing command {cmd}.") - args = [cmd] - if 'args' in task and len(task['args']) > 0: - args = args + [task['args']] - result = f"Execute process PID = {stash.ExecuteProcess(args)}" + if 'command' in task and task['command'] != "": + cmd = task['command'].replace("", f"{pathlib.Path(__file__).resolve().parent}{os.sep}") + args = [cmd] + if 'args' in task and len(task['args']) > 0: + args = args + [task['args']] + stash.Log(f"Executing command arguments {args}.") + result = f"Execute process PID = {stash.ExecuteProcess(args)}" + else: + stash.Error(f"Can not run task '{task['task']}', because it's missing 'command' field.") else: # ToDo: Add code to check if plugin is installed. - stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}") try: - stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task']) + if 'pluginId' in task and task['pluginId'] != "": + stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}") + stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task']) + else: + stash.Error(f"Can not run task '{task['task']}', because it's an invalid task.") + stash.LogOnce(f"If task '{task['task']}' is supposed to be a built-in task, check for correct task name spelling.") + stash.LogOnce(f"If task '{task['task']}' is supposed to be a plugin, make sure to include the pluginId field in the task. task={task}") except Exception as e: stash.LogOnce(f"Failed to call plugin {task['task']} with plugin-ID {task['pluginId']}. Error: {e}") pass if result: - stash.Trace(f"Stash task '{task['task']}' result={result}") + stash.Trace(f"Task '{task['task']}' result={result}") def trimDbFiles(self, dbPath, maxFiles): if not os.path.exists(dbPath): @@ -301,10 +321,12 @@ def start_library_monitor(): if len(fileExtTypes) > 0: suffix = pathlib.Path(chng_path_lwr).suffix.lstrip(".") if suffix not in fileExtTypes: + stash.TraceOnce(f"Ignoring file change because not a monitored type ({suffix}).") return True if len(excludePathChanges) > 0: for path in excludePathChanges: if chng_path_lwr.startswith(path.lower()): + stash.TraceOnce(f"Ignoring file change because is excluded path ({chng_path_lwr}) per entery '{path}'.") return True if addToTargetPaths: TargetPaths.append(chng_path) @@ -511,9 +533,15 @@ if parse_args.stop or parse_args.restart or stash.PLUGIN_TASK_NAME == "stop_libr elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAServiceTaskID: start_library_monitor_service() stash.Trace(f"{StartFileMonitorAsAServiceTaskID} EXIT") -elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID or not stash.CALLED_AS_STASH_PLUGIN: +elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID: start_library_monitor() stash.Trace(f"{StartFileMonitorAsAPluginTaskID} EXIT") +elif not stash.CALLED_AS_STASH_PLUGIN: + try: + start_library_monitor() + stash.Trace(f"Command line FileMonitor EXIT") + except Exception as e: + stash.Error(f"Exception while running FileMonitor from the command line. Error: {e}") else: stash.Log(f"Nothing to do!!! (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})") diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py index 7e1538a..a5f6f00 100644 --- a/plugins/FileMonitor/filemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -43,13 +43,17 @@ config = { {"task" : "python", "script" : "test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time # Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used. - {"task" : "Scan", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan] + {"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan] {"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag] - {"task" : "Clean", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate] + {"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate] # Example#A4: Task which calls Migrations -> [Rename generated files] {"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example + # Example#A5: The Backup task using optional field maxBackup, which overrides the UI [Max DB Backups] value + {"task" : "Backup", "maxBackup" : 12, "weekday" : "sunday", "time" : "DISABLED"}, # Trim the DB backup files down to 12 backup files. + {"task" : "Backup", "maxBackup" : 0, "weekday" : "sunday", "time" : "DISABLED"}, # When used with a zero value, it will make sure no file trimming will occur no matter the value of the UI [Max DB Backups] + # The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop). # The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task. @@ -67,9 +71,33 @@ config = { # Example#B3: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, - # Commented out **test** tasks. - # {"task" : "Clean", "seconds" : 30}, - # {"task" : "Scan", "paths" : [r"B:\_\SpecialSet", r"B:\_\Casting\Latina"], "seconds" : 30} + # Comment out **test** tasks. + # To run test, enable all task, and start FileMonitor as a service. + # When executed, these task should be seen in the Task Queue unless otherwise stated in comments. + # These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor + # These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts + # MUST ToDo: Always comment out below test task before checking in this code!!! + # {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name + # {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command) + # {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts) + # {"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId) + # {"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command) + # {"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts) + # {"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId) + # {"task" : "Generate", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "Clean", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "Auto Tag", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "Optimise Database", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Running plugin task: Create Tags + # {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "00:00"}, + # {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Optimising database... + # {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "00:00"}, + # {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "00:00"}, # In task queue as -> Migrating scene hashes... + # {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. + # {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + # {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + # {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + # {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "00:00"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? ], # Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue.