forked from Github/Axter-Stash
Add start FileMonitor as a service from UI
This commit is contained in:
@@ -77,7 +77,7 @@ class StashPluginHelper:
|
||||
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
||||
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
||||
log_to_wrn_set = LOG_TO_FILE + LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
||||
|
||||
|
||||
def __init__(self,
|
||||
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
||||
logFormat = LOG_FORMAT, # Plugin log line format
|
||||
@@ -142,7 +142,7 @@ class StashPluginHelper:
|
||||
self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0]
|
||||
self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:]
|
||||
self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2]
|
||||
self.STASH_INTERFACE = StashInterface(self.FRAGMENT_SERVER)
|
||||
self.STASH_INTERFACE = self.ExtendStashInterface(self.FRAGMENT_SERVER)
|
||||
else:
|
||||
try:
|
||||
self.STDIN_READ = sys.stdin.read()
|
||||
@@ -155,7 +155,7 @@ class StashPluginHelper:
|
||||
self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"]
|
||||
self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"]
|
||||
self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}"
|
||||
self.STASH_INTERFACE = StashInterface(self.FRAGMENT_SERVER)
|
||||
self.STASH_INTERFACE = self.ExtendStashInterface(self.FRAGMENT_SERVER)
|
||||
|
||||
if self.STASH_INTERFACE:
|
||||
self.PLUGIN_CONFIGURATION = self.STASH_INTERFACE.get_configuration()["plugins"]
|
||||
@@ -239,3 +239,27 @@ class StashPluginHelper:
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
|
||||
printTo, logLevel, lineNo)
|
||||
|
||||
# Extends class StashInterface with functions which are not yet in the class
|
||||
class ExtendStashInterface(StashInterface):
|
||||
def metadata_autotag(self, paths:list=[], dry_run=False):
|
||||
if not paths:
|
||||
return
|
||||
|
||||
query = """
|
||||
mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
|
||||
metadataAutoTag(input: $input)
|
||||
}
|
||||
"""
|
||||
|
||||
metadata_autotag_input = {
|
||||
"paths": paths
|
||||
}
|
||||
result = self.call_GQL(query, {"input": metadata_autotag_input})
|
||||
return result
|
||||
|
||||
def backup_database(self):
|
||||
return self.call_GQL("mutation { backupDatabase(input: {download: false})}")
|
||||
|
||||
def optimise_database(self):
|
||||
return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }")
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# FileMonitor: Ver 0.6.1 (By David Maisonave)
|
||||
# FileMonitor: Ver 0.7.0 (By David Maisonave)
|
||||
FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin which updates Stash if any changes occurs in the Stash library paths.
|
||||
|
||||
### Using FileMonitor as a plugin
|
||||
|
||||
@@ -77,7 +77,7 @@ class StashPluginHelper:
|
||||
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
||||
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
||||
log_to_wrn_set = LOG_TO_FILE + LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
||||
|
||||
|
||||
def __init__(self,
|
||||
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
||||
logFormat = LOG_FORMAT, # Plugin log line format
|
||||
@@ -142,7 +142,7 @@ class StashPluginHelper:
|
||||
self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0]
|
||||
self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:]
|
||||
self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2]
|
||||
self.STASH_INTERFACE = StashInterface(self.FRAGMENT_SERVER)
|
||||
self.STASH_INTERFACE = self.ExtendStashInterface(self.FRAGMENT_SERVER)
|
||||
else:
|
||||
try:
|
||||
self.STDIN_READ = sys.stdin.read()
|
||||
@@ -155,7 +155,7 @@ class StashPluginHelper:
|
||||
self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"]
|
||||
self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"]
|
||||
self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}"
|
||||
self.STASH_INTERFACE = StashInterface(self.FRAGMENT_SERVER)
|
||||
self.STASH_INTERFACE = self.ExtendStashInterface(self.FRAGMENT_SERVER)
|
||||
|
||||
if self.STASH_INTERFACE:
|
||||
self.PLUGIN_CONFIGURATION = self.STASH_INTERFACE.get_configuration()["plugins"]
|
||||
@@ -239,3 +239,27 @@ class StashPluginHelper:
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
|
||||
printTo, logLevel, lineNo)
|
||||
|
||||
# Extends class StashInterface with functions which are not yet in the class
|
||||
class ExtendStashInterface(StashInterface):
|
||||
def metadata_autotag(self, paths:list=[], dry_run=False):
|
||||
if not paths:
|
||||
return
|
||||
|
||||
query = """
|
||||
mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
|
||||
metadataAutoTag(input: $input)
|
||||
}
|
||||
"""
|
||||
|
||||
metadata_autotag_input = {
|
||||
"paths": paths
|
||||
}
|
||||
result = self.call_GQL(query, {"input": metadata_autotag_input})
|
||||
return result
|
||||
|
||||
def backup_database(self):
|
||||
return self.call_GQL("mutation { backupDatabase(input: {download: false})}")
|
||||
|
||||
def optimise_database(self):
|
||||
return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }")
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
|
||||
# Note: To call this script outside of Stash, pass --url and the Stash URL.
|
||||
# Example: python filemonitor.py --url http://localhost:9999
|
||||
import os, time, pathlib, argparse
|
||||
import os, sys, time, pathlib, argparse
|
||||
from StashPluginHelper import StashPluginHelper
|
||||
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
|
||||
from watchdog.observers import Observer # This is also needed for event attributes
|
||||
@@ -50,6 +50,7 @@ signal = Condition(mutex)
|
||||
shouldUpdate = False
|
||||
TargetPaths = []
|
||||
|
||||
SHAREDMEMORY_NAME = "DavidMaisonaveAxter_FileMonitor"
|
||||
RECURSIVE = plugin.pluginSettings["recursiveDisabled"] == False
|
||||
SCAN_MODIFIED = plugin.pluginConfig["scanModified"]
|
||||
RUN_CLEAN_AFTER_DELETE = plugin.pluginConfig["runCleanAfterDelete"]
|
||||
@@ -84,30 +85,25 @@ def isJobWaitingToRun():
|
||||
global FileMonitorPluginIsOnTaskQue
|
||||
FileMonitorPluginIsOnTaskQue = False
|
||||
jobIsWaiting = False
|
||||
i = 1
|
||||
while True:
|
||||
jobDetails = plugin.STASH_INTERFACE.find_job(i)
|
||||
if jobDetails:
|
||||
plugin.Trace(f"(Job ID({i})={jobDetails})")
|
||||
if jobDetails['status'] == "READY":
|
||||
if jobDetails['description'] == "Running plugin task: Stop Library Monitor":
|
||||
StopLibraryMonitorWaitingInTaskQueue = True
|
||||
JobIdInTheQue = i
|
||||
jobIsWaiting = True
|
||||
elif jobDetails['status'] == "RUNNING" and jobDetails['description'] == "Running plugin task: Start Library Monitor":
|
||||
FileMonitorPluginIsOnTaskQue = True
|
||||
else:
|
||||
plugin.Trace(f"Last job {i}")
|
||||
break
|
||||
i += 1
|
||||
taskQue = plugin.STASH_INTERFACE.job_queue()
|
||||
for jobDetails in taskQue:
|
||||
plugin.Trace(f"(Job ID({jobDetails['id']})={jobDetails})")
|
||||
if jobDetails['status'] == "READY":
|
||||
if jobDetails['description'] == "Running plugin task: Stop Library Monitor":
|
||||
StopLibraryMonitorWaitingInTaskQueue = True
|
||||
JobIdInTheQue = jobDetails['id']
|
||||
jobIsWaiting = True
|
||||
elif jobDetails['status'] == "RUNNING" and jobDetails['description'].find("Start Library Monitor") > -1:
|
||||
FileMonitorPluginIsOnTaskQue = True
|
||||
JobIdInTheQue = 0
|
||||
return jobIsWaiting
|
||||
|
||||
if plugin.CALLED_AS_STASH_PLUGIN:
|
||||
plugin.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun()})")
|
||||
|
||||
# ToDo: Add logic here for reoccurring scheduler
|
||||
# Reoccurring scheduler code
|
||||
def runTask(task):
|
||||
plugin.Trace(f"Running task {task}")
|
||||
if task['task'] == "Clean":
|
||||
plugin.STASH_INTERFACE.metadata_clean(paths=stashPaths, dry_run=plugin.DRY_RUN)
|
||||
elif task['task'] == "Generate":
|
||||
@@ -116,21 +112,29 @@ def runTask(task):
|
||||
plugin.STASH_INTERFACE.call_GQL("mutation { backupDatabase(input: {download: false})}")
|
||||
elif task['task'] == "Scan":
|
||||
plugin.STASH_INTERFACE.metadata_scan(paths=stashPaths)
|
||||
# elif task['task'] == "Create Tags":
|
||||
# plugin.STASH_INTERFACE.run_plugin_task(plugin_id="pathParser", task_name="Create Tags")
|
||||
elif task['task'] == "Auto Tag":
|
||||
plugin.Warn("Auto Tag is not implemented!!!")
|
||||
plugin.STASH_INTERFACE.metadata_autotag(paths=stashPaths, dry_run=plugin.DRY_RUN)
|
||||
elif task['task'] == "Optimise Database":
|
||||
plugin.STASH_INTERFACE.optimise_database()
|
||||
else:
|
||||
# ToDo: Add code to check if plugin is installed.
|
||||
plugin.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}")
|
||||
plugin.STASH_INTERFACE.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task'])
|
||||
|
||||
|
||||
def reoccurringScheduler():
|
||||
import schedule # pip install schedule # https://github.com/dbader/schedule
|
||||
for task in plugin.pluginConfig['task_reoccurring_scheduler']:
|
||||
if task['hours'] > 0:
|
||||
plugin.Log(f"Adding to reoccurring scheduler task '{task['task']}' at {task['hours']} hour(s) interval")
|
||||
if 'days' in task and task['days'] > 0:
|
||||
plugin.Log(f"Adding to reoccurring scheduler task '{task['task']}' at {task['days']} days interval")
|
||||
schedule.every(task['days']).days.do(runTask, task)
|
||||
elif 'hours' in task and task['hours'] > 0:
|
||||
plugin.Log(f"Adding to reoccurring scheduler task '{task['task']}' at {task['hours']} hours interval")
|
||||
schedule.every(task['hours']).hours.do(runTask, task)
|
||||
|
||||
elif 'minutes' in task and task['minutes'] > 0:
|
||||
plugin.Log(f"Adding to reoccurring scheduler task '{task['task']}' at {task['minutes']} minutes interval")
|
||||
schedule.every(task['minutes']).minutes.do(runTask, task)
|
||||
def checkSchedulePending():
|
||||
import schedule # pip install schedule # https://github.com/dbader/schedule
|
||||
schedule.run_pending()
|
||||
if plugin.pluginConfig['turnOnScheduler']:
|
||||
reoccurringScheduler()
|
||||
|
||||
@@ -139,9 +143,9 @@ def start_library_monitor():
|
||||
global TargetPaths
|
||||
try:
|
||||
# Create shared memory buffer which can be used as singleton logic or to get a signal to quit task from external script
|
||||
shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=True, size=4)
|
||||
shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=True, size=4)
|
||||
except:
|
||||
plugin.Error("Could not open shared memory map. Change File Monitor must be running. Can not run multiple instance of Change File Monitor.")
|
||||
plugin.Error(f"Could not open shared memory map ({SHAREDMEMORY_NAME}). Change File Monitor must be running. Can not run multiple instance of Change File Monitor.")
|
||||
return
|
||||
type(shm_a.buf)
|
||||
shm_buffer = shm_a.buf
|
||||
@@ -238,6 +242,8 @@ def start_library_monitor():
|
||||
if shm_buffer[0] != CONTINUE_RUNNING_SIG:
|
||||
plugin.Log(f"Breaking out of loop. (shm_buffer[0]={shm_buffer[0]})")
|
||||
break
|
||||
if plugin.pluginConfig['turnOnScheduler']:
|
||||
checkSchedulePending()
|
||||
plugin.Trace("Wait start")
|
||||
if plugin.CALLED_AS_STASH_PLUGIN:
|
||||
signal.wait(timeout=SIGNAL_TIMEOUT)
|
||||
@@ -251,7 +257,9 @@ def start_library_monitor():
|
||||
if TargetPath == SPECIAL_FILE_DIR:
|
||||
if os.path.isfile(SPECIAL_FILE_NAME):
|
||||
shm_buffer[0] = STOP_RUNNING_SIG
|
||||
plugin.Log(f"Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = plugin.LOG_TO_FILE + plugin.LOG_TO_CONSOLE + plugin.LOG_TO_STASH)
|
||||
plugin.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = plugin.LOG_TO_FILE + plugin.LOG_TO_CONSOLE + plugin.LOG_TO_STASH)
|
||||
else:
|
||||
plugin.Trace(f"[SpFl]Did not find file {SPECIAL_FILE_NAME}.")
|
||||
TargetPaths = []
|
||||
TmpTargetPaths = list(set(TmpTargetPaths))
|
||||
if TmpTargetPaths != []:
|
||||
@@ -285,7 +293,6 @@ def start_library_monitor():
|
||||
observer.join()
|
||||
plugin.Trace("Exiting function")
|
||||
|
||||
# This function is only useful when called outside of Stash.
|
||||
# Example: python filemonitor.py --stop
|
||||
def stop_library_monitor():
|
||||
if CREATE_SPECIAL_FILE_TO_EXIT:
|
||||
@@ -296,10 +303,10 @@ def stop_library_monitor():
|
||||
os.remove(SPECIAL_FILE_NAME)
|
||||
plugin.Trace("Opening shared memory map.")
|
||||
try:
|
||||
shm_a = shared_memory.SharedMemory(name="DavidMaisonaveAxter_FileMonitor", create=False, size=4)
|
||||
shm_a = shared_memory.SharedMemory(name=SHAREDMEMORY_NAME, create=False, size=4)
|
||||
except:
|
||||
pass
|
||||
plugin.Log("Could not open shared memory map. Change File Monitor must not be running.")
|
||||
# If FileMonitor is running as plugin, then it's expected behavior that SharedMemory will not be avialable.
|
||||
plugin.Trace(f"Could not open shared memory map ({SHAREDMEMORY_NAME}). Change File Monitor must not be running.")
|
||||
return
|
||||
type(shm_a.buf)
|
||||
shm_buffer = shm_a.buf
|
||||
@@ -318,6 +325,23 @@ if parse_args.stop or parse_args.restart or plugin.PLUGIN_TASK_NAME == "stop_lib
|
||||
plugin.Trace(f"Restart FileMonitor EXIT")
|
||||
else:
|
||||
plugin.Trace(f"Stop FileMonitor EXIT")
|
||||
elif plugin.PLUGIN_TASK_NAME == "start_library_monitor_service":
|
||||
import subprocess
|
||||
import platform
|
||||
is_windows = any(platform.win32_ver())
|
||||
PythonExe = f"{sys.executable}"
|
||||
# PythonExe = PythonExe.replace("python.exe", "pythonw.exe")
|
||||
args = [f"{PythonExe}", f"{pathlib.Path(__file__).resolve().parent}{os.sep}filemonitor.py", '--url', f"{plugin.STASH_URL}"]
|
||||
plugin.Trace(f"args={args}")
|
||||
if is_windows:
|
||||
plugin.Trace("Executing process using Windows DETACHED_PROCESS")
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
|
||||
else:
|
||||
plugin.Trace("Executing process using normal Popen")
|
||||
pid = subprocess.Popen(args).pid
|
||||
plugin.Trace(f"pid={pid}")
|
||||
plugin.Trace(f"start_library_monitor_service EXIT")
|
||||
elif plugin.PLUGIN_TASK_NAME == "start_library_monitor" or not plugin.CALLED_AS_STASH_PLUGIN:
|
||||
start_library_monitor()
|
||||
plugin.Trace(f"start_library_monitor EXIT")
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
name: FileMonitor
|
||||
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
|
||||
version: 0.6.1
|
||||
version: 0.7.0
|
||||
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
|
||||
settings:
|
||||
recursiveDisabled:
|
||||
@@ -20,8 +20,12 @@ exec:
|
||||
- "{pluginDir}/filemonitor.py"
|
||||
interface: raw
|
||||
tasks:
|
||||
- name: Start Library Monitor
|
||||
description: Monitors paths in Stash library for media file changes, and updates Stash.
|
||||
- name: Start Library Monitor Service
|
||||
description: Run as a SERVICE to monitors paths in Stash library for media file changes, and updates Stash. Recommended start method.
|
||||
defaultArgs:
|
||||
mode: start_library_monitor_service
|
||||
- name: Start Library Monitor Plugin
|
||||
description: Run as a plugin (not recommended method)
|
||||
defaultArgs:
|
||||
mode: start_library_monitor
|
||||
- name: Stop Library Monitor
|
||||
|
||||
@@ -17,20 +17,22 @@ config = {
|
||||
# Enable to run metadata clean task after file deletion.
|
||||
"runCleanAfterDelete": False,
|
||||
|
||||
# The scheduler my only work reliably when FileMonitor runs in command line mode (as a service)
|
||||
# Enable to turn on scheduler_task_list
|
||||
"turnOnScheduler": True,
|
||||
# Reoccurring scheduler task list. To activate schedule, change number from zero to the number of hours interval
|
||||
# Reoccurring scheduler task list.
|
||||
"task_reoccurring_scheduler": [
|
||||
# Example: To perform a 'Clean' task every 48 hours, change zero to 48
|
||||
# Hours Conversion: 24=Daily, 168=Weekly, 720=Monthly, 1440=Bi-Monthly, 2160=Quarterly, 8760=Yearly
|
||||
{"task" : "Clean", "hours" : 48}, # Maintenance Clean (every 2 days)
|
||||
{"task" : "Generate", "hours" : 168}, # Generated Content (Weekly)
|
||||
{"task" : "Backup", "hours" : 720}, # Backup Backup (Monthly)
|
||||
{"task" : "Scan", "hours" : 168}, # Library Scan (Weekly)
|
||||
# {"task" : "Create Tags", "hours" : 24},# Requires plugin [Path Parser]
|
||||
{"task" : "Create Tags", "pluginId" : "pathParser", "hours" : 24}, # Requires plugin [Path Parser]
|
||||
{"task" : "Auto Tag", "hours" : 0}, # !!! Not yet implemented!!!
|
||||
{"task" : "MyTaskHere", "pluginId" : "MyPluginId", "hours" : 0}, # Place holder for custom task.
|
||||
# Frequency can be in minutes, hours, or days.
|
||||
# A zero frequency value disables the task.
|
||||
{"task" : "Clean", "days" : 2}, # Maintenance -> [Clean] (every 2 days)
|
||||
{"task" : "Generate", "days" : 7}, # Generated Content-> [Generate] (Weekly)
|
||||
{"task" : "Backup", "days" : 30}, # Backup -> [Backup] (Monthly)
|
||||
{"task" : "Scan", "days" : 7}, # Library -> [Scan] (Weekly)
|
||||
{"task" : "Auto Tag", "hours" : 24}, # Auto Tag -> [Auto Tag] (Daily)
|
||||
{"task" : "Optimise Database", "hours" : 24}, # Maintenance -> [Optimise Database] (Daily)
|
||||
{"task" : "Create Tags", "pluginId" : "pathParser", "days" : 1}, # Requires plugin [Path Parser] (Daily)
|
||||
{"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # Place holder for custom task.
|
||||
# Add additional task here.
|
||||
],
|
||||
# Maximum backups to keep. When scheduler is enabled, and the Backup runs, delete older backups after reaching maximum backups.
|
||||
"BackupsMax" : 6, # Not yet implemented!!!
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
stashapp-tools >= 0.2.48
|
||||
stashapp-tools >= 0.2.49
|
||||
pyYAML
|
||||
watchdog
|
||||
Reference in New Issue
Block a user