Too many changes to list

This commit is contained in:
David Maisonave
2024-09-13 10:10:37 -04:00
parent 5b34502963
commit 452c08df03
18 changed files with 1645 additions and 353 deletions

View File

@@ -142,7 +142,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
- pip install -r requirements.txt
- Or manually install each requirement:
- `pip install stashapp-tools --upgrade`
- `pip install pyYAML`
- `pip install requests`
- `pip install watchdog`
- `pip install schedule`

View File

@@ -1,6 +1,6 @@
from stashapi.stashapp import StashInterface
from logging.handlers import RotatingFileHandler
import re, inspect, sys, os, pathlib, logging, json
import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
import concurrent.futures
from stashapi.stash_types import PhashDistance
import __main__
@@ -61,6 +61,14 @@ class StashPluginHelper(StashInterface):
LOG_FILE_DIR = None
LOG_FILE_NAME = None
STDIN_READ = None
stopProcessBarSpin = True
IS_DOCKER = False
IS_WINDOWS = False
IS_LINUX = False
IS_FREEBSD = False
IS_MAC_OS = False
pluginLog = None
logLinePreviousHits = []
thredPool = None
@@ -107,6 +115,16 @@ class StashPluginHelper(StashInterface):
DryRunFieldName = "zzdryRun",
setStashLoggerAsPluginLogger = False):
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
if any(platform.win32_ver()):
self.IS_WINDOWS = True
elif platform.system().lower().startswith("linux"):
self.IS_LINUX = True
if self.isDocker():
self.IS_DOCKER = True
elif platform.system().lower().startswith("freebsd"):
self.IS_FREEBSD = True
elif sys.platform == "darwin":
self.IS_MAC_OS = True
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
if logToErrSet: self.log_to_err_set = logToErrSet
if logToNormSet: self.log_to_norm = logToNormSet
@@ -300,37 +318,43 @@ class StashPluginHelper(StashInterface):
lineNo = inspect.currentframe().f_back.f_lineno
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
# Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
# The below non-loggging functions use (lower) camelCase naming convention.
def status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
if printTo == 0: printTo = self.log_to_norm
if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
printTo, logLevel, lineNo)
def ExecuteProcess(self, args, ExecDetach=False):
import platform, subprocess
is_windows = any(platform.win32_ver())
def executeProcess(self, args, ExecDetach=False):
pid = None
self.Trace(f"is_windows={is_windows} args={args}")
if is_windows:
self.Trace(f"self.IS_WINDOWS={self.IS_WINDOWS} args={args}")
if self.IS_WINDOWS:
if ExecDetach:
self.Trace("Executing process using Windows DETACHED_PROCESS")
self.Trace(f"Executing process using Windows DETACHED_PROCESS; args=({args})")
DETACHED_PROCESS = 0x00000008
pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
else:
pid = subprocess.Popen(args, shell=True).pid
else:
self.Trace("Executing process using normal Popen")
pid = subprocess.Popen(args).pid
if ExecDetach:
# For linux detached, use nohup. I.E. subprocess.Popen(["nohup", "python", "test.py"])
if self.IS_LINUX:
args = ["nohup"] + args
self.Trace(f"Executing detached process using Popen({args})")
else:
self.Trace(f"Executing process using normal Popen({args})")
pid = subprocess.Popen(args).pid # On detach, may need the following for MAC OS subprocess.Popen(args, shell=True, start_new_session=True)
self.Trace(f"pid={pid}")
return pid
def ExecutePythonScript(self, args, ExecDetach=True):
def executePythonScript(self, args, ExecDetach=True):
PythonExe = f"{sys.executable}"
argsWithPython = [f"{PythonExe}"] + args
return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach)
return self.executeProcess(argsWithPython,ExecDetach=ExecDetach)
def Submit(self, *args, **kwargs):
def submit(self, *args, **kwargs):
return self.thredPool.submit(*args, **kwargs)
def asc2(self, data, convertToAscii=None):
@@ -340,24 +364,214 @@ class StashPluginHelper(StashInterface):
# data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
# return str(data)[2:-1] # strip out b'str'
def init_mergeMetadata(self, excludeMergeTags=None):
def initMergeMetadata(self, excludeMergeTags=None):
self.excludeMergeTags = excludeMergeTags
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
# Must call init_mergeMetadata, before calling merge_metadata
def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
# Must call initMergeMetadata, before calling mergeMetadata
def mergeMetadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
if type(SrcData) is int:
SrcData = self.find_scene(SrcData)
DestData = self.find_scene(DestData)
return self._mergeMetadata.merge(SrcData, DestData)
def Progress(self, currentIndex, maxCount):
def progressBar(self, currentIndex, maxCount):
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
self.log.progress(progress)
def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
# Test via command line: pip uninstall -y pyYAML watchdog schedule requests
def modulesInstalled(self, moduleNames, install=True, silent=False): # moduleNames=["stashapp-tools", "requests", "pyYAML"]
retrnValue = True
for moduleName in moduleNames:
try: # Try Python 3.3 > way
import importlib
import importlib.util
if moduleName in sys.modules:
if not silent: self.Trace(f"{moduleName!r} already in sys.modules")
elif self.isModuleInstalled(moduleName):
if not silent: self.Trace(f"Module {moduleName!r} is available.")
else:
if install and (results:=self.installModule(moduleName)) > 0:
if results == 1:
self.Log(f"Module {moduleName!r} has been installed")
else:
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
continue
else:
if install:
self.Error(f"Can't find the {moduleName!r} module")
retrnValue = False
except Exception as e:
try:
i = importlib.import_module(moduleName)
except ImportError as e:
if install and (results:=self.installModule(moduleName)) > 0:
if results == 1:
self.Log(f"Module {moduleName!r} has been installed")
else:
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
continue
else:
if install:
tb = traceback.format_exc()
self.Error(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
retrnValue = False
return retrnValue
def isModuleInstalled(self, moduleName):
try:
__import__(moduleName)
# self.Trace(f"Module {moduleName!r} is installed")
return True
except Exception as e:
tb = traceback.format_exc()
self.Warn(f"Module {moduleName!r} is NOT installed!")
self.Trace(f"Error: {e}\nTraceBack={tb}")
pass
return False
def installModule(self,moduleName):
# if not self.IS_DOCKER:
# try:
# self.Log(f"Attempting to install package {moduleName!r} using pip import method.")
# First try pip import method. (This may fail in a future version of pip.)
# self.installPackage(moduleName)
# self.Trace(f"installPackage called for module {moduleName!r}")
# if self.modulesInstalled(moduleNames=[moduleName], install=False):
# self.Trace(f"Module {moduleName!r} installed")
# return 1
# self.Trace(f"Module {moduleName!r} still not installed.")
# except Exception as e:
# tb = traceback.format_exc()
# self.Warn(f"pip import method failed for module {moduleName!r}. Will try command line method; Error: {e}\nTraceBack={tb}")
# pass
# else:
# self.Trace("Running in Docker, so skipping pip import method.")
try:
if self.IS_LINUX:
# Note: Linux may first need : sudo apt install python3-pip
# if error starts with "Command 'pip' not found"
# or includes "No module named pip"
self.Log("Checking if pip installed.")
results = os.popen(f"pip --version").read()
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
results = os.popen(f"sudo apt install python3-pip").read()
results = os.popen(f"pip --version").read()
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
self.Error(f"Error while calling 'pip'. Make sure pip is installed, and make sure module {moduleName!r} is installed. Results = '{results}'")
return -1
self.Trace("pip good.")
if self.IS_FREEBSD:
self.Warn("installModule may NOT work on freebsd")
pipArg = ""
if self.IS_DOCKER:
pipArg = " --break-system-packages"
self.Log(f"Attempting to install package {moduleName!r} via popen.")
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
results = results.strip("\n")
self.Trace(f"pip results = {results}")
if results.find("Requirement already satisfied:") > -1:
self.Trace(f"Requirement already satisfied for module {moduleName!r}")
return 2
elif results.find("Successfully installed") > -1:
self.Trace(f"Successfully installed module {moduleName!r}")
return 1
elif self.modulesInstalled(moduleNames=[moduleName], install=False):
self.Trace(f"modulesInstalled returned True for module {moduleName!r}")
return 1
self.Error(f"Failed to install module {moduleName!r}")
except Exception as e:
tb = traceback.format_exc()
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
return 0
def installPackage(self,package): # Should delete this. It doesn't work consistently
try:
import pip
if hasattr(pip, 'main'):
pip.main(['install', package])
self.Trace()
else:
pip._internal.main(['install', package])
self.Trace()
except Exception as e:
tb = traceback.format_exc()
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
return False
return True
def isDocker(self):
cgroup = pathlib.Path('/proc/self/cgroup')
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
if trace:
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
pos = 1
while self.stopProcessBarSpin == False:
if trace:
self.Trace(f"progressBar({pos}, {maxPos})")
self.progressBar(pos, maxPos)
pos +=1
if pos > maxPos:
pos = 1
time.sleep(sleepSeconds)
def startSpinningProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
self.stopProcessBarSpin = False
if trace:
self.Trace(f"submitting spinProcessBar; sleepSeconds={sleepSeconds}, maxPos={maxPos}, trace={trace}")
self.submit(self.spinProcessBar, sleepSeconds, maxPos, trace)
def stopSpinningProcessBar(self, sleepSeconds = 1):
self.stopProcessBarSpin = True
time.sleep(sleepSeconds)
def createTagId(self, tagName, tagName_descp = "", deleteIfExist = False, ignoreAutoTag = False):
tagId = self.find_tags(q=tagName)
if len(tagId):
tagId = tagId[0]
if deleteIfExist:
self.destroy_tag(int(tagId['id']))
else:
return tagId['id']
tagId = self.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": ignoreAutoTag})
self.Log(f"Dup-tagId={tagId['id']}")
return tagId['id']
def removeTag(self, scene, tagName): # scene can be scene ID or scene metadata
scene_details = scene
if 'id' not in scene:
scene_details = self.find_scene(scene)
tagIds = []
doesHaveTagName = False
for tag in scene_details['tags']:
if tag['name'] != tagName:
tagIds += [tag['id']]
else:
doesHaveTagName = True
if doesHaveTagName:
dataDict = {'id' : scene_details['id']}
dataDict.update({'tag_ids' : tagIds})
self.update_scene(dataDict)
return doesHaveTagName
def addTag(self, scene, tagName): # scene can be scene ID or scene metadata
scene_details = scene
if 'id' not in scene:
scene_details = self.find_scene(scene)
tagIds = [self.createTagId(tagName)]
for tag in scene_details['tags']:
if tag['name'] != tagName:
tagIds += [tag['id']]
dataDict = {'id' : scene_details['id']}
dataDict.update({'tag_ids' : tagIds})
self.update_scene(dataDict)
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
"""Runs a plugin operation.
The operation is run immediately and does not use the job queue.
This is a blocking call, and does not return until plugin completes.
Args:
plugin_id (ID): plugin_id
task_name (str, optional): Plugin task to perform
@@ -375,43 +589,26 @@ class StashPluginHelper(StashInterface):
"args": args,
}
if asyn:
self.Submit(self.call_GQL, query, variables)
self.submit(self.call_GQL, query, variables)
return f"Made asynchronous call for plugin {plugin_id}"
else:
return self.call_GQL(query, variables)
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
# #################################################################################################
# ############################################################################################################
# Functions which are candidates to be added to parent class use snake_case naming convention.
# ############################################################################################################
# The below functions extends class StashInterface with functions which are not yet in the class or
# fixes for functions which have not yet made it into official class.
def metadata_scan(self, paths:list=[], flags={}):
def metadata_scan(self, paths:list=[], flags={}): # ToDo: Add option to add path to library if path not included when calling metadata_scan
query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }"
scan_metadata_input = {"paths": paths}
if flags:
scan_metadata_input.update(flags)
else:
scanData = self.get_configuration_defaults("scan { ...ScanMetadataOptions }")
if scanData['scan'] != None:
scan_metadata_input.update(scanData.get("scan",{}))
elif scan_config := self.get_configuration_defaults("scan { ...ScanMetadataOptions }").get("scan"):
scan_metadata_input.update(scan_config)
result = self.call_GQL(query, {"input": scan_metadata_input})
return result["metadataScan"]
def get_all_scenes(self):
query_all_scenes = """
query AllScenes {
@@ -464,6 +661,43 @@ class StashPluginHelper(StashInterface):
def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Direct SQL associated functions
def get_file_metadata(self, data, raw_data = False): # data is either file ID or scene metadata
results = None
if data == None:
return results
if 'files' in data and len(data['files']) > 0 and 'id' in data['files'][0]:
results = self.sql_query(f"select * from files where id = {data['files'][0]['id']}")
else:
results = self.sql_query(f"select * from files where id = {data}")
if raw_data:
return results
if 'rows' in results:
return results['rows'][0]
self.Error(f"Unknown error while SQL query with data='{data}'; Results='{results}'.")
return None
def set_file_basename(self, id, basename):
return self.sql_commit(f"update files set basename = '{basename}' where id = {id}")
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
srcData = None
@@ -537,3 +771,54 @@ class mergeMetadata: # A class to merge scene metadata from source scene to dest
listToAdd += [item['id']]
self.dataDict.update({ updateFieldName : listToAdd})
# self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
class taskQueue:
taskqueue = None
def __init__(self, taskqueue):
self.taskqueue = taskqueue
def tooManyScanOnTaskQueue(self, tooManyQty = 5):
count = 0
if self.taskqueue == None:
return False
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Scanning...":
count += 1
if count < tooManyQty:
return False
return True
def cleanJobOnTaskQueue(self):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Cleaning...":
return True
return False
def cleanGeneratedJobOnTaskQueue(self):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Cleaning generated files...":
return True
return False
def isRunningPluginTaskJobOnTaskQueue(self, taskName):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Running plugin task: {taskName}":
return True
return False
def tagDuplicatesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Tag Duplicates")
def clearDupTagsJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Clear Tags")
def generatePhashMatchingJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Generate PHASH Matching")
def deleteDuplicatesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Delete Duplicates")
def deleteTaggedScenesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Delete Tagged Scenes")

View File

@@ -5,8 +5,7 @@
# Example: python filemonitor.py --url http://localhost:9999
import os, sys, time, pathlib, argparse, platform, traceback, logging
from StashPluginHelper import StashPluginHelper
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
from watchdog.observers import Observer # This is also needed for event attributes
from StashPluginHelper import taskQueue
from threading import Lock, Condition
from multiprocessing import shared_memory
from filemonitor_config import config
@@ -25,7 +24,8 @@ STOP_RUNNING_SIG = 32
parser = argparse.ArgumentParser()
parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
parser.add_argument('--stop', '-s', dest='stop', action='store_true', help='Stop (kill) a running FileMonitor task.')
parser.add_argument('--stop', '-s', dest='stop', action='store_true', help='Stop a running FileMonitor task.')
parser.add_argument('--kill_que', '-k', dest='kill_job_task_que', type=str, help='Kill job on Task Queue while running in service mode (command line mode).')
parser.add_argument('--restart', '-r', dest='restart', action='store_true', help='Restart FileMonitor.')
parser.add_argument('--silent', '--quit', '-q', dest='quit', action='store_true', help='Run in silent mode. No output to console or stderr. Use this when running from pythonw.exe')
parser.add_argument('--apikey', '-a', dest='apikey', type=str, help='API Key')
@@ -54,8 +54,10 @@ stash = StashPluginHelper(
maxbytes=5*1024*1024,
apiKey=parse_args.apikey
)
stash.Status(logLevel=logging.DEBUG)
stash.status(logLevel=logging.DEBUG)
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
stash.Trace(f"stash.JSON_INPUT={stash.JSON_INPUT}")
stash.modulesInstalled(["watchdog", "schedule", "requests"])
exitMsg = "Change success!!"
mutex = Lock()
@@ -86,6 +88,7 @@ fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConf
includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS
excludePathChanges = stash.pluginConfig['excludePathChanges']
turnOnSchedulerDeleteDup = stash.pluginSettings['turnOnSchedulerDeleteDup']
NotInLibraryTagName = stash.pluginConfig['NotInLibraryTagName']
if stash.DRY_RUN:
stash.Log("Dry run mode is enabled.")
@@ -93,34 +96,50 @@ stash.Trace(f"(SCAN_MODIFIED={SCAN_MODIFIED}) (SCAN_ON_ANY_EVENT={SCAN_ON_ANY_EV
StartFileMonitorAsAPluginTaskName = "Monitor as a Plugin"
StartFileMonitorAsAServiceTaskName = "Start Library Monitor Service"
StartFileMonitorAsAPluginTaskID = "start_library_monitor"
StartFileMonitorAsAServiceTaskID = "start_library_monitor_service"
StopFileMonitorAsAPluginTaskID = "stop_library_monitor"
SYNC_LIBRARY_REMOVE = "sync_library_remove"
SYNC_LIBRARY_TAG = "sync_library_tag"
CLEAR_SYNC_LIBRARY_TAG = "clear_sync_tags_task"
FileMonitorPluginIsOnTaskQue = stash.CALLED_AS_STASH_PLUGIN
StopLibraryMonitorWaitingInTaskQueue = False
JobIdInTheQue = 0
def isJobWaitingToRun():
JobIdOf_StartAsAServiceTask = None
def isJobWaitingToRun(getJobIdOf_StartAsAServiceTask = False):
global StopLibraryMonitorWaitingInTaskQueue
global JobIdInTheQue
global JobIdOf_StartAsAServiceTask
global FileMonitorPluginIsOnTaskQue
FileMonitorPluginIsOnTaskQue = False
jobIsWaiting = False
taskQue = stash.job_queue()
for jobDetails in taskQue:
stash.Trace(f"(Job ID({jobDetails['id']})={jobDetails})")
if jobDetails['status'] == "READY":
if jobDetails['description'] == "Running plugin task: Stop Library Monitor":
StopLibraryMonitorWaitingInTaskQueue = True
JobIdInTheQue = jobDetails['id']
jobIsWaiting = True
elif jobDetails['status'] == "RUNNING" and jobDetails['description'].find(StartFileMonitorAsAPluginTaskName) > -1:
FileMonitorPluginIsOnTaskQue = True
if getJobIdOf_StartAsAServiceTask:
if jobDetails['status'] == "RUNNING" and jobDetails['description'].find(StartFileMonitorAsAServiceTaskName) > -1:
JobIdOf_StartAsAServiceTask = jobDetails['id']
stash.Trace(f"Found current running task '{jobDetails['description']}' with Job ID {JobIdOf_StartAsAServiceTask}")
return True
else:
if jobDetails['status'] == "READY":
if jobDetails['description'] == "Running plugin task: Stop Library Monitor":
StopLibraryMonitorWaitingInTaskQueue = True
JobIdInTheQue = jobDetails['id']
jobIsWaiting = True
elif jobDetails['status'] == "RUNNING" and jobDetails['description'].find(StartFileMonitorAsAPluginTaskName) > -1:
FileMonitorPluginIsOnTaskQue = True
JobIdInTheQue = 0
return jobIsWaiting
if stash.CALLED_AS_STASH_PLUGIN and stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID:
if stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID:
stash.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun()})")
elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAServiceTaskID:
stash.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun(True)})")
class StashScheduler: # Stash Scheduler
def __init__(self):
import schedule # pip install schedule # https://github.com/dbader/schedule
@@ -292,7 +311,7 @@ class StashScheduler: # Stash Scheduler
if 'args' in task and len(task['args']) > 0:
args = args + [task['args']]
stash.Log(f"Executing command arguments {args}.")
return f"Execute process PID = {stash.ExecuteProcess(args)}"
return f"Execute process PID = {stash.executeProcess(args)}"
else:
stash.Error(f"Can not run task '{task['task']}', because it's missing 'command' field.")
return None
@@ -307,7 +326,7 @@ class StashScheduler: # Stash Scheduler
detached = True
if 'detach' in task:
detached = task['detach']
return f"Python process PID = {stash.ExecutePythonScript(args, ExecDetach=detached)}"
return f"Python process PID = {stash.executePythonScript(args, ExecDetach=detached)}"
else:
stash.Error(f"Can not run task '{task['task']}', because it's missing 'script' field.")
return None
@@ -345,8 +364,8 @@ class StashScheduler: # Stash Scheduler
taskMode = task['taskMode']
if ('taskQue' in task and task['taskQue'] == False) or taskName == None:
stash.Log(f"Running plugin task pluginID={task['task']}, task mode = {taskMode}. {validDirMsg}")
# Asynchronous threading logic to call run_plugin, because it's a blocking call.
stash.run_plugin(plugin_id=task['task'], task_mode=taskMode, asyn=True)
# Asynchronous threading logic to call runPlugin, because it's a blocking call.
stash.runPlugin(plugin_id=task['task'], task_mode=taskMode, asyn=True)
return None
else:
stash.Trace(f"Adding to Task Queue plugin task pluginID={task['task']}, task name = {taskName}. {validDirMsg}")
@@ -362,11 +381,11 @@ class StashScheduler: # Stash Scheduler
except:
pass
stash.Error("Failed to get response from Stash.")
if platform.system() == "Windows":
if stash.IS_WINDOWS:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-win.exe"
elif platform.system() == "Darwin": # MacOS
elif stash.IS_MAC_OS:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep} stash-macos "
elif platform.system().lower().startswith("linux"):
elif stash.IS_LINUX:
# ToDo: Need to verify this method will work for (stash-linux-arm32v6, stash-linux-arm32v7, and stash-linux-arm64v8)
if platform.system().lower().find("32v6") > -1:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-linux-arm32v6"
@@ -376,7 +395,7 @@ class StashScheduler: # Stash Scheduler
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-linux-arm64v8"
else:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-linux"
elif platform.system().lower().startswith("freebsd"):
elif stash.IS_FREEBSD:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-freebsd"
elif 'command' not in task or task['command'] == "":
stash.Error("Can not start Stash, because failed to determine platform OS. As a workaround, add 'command' field to this task.")
@@ -391,7 +410,7 @@ class StashScheduler: # Stash Scheduler
else:
stash.Error("Could not start Stash, because could not find executable Stash file '{execPath}'")
return None
result = f"Execute process PID = {stash.ExecuteProcess(args)}"
result = f"Execute process PID = {stash.executeProcess(args)}"
time.sleep(sleepAfterStart)
if "RunAfter" in task and len(task['RunAfter']) > 0:
for runAfterTask in task['RunAfter']:
@@ -456,6 +475,8 @@ lastScanJob = {
JOB_ENDED_STATUSES = ["FINISHED", "CANCELLED"]
def start_library_monitor():
from watchdog.observers import Observer # This is also needed for event attributes
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
global shouldUpdate
global TargetPaths
global lastScanJob
@@ -624,7 +645,7 @@ def start_library_monitor():
TargetPaths = []
TmpTargetPaths = list(set(TmpTargetPaths))
if TmpTargetPaths != [] or lastScanJob['DelayedProcessTargetPaths'] != []:
stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths}")
stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths} and/or {lastScanJob['DelayedProcessTargetPaths']}")
if lastScanJob['DelayedProcessTargetPaths'] != [] or len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR:
if not stash.DRY_RUN:
if lastScanJob['id'] > -1:
@@ -657,11 +678,15 @@ def start_library_monitor():
lastScanJob['DelayedProcessTargetPaths'].append(path)
stash.Trace(f"lastScanJob['DelayedProcessTargetPaths'] = {lastScanJob['DelayedProcessTargetPaths']}")
if lastScanJob['id'] == -1:
stash.Trace(f"Calling metadata_scan for paths '{TmpTargetPaths}'")
lastScanJob['id'] = int(stash.metadata_scan(paths=TmpTargetPaths))
lastScanJob['TargetPaths'] = TmpTargetPaths
lastScanJob['timeAddedToTaskQueue'] = time.time()
stash.Trace(f"metadata_scan JobId = {lastScanJob['id']}, Start-Time = {lastScanJob['timeAddedToTaskQueue']}, paths = {lastScanJob['TargetPaths']}")
taskqueue = taskQueue(stash.job_queue())
if taskqueue.tooManyScanOnTaskQueue(7):
stash.Log(f"[metadata_scan] Skipping updating Stash for paths '{TmpTargetPaths}', because too many scans on Task Queue.")
else:
stash.Trace(f"[metadata_scan] Calling metadata_scan for paths '{TmpTargetPaths}'")
lastScanJob['id'] = int(stash.metadata_scan(paths=TmpTargetPaths))
lastScanJob['TargetPaths'] = TmpTargetPaths
lastScanJob['timeAddedToTaskQueue'] = time.time()
stash.Trace(f"metadata_scan JobId = {lastScanJob['id']}, Start-Time = {lastScanJob['timeAddedToTaskQueue']}, paths = {lastScanJob['TargetPaths']}")
if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
stash.metadata_clean(paths=TmpTargetPaths, dry_run=stash.DRY_RUN)
if RUN_GENERATE_CONTENT:
@@ -723,9 +748,81 @@ def start_library_monitor_service():
pass
stash.Trace("FileMonitor is not running, so it's safe to start it as a service.")
args = [f"{pathlib.Path(__file__).resolve().parent}{os.sep}filemonitor.py", '--url', f"{stash.STASH_URL}"]
if JobIdOf_StartAsAServiceTask != None:
args += ["-k", JobIdOf_StartAsAServiceTask]
if stash.API_KEY:
args = args + ["-a", stash.API_KEY]
stash.ExecutePythonScript(args)
args += ["-a", stash.API_KEY]
results = stash.executePythonScript(args)
stash.Trace(f"executePythonScript results='{results}'")
def synchronize_library(removeScene=False):
stash.startSpinningProcessBar()
scenes = stash.find_scenes(fragment='id tags {id name} files {path}')
qtyResults = len(scenes)
Qty = 0
stash.Log(f"count = {qtyResults}")
stash.stopSpinningProcessBar()
sceneIDs = stash.find_scenes(fragment='id files {path}')
for scene in scenes:
Qty += 1
stash.progressBar(Qty, qtyResults)
scenePartOfLibrary = False
for path in stash.STASH_PATHS:
if scene['files'][0]['path'].startswith(path):
scenePartOfLibrary = True
break
if scenePartOfLibrary == False:
stash.Log(f"Scene ID={scene['id']}; path={scene['files'][0]['path']} not part of Stash Library")
if removeScene:
stash.destroy_scene(scene['id'])
stash.Log(f"Removed Scene ID={scene['id']}; path={scene['files'][0]['path']}")
else:
stash.addTag(scene, NotInLibraryTagName)
stash.Trace(f"Tagged ({NotInLibraryTagName}) Scene ID={scene['id']}; path={scene['files'][0]['path']}")
def manageTagggedScenes(clearTag=True):
tagId = stash.find_tags(q=NotInLibraryTagName)
if len(tagId) > 0 and 'id' in tagId[0]:
tagId = tagId[0]['id']
else:
stash.Warn(f"Could not find tag ID for tag '{NotInLibraryTagName}'.")
return
QtyDup = 0
QtyRemoved = 0
QtyClearedTags = 0
QtyFailedQuery = 0
stash.Trace("#########################################################################")
stash.startSpinningProcessBar()
stash.Trace(f"Calling find_scenes with tagId={tagId}")
sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id')
stash.stopSpinningProcessBar()
qtyResults = len(sceneIDs)
stash.Trace(f"Found {qtyResults} scenes with tag ({NotInLibraryTagName}): sceneIDs = {sceneIDs}")
for sceneID in sceneIDs:
# stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.")
QtyDup += 1
prgs = QtyDup / qtyResults
stash.progressBar(QtyDup, qtyResults)
scene = stash.find_scene(sceneID['id'])
if scene == None or len(scene) == 0:
stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.")
QtyFailedQuery += 1
continue
# stash.Trace(f"scene={scene}")
if clearTag:
tags = [int(item['id']) for item in scene["tags"] if item['id'] != tagId]
stash.TraceOnce(f"tagId={tagId}, len={len(tags)}, tags = {tags}")
dataDict = {'id' : scene['id']}
dataDict.update({'tag_ids' : tags})
stash.Log(f"Updating scene with {dataDict}")
stash.update_scene(dataDict)
# stash.removeTag(scene, NotInLibraryTagName)
QtyClearedTags += 1
else:
stash.destroy_scene(scene['id'])
stash.Log(f"Removed Scene ID={scene['id']}; path={scene['files'][0]['path']}")
QtyRemoved += 1
stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtyRemoved={QtyRemoved}, QtyFailedQuery={QtyFailedQuery}")
runTypeID=0
runTypeName=["NothingToDo", "stop_library_monitor", "StartFileMonitorAsAServiceTaskID", "StartFileMonitorAsAPluginTaskID", "CommandLineStartLibMonitor"]
@@ -742,13 +839,29 @@ try:
elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAServiceTaskID:
runTypeID=2
start_library_monitor_service()
stash.Trace(f"{StartFileMonitorAsAServiceTaskID} EXIT")
stash.Trace(f"{StartFileMonitorAsAServiceTaskID} transitioning to service mode.")
elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID:
runTypeID=3
start_library_monitor()
stash.Trace(f"{StartFileMonitorAsAPluginTaskID} EXIT")
elif stash.PLUGIN_TASK_NAME == SYNC_LIBRARY_REMOVE:
runTypeID=5
synchronize_library(removeScene=tRUE)
stash.Trace(f"{SYNC_LIBRARY_REMOVE} EXIT")
elif stash.PLUGIN_TASK_NAME == SYNC_LIBRARY_TAG:
runTypeID=6
synchronize_library()
stash.Trace(f"{SYNC_LIBRARY_TAG} EXIT")
elif stash.PLUGIN_TASK_NAME == CLEAR_SYNC_LIBRARY_TAG:
runTypeID=7
manageTagggedScenes()
stash.Trace(f"{CLEAR_SYNC_LIBRARY_TAG} EXIT")
elif not stash.CALLED_AS_STASH_PLUGIN:
runTypeID=4
if parse_args.kill_job_task_que != None and parse_args.kill_job_task_que != "":
# Removing the job from the Task Queue is really only needed for Linux, but it should be OK to do in general.
stash.Log(f"Removing job ID {parse_args.kill_job_task_que} from the Task Queue, because transitioning to service mode.")
stash.stop_job(parse_args.kill_job_task_que)
start_library_monitor()
stash.Trace("Command line FileMonitor EXIT")
else:
@@ -756,6 +869,7 @@ try:
except Exception as e:
tb = traceback.format_exc()
stash.Error(f"Exception while running FileMonitor. runType='{runTypeName[runTypeID]}'; Error: {e}\nTraceBack={tb}")
stash.log.exception('Got exception on main handler')
stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")
# ToDo: Add option to add path to library if path not included when calling metadata_scan

View File

@@ -40,3 +40,15 @@ tasks:
description: Run [Library Monitor] as a plugin (*Not recommended*)
defaultArgs:
mode: start_library_monitor
- name: Synchronize Library Tag
description: Tag (_NoLongerPartOfLibrary) scenes from database with paths no longer in Stash Library.
defaultArgs:
mode: sync_library_tag
- name: Synchronize Library Clean
description: Remove scenes from database with paths no longer in Stash Library.
defaultArgs:
mode: sync_library_remove
- name: Clear Sync Tags
description: Clear tag _NoLongerPartOfLibrary. Remove this tag from all files.
defaultArgs:
mode: clear_sync_tags_task

View File

@@ -63,6 +63,8 @@ config = {
"runCleanAfterDelete": False,
# Enable to run metadata_generate (Generate Content) after metadata scan.
"runGenerateContent": False,
# Tag name when tagging files that are no longer in Stash Library paths.
"NotInLibraryTagName" : "_NoLongerPartOfLibrary",
# When populated (comma separated list [lower-case]), only scan for changes for specified file extension
"fileExtTypes" : "", # Example: "mp4,mpg,mpeg,m2ts,wmv,avi,m4v,flv,mov,asf,mkv,divx,webm,ts,mp2t"

View File

@@ -1,3 +1,4 @@
stashapp-tools >= 0.2.50
pyYAML
watchdog
requests
watchdog
schedule