forked from Github/Axter-Stash
Added option to include paths for scheduled task
Added rename_generated_files
This commit is contained in:
@@ -7,160 +7,43 @@
|
|||||||
# Research:
|
# Research:
|
||||||
# Research following links to complete this plugin:
|
# Research following links to complete this plugin:
|
||||||
# https://github.com/WithoutPants/stash-plugin-duplicate-finder
|
# https://github.com/WithoutPants/stash-plugin-duplicate-finder
|
||||||
|
#
|
||||||
|
# Look at options in programs from the following link:
|
||||||
|
# https://video.stackexchange.com/questions/25302/how-can-i-find-duplicate-videos-by-content
|
||||||
|
#
|
||||||
|
# Python library for parse-reparsepoint
|
||||||
|
# https://pypi.org/project/parse-reparsepoint/
|
||||||
|
# pip install parse-reparsepoint
|
||||||
|
#
|
||||||
# Look at stash API find_duplicate_scenes
|
# Look at stash API find_duplicate_scenes
|
||||||
import os
|
import os, sys, time, pathlib, argparse, platform
|
||||||
import sys
|
from StashPluginHelper import StashPluginHelper
|
||||||
import time
|
|
||||||
import shutil
|
|
||||||
import fileinput
|
|
||||||
import hashlib
|
|
||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
import requests
|
|
||||||
import logging
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
import stashapi.log as log # Importing stashapi.log as log for critical events ONLY
|
|
||||||
from stashapi.stashapp import StashInterface
|
|
||||||
from DupFileManager_config import config # Import config from DupFileManager_config.py
|
from DupFileManager_config import config # Import config from DupFileManager_config.py
|
||||||
|
|
||||||
# **********************************************************************
|
parser = argparse.ArgumentParser()
|
||||||
# Constant global variables --------------------------------------------
|
parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
|
||||||
LOG_FILE_PATH = log_file_path = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
|
parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
|
||||||
FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
|
parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.')
|
||||||
PLUGIN_ARGS_MODE = False
|
parser.add_argument('--dryrun', '-d', dest='dryrun', action='store_true', help='Do dryrun for deleting duplicate files. No files are deleted, and only logging occurs.')
|
||||||
PLUGIN_ID = Path(__file__).stem
|
parse_args = parser.parse_args()
|
||||||
|
|
||||||
RFH = RotatingFileHandler(
|
|
||||||
filename=LOG_FILE_PATH,
|
|
||||||
mode='a',
|
|
||||||
maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K
|
|
||||||
backupCount=2,
|
|
||||||
encoding=None,
|
|
||||||
delay=0
|
|
||||||
)
|
|
||||||
TIMEOUT = 5
|
|
||||||
CONTINUE_RUNNING_SIG = 99
|
|
||||||
|
|
||||||
# **********************************************************************
|
|
||||||
# Global variables --------------------------------------------
|
|
||||||
exitMsg = "Change success!!"
|
|
||||||
runningInPluginMode = False
|
|
||||||
|
|
||||||
# Configure local log file for plugin within plugin folder having a limited max log file size
|
|
||||||
logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH])
|
|
||||||
logger = logging.getLogger(Path(__file__).stem)
|
|
||||||
|
|
||||||
# **********************************************************************
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# Code section to fetch variables from Plugin UI and from DupFileManager_settings.py
|
|
||||||
# Check if being called as Stash plugin
|
|
||||||
gettingCalledAsStashPlugin = True
|
|
||||||
mangeDupFilesTask = True
|
|
||||||
StdInRead = None
|
|
||||||
try:
|
|
||||||
if len(sys.argv) == 1:
|
|
||||||
print(f"Attempting to read stdin. (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
|
|
||||||
StdInRead = sys.stdin.read()
|
|
||||||
# for line in fileinput.input():
|
|
||||||
# StdInRead = line
|
|
||||||
# break
|
|
||||||
else:
|
|
||||||
raise Exception("Not called in plugin mode.")
|
|
||||||
except:
|
|
||||||
gettingCalledAsStashPlugin = False
|
|
||||||
print(f"Either len(sys.argv) not expected value OR sys.stdin.read() failed! (StdInRead={StdInRead}) (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
|
|
||||||
pass
|
|
||||||
|
|
||||||
if gettingCalledAsStashPlugin and StdInRead:
|
|
||||||
print(f"StdInRead={StdInRead} (len(sys.argv)={len(sys.argv)})", file=sys.stderr)
|
|
||||||
runningInPluginMode = True
|
|
||||||
json_input = json.loads(StdInRead)
|
|
||||||
FRAGMENT_SERVER = json_input["server_connection"]
|
|
||||||
else:
|
|
||||||
runningInPluginMode = False
|
|
||||||
FRAGMENT_SERVER = {'Scheme': config['endpoint_Scheme'], 'Host': config['endpoint_Host'], 'Port': config['endpoint_Port'], 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(Path(__file__).resolve().parent), 'PluginDir': Path(__file__).resolve().parent}
|
|
||||||
print("Running in non-plugin mode!", file=sys.stderr)
|
|
||||||
|
|
||||||
stash = StashInterface(FRAGMENT_SERVER)
|
|
||||||
PLUGINCONFIGURATION = stash.get_configuration()["plugins"]
|
|
||||||
STASHCONFIGURATION = stash.get_configuration()["general"]
|
|
||||||
STASHPATHSCONFIG = STASHCONFIGURATION['stashes']
|
|
||||||
stashPaths = []
|
|
||||||
settings = {
|
settings = {
|
||||||
"ignoreReparsepoints": True,
|
|
||||||
"ignoreSymbolicLinks": True,
|
|
||||||
"mergeDupFilename": True,
|
"mergeDupFilename": True,
|
||||||
"moveToTrashCan": False,
|
"moveToTrashCan": False,
|
||||||
|
"whitelist": [],
|
||||||
"zzdebugTracing": False,
|
"zzdebugTracing": False,
|
||||||
"zzdryRun": False,
|
"zzdryRun": False,
|
||||||
}
|
}
|
||||||
CanUpdatePluginConfigSettings = False
|
stash = StashPluginHelper(
|
||||||
try:
|
stash_url=parse_args.stash_url,
|
||||||
plugins_configuration = stash.find_plugins_config()
|
debugTracing=parse_args.trace,
|
||||||
CanUpdatePluginConfigSettings = True
|
settings=settings,
|
||||||
except Exception as e:
|
config=config
|
||||||
logger.exception('Got exception on main handler')
|
)
|
||||||
logger.error('This exception most likely occurred because stashapp-tools needs to be upgraded. To fix this error, run the following command:\npip install --upgrade stashapp-tools')
|
stash.Status()
|
||||||
pass
|
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
||||||
|
|
||||||
if PLUGIN_ID in PLUGINCONFIGURATION and (not CanUpdatePluginConfigSettings or 'INITIAL_VALUES_SET1' in PLUGINCONFIGURATION[PLUGIN_ID]):
|
stash.Trace(f"(stashPaths={stash.STASH_PATHS})")
|
||||||
settings.update(PLUGINCONFIGURATION[PLUGIN_ID])
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
debugTracing = settings["zzdebugTracing"]
|
|
||||||
debugTracing = True
|
|
||||||
|
|
||||||
|
|
||||||
for item in STASHPATHSCONFIG:
|
|
||||||
stashPaths.append(item["path"])
|
|
||||||
|
|
||||||
# Extract dry_run setting from settings
|
|
||||||
DRY_RUN = settings["zzdryRun"]
|
|
||||||
dry_run_prefix = ''
|
|
||||||
try:
|
|
||||||
PLUGIN_ARGS_MODE = json_input['args']["mode"]
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
logger.info(f"\nStarting (runningInPluginMode={runningInPluginMode}) (debugTracing={debugTracing}) (DRY_RUN={DRY_RUN}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (stash.stash_version()={stash.stash_version()})************************************************")
|
|
||||||
if debugTracing: logger.info(f"Debug Tracing (stash.get_configuration()={stash.get_configuration()})")
|
|
||||||
if debugTracing: logger.info("settings: %s " % (settings,))
|
|
||||||
if debugTracing: logger.info(f"Debug Tracing (STASHCONFIGURATION={STASHCONFIGURATION})")
|
|
||||||
if debugTracing: logger.info(f"Debug Tracing (stashPaths={stashPaths})")
|
|
||||||
if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ID={PLUGIN_ID})")
|
|
||||||
if debugTracing: logger.info(f"Debug Tracing (PLUGINCONFIGURATION={PLUGINCONFIGURATION})")
|
|
||||||
|
|
||||||
if PLUGIN_ID in PLUGINCONFIGURATION:
|
|
||||||
if 'INITIAL_VALUES_SET1' not in PLUGINCONFIGURATION[PLUGIN_ID]:
|
|
||||||
if debugTracing: logger.info(f"Initializing plugin ({PLUGIN_ID}) settings (PLUGINCONFIGURATION[PLUGIN_ID]={PLUGINCONFIGURATION[PLUGIN_ID]})")
|
|
||||||
try:
|
|
||||||
plugins_configuration = stash.find_plugins_config()
|
|
||||||
if debugTracing: logger.info(f"Debug Tracing (plugins_configuration={plugins_configuration})")
|
|
||||||
stash.configure_plugin(PLUGIN_ID, {"INITIAL_VALUES_SET1": True})
|
|
||||||
logger.info('Called stash.configure_plugin(PLUGIN_ID, {"INITIAL_VALUES_SET1": True})')
|
|
||||||
plugins_configuration = stash.find_plugins_config()
|
|
||||||
if debugTracing: logger.info(f"Debug Tracing (plugins_configuration={plugins_configuration})")
|
|
||||||
stash.configure_plugin(PLUGIN_ID, settings)
|
|
||||||
logger.info('Called stash.configure_plugin(PLUGIN_ID, settings)')
|
|
||||||
plugins_configuration = stash.find_plugins_config()
|
|
||||||
if debugTracing: logger.info(f"Debug Tracing (plugins_configuration={plugins_configuration})")
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception('Got exception on main handler')
|
|
||||||
try:
|
|
||||||
if debugTracing: logger.info("Debug Tracing................")
|
|
||||||
stash.configure_plugin(plugin_id=PLUGIN_ID, values=[{"zzdebugTracing": False}], init_defaults=True)
|
|
||||||
if debugTracing: logger.info("Debug Tracing................")
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception('Got exception on main handler')
|
|
||||||
pass
|
|
||||||
pass
|
|
||||||
# stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True
|
|
||||||
if debugTracing: logger.info("Debug Tracing................")
|
|
||||||
|
|
||||||
if DRY_RUN:
|
|
||||||
logger.info("Dry run mode is enabled.")
|
|
||||||
dry_run_prefix = "Would've "
|
|
||||||
if debugTracing: logger.info("Debug Tracing................")
|
|
||||||
# ----------------------------------------------------------------------
|
|
||||||
# **********************************************************************
|
|
||||||
|
|
||||||
def realpath(path):
|
def realpath(path):
|
||||||
"""
|
"""
|
||||||
@@ -184,60 +67,72 @@ def realpath(path):
|
|||||||
def isReparsePoint(path):
|
def isReparsePoint(path):
|
||||||
import win32api
|
import win32api
|
||||||
import win32con
|
import win32con
|
||||||
|
from parse_reparsepoint import Navigator
|
||||||
FinalPathname = realpath(path)
|
FinalPathname = realpath(path)
|
||||||
logger.info(f"(path='{path}') (FinalPathname='{FinalPathname}')")
|
stash.Log(f"(path='{path}') (FinalPathname='{FinalPathname}')")
|
||||||
if FinalPathname != path:
|
if FinalPathname != path:
|
||||||
logger.info(f"Symbolic link '{path}'")
|
stash.Log(f"Symbolic link '{path}'")
|
||||||
return True
|
return True
|
||||||
if not os.path.isdir(path):
|
if not os.path.isdir(path):
|
||||||
path = os.path.dirname(path)
|
path = os.path.dirname(path)
|
||||||
return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT
|
return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT
|
||||||
|
|
||||||
def mangeDupFiles():
|
def mangeDupFiles(merge=False, deleteDup=False, DryRun=False):
|
||||||
import platform
|
stash.Trace(f"Debug Tracing (platform.system()={platform.system()})")
|
||||||
if debugTracing: logger.info(f"Debug Tracing (platform.system()={platform.system()})")
|
|
||||||
myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link
|
myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link
|
||||||
myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point
|
myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point
|
||||||
myTestPath3 = r"B:\_\SpecialSet\Amateur Anal Attempts\Amateur Anal Attempts 4.mp4" #symbolic link
|
myTestPath3 = r"B:\_\SpecialSet\Amateur Anal Attempts\Amateur Anal Attempts 4.mp4" #symbolic link
|
||||||
myTestPath4 = r"E:\Stash\plugins\RenameFile\README.md" #symbolic link
|
myTestPath4 = r"E:\Stash\plugins\RenameFile\README.md" #symbolic link
|
||||||
myTestPath5 = r"E:\_\David-Maisonave\Axter-Stash\plugins\RenameFile\README.md" #symbolic link
|
myTestPath5 = r"E:\_\David-Maisonave\Axter-Stash\plugins\RenameFile\README.md" #symbolic link
|
||||||
myTestPath6 = r"E:\_\David-Maisonave\Axter-Stash\plugins\DeleteMe\Renamer\README.md" # not reparse point
|
myTestPath6 = r"E:\_\David-Maisonave\Axter-Stash\plugins\DeleteMe\Renamer\README.md" # not reparse point
|
||||||
logger.info(f"Testing '{myTestPath1}'")
|
stash.Log(f"Testing '{myTestPath1}'")
|
||||||
if isReparsePoint(myTestPath1):
|
if isReparsePoint(myTestPath1):
|
||||||
logger.info(f"isSymLink '{myTestPath1}'")
|
stash.Log(f"isSymLink '{myTestPath1}'")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Not isSymLink '{myTestPath1}'")
|
stash.Log(f"Not isSymLink '{myTestPath1}'")
|
||||||
|
|
||||||
if isReparsePoint(myTestPath2):
|
if isReparsePoint(myTestPath2):
|
||||||
logger.info(f"isSymLink '{myTestPath2}'")
|
stash.Log(f"isSymLink '{myTestPath2}'")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Not isSymLink '{myTestPath2}'")
|
stash.Log(f"Not isSymLink '{myTestPath2}'")
|
||||||
|
|
||||||
if isReparsePoint(myTestPath3):
|
if isReparsePoint(myTestPath3):
|
||||||
logger.info(f"isSymLink '{myTestPath3}'")
|
stash.Log(f"isSymLink '{myTestPath3}'")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Not isSymLink '{myTestPath3}'")
|
stash.Log(f"Not isSymLink '{myTestPath3}'")
|
||||||
|
|
||||||
if isReparsePoint(myTestPath4):
|
if isReparsePoint(myTestPath4):
|
||||||
logger.info(f"isSymLink '{myTestPath4}'")
|
stash.Log(f"isSymLink '{myTestPath4}'")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Not isSymLink '{myTestPath4}'")
|
stash.Log(f"Not isSymLink '{myTestPath4}'")
|
||||||
|
|
||||||
if isReparsePoint(myTestPath5):
|
if isReparsePoint(myTestPath5):
|
||||||
logger.info(f"isSymLink '{myTestPath5}'")
|
stash.Log(f"isSymLink '{myTestPath5}'")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Not isSymLink '{myTestPath5}'")
|
stash.Log(f"Not isSymLink '{myTestPath5}'")
|
||||||
|
|
||||||
if isReparsePoint(myTestPath6):
|
if isReparsePoint(myTestPath6):
|
||||||
logger.info(f"isSymLink '{myTestPath6}'")
|
stash.Log(f"isSymLink '{myTestPath6}'")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Not isSymLink '{myTestPath6}'")
|
stash.Log(f"Not isSymLink '{myTestPath6}'")
|
||||||
return
|
return
|
||||||
|
|
||||||
if mangeDupFilesTask:
|
if stash.PLUGIN_TASK_NAME == "merge_dup_filename_task":
|
||||||
mangeDupFiles()
|
mangeDupFiles(merge=True)
|
||||||
if debugTracing: logger.info(f"stop_library_monitor EXIT................")
|
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
|
elif stash.PLUGIN_TASK_NAME == "delete_duplicates":
|
||||||
|
mangeDupFiles(deleteDup=True)
|
||||||
|
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
|
elif stash.PLUGIN_TASK_NAME == "dryrun_delete_duplicates":
|
||||||
|
mangeDupFiles(deleteDup=True, DryRun=True)
|
||||||
|
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
|
elif parse_args.remove:
|
||||||
|
mangeDupFiles(deleteDup=True, DryRun=parse_args.dryrun)
|
||||||
|
stash.Trace(f"Delete duplicate (DryRun={parse_args.dryrun}) EXIT")
|
||||||
|
elif parse_args.dryrun:
|
||||||
|
mangeDupFiles(deleteDup=True, DryRun=parse_args.dryrun)
|
||||||
|
stash.Trace(f"Dryrun delete duplicate EXIT")
|
||||||
else:
|
else:
|
||||||
logger.info(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})")
|
stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})")
|
||||||
|
|
||||||
if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************")
|
stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")
|
||||||
|
|||||||
@@ -3,14 +3,6 @@ description: Manages duplicate files.
|
|||||||
version: 0.1.0
|
version: 0.1.0
|
||||||
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
||||||
settings:
|
settings:
|
||||||
ignoreReparsepoints:
|
|
||||||
displayName: Ignore Reparse Points
|
|
||||||
description: Enable to ignore reparse-points when deleting duplicates.
|
|
||||||
type: BOOLEAN
|
|
||||||
ignoreSymbolicLinks:
|
|
||||||
displayName: Ignore Symbolic Links
|
|
||||||
description: Enable to ignore symbolic links when deleting duplicates.
|
|
||||||
type: BOOLEAN
|
|
||||||
mergeDupFilename:
|
mergeDupFilename:
|
||||||
displayName: Before deletion, merge potential source in the duplicate file names for tag names, performers, and studios.
|
displayName: Before deletion, merge potential source in the duplicate file names for tag names, performers, and studios.
|
||||||
description: Enable to
|
description: Enable to
|
||||||
@@ -19,6 +11,10 @@ settings:
|
|||||||
displayName: Trash Can
|
displayName: Trash Can
|
||||||
description: Enable to move files to trash can instead of permanently delete file.
|
description: Enable to move files to trash can instead of permanently delete file.
|
||||||
type: BOOLEAN
|
type: BOOLEAN
|
||||||
|
whitelist:
|
||||||
|
displayName: White List
|
||||||
|
description: A comma seperated list of preferential paths to determine which duplicate should be the primary. Listed in order of preference.
|
||||||
|
type: STRING
|
||||||
zzdebugTracing:
|
zzdebugTracing:
|
||||||
displayName: Debug Tracing
|
displayName: Debug Tracing
|
||||||
description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log
|
description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log
|
||||||
|
|||||||
@@ -2,16 +2,41 @@
|
|||||||
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
|
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
|
||||||
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
||||||
config = {
|
config = {
|
||||||
# Define white list of preferential paths to determine which duplicate should be the primary.
|
|
||||||
"whitelist_paths": [], #Example: "whitelist_paths": ['C:\SomeMediaPath\subpath', 'E:\YetAnotherPath\subpath', 'E:\YetAnotherPath\secondSubPath']
|
|
||||||
# Define black list to determine which duplicates should be deleted first.
|
# Define black list to determine which duplicates should be deleted first.
|
||||||
"blacklist_paths": [], #Example: "blacklist_paths": ['C:\SomeMediaPath\subpath', 'E:\YetAnotherPath\subpath', 'E:\YetAnotherPath\secondSubPath']
|
"blacklist_paths": [], #Example: "blacklist_paths": ['C:\\SomeMediaPath\\subpath', "E:\\YetAnotherPath\\subpath', "E:\\YetAnotherPath\\secondSubPath']
|
||||||
|
|
||||||
|
# If enabled, ignore reparsepoints. For Windows NT drives only.
|
||||||
|
"ignoreReparsepoints" : True,
|
||||||
|
# If enabled, ignore symbolic links.
|
||||||
|
"ignoreSymbolicLinks" : True,
|
||||||
|
|
||||||
|
|
||||||
|
# If enabled, swap higher resolution duplicate files to preferred path.
|
||||||
|
"swapHighRes" : True,
|
||||||
|
# If enabled, swap longer length media files to preferred path. Longer will be determine by significantLongerTime value.
|
||||||
|
"swapLongLength" : True,
|
||||||
|
# If enabled, swap longer file name to preferred path.
|
||||||
|
"swapLongFileName" : False,
|
||||||
|
|
||||||
|
# If enabled, when finding exact duplicate files, keep file with the shorter name. The default is to keep file name with the longer name.
|
||||||
|
"keepShorterFileName" : False,
|
||||||
|
# If enabled, when finding duplicate files, keep media with the shorter time length. The default is to keep media with longer time length.
|
||||||
|
"keepShorterLength" : False,
|
||||||
|
# If enabled, when finding duplicate files, keep media with the lower resolution. The default is to keep media with higher resolution.
|
||||||
|
"keepLowerResolution" : False,
|
||||||
|
# If enabled, keep duplicate media with high resolution over media with significant longer time.
|
||||||
|
"keepHighResOverLen" : False, # Requires keepBothHighResAndLongerLen = False
|
||||||
|
# The threshold as to what percentage is consider a significant longer time. Default is 15% longer.
|
||||||
|
"significantLongerTime" : 15, # 15% longer time
|
||||||
|
# If enabled, keep both duplicate files if the LOWER resolution file is significantly longer.
|
||||||
|
"keepBothHighResAndLongerLen" : True,
|
||||||
|
|
||||||
# Define ignore list to avoid specific directories. No action is taken on any file in the ignore list.
|
# Define ignore list to avoid specific directories. No action is taken on any file in the ignore list.
|
||||||
"ignore_paths": [], #Example: "ignore_paths": ['C:\SomeMediaPath\subpath', 'E:\YetAnotherPath\subpath', 'E:\YetAnotherPath\secondSubPath']
|
"ignore_paths": [], #Example: "ignore_paths": ['C:\\SomeMediaPath\\subpath', "E:\\YetAnotherPath\\subpath', "E:\\YetAnotherPath\\secondSubPath']
|
||||||
# Keep empty to check all paths, or populate it with the only paths to check for duplicates
|
# Keep empty to check all paths, or populate it with the only paths to check for duplicates
|
||||||
"onlyCheck_paths": [], #Example: "onlyCheck_paths": ['C:\SomeMediaPath\subpath', 'E:\YetAnotherPath\subpath', 'E:\YetAnotherPath\secondSubPath']
|
"onlyCheck_paths": [], #Example: "onlyCheck_paths": ['C:\\SomeMediaPath\\subpath', "E:\\YetAnotherPath\\subpath', "E:\\YetAnotherPath\\secondSubPath']
|
||||||
# Alternative path to move duplicate files. Path needs to be in the same drive as the duplicate file.
|
# Alternative path to move duplicate files. Path needs to be in the same drive as the duplicate file.
|
||||||
"dup_path": "", #Example: "C:\TempDeleteFolder"
|
"dup_path": "", #Example: "C:\\TempDeleteFolder"
|
||||||
|
|
||||||
# The following fields are ONLY used when running DupFileManager in script mode
|
# The following fields are ONLY used when running DupFileManager in script mode
|
||||||
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
|
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
|
||||||
|
|||||||
354
plugins/DupFileManager/StashPluginHelper.py
Normal file
354
plugins/DupFileManager/StashPluginHelper.py
Normal file
@@ -0,0 +1,354 @@
|
|||||||
|
from stashapi.stashapp import StashInterface
|
||||||
|
from logging.handlers import RotatingFileHandler
|
||||||
|
import inspect, sys, os, pathlib, logging, json
|
||||||
|
from stashapi.stash_types import PhashDistance
|
||||||
|
import __main__
|
||||||
|
|
||||||
|
# StashPluginHelper (By David Maisonave aka Axter)
|
||||||
|
# See end of this file for example usage
|
||||||
|
# Log Features:
|
||||||
|
# Can optionally log out to multiple outputs for each Log or Trace call.
|
||||||
|
# Logging includes source code line number
|
||||||
|
# Sets a maximum plugin log file size
|
||||||
|
# Stash Interface Features:
|
||||||
|
# Gets STASH_URL value from command line argument and/or from STDIN_READ
|
||||||
|
# Sets FRAGMENT_SERVER based on command line arguments or STDIN_READ
|
||||||
|
# Sets PLUGIN_ID based on the main script file name (in lower case)
|
||||||
|
# Gets PLUGIN_TASK_NAME value
|
||||||
|
# Sets pluginSettings (The plugin UI settings)
|
||||||
|
# Misc Features:
|
||||||
|
# Gets DRY_RUN value from command line argument and/or from UI and/or from config file
|
||||||
|
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
|
||||||
|
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
|
||||||
|
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
|
||||||
|
class StashPluginHelper(StashInterface):
|
||||||
|
# Primary Members for external reference
|
||||||
|
PLUGIN_TASK_NAME = None
|
||||||
|
PLUGIN_ID = None
|
||||||
|
PLUGIN_CONFIGURATION = None
|
||||||
|
pluginSettings = None
|
||||||
|
pluginConfig = None
|
||||||
|
STASH_INTERFACE_INIT = False
|
||||||
|
STASH_URL = None
|
||||||
|
STASH_CONFIGURATION = None
|
||||||
|
JSON_INPUT = None
|
||||||
|
DEBUG_TRACING = False
|
||||||
|
DRY_RUN = False
|
||||||
|
CALLED_AS_STASH_PLUGIN = False
|
||||||
|
RUNNING_IN_COMMAND_LINE_MODE = False
|
||||||
|
FRAGMENT_SERVER = None
|
||||||
|
STASHPATHSCONFIG = None
|
||||||
|
STASH_PATHS = []
|
||||||
|
|
||||||
|
# printTo argument
|
||||||
|
LOG_TO_FILE = 1
|
||||||
|
LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
|
||||||
|
LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
|
||||||
|
LOG_TO_STASH = 8
|
||||||
|
LOG_TO_WARN = 16
|
||||||
|
LOG_TO_ERROR = 32
|
||||||
|
LOG_TO_CRITICAL = 64
|
||||||
|
LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
|
||||||
|
|
||||||
|
# Misc class variables
|
||||||
|
MAIN_SCRIPT_NAME = None
|
||||||
|
LOG_LEVEL = logging.INFO
|
||||||
|
LOG_FILE_DIR = None
|
||||||
|
LOG_FILE_NAME = None
|
||||||
|
STDIN_READ = None
|
||||||
|
pluginLog = None
|
||||||
|
logLinePreviousHits = []
|
||||||
|
|
||||||
|
# Prefix message value
|
||||||
|
LEV_TRACE = "TRACE: "
|
||||||
|
LEV_DBG = "DBG: "
|
||||||
|
LEV_INF = "INF: "
|
||||||
|
LEV_WRN = "WRN: "
|
||||||
|
LEV_ERR = "ERR: "
|
||||||
|
LEV_CRITICAL = "CRITICAL: "
|
||||||
|
|
||||||
|
# Default format
|
||||||
|
LOG_FORMAT = "[%(asctime)s] %(message)s"
|
||||||
|
|
||||||
|
# Externally modifiable variables
|
||||||
|
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
||||||
|
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
||||||
|
# Warn message goes to both plugin log file and stash when sent to Stash log file.
|
||||||
|
log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
||||||
|
logFormat = LOG_FORMAT, # Plugin log line format
|
||||||
|
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
|
||||||
|
maxbytes = 2*1024*1024, # Max size of plugin log file
|
||||||
|
backupcount = 2, # Backup counts when log file size reaches max size
|
||||||
|
logToWrnSet = 0, # Customize the target output set which will get warning logging
|
||||||
|
logToErrSet = 0, # Customize the target output set which will get error logging
|
||||||
|
logToNormSet = 0, # Customize the target output set which will get normal logging
|
||||||
|
logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
|
||||||
|
mainScriptName = "", # The main plugin script file name (full path)
|
||||||
|
pluginID = "",
|
||||||
|
settings = None, # Default settings for UI fields
|
||||||
|
config = None, # From pluginName_config.py or pluginName_setting.py
|
||||||
|
fragmentServer = None,
|
||||||
|
stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
|
||||||
|
DebugTraceFieldName = "zzdebugTracing",
|
||||||
|
DryRunFieldName = "zzdryRun",
|
||||||
|
setStashLoggerAsPluginLogger = False):
|
||||||
|
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
|
||||||
|
if logToErrSet: self.log_to_err_set = logToErrSet
|
||||||
|
if logToNormSet: self.log_to_norm = logToNormSet
|
||||||
|
if stash_url and len(stash_url): self.STASH_URL = stash_url
|
||||||
|
self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__
|
||||||
|
self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower()
|
||||||
|
# print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr)
|
||||||
|
self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log"
|
||||||
|
self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent
|
||||||
|
RFH = RotatingFileHandler(
|
||||||
|
filename=self.LOG_FILE_NAME,
|
||||||
|
mode='a',
|
||||||
|
maxBytes=maxbytes,
|
||||||
|
backupCount=backupcount,
|
||||||
|
encoding=None,
|
||||||
|
delay=0
|
||||||
|
)
|
||||||
|
if fragmentServer:
|
||||||
|
self.FRAGMENT_SERVER = fragmentServer
|
||||||
|
else:
|
||||||
|
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
|
||||||
|
|
||||||
|
if debugTracing: self.DEBUG_TRACING = debugTracing
|
||||||
|
if config:
|
||||||
|
self.pluginConfig = config
|
||||||
|
if DebugTraceFieldName in self.pluginConfig:
|
||||||
|
self.DEBUG_TRACING = self.pluginConfig[DebugTraceFieldName]
|
||||||
|
if DryRunFieldName in self.pluginConfig:
|
||||||
|
self.DRY_RUN = self.pluginConfig[DryRunFieldName]
|
||||||
|
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
RUNNING_IN_COMMAND_LINE_MODE = True
|
||||||
|
if not debugTracing or not stash_url:
|
||||||
|
for argValue in sys.argv[1:]:
|
||||||
|
if argValue.lower() == "--trace":
|
||||||
|
self.DEBUG_TRACING = True
|
||||||
|
elif argValue.lower() == "--dry_run" or argValue.lower() == "--dryrun":
|
||||||
|
self.DRY_RUN = True
|
||||||
|
elif ":" in argValue and not self.STASH_URL:
|
||||||
|
self.STASH_URL = argValue
|
||||||
|
if self.STASH_URL:
|
||||||
|
endpointUrlArr = self.STASH_URL.split(":")
|
||||||
|
if len(endpointUrlArr) == 3:
|
||||||
|
self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0]
|
||||||
|
self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:]
|
||||||
|
self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2]
|
||||||
|
super().__init__(self.FRAGMENT_SERVER)
|
||||||
|
self.STASH_INTERFACE_INIT = True
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self.STDIN_READ = sys.stdin.read()
|
||||||
|
self.CALLED_AS_STASH_PLUGIN = True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if self.STDIN_READ:
|
||||||
|
self.JSON_INPUT = json.loads(self.STDIN_READ)
|
||||||
|
if "args" in self.JSON_INPUT and "mode" in self.JSON_INPUT["args"]:
|
||||||
|
self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"]
|
||||||
|
self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"]
|
||||||
|
self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}"
|
||||||
|
super().__init__(self.FRAGMENT_SERVER)
|
||||||
|
self.STASH_INTERFACE_INIT = True
|
||||||
|
|
||||||
|
if self.STASH_INTERFACE_INIT:
|
||||||
|
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
||||||
|
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
||||||
|
self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes']
|
||||||
|
for item in self.STASHPATHSCONFIG:
|
||||||
|
self.STASH_PATHS.append(item["path"])
|
||||||
|
if settings:
|
||||||
|
self.pluginSettings = settings
|
||||||
|
if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION:
|
||||||
|
self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID])
|
||||||
|
if DebugTraceFieldName in self.pluginSettings:
|
||||||
|
self.DEBUG_TRACING = self.pluginSettings[DebugTraceFieldName]
|
||||||
|
if DryRunFieldName in self.pluginSettings:
|
||||||
|
self.DRY_RUN = self.pluginSettings[DryRunFieldName]
|
||||||
|
if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
|
||||||
|
|
||||||
|
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
|
||||||
|
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
|
||||||
|
if setStashLoggerAsPluginLogger:
|
||||||
|
self.log = self.pluginLog
|
||||||
|
|
||||||
|
def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False):
|
||||||
|
if printTo == 0:
|
||||||
|
printTo = self.log_to_norm
|
||||||
|
elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
|
||||||
|
logLevel = logging.ERROR
|
||||||
|
printTo = self.log_to_err_set
|
||||||
|
elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
|
||||||
|
logLevel = logging.CRITICAL
|
||||||
|
printTo = self.log_to_err_set
|
||||||
|
elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
|
||||||
|
logLevel = logging.WARN
|
||||||
|
printTo = self.log_to_wrn_set
|
||||||
|
if lineNo == -1:
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
LN_Str = f"[LN:{lineNo}]"
|
||||||
|
# print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
|
||||||
|
if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
|
||||||
|
if levelStr == "": levelStr = self.LEV_DBG
|
||||||
|
if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
elif logLevel == logging.INFO or logLevel == logging.DEBUG:
|
||||||
|
if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
|
||||||
|
if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
elif logLevel == logging.WARN:
|
||||||
|
if levelStr == "": levelStr = self.LEV_WRN
|
||||||
|
if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
elif logLevel == logging.ERROR:
|
||||||
|
if levelStr == "": levelStr = self.LEV_ERR
|
||||||
|
if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
elif logLevel == logging.CRITICAL:
|
||||||
|
if levelStr == "": levelStr = self.LEV_CRITICAL
|
||||||
|
if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||||
|
print(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||||
|
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
|
||||||
|
|
||||||
|
def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1):
|
||||||
|
if printTo == 0: printTo = self.LOG_TO_FILE
|
||||||
|
if lineNo == -1:
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
logLev = logging.INFO if logAlways else logging.DEBUG
|
||||||
|
if self.DEBUG_TRACING or logAlways:
|
||||||
|
if logMsg == "":
|
||||||
|
logMsg = f"Line number {lineNo}..."
|
||||||
|
self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways)
|
||||||
|
|
||||||
|
# Log once per session. Only logs the first time called from a particular line number in the code.
|
||||||
|
def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False):
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
if self.DEBUG_TRACING or logAlways:
|
||||||
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
|
if FuncAndLineNo in self.logLinePreviousHits:
|
||||||
|
return
|
||||||
|
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||||
|
self.Trace(logMsg, printTo, logAlways, lineNo)
|
||||||
|
|
||||||
|
# Log INFO on first call, then do Trace on remaining calls.
|
||||||
|
def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True):
|
||||||
|
if printTo == 0: printTo = self.LOG_TO_FILE
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
|
if FuncAndLineNo in self.logLinePreviousHits:
|
||||||
|
if traceOnRemainingCalls:
|
||||||
|
self.Trace(logMsg, printTo, logAlways, lineNo)
|
||||||
|
else:
|
||||||
|
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||||
|
self.Log(logMsg, printTo, logging.INFO, lineNo)
|
||||||
|
|
||||||
|
def Warn(self, logMsg, printTo = 0):
|
||||||
|
if printTo == 0: printTo = self.log_to_wrn_set
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
self.Log(logMsg, printTo, logging.WARN, lineNo)
|
||||||
|
|
||||||
|
def Error(self, logMsg, printTo = 0):
|
||||||
|
if printTo == 0: printTo = self.log_to_err_set
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
self.Log(logMsg, printTo, logging.ERROR, lineNo)
|
||||||
|
|
||||||
|
def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
|
||||||
|
if printTo == 0: printTo = self.log_to_norm
|
||||||
|
if lineNo == -1:
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
|
||||||
|
printTo, logLevel, lineNo)
|
||||||
|
|
||||||
|
def ExecuteProcess(self, args):
|
||||||
|
import platform, subprocess
|
||||||
|
is_windows = any(platform.win32_ver())
|
||||||
|
pid = None
|
||||||
|
self.Trace(f"is_windows={is_windows} args={args}")
|
||||||
|
if is_windows:
|
||||||
|
self.Trace("Executing process using Windows DETACHED_PROCESS")
|
||||||
|
DETACHED_PROCESS = 0x00000008
|
||||||
|
pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
|
||||||
|
else:
|
||||||
|
self.Trace("Executing process using normal Popen")
|
||||||
|
pid = subprocess.Popen(args).pid
|
||||||
|
self.Trace(f"pid={pid}")
|
||||||
|
return pid
|
||||||
|
|
||||||
|
def ExecutePythonScript(self, args):
|
||||||
|
PythonExe = f"{sys.executable}"
|
||||||
|
argsWithPython = [f"{PythonExe}"] + args
|
||||||
|
return self.ExecuteProcess(argsWithPython)
|
||||||
|
|
||||||
|
# Extends class StashInterface with functions which are not yet in the class
|
||||||
|
def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
|
||||||
|
query = """
|
||||||
|
mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
|
||||||
|
metadataAutoTag(input: $input)
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
metadata_autotag_input = {
|
||||||
|
"paths":paths,
|
||||||
|
"performers": performers,
|
||||||
|
"studios":studios,
|
||||||
|
"tags":tags,
|
||||||
|
}
|
||||||
|
result = self.call_GQL(query, {"input": metadata_autotag_input})
|
||||||
|
return result
|
||||||
|
|
||||||
|
def backup_database(self):
|
||||||
|
return self.call_GQL("mutation { backupDatabase(input: {download: false})}")
|
||||||
|
|
||||||
|
def optimise_database(self):
|
||||||
|
return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }")
|
||||||
|
|
||||||
|
def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails=True, markers=True, screenshots=True, sprites=True, transcodes=True):
|
||||||
|
query = """
|
||||||
|
mutation MetadataCleanGenerated($input: CleanGeneratedInput!) {
|
||||||
|
metadataCleanGenerated(input: $input)
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
clean_metadata_input = {
|
||||||
|
"blobFiles": blobFiles,
|
||||||
|
"dryRun": dryRun,
|
||||||
|
"imageThumbnails": imageThumbnails,
|
||||||
|
"markers": markers,
|
||||||
|
"screenshots": screenshots,
|
||||||
|
"sprites": sprites,
|
||||||
|
"transcodes": transcodes,
|
||||||
|
}
|
||||||
|
result = self.call_GQL(query, {"input": clean_metadata_input})
|
||||||
|
return result
|
||||||
|
|
||||||
|
def rename_generated_files(self):
|
||||||
|
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
|
||||||
|
# def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None):
|
||||||
|
# query = """
|
||||||
|
# query FindDuplicateScenes($distance: Int) {
|
||||||
|
# findDuplicateScenes(distance: $distance) {
|
||||||
|
# ...SceneSlim
|
||||||
|
# }
|
||||||
|
# }
|
||||||
|
# """
|
||||||
|
# if fragment:
|
||||||
|
# query = re.sub(r'\.\.\.SceneSlim', fragment, query)
|
||||||
|
# else:
|
||||||
|
# query = """
|
||||||
|
# query FindDuplicateScenes($distance: Int) {
|
||||||
|
# findDuplicateScenes(distance: $distance)
|
||||||
|
# }
|
||||||
|
# """
|
||||||
|
# variables = {
|
||||||
|
# "distance": distance
|
||||||
|
# }
|
||||||
|
# result = self.call_GQL(query, variables)
|
||||||
|
# return result['findDuplicateScenes']
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
# FileMonitor: Ver 0.8.0 (By David Maisonave)
|
# FileMonitor: Ver 0.8.1 (By David Maisonave)
|
||||||
FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features:
|
FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features:
|
||||||
- Updates Stash when any file changes occurs in the Stash library.
|
- Updates Stash when any file changes occurs in the Stash library.
|
||||||
- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**.
|
- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**.
|
||||||
|
|||||||
@@ -37,6 +37,8 @@ class StashPluginHelper(StashInterface):
|
|||||||
CALLED_AS_STASH_PLUGIN = False
|
CALLED_AS_STASH_PLUGIN = False
|
||||||
RUNNING_IN_COMMAND_LINE_MODE = False
|
RUNNING_IN_COMMAND_LINE_MODE = False
|
||||||
FRAGMENT_SERVER = None
|
FRAGMENT_SERVER = None
|
||||||
|
STASHPATHSCONFIG = None
|
||||||
|
STASH_PATHS = []
|
||||||
|
|
||||||
# printTo argument
|
# printTo argument
|
||||||
LOG_TO_FILE = 1
|
LOG_TO_FILE = 1
|
||||||
@@ -71,7 +73,8 @@ class StashPluginHelper(StashInterface):
|
|||||||
# Externally modifiable variables
|
# Externally modifiable variables
|
||||||
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
||||||
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
||||||
log_to_wrn_set = LOG_TO_FILE + LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
# Warn message goes to both plugin log file and stash when sent to Stash log file.
|
||||||
|
log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
||||||
@@ -158,6 +161,9 @@ class StashPluginHelper(StashInterface):
|
|||||||
if self.STASH_INTERFACE_INIT:
|
if self.STASH_INTERFACE_INIT:
|
||||||
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
||||||
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
||||||
|
self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes']
|
||||||
|
for item in self.STASHPATHSCONFIG:
|
||||||
|
self.STASH_PATHS.append(item["path"])
|
||||||
if settings:
|
if settings:
|
||||||
self.pluginSettings = settings
|
self.pluginSettings = settings
|
||||||
if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION:
|
if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION:
|
||||||
@@ -322,10 +328,13 @@ class StashPluginHelper(StashInterface):
|
|||||||
}
|
}
|
||||||
result = self.call_GQL(query, {"input": clean_metadata_input})
|
result = self.call_GQL(query, {"input": clean_metadata_input})
|
||||||
return result
|
return result
|
||||||
# def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None, duration_diff=0):
|
|
||||||
|
def rename_generated_files(self):
|
||||||
|
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
|
||||||
|
# def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None):
|
||||||
# query = """
|
# query = """
|
||||||
# query FindDuplicateScenes($distance: Int) {
|
# query FindDuplicateScenes($distance: Int) {
|
||||||
# findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
|
# findDuplicateScenes(distance: $distance) {
|
||||||
# ...SceneSlim
|
# ...SceneSlim
|
||||||
# }
|
# }
|
||||||
# }
|
# }
|
||||||
@@ -335,13 +344,11 @@ class StashPluginHelper(StashInterface):
|
|||||||
# else:
|
# else:
|
||||||
# query = """
|
# query = """
|
||||||
# query FindDuplicateScenes($distance: Int) {
|
# query FindDuplicateScenes($distance: Int) {
|
||||||
# findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
|
# findDuplicateScenes(distance: $distance)
|
||||||
# }
|
|
||||||
# }
|
# }
|
||||||
# """
|
# """
|
||||||
# variables = {
|
# variables = {
|
||||||
# "distance": distance,
|
# "distance": distance
|
||||||
# "duration_diff": duration_diff
|
|
||||||
# }
|
# }
|
||||||
# result = self.call_GQL(query, variables)
|
# result = self.call_GQL(query, variables)
|
||||||
# return result['findDuplicateScenes']
|
# return result['findDuplicateScenes']
|
||||||
@@ -45,11 +45,12 @@ stash = StashPluginHelper(
|
|||||||
stash.Status()
|
stash.Status()
|
||||||
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
||||||
|
|
||||||
|
# stash.Log(f"{stash.find_duplicate_scenes()}")
|
||||||
|
|
||||||
exitMsg = "Change success!!"
|
exitMsg = "Change success!!"
|
||||||
mutex = Lock()
|
mutex = Lock()
|
||||||
signal = Condition(mutex)
|
signal = Condition(mutex)
|
||||||
shouldUpdate = False
|
shouldUpdate = False
|
||||||
TargetPaths = []
|
|
||||||
|
|
||||||
SHAREDMEMORY_NAME = "DavidMaisonaveAxter_FileMonitor" # Unique name for shared memory
|
SHAREDMEMORY_NAME = "DavidMaisonaveAxter_FileMonitor" # Unique name for shared memory
|
||||||
RECURSIVE = stash.pluginSettings["recursiveDisabled"] == False
|
RECURSIVE = stash.pluginSettings["recursiveDisabled"] == False
|
||||||
@@ -69,11 +70,11 @@ SPECIAL_FILE_NAME = f"{SPECIAL_FILE_DIR}{os.sep}trigger_to_kill_filemonitor_by_d
|
|||||||
if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME):
|
if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME):
|
||||||
os.remove(SPECIAL_FILE_NAME)
|
os.remove(SPECIAL_FILE_NAME)
|
||||||
|
|
||||||
STASHPATHSCONFIG = stash.STASH_CONFIGURATION['stashes']
|
fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConfig['fileExtTypes'] != "" else []
|
||||||
stashPaths = []
|
includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS
|
||||||
for item in STASHPATHSCONFIG:
|
excludePathChanges = stash.pluginConfig['excludePathChanges']
|
||||||
stashPaths.append(item["path"])
|
|
||||||
stash.Trace(f"(stashPaths={stashPaths})")
|
stash.Trace(f"(includePathChanges={includePathChanges})")
|
||||||
|
|
||||||
if stash.DRY_RUN:
|
if stash.DRY_RUN:
|
||||||
stash.Log("Dry run mode is enabled.")
|
stash.Log("Dry run mode is enabled.")
|
||||||
@@ -81,6 +82,10 @@ stash.Trace(f"(SCAN_MODIFIED={SCAN_MODIFIED}) (SCAN_ON_ANY_EVENT={SCAN_ON_ANY_EV
|
|||||||
|
|
||||||
StartFileMonitorAsAPluginTaskName = "Monitor as a Plugin"
|
StartFileMonitorAsAPluginTaskName = "Monitor as a Plugin"
|
||||||
StartFileMonitorAsAServiceTaskName = "Start Library Monitor Service"
|
StartFileMonitorAsAServiceTaskName = "Start Library Monitor Service"
|
||||||
|
StartFileMonitorAsAPluginTaskID = "start_library_monitor"
|
||||||
|
StartFileMonitorAsAServiceTaskID = "start_library_monitor_service"
|
||||||
|
|
||||||
|
|
||||||
FileMonitorPluginIsOnTaskQue = stash.CALLED_AS_STASH_PLUGIN
|
FileMonitorPluginIsOnTaskQue = stash.CALLED_AS_STASH_PLUGIN
|
||||||
StopLibraryMonitorWaitingInTaskQueue = False
|
StopLibraryMonitorWaitingInTaskQueue = False
|
||||||
JobIdInTheQue = 0
|
JobIdInTheQue = 0
|
||||||
@@ -103,49 +108,80 @@ def isJobWaitingToRun():
|
|||||||
JobIdInTheQue = 0
|
JobIdInTheQue = 0
|
||||||
return jobIsWaiting
|
return jobIsWaiting
|
||||||
|
|
||||||
if stash.CALLED_AS_STASH_PLUGIN:
|
if stash.CALLED_AS_STASH_PLUGIN and stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID:
|
||||||
stash.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun()})")
|
stash.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun()})")
|
||||||
|
|
||||||
class StashScheduler: # Stash Scheduler
|
class StashScheduler: # Stash Scheduler
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
import schedule # pip install schedule # https://github.com/dbader/schedule
|
import schedule # pip install schedule # https://github.com/dbader/schedule
|
||||||
|
global SIGNAL_TIMEOUT
|
||||||
for task in stash.pluginConfig['task_scheduler']:
|
for task in stash.pluginConfig['task_scheduler']:
|
||||||
if 'hours' in task and task['hours'] > 0:
|
if 'task' not in task:
|
||||||
|
stash.Error(f"Task is missing required task field. task={task}")
|
||||||
|
elif 'hours' in task:
|
||||||
|
if task['hours'] > 0:
|
||||||
stash.Log(f"Adding to scheduler task '{task['task']}' at {task['hours']} hours interval")
|
stash.Log(f"Adding to scheduler task '{task['task']}' at {task['hours']} hours interval")
|
||||||
schedule.every(task['hours']).hours.do(self.runTask, task)
|
schedule.every(task['hours']).hours.do(self.runTask, task)
|
||||||
elif 'minutes' in task and task['minutes'] > 0:
|
if task['hours'] > 167: # Warn when using a week or more of hours
|
||||||
|
stash.Warn(f"Using {task['hours']} hours in task '{task['task']}'. Should use the weekday syntax instead.")
|
||||||
|
elif 'minutes' in task:
|
||||||
|
if task['minutes'] > 0:
|
||||||
stash.Log(f"Adding to scheduler task '{task['task']}' at {task['minutes']} minutes interval")
|
stash.Log(f"Adding to scheduler task '{task['task']}' at {task['minutes']} minutes interval")
|
||||||
schedule.every(task['minutes']).minutes.do(self.runTask, task)
|
schedule.every(task['minutes']).minutes.do(self.runTask, task)
|
||||||
elif 'days' in task and task['days'] > 0: # Left here for backward compatibility, but should use weekday logic instead.
|
if task['minutes'] > 10079: # Warn when using a week or more of minutes
|
||||||
|
stash.Warn(f"Using {task['minutes']} minutes in task '{task['task']}'. Should use the weekday syntax instead.")
|
||||||
|
elif 'days' in task: # Left here for backward compatibility, but should use weekday logic instead.
|
||||||
|
if task['days'] > 0:
|
||||||
stash.Log(f"Adding to scheduler task '{task['task']}' at {task['days']} days interval")
|
stash.Log(f"Adding to scheduler task '{task['task']}' at {task['days']} days interval")
|
||||||
schedule.every(task['days']).days.do(self.runTask, task)
|
schedule.every(task['days']).days.do(self.runTask, task)
|
||||||
elif 'seconds' in task and task['seconds'] > 0: # This is mainly here for test purposes only
|
if task['days'] > 6: # Warn when using a week or more of days
|
||||||
|
stash.Warn(f"Using {task['days']} days in task '{task['task']}'. Should use the weekday syntax instead.")
|
||||||
|
elif 'seconds' in task: # This is mainly here for test purposes only
|
||||||
|
if task['seconds'] > 0:
|
||||||
if SIGNAL_TIMEOUT > task['seconds']:
|
if SIGNAL_TIMEOUT > task['seconds']:
|
||||||
stash.Log(f"Changing SIGNAL_TIMEOUT from value {SIGNAL_TIMEOUT} to {task['seconds']} to allow '{task['task']}' to get triggered timely")
|
stash.Log(f"Changing SIGNAL_TIMEOUT from value {SIGNAL_TIMEOUT} to {task['seconds']} to allow '{task['task']}' to get triggered timely")
|
||||||
SIGNAL_TIMEOUT = task['seconds']
|
SIGNAL_TIMEOUT = task['seconds']
|
||||||
stash.Log(f"Adding to scheduler task '{task['task']}' at {task['seconds']} seconds interval")
|
stash.Log(f"Adding to scheduler task '{task['task']}' at {task['seconds']} seconds interval")
|
||||||
schedule.every(task['seconds']).seconds.do(self.runTask, task)
|
schedule.every(task['seconds']).seconds.do(self.runTask, task)
|
||||||
elif 'weekday' in task and 'time' in task:
|
elif 'weekday' in task and 'time' in task:
|
||||||
|
if task['time'].upper() == "DISABLED":
|
||||||
|
stash.Trace(f"Skipping task '{task['task']}', because it's disabled. To enable this task, change the time field to a valid time. Example: '07:00'")
|
||||||
|
elif len(task['time']) != 5 or task['time'][2] != ":":
|
||||||
|
stash.Error(f"Skipping task '{task['task']}', because time ({task['time']}) is invalid. Change the time field to a valid time. Example: '07:00'")
|
||||||
|
else:
|
||||||
weekDays = task['weekday'].lower()
|
weekDays = task['weekday'].lower()
|
||||||
if 'monthly' in task:
|
if 'monthly' in task:
|
||||||
stash.Log(f"Adding to scheduler task '{task['task']}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}")
|
stash.Log(f"Adding to scheduler task '{task['task']}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}")
|
||||||
else:
|
else:
|
||||||
stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every {task['weekday']} at {task['time']}")
|
stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every {task['weekday']} at {task['time']}")
|
||||||
|
|
||||||
|
hasValidDay = False
|
||||||
if "monday" in weekDays:
|
if "monday" in weekDays:
|
||||||
schedule.every().monday.at(task['time']).do(self.runTask, task)
|
schedule.every().monday.at(task['time']).do(self.runTask, task)
|
||||||
|
hasValidDay = True
|
||||||
if "tuesday" in weekDays:
|
if "tuesday" in weekDays:
|
||||||
schedule.every().tuesday.at(task['time']).do(self.runTask, task)
|
schedule.every().tuesday.at(task['time']).do(self.runTask, task)
|
||||||
|
hasValidDay = True
|
||||||
if "wednesday" in weekDays:
|
if "wednesday" in weekDays:
|
||||||
schedule.every().wednesday.at(task['time']).do(self.runTask, task)
|
schedule.every().wednesday.at(task['time']).do(self.runTask, task)
|
||||||
|
hasValidDay = True
|
||||||
if "thursday" in weekDays:
|
if "thursday" in weekDays:
|
||||||
schedule.every().thursday.at(task['time']).do(self.runTask, task)
|
schedule.every().thursday.at(task['time']).do(self.runTask, task)
|
||||||
|
hasValidDay = True
|
||||||
if "friday" in weekDays:
|
if "friday" in weekDays:
|
||||||
schedule.every().friday.at(task['time']).do(self.runTask, task)
|
schedule.every().friday.at(task['time']).do(self.runTask, task)
|
||||||
|
hasValidDay = True
|
||||||
if "saturday" in weekDays:
|
if "saturday" in weekDays:
|
||||||
schedule.every().saturday.at(task['time']).do(self.runTask, task)
|
schedule.every().saturday.at(task['time']).do(self.runTask, task)
|
||||||
|
hasValidDay = True
|
||||||
if "sunday" in weekDays:
|
if "sunday" in weekDays:
|
||||||
schedule.every().sunday.at(task['time']).do(self.runTask, task)
|
schedule.every().sunday.at(task['time']).do(self.runTask, task)
|
||||||
|
hasValidDay = True
|
||||||
|
|
||||||
|
if not hasValidDay:
|
||||||
|
stash.Error(f"Task '{task['task']}' is missing valid day(s) in weekday field. weekday = '{task['weekday']}'")
|
||||||
|
else:
|
||||||
|
stash.Error(f"Task '{task['task']}' is missing fields.")
|
||||||
self.checkSchedulePending()
|
self.checkSchedulePending()
|
||||||
|
|
||||||
def runTask(self, task):
|
def runTask(self, task):
|
||||||
@@ -158,15 +194,21 @@ class StashScheduler: # Stash Scheduler
|
|||||||
if dayOfTheMonth < FirstAllowedDate or dayOfTheMonth > LastAllowedDate:
|
if dayOfTheMonth < FirstAllowedDate or dayOfTheMonth > LastAllowedDate:
|
||||||
stash.Log(f"Skipping task {task['task']} because today is not the right {task['weekday']} of the month. Target range is between {FirstAllowedDate} and {LastAllowedDate}.")
|
stash.Log(f"Skipping task {task['task']} because today is not the right {task['weekday']} of the month. Target range is between {FirstAllowedDate} and {LastAllowedDate}.")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
targetPaths = includePathChanges
|
||||||
|
if 'paths' in task:
|
||||||
|
targetPaths = task['paths']
|
||||||
|
|
||||||
|
result = None
|
||||||
if task['task'] == "Clean":
|
if task['task'] == "Clean":
|
||||||
stash.metadata_clean(paths=stashPaths, dry_run=stash.DRY_RUN)
|
result = stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN)
|
||||||
elif task['task'] == "Clean Generated Files":
|
elif task['task'] == "Clean Generated Files":
|
||||||
stash.metadata_clean_generated()
|
result = stash.metadata_clean_generated()
|
||||||
elif task['task'] == "Generate":
|
elif task['task'] == "Generate":
|
||||||
stash.metadata_generate()
|
result = stash.metadata_generate()
|
||||||
elif task['task'] == "Backup":
|
elif task['task'] == "Backup":
|
||||||
stash.LogOnce("Note: Backup task does not get listed in the Task Queue, but user can verify that it started by looking in the Stash log file as an INFO level log line.")
|
stash.LogOnce("Note: Backup task does not get listed in the Task Queue, but user can verify that it started by looking in the Stash log file as an INFO level log line.")
|
||||||
stash.backup_database()
|
result = stash.backup_database()
|
||||||
if stash.pluginSettings['zmaximumBackups'] < 2:
|
if stash.pluginSettings['zmaximumBackups'] < 2:
|
||||||
stash.TraceOnce(f"Skipping DB backup file trim because zmaximumBackups={stash.pluginSettings['zmaximumBackups']}. Value has to be greater than 1.")
|
stash.TraceOnce(f"Skipping DB backup file trim because zmaximumBackups={stash.pluginSettings['zmaximumBackups']}. Value has to be greater than 1.")
|
||||||
elif 'backupDirectoryPath' in stash.STASH_CONFIGURATION:
|
elif 'backupDirectoryPath' in stash.STASH_CONFIGURATION:
|
||||||
@@ -178,27 +220,27 @@ class StashScheduler: # Stash Scheduler
|
|||||||
else:
|
else:
|
||||||
stash.TraceOnce(f"Skipping DB backup file trim because backupDirectoryPath does NOT exist. backupDirectoryPath={stash.STASH_CONFIGURATION['backupDirectoryPath']}")
|
stash.TraceOnce(f"Skipping DB backup file trim because backupDirectoryPath does NOT exist. backupDirectoryPath={stash.STASH_CONFIGURATION['backupDirectoryPath']}")
|
||||||
elif task['task'] == "Scan":
|
elif task['task'] == "Scan":
|
||||||
stash.metadata_scan(paths=stashPaths)
|
result = stash.metadata_scan(paths=targetPaths)
|
||||||
elif task['task'] == "Auto Tag":
|
elif task['task'] == "Auto Tag":
|
||||||
stash.metadata_autotag(paths=stashPaths)
|
result = stash.metadata_autotag(paths=targetPaths)
|
||||||
elif task['task'] == "Optimise Database":
|
elif task['task'] == "Optimise Database":
|
||||||
stash.optimise_database()
|
result = stash.optimise_database()
|
||||||
elif task['task'] == "GQL":
|
elif task['task'] == "GQL":
|
||||||
stash.call_GQL(task['input'])
|
result = stash.call_GQL(task['input'])
|
||||||
elif task['task'] == "python":
|
elif task['task'] == "python":
|
||||||
script = task['script'].replace("<plugin_path>", f"{pathlib.Path(__file__).resolve().parent}{os.sep}")
|
script = task['script'].replace("<plugin_path>", f"{pathlib.Path(__file__).resolve().parent}{os.sep}")
|
||||||
stash.Log(f"Executing python script {script}.")
|
stash.Log(f"Executing python script {script}.")
|
||||||
args = [script]
|
args = [script]
|
||||||
if len(task['args']) > 0:
|
if 'args' in task and len(task['args']) > 0:
|
||||||
args = args + [task['args']]
|
args = args + [task['args']]
|
||||||
stash.ExecutePythonScript(args)
|
result = f"Python process PID = {stash.ExecutePythonScript(args)}"
|
||||||
elif task['task'] == "execute":
|
elif task['task'] == "execute":
|
||||||
cmd = task['command'].replace("<plugin_path>", f"{pathlib.Path(__file__).resolve().parent}{os.sep}")
|
cmd = task['command'].replace("<plugin_path>", f"{pathlib.Path(__file__).resolve().parent}{os.sep}")
|
||||||
stash.Log(f"Executing command {cmd}.")
|
stash.Log(f"Executing command {cmd}.")
|
||||||
args = [cmd]
|
args = [cmd]
|
||||||
if len(task['args']) > 0:
|
if 'args' in task and len(task['args']) > 0:
|
||||||
args = args + [task['args']]
|
args = args + [task['args']]
|
||||||
stash.ExecuteProcess(args)
|
result = f"Execute process PID = {stash.ExecuteProcess(args)}"
|
||||||
else:
|
else:
|
||||||
# ToDo: Add code to check if plugin is installed.
|
# ToDo: Add code to check if plugin is installed.
|
||||||
stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}")
|
stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}")
|
||||||
@@ -208,6 +250,9 @@ class StashScheduler: # Stash Scheduler
|
|||||||
stash.LogOnce(f"Failed to call plugin {task['task']} with plugin-ID {task['pluginId']}. Error: {e}")
|
stash.LogOnce(f"Failed to call plugin {task['task']} with plugin-ID {task['pluginId']}. Error: {e}")
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
if result:
|
||||||
|
stash.Trace(f"Stash task '{task['task']}' result={result}")
|
||||||
|
|
||||||
def trimDbFiles(self, dbPath, maxFiles):
|
def trimDbFiles(self, dbPath, maxFiles):
|
||||||
if not os.path.exists(dbPath):
|
if not os.path.exists(dbPath):
|
||||||
stash.LogOnce(f"Exiting trimDbFiles, because path {dbPath} does not exists.")
|
stash.LogOnce(f"Exiting trimDbFiles, because path {dbPath} does not exists.")
|
||||||
@@ -228,8 +273,11 @@ class StashScheduler: # Stash Scheduler
|
|||||||
|
|
||||||
def checkSchedulePending(self):
|
def checkSchedulePending(self):
|
||||||
import schedule # pip install schedule # https://github.com/dbader/schedule
|
import schedule # pip install schedule # https://github.com/dbader/schedule
|
||||||
|
stash.Trace("Checking if task pending.")
|
||||||
schedule.run_pending()
|
schedule.run_pending()
|
||||||
|
stash.Trace("Pending check complete.")
|
||||||
|
|
||||||
|
TargetPaths = []
|
||||||
def start_library_monitor():
|
def start_library_monitor():
|
||||||
global shouldUpdate
|
global shouldUpdate
|
||||||
global TargetPaths
|
global TargetPaths
|
||||||
@@ -247,10 +295,25 @@ def start_library_monitor():
|
|||||||
RunCleanMetadata = False
|
RunCleanMetadata = False
|
||||||
stashScheduler = StashScheduler() if stash.pluginSettings['turnOnScheduler'] else None
|
stashScheduler = StashScheduler() if stash.pluginSettings['turnOnScheduler'] else None
|
||||||
event_handler = watchdog.events.FileSystemEventHandler()
|
event_handler = watchdog.events.FileSystemEventHandler()
|
||||||
|
def doIgnoreFileExt(chng_path, addToTargetPaths = False):
|
||||||
|
global TargetPaths
|
||||||
|
chng_path_lwr = chng_path.lower()
|
||||||
|
if len(fileExtTypes) > 0:
|
||||||
|
suffix = pathlib.Path(chng_path_lwr).suffix.lstrip(".")
|
||||||
|
if suffix not in fileExtTypes:
|
||||||
|
return True
|
||||||
|
if len(excludePathChanges) > 0:
|
||||||
|
for path in excludePathChanges:
|
||||||
|
if chng_path_lwr.startswith(path.lower()):
|
||||||
|
return True
|
||||||
|
if addToTargetPaths:
|
||||||
|
TargetPaths.append(chng_path)
|
||||||
|
return False
|
||||||
|
|
||||||
def on_created(event):
|
def on_created(event):
|
||||||
global shouldUpdate
|
global shouldUpdate
|
||||||
global TargetPaths
|
if doIgnoreFileExt(event.src_path, True):
|
||||||
TargetPaths.append(event.src_path)
|
return
|
||||||
stash.Log(f"CREATE *** '{event.src_path}'")
|
stash.Log(f"CREATE *** '{event.src_path}'")
|
||||||
with mutex:
|
with mutex:
|
||||||
shouldUpdate = True
|
shouldUpdate = True
|
||||||
@@ -258,9 +321,9 @@ def start_library_monitor():
|
|||||||
|
|
||||||
def on_deleted(event):
|
def on_deleted(event):
|
||||||
global shouldUpdate
|
global shouldUpdate
|
||||||
global TargetPaths
|
|
||||||
nonlocal RunCleanMetadata
|
nonlocal RunCleanMetadata
|
||||||
TargetPaths.append(event.src_path)
|
if doIgnoreFileExt(event.src_path, True):
|
||||||
|
return
|
||||||
stash.Log(f"DELETE *** '{event.src_path}'")
|
stash.Log(f"DELETE *** '{event.src_path}'")
|
||||||
with mutex:
|
with mutex:
|
||||||
shouldUpdate = True
|
shouldUpdate = True
|
||||||
@@ -270,6 +333,8 @@ def start_library_monitor():
|
|||||||
def on_modified(event):
|
def on_modified(event):
|
||||||
global shouldUpdate
|
global shouldUpdate
|
||||||
global TargetPaths
|
global TargetPaths
|
||||||
|
if doIgnoreFileExt(event.src_path):
|
||||||
|
return
|
||||||
if SCAN_MODIFIED:
|
if SCAN_MODIFIED:
|
||||||
TargetPaths.append(event.src_path)
|
TargetPaths.append(event.src_path)
|
||||||
stash.Log(f"MODIFIED *** '{event.src_path}'")
|
stash.Log(f"MODIFIED *** '{event.src_path}'")
|
||||||
@@ -282,7 +347,8 @@ def start_library_monitor():
|
|||||||
def on_moved(event):
|
def on_moved(event):
|
||||||
global shouldUpdate
|
global shouldUpdate
|
||||||
global TargetPaths
|
global TargetPaths
|
||||||
TargetPaths.append(event.src_path)
|
if doIgnoreFileExt(event.src_path, True):
|
||||||
|
return
|
||||||
TargetPaths.append(event.dest_path)
|
TargetPaths.append(event.dest_path)
|
||||||
stash.Log(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'")
|
stash.Log(f"MOVE *** from '{event.src_path}' to '{event.dest_path}'")
|
||||||
with mutex:
|
with mutex:
|
||||||
@@ -292,6 +358,8 @@ def start_library_monitor():
|
|||||||
def on_any_event(event):
|
def on_any_event(event):
|
||||||
global shouldUpdate
|
global shouldUpdate
|
||||||
global TargetPaths
|
global TargetPaths
|
||||||
|
if doIgnoreFileExt(event.src_path):
|
||||||
|
return
|
||||||
if SCAN_ON_ANY_EVENT or event.src_path == SPECIAL_FILE_DIR:
|
if SCAN_ON_ANY_EVENT or event.src_path == SPECIAL_FILE_DIR:
|
||||||
stash.Log(f"Any-Event *** '{event.src_path}'")
|
stash.Log(f"Any-Event *** '{event.src_path}'")
|
||||||
TargetPaths.append(event.src_path)
|
TargetPaths.append(event.src_path)
|
||||||
@@ -309,8 +377,8 @@ def start_library_monitor():
|
|||||||
|
|
||||||
observer = Observer()
|
observer = Observer()
|
||||||
|
|
||||||
# Iterate through stashPaths
|
# Iterate through includePathChanges
|
||||||
for path in stashPaths:
|
for path in includePathChanges:
|
||||||
observer.schedule(event_handler, path, recursive=RECURSIVE)
|
observer.schedule(event_handler, path, recursive=RECURSIVE)
|
||||||
stash.Log(f"Observing {path}")
|
stash.Log(f"Observing {path}")
|
||||||
observer.schedule(event_handler, SPECIAL_FILE_DIR, recursive=RECURSIVE)
|
observer.schedule(event_handler, SPECIAL_FILE_DIR, recursive=RECURSIVE)
|
||||||
@@ -324,7 +392,7 @@ def start_library_monitor():
|
|||||||
TmpTargetPaths = []
|
TmpTargetPaths = []
|
||||||
with mutex:
|
with mutex:
|
||||||
while not shouldUpdate:
|
while not shouldUpdate:
|
||||||
stash.Trace("While not shouldUpdate")
|
stash.TraceOnce("While not shouldUpdate")
|
||||||
if stash.CALLED_AS_STASH_PLUGIN and isJobWaitingToRun():
|
if stash.CALLED_AS_STASH_PLUGIN and isJobWaitingToRun():
|
||||||
if FileMonitorPluginIsOnTaskQue:
|
if FileMonitorPluginIsOnTaskQue:
|
||||||
stash.Log(f"Another task (JobID={JobIdInTheQue}) is waiting on the queue. Will restart FileMonitor to allow other task to run.")
|
stash.Log(f"Another task (JobID={JobIdInTheQue}) is waiting on the queue. Will restart FileMonitor to allow other task to run.")
|
||||||
@@ -340,11 +408,11 @@ def start_library_monitor():
|
|||||||
stash.LogOnce("Waiting for a file change-trigger.")
|
stash.LogOnce("Waiting for a file change-trigger.")
|
||||||
signal.wait(timeout=SIGNAL_TIMEOUT)
|
signal.wait(timeout=SIGNAL_TIMEOUT)
|
||||||
if stash.pluginSettings['turnOnScheduler'] and not shouldUpdate:
|
if stash.pluginSettings['turnOnScheduler'] and not shouldUpdate:
|
||||||
stash.Trace("Checking the scheduler.")
|
stash.TraceOnce("Checking the scheduler.")
|
||||||
elif shouldUpdate:
|
elif shouldUpdate:
|
||||||
stash.Trace("File change trigger occurred.")
|
stash.LogOnce("File change trigger occurred.")
|
||||||
else:
|
else:
|
||||||
stash.Trace("Wait timeout occurred.")
|
stash.TraceOnce("Wait timeout occurred.")
|
||||||
shouldUpdate = False
|
shouldUpdate = False
|
||||||
TmpTargetPaths = []
|
TmpTargetPaths = []
|
||||||
for TargetPath in TargetPaths:
|
for TargetPath in TargetPaths:
|
||||||
@@ -367,6 +435,7 @@ def start_library_monitor():
|
|||||||
stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths}")
|
stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths}")
|
||||||
if len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR:
|
if len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR:
|
||||||
if not stash.DRY_RUN:
|
if not stash.DRY_RUN:
|
||||||
|
# ToDo: Consider using create_scene, update_scene, and destroy_scene over general method metadata_scan
|
||||||
stash.metadata_scan(paths=TmpTargetPaths)
|
stash.metadata_scan(paths=TmpTargetPaths)
|
||||||
if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
|
if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
|
||||||
stash.metadata_clean(paths=TmpTargetPaths, dry_run=stash.DRY_RUN)
|
stash.metadata_clean(paths=TmpTargetPaths, dry_run=stash.DRY_RUN)
|
||||||
@@ -439,12 +508,12 @@ if parse_args.stop or parse_args.restart or stash.PLUGIN_TASK_NAME == "stop_libr
|
|||||||
stash.Trace(f"Restart FileMonitor EXIT")
|
stash.Trace(f"Restart FileMonitor EXIT")
|
||||||
else:
|
else:
|
||||||
stash.Trace(f"Stop FileMonitor EXIT")
|
stash.Trace(f"Stop FileMonitor EXIT")
|
||||||
elif stash.PLUGIN_TASK_NAME == "start_library_monitor_service":
|
elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAServiceTaskID:
|
||||||
start_library_monitor_service()
|
start_library_monitor_service()
|
||||||
stash.Trace(f"start_library_monitor_service EXIT")
|
stash.Trace(f"{StartFileMonitorAsAServiceTaskID} EXIT")
|
||||||
elif stash.PLUGIN_TASK_NAME == "start_library_monitor" or not stash.CALLED_AS_STASH_PLUGIN:
|
elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID or not stash.CALLED_AS_STASH_PLUGIN:
|
||||||
start_library_monitor()
|
start_library_monitor()
|
||||||
stash.Trace(f"start_library_monitor EXIT")
|
stash.Trace(f"{StartFileMonitorAsAPluginTaskID} EXIT")
|
||||||
else:
|
else:
|
||||||
stash.Log(f"Nothing to do!!! (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})")
|
stash.Log(f"Nothing to do!!! (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})")
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
name: FileMonitor
|
name: FileMonitor
|
||||||
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
|
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
|
||||||
version: 0.8.0
|
version: 0.8.1
|
||||||
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
|
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
|
||||||
settings:
|
settings:
|
||||||
recursiveDisabled:
|
recursiveDisabled:
|
||||||
|
|||||||
@@ -31,33 +31,45 @@ config = {
|
|||||||
# The following task is scheduled monthly
|
# The following task is scheduled monthly
|
||||||
{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
|
{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
|
||||||
|
|
||||||
|
# The following task is the syntax used for a plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
|
||||||
|
# This task requires plugin [Path Parser], and it's disabled by default.
|
||||||
|
{"task" : "Create Tags", "pluginId" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # To enable this task change time "DISABLED" to a valid time.
|
||||||
|
|
||||||
|
# Example#A1: Task to call call_GQL API with custom input
|
||||||
|
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "DISABLED"}, # To enable, change "DISABLED" to valid time
|
||||||
|
|
||||||
|
# Example#A2: Task to call a python script. When this task is executed, the keyword <plugin_path> is replaced by filemonitor.py current directory.
|
||||||
|
# The args field is NOT required.
|
||||||
|
{"task" : "python", "script" : "<plugin_path>test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time
|
||||||
|
|
||||||
|
# Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used.
|
||||||
|
{"task" : "Scan", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan]
|
||||||
|
{"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag]
|
||||||
|
{"task" : "Clean", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate]
|
||||||
|
|
||||||
|
# Example#A4: Task which calls Migrations -> [Rename generated files]
|
||||||
|
{"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example
|
||||||
|
|
||||||
# The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop).
|
# The above weekday method is the more reliable method to schedule task, because it doesn't rely on FileMonitor running continuously (non-stop).
|
||||||
|
|
||||||
# The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task.
|
# The below examples use frequency field method which can work with minutes and hours. A zero frequency value disables the task.
|
||||||
# Note: Both seconds and days are also supported for the frequency field.
|
# Note: Both seconds and days are also supported for the frequency field.
|
||||||
# However, seconds is mainly used for test purposes.
|
# However, seconds is mainly used for test purposes.
|
||||||
# And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
|
# And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
|
||||||
# Note:
|
# The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax.
|
||||||
# The below example tasks are done using hours and minutes because the task is easily disabled (deactivated) by a zero value entry.
|
|
||||||
# Any of these task types can be converted to a daily, weekly, or monthly syntax.
|
|
||||||
|
|
||||||
# The following is the syntax used for plugins. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
|
# Example#B1: Task for calling another Stash plugin, which needs plugin name and plugin ID.
|
||||||
{"task" : "Create Tags", "pluginId" : "pathParser", "hours" : 0}, # This task requires plugin [Path Parser]. To enable this task change the zero to a positive number.
|
|
||||||
|
|
||||||
# Example task for calling another Stash plugin, which needs plugin name and plugin ID.
|
|
||||||
{"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
|
{"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
|
||||||
|
|
||||||
# Example task to call call_GQL API with custom input
|
# Example#B2: Task to execute a command
|
||||||
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "minutes" : 0},
|
{"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0},
|
||||||
|
|
||||||
# Example task to call a python script. When this task is executed, the keyword <plugin_path> is replaced by filemonitor.py current directory.
|
# Example#B3: Task to execute a command with optional args field, and using keyword <plugin_path>, which gets replaced with filemonitor.py current directory.
|
||||||
{"task" : "python", "script" : "<plugin_path>test_script_hello_world.py", "args" : "--MyArguments Hello", "minutes" : 0},
|
{"task" : "execute", "command" : "<plugin_path>HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
|
||||||
|
|
||||||
# Example task to execute a command
|
# Commented out **test** tasks.
|
||||||
{"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "args" : "", "hours" : 0},
|
# {"task" : "Clean", "seconds" : 30},
|
||||||
|
# {"task" : "Scan", "paths" : [r"B:\_\SpecialSet", r"B:\_\Casting\Latina"], "seconds" : 30}
|
||||||
# Commented out test task.
|
|
||||||
# {"task" : "Backup", "seconds" : 30},
|
|
||||||
],
|
],
|
||||||
|
|
||||||
# Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue.
|
# Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue.
|
||||||
@@ -66,6 +78,12 @@ config = {
|
|||||||
"runCleanAfterDelete": False,
|
"runCleanAfterDelete": False,
|
||||||
# Enable to run metadata_generate (Generate Content) after metadata scan.
|
# Enable to run metadata_generate (Generate Content) after metadata scan.
|
||||||
"runGenerateContent": False,
|
"runGenerateContent": False,
|
||||||
|
# When populated (comma separated list [lower-case]), only scan for changes for specified file extension
|
||||||
|
"fileExtTypes" : "", # Example: "mp4,mpg,mpeg,m2ts,wmv,avi,m4v,flv,mov,asf,mkv,divx,webm,ts,mp2t"
|
||||||
|
# When populated, only include file changes in specified paths.
|
||||||
|
"includePathChanges" :[], # Example: ["C:\\MyVideos", "C:\\MyImages"]
|
||||||
|
# When populated, exclude file changes in paths that start with specified entries.
|
||||||
|
"excludePathChanges" :[], # Example: ["C:\\MyVideos\\SomeSubFolder\\", "C:\\MyImages\\folder\\Sub\\"]
|
||||||
|
|
||||||
# The following fields are ONLY used when running FileMonitor in script mode.
|
# The following fields are ONLY used when running FileMonitor in script mode.
|
||||||
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
|
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
|
||||||
@@ -82,6 +100,9 @@ config = {
|
|||||||
# Enable to delete special file immediately after it's created in stop process.
|
# Enable to delete special file immediately after it's created in stop process.
|
||||||
"deleteSpecFileInStop": False,
|
"deleteSpecFileInStop": False,
|
||||||
|
|
||||||
|
# Below are place holders for **possible** future features.
|
||||||
|
# !!! Not yet implemented !!!
|
||||||
# When enabled, if CREATE flag is triggered, DupFileManager task is called if the plugin is installed.
|
# When enabled, if CREATE flag is triggered, DupFileManager task is called if the plugin is installed.
|
||||||
"onCreateCallDupFileManager": False, # Not yet implemented!!!!
|
"onCreateCallDupFileManager": False, # Not yet implemented!!!!
|
||||||
|
# !!! Not yet implemented !!!
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user