diff --git a/StashPluginHelper/StashPluginHelper.py b/StashPluginHelper/StashPluginHelper.py
index 06368da..a9be414 100644
--- a/StashPluginHelper/StashPluginHelper.py
+++ b/StashPluginHelper/StashPluginHelper.py
@@ -1,12 +1,3 @@
-from stashapi.stashapp import StashInterface
-from logging.handlers import RotatingFileHandler
-import re, inspect, sys, os, pathlib, logging, json
-import concurrent.futures
-from stashapi.stash_types import PhashDistance
-import __main__
-
-_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
-
# StashPluginHelper (By David Maisonave aka Axter)
# See end of this file for example usage
# Log Features:
@@ -24,6 +15,14 @@ _ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
+from stashapi.stashapp import StashInterface
+from logging.handlers import RotatingFileHandler
+import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
+import concurrent.futures
+from stashapi.stash_types import PhashDistance
+from enum import Enum, IntEnum
+import __main__
+
class StashPluginHelper(StashInterface):
# Primary Members for external reference
PLUGIN_TASK_NAME = None
@@ -45,15 +44,44 @@ class StashPluginHelper(StashInterface):
API_KEY = None
excludeMergeTags = None
+ # class EnumInt(IntEnum):
+ # def __repr__(self) -> str:
+ # return f"{self.__class__.__name__}.{self.name}"
+ # def __str__(self) -> str:
+ # return str(self.value)
+ # def serialize(self):
+ # return self.value
+
+ class EnumValue(Enum):
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}.{self.name}"
+ def __str__(self) -> str:
+ return str(self.value)
+ def __add__(self, other):
+ return self.value + other.value
+ def serialize(self):
+ return self.value
+
# printTo argument
- LOG_TO_FILE = 1
- LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
- LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
- LOG_TO_STASH = 8
- LOG_TO_WARN = 16
- LOG_TO_ERROR = 32
- LOG_TO_CRITICAL = 64
- LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
+ class LogTo(IntEnum):
+ FILE = 1
+ CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
+ STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
+ STASH = 8
+ WARN = 16
+ ERROR = 32
+ CRITICAL = 64
+ ALL = FILE + CONSOLE + STDERR + STASH
+
+ class DbgLevel(IntEnum):
+ TRACE = 1
+ DBG = 2
+ INF = 3
+ WRN = 4
+ ERR = 5
+ CRITICAL = 6
+
+ DBG_LEVEL = DbgLevel.INF
# Misc class variables
MAIN_SCRIPT_NAME = None
@@ -61,6 +89,25 @@ class StashPluginHelper(StashInterface):
LOG_FILE_DIR = None
LOG_FILE_NAME = None
STDIN_READ = None
+ stopProcessBarSpin = True
+ updateProgressbarOnIter = 0
+ currentProgressbarIteration = 0
+
+ class OS_Type(IntEnum):
+ WINDOWS = 1
+ LINUX = 2
+ MAC_OS = 3
+ FREEBSD = 4
+ UNKNOWN_OS = 5
+
+ OS_TYPE = OS_Type.UNKNOWN_OS
+
+ IS_DOCKER = False
+ IS_WINDOWS = False
+ IS_LINUX = False
+ IS_FREEBSD = False
+ IS_MAC_OS = False
+
pluginLog = None
logLinePreviousHits = []
thredPool = None
@@ -68,45 +115,76 @@ class StashPluginHelper(StashInterface):
_mergeMetadata = None
encodeToUtf8 = False
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
+ progressBarIsEnabled = True
# Prefix message value
- LEV_TRACE = "TRACE: "
- LEV_DBG = "DBG: "
- LEV_INF = "INF: "
- LEV_WRN = "WRN: "
- LEV_ERR = "ERR: "
- LEV_CRITICAL = "CRITICAL: "
-
- # Default format
- LOG_FORMAT = "[%(asctime)s] %(message)s"
+ class Level(EnumValue):
+ TRACE = "TRACE: "
+ DBG = "DBG: "
+ INF = "INF: "
+ WRN = "WRN: "
+ ERR = "ERR: "
+ CRITICAL = "CRITICAL: "
+ class Constant(EnumValue):
+ # Default format
+ LOG_FORMAT = "[%(asctime)s] %(message)s"
+ ARGUMENT_UNSPECIFIED = "_ARGUMENT_UNSPECIFIED_"
+ NOT_IN_LIST = 2147483646
+
# Externally modifiable variables
- log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
- log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
+ log_to_err_set = LogTo.FILE + LogTo.STDERR # This can be changed by the calling source in order to customize what targets get error messages
+ log_to_norm = LogTo.FILE + LogTo.CONSOLE # Can be change so-as to set target output for normal logging
# Warn message goes to both plugin log file and stash when sent to Stash log file.
- log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
+ log_to_wrn_set = LogTo.STASH # This can be changed by the calling source in order to customize what targets get warning messages
def __init__(self,
- debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
- logFormat = LOG_FORMAT, # Plugin log line format
- dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
- maxbytes = 8*1024*1024, # Max size of plugin log file
- backupcount = 2, # Backup counts when log file size reaches max size
- logToWrnSet = 0, # Customize the target output set which will get warning logging
- logToErrSet = 0, # Customize the target output set which will get error logging
- logToNormSet = 0, # Customize the target output set which will get normal logging
- logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
- mainScriptName = "", # The main plugin script file name (full path)
- pluginID = "",
- settings = None, # Default settings for UI fields
- config = None, # From pluginName_config.py or pluginName_setting.py
- fragmentServer = None,
- stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
- apiKey = None, # API Key only needed when username and password set while running script via command line
+ debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
+ logFormat = Constant.LOG_FORMAT.value, # Plugin log line format
+ dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
+ maxbytes = 8*1024*1024, # Max size of plugin log file
+ backupcount = 2, # Backup counts when log file size reaches max size
+ logToWrnSet = 0, # Customize the target output set which will get warning logging
+ logToErrSet = 0, # Customize the target output set which will get error logging
+ logToNormSet = 0, # Customize the target output set which will get normal logging
+ logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
+ mainScriptName = "", # The main plugin script file name (full path)
+ pluginID = "",
+ settings = None, # Default settings for UI fields
+ config = None, # From pluginName_config.py or pluginName_setting.py
+ fragmentServer = None,
+ stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
+ apiKey = None, # API Key only needed when username and password set while running script via command line
DebugTraceFieldName = "zzdebugTracing",
+ DebugFieldName = "zzDebug",
DryRunFieldName = "zzdryRun",
- setStashLoggerAsPluginLogger = False):
+ setStashLoggerAsPluginLogger = False,
+ DBG_LEVEL = DbgLevel.INF):
+ if DBG_LEVEL in list(self.DbgLevel):
+ self.DBG_LEVEL = DBG_LEVEL
+ if debugTracing:
+ self.DEBUG_TRACING = debugTracing
+ if self.DBG_LEVEL > self.DbgLevel.DBG:
+ self.DBG_LEVEL = self.DbgLevel.TRACE
+ elif self.DBG_LEVEL < self.DbgLevel.INF:
+ self.DEBUG_TRACING = True
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
+ if self.isWindows():
+ self.IS_WINDOWS = True
+ self.OS_TYPE = self.OS_Type.WINDOWS
+ elif self.isLinux():
+ self.IS_LINUX = True
+ self.OS_TYPE = self.OS_Type.LINUX
+ if self.isDocker():
+ self.IS_DOCKER = True
+ elif self.isFreeBSD():
+ self.IS_FREEBSD = True
+ self.OS_TYPE = self.OS_Type.FREEBSD
+ if self.isDocker():
+ self.IS_DOCKER = True
+ elif self.isMacOS():
+ self.IS_MAC_OS = True
+ self.OS_TYPE = self.OS_Type.MAC_OS
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
if logToErrSet: self.log_to_err_set = logToErrSet
if logToNormSet: self.log_to_norm = logToNormSet
@@ -129,7 +207,6 @@ class StashPluginHelper(StashInterface):
else:
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
- if debugTracing: self.DEBUG_TRACING = debugTracing
if config:
self.pluginConfig = config
if self.Setting('apiKey', "") != "":
@@ -191,8 +268,14 @@ class StashPluginHelper(StashInterface):
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
- self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING)
- if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
+ if self.Setting(DebugTraceFieldName, self.DEBUG_TRACING):
+ self.DEBUG_TRACING = True
+ self.LOG_LEVEL = logging.TRACE
+ self.DBG_LEVEL = self.DbgLevel.TRACE
+ elif self.Setting(DebugFieldName, self.DEBUG_TRACING):
+ self.DEBUG_TRACING = True
+ self.LOG_LEVEL = logging.DEBUG
+ self.DBG_LEVEL = self.DbgLevel.DBG
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
@@ -202,74 +285,104 @@ class StashPluginHelper(StashInterface):
def __del__(self):
self.thredPool.shutdown(wait=False)
- def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False):
+ def Setting(self, name, default=Constant.ARGUMENT_UNSPECIFIED.value, raiseEx=True, notEmpty=False):
if self.pluginSettings != None and name in self.pluginSettings:
if notEmpty == False or self.pluginSettings[name] != "":
return self.pluginSettings[name]
if self.pluginConfig != None and name in self.pluginConfig:
if notEmpty == False or self.pluginConfig[name] != "":
return self.pluginConfig[name]
- if default == _ARGUMENT_UNSPECIFIED_ and raiseEx:
+ if default == self.Constant.ARGUMENT_UNSPECIFIED.value and raiseEx:
raise Exception(f"Missing {name} from both UI settings and config file settings.")
return default
- def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None):
- if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
- logMsg = self.asc2(logMsg)
- else:
- logMsg = logMsg
- if printTo == 0:
- printTo = self.log_to_norm
- elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
- logLevel = logging.ERROR
- printTo = self.log_to_err_set
- elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
- logLevel = logging.CRITICAL
- printTo = self.log_to_err_set
- elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
- logLevel = logging.WARN
- printTo = self.log_to_wrn_set
- if lineNo == -1:
- lineNo = inspect.currentframe().f_back.f_lineno
- LN_Str = f"[LN:{lineNo}]"
- # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
- if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
- if levelStr == "": levelStr = self.LEV_DBG
- if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
- elif logLevel == logging.INFO or logLevel == logging.DEBUG:
- if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
- if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
- elif logLevel == logging.WARN:
- if levelStr == "": levelStr = self.LEV_WRN
- if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
- elif logLevel == logging.ERROR:
- if levelStr == "": levelStr = self.LEV_ERR
- if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
- elif logLevel == logging.CRITICAL:
- if levelStr == "": levelStr = self.LEV_CRITICAL
- if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
- if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
- if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
- print(f"{LN_Str} {levelStr}{logMsg}")
- if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
- print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
+ def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None, printLogException = False):
+ try:
+ if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
+ logMsg = self.asc2(logMsg)
+ else:
+ logMsg = logMsg
+ if printTo == 0:
+ printTo = self.log_to_norm
+ elif printTo == self.LogTo.ERROR and logLevel == logging.INFO:
+ logLevel = logging.ERROR
+ printTo = self.log_to_err_set
+ elif printTo == self.LogTo.CRITICAL and logLevel == logging.INFO:
+ logLevel = logging.CRITICAL
+ printTo = self.log_to_err_set
+ elif printTo == self.LogTo.WARN and logLevel == logging.INFO:
+ logLevel = logging.WARN
+ printTo = self.log_to_wrn_set
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ LN_Str = f"[LN:{lineNo}]"
+ # print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
+ if logLevel == logging.TRACE and (logAlways == False or self.LOG_LEVEL == logging.TRACE):
+ if levelStr == "": levelStr = self.Level.DBG
+ if printTo & self.LogTo.FILE: self.pluginLog.trace(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.trace(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG or self.LOG_LEVEL == logging.TRACE):
+ if levelStr == "": levelStr = self.Level.DBG
+ if printTo & self.LogTo.FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.INFO or logLevel == logging.DEBUG:
+ if levelStr == "": levelStr = self.Level.INF if logLevel == logging.INFO else self.Level.DBG
+ if printTo & self.LogTo.FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.WARN:
+ if levelStr == "": levelStr = self.Level.WRN
+ if printTo & self.LogTo.FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.ERROR:
+ if levelStr == "": levelStr = self.Level.ERR
+ if printTo & self.LogTo.FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ elif logLevel == logging.CRITICAL:
+ if levelStr == "": levelStr = self.Level.CRITICAL
+ if printTo & self.LogTo.FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
+ if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LogTo.CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"{LN_Str} {levelStr}{logMsg}")
+ if (printTo & self.LogTo.STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
+ print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
+ except Exception as e:
+ if printLogException:
+ tb = traceback.format_exc()
+ print(f"Exception calling [Log]; Error: {e}\nTraceBack={tb}")
+ pass
def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
- if printTo == 0: printTo = self.LOG_TO_FILE
+ if printTo == 0: printTo = self.LogTo.FILE
+ if lineNo == -1:
+ lineNo = inspect.currentframe().f_back.f_lineno
+ logLev = logging.INFO if logAlways else logging.TRACE
+ if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
+ if logMsg == "":
+ logMsg = f"Line number {lineNo}..."
+ self.Log(logMsg, printTo, logLev, lineNo, self.Level.TRACE, logAlways, toAscii=toAscii)
+
+ # Log once per session. Only logs the first time called from a particular line number in the code.
+ def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
+ lineNo = inspect.currentframe().f_back.f_lineno
+ if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
+ FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
+ if FuncAndLineNo in self.logLinePreviousHits:
+ return
+ self.logLinePreviousHits.append(FuncAndLineNo)
+ self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
+
+ def Debug(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
+ if printTo == 0: printTo = self.LogTo.FILE
if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno
logLev = logging.INFO if logAlways else logging.DEBUG
if self.DEBUG_TRACING or logAlways:
if logMsg == "":
logMsg = f"Line number {lineNo}..."
- self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii)
+ self.Log(logMsg, printTo, logLev, lineNo, self.Level.DBG, logAlways, toAscii=toAscii)
# Log once per session. Only logs the first time called from a particular line number in the code.
- def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
+ def DebugOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
lineNo = inspect.currentframe().f_back.f_lineno
if self.DEBUG_TRACING or logAlways:
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
@@ -279,8 +392,8 @@ class StashPluginHelper(StashInterface):
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
# Log INFO on first call, then do Trace on remaining calls.
- def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None):
- if printTo == 0: printTo = self.LOG_TO_FILE
+ def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None, printLogException = False):
+ if printTo == 0: printTo = self.LogTo.FILE
lineNo = inspect.currentframe().f_back.f_lineno
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
if FuncAndLineNo in self.logLinePreviousHits:
@@ -288,49 +401,97 @@ class StashPluginHelper(StashInterface):
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
else:
self.logLinePreviousHits.append(FuncAndLineNo)
- self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii)
+ self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii, printLogException=printLogException)
- def Warn(self, logMsg, printTo = 0, toAscii = None):
+ def Warn(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
if printTo == 0: printTo = self.log_to_wrn_set
lineNo = inspect.currentframe().f_back.f_lineno
- self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii)
+ self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii, printLogException=printLogException)
- def Error(self, logMsg, printTo = 0, toAscii = None):
+ def Error(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
if printTo == 0: printTo = self.log_to_err_set
lineNo = inspect.currentframe().f_back.f_lineno
- self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
-
- def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
+ self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii, printLogException=printLogException)
+
+ # Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
+ # The below non-loggging functions use (lower) camelCase naming convention.
+ def status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
if printTo == 0: printTo = self.log_to_norm
if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
printTo, logLevel, lineNo)
- def ExecuteProcess(self, args, ExecDetach=False):
- import platform, subprocess
- is_windows = any(platform.win32_ver())
+ # Replaces obsolete UI settings variable with new name. Only use this with strings and numbers.
+ # Example usage:
+ # obsoleteSettingsToConvert = {"OldVariableName" : "NewVariableName", "AnotherOldVarName" : "NewName2"}
+ # stash.replaceObsoleteSettings(obsoleteSettingsToConvert, "ObsoleteSettingsCheckVer2")
+ def replaceObsoleteSettings(self, settingSet:dict, SettingToCheckFirst="", init_defaults=False):
+ if SettingToCheckFirst == "" or self.Setting(SettingToCheckFirst) == False:
+ for key in settingSet:
+ obsoleteVar = self.Setting(key)
+ if isinstance(obsoleteVar, bool):
+ if obsoleteVar:
+ if self.Setting(settingSet[key]) == False:
+ self.Log(f"Detected obsolete (bool) settings ({key}). Moving obsolete settings to new setting name {settingSet[key]}.")
+ results = self.configure_plugin(self.PLUGIN_ID, {settingSet[key]:self.Setting(key), key : False}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ else:
+ self.Log(f"Detected obsolete (bool) settings ({key}), and deleting it's content because new setting name ({settingSet[key]}) is already populated.")
+ results = self.configure_plugin(self.PLUGIN_ID, {key : False}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ elif isinstance(obsoleteVar, int): # Both int and bool type returns true here
+ if obsoleteVar > 0:
+ if self.Setting(settingSet[key]) > 0:
+ self.Log(f"Detected obsolete (int) settings ({key}), and deleting it's content because new setting name ({settingSet[key]}) is already populated.")
+ results = self.configure_plugin(self.PLUGIN_ID, {key : 0}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ else:
+ self.Log(f"Detected obsolete (int) settings ({key}). Moving obsolete settings to new setting name {settingSet[key]}.")
+ results = self.configure_plugin(self.PLUGIN_ID, {settingSet[key]:self.Setting(key), key : 0}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ elif obsoleteVar != "":
+ if self.Setting(settingSet[key]) == "":
+ self.Log(f"Detected obsolete (str) settings ({key}). Moving obsolete settings to new setting name {settingSet[key]}.")
+ results = self.configure_plugin(self.PLUGIN_ID, {settingSet[key]:self.Setting(key), key : ""}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ else:
+ self.Log(f"Detected obsolete (str) settings ({key}), and deleting it's content because new setting name ({settingSet[key]}) is already populated.")
+ results = self.configure_plugin(self.PLUGIN_ID, {key : ""}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+ if SettingToCheckFirst != "":
+ results = self.configure_plugin(self.PLUGIN_ID, {SettingToCheckFirst : True}, init_defaults)
+ self.Debug(f"configure_plugin = {results}")
+
+
+ def executeProcess(self, args, ExecDetach=False):
pid = None
- self.Trace(f"is_windows={is_windows} args={args}")
- if is_windows:
+ self.Trace(f"self.IS_WINDOWS={self.IS_WINDOWS} args={args}")
+ if self.IS_WINDOWS:
if ExecDetach:
- self.Trace("Executing process using Windows DETACHED_PROCESS")
+ self.Trace(f"Executing process using Windows DETACHED_PROCESS; args=({args})")
DETACHED_PROCESS = 0x00000008
pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
else:
pid = subprocess.Popen(args, shell=True).pid
else:
- self.Trace("Executing process using normal Popen")
- pid = subprocess.Popen(args).pid
+ if ExecDetach:
+ # For linux detached, use nohup. I.E. subprocess.Popen(["nohup", "python", "test.py"])
+ if self.IS_LINUX:
+ args = ["nohup"] + args
+ self.Trace(f"Executing detached process using Popen({args})")
+ else:
+ self.Trace(f"Executing process using normal Popen({args})")
+ pid = subprocess.Popen(args).pid # On detach, may need the following for MAC OS subprocess.Popen(args, shell=True, start_new_session=True)
self.Trace(f"pid={pid}")
return pid
- def ExecutePythonScript(self, args, ExecDetach=True):
+ def executePythonScript(self, args, ExecDetach=True):
PythonExe = f"{sys.executable}"
argsWithPython = [f"{PythonExe}"] + args
- return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach)
+ return self.executeProcess(argsWithPython,ExecDetach=ExecDetach)
- def Submit(self, *args, **kwargs):
+ def submit(self, *args, **kwargs):
return self.thredPool.submit(*args, **kwargs)
def asc2(self, data, convertToAscii=None):
@@ -340,18 +501,377 @@ class StashPluginHelper(StashInterface):
# data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
# return str(data)[2:-1] # strip out b'str'
- def init_mergeMetadata(self, excludeMergeTags=None):
+ def initMergeMetadata(self, excludeMergeTags=None):
self.excludeMergeTags = excludeMergeTags
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
- # Must call init_mergeMetadata, before calling merge_metadata
- def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
- if type(SrcData) is int:
- SrcData = self.find_scene(SrcData)
- DestData = self.find_scene(DestData)
- return self._mergeMetadata.merge(SrcData, DestData)
+ def mergeMetadata(self, SrcData, DestData, retryCount = 12, sleepSecondsBetweenRetry = 5, excludeMergeTags=None): # Input arguments can be scene ID or scene metadata
+ import requests
+ if self._mergeMetadata == None:
+ self.initMergeMetadata(excludeMergeTags)
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ self.Warn(errMsg)
+ if type(SrcData) is int:
+ SrcData = self.find_scene(SrcData)
+ DestData = self.find_scene(DestData)
+ return self._mergeMetadata.merge(SrcData, DestData)
+ except (requests.exceptions.ConnectionError, ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
+
+ def getUpdateProgressBarIter(self, qtyResults):
+ if qtyResults > 40000:
+ return 100
+ if qtyResults > 20000:
+ return 80
+ if qtyResults > 10000:
+ return 40
+ if qtyResults > 5000:
+ return 20
+ if qtyResults > 2000:
+ return 10
+ if qtyResults > 1000:
+ return 5
+ if qtyResults > 500:
+ return 3
+ if qtyResults > 200:
+ return 2
+ return 1
+
+ def enableProgressBar(self, enable=True):
+ self.progressBarIsEnabled = enable
+
+ # Use setProgressBarIter to reduce traffic to the server by only updating the progressBar every X(updateProgressbarOnIter) iteration.
+ def setProgressBarIter(self, qtyResults):
+ if self.progressBarIsEnabled:
+ self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
+ self.currentProgressbarIteration = 0
+
+ def progressBar(self, currentIndex, maxCount):
+ if self.progressBarIsEnabled:
+ if self.updateProgressbarOnIter > 0:
+ self.currentProgressbarIteration+=1
+ if self.currentProgressbarIteration > self.updateProgressbarOnIter:
+ self.currentProgressbarIteration = 0
+ else:
+ return
+ progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
+ try:
+ self.log.progress(progress)
+ except Exception as e:
+ pass
+
+ def isDocker(self):
+ cgroup = pathlib.Path('/proc/self/cgroup')
+ return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
+
+ def isWindows(self):
+ if any(platform.win32_ver()):
+ return True
+ return False
+
+ def isLinux(self):
+ if platform.system().lower().startswith("linux"):
+ return True
+ return False
+
+ def isFreeBSD(self):
+ if platform.system().lower().startswith("freebsd"):
+ return True
+ return False
+
+ def isMacOS(self):
+ if sys.platform == "darwin":
+ return True
+ return False
+
+ def isWindows(self):
+ if any(platform.win32_ver()):
+ return True
+ return False
+
+ def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
+ if trace:
+ self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
+ pos = 1
+ while self.stopProcessBarSpin == False:
+ if trace:
+ self.Trace(f"progressBar({pos}, {maxPos})")
+ self.progressBar(pos, maxPos)
+ pos +=1
+ if pos > maxPos:
+ pos = 1
+ time.sleep(sleepSeconds)
+
+ def startSpinningProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
+ self.stopProcessBarSpin = False
+ if trace:
+ self.Trace(f"submitting spinProcessBar; sleepSeconds={sleepSeconds}, maxPos={maxPos}, trace={trace}")
+ self.submit(self.spinProcessBar, sleepSeconds, maxPos, trace)
+
+ def stopSpinningProcessBar(self, sleepSeconds = 1):
+ self.stopProcessBarSpin = True
+ time.sleep(sleepSeconds)
+
+ def startsWithInList(self, listToCk, itemToCk):
+ itemToCk = itemToCk.lower()
+ for listItem in listToCk:
+ if itemToCk.startswith(listItem.lower()):
+ return True
+ return False
+
+ def indexStartsWithInList(self, listToCk, itemToCk):
+ itemToCk = itemToCk.lower()
+ index = -1
+ lenItemMatch = 0
+ returnValue = self.Constant.NOT_IN_LIST.value
+ for listItem in listToCk:
+ index += 1
+ if itemToCk.startswith(listItem.lower()):
+ if len(listItem) > lenItemMatch: # Make sure the best match is selected by getting match with longest string.
+ lenItemMatch = len(listItem)
+ returnValue = index
+ return returnValue
+
+ def checkIfTagInlist(self, somelist, tagName, trace=False):
+ tagId = self.find_tags(q=tagName)
+ if len(tagId) > 0 and 'id' in tagId[0]:
+ tagId = tagId[0]['id']
+ else:
+ self.Warn(f"Could not find tag ID for tag '{tagName}'.")
+ return
+ somelist = somelist.split(",")
+ if trace:
+ self.Trace("#########################################################################")
+ scenes = self.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details')
+ qtyResults = len(scenes)
+ self.Log(f"Found {qtyResults} scenes with tag ({tagName})")
+ Qty = 0
+ for scene in scenes:
+ Qty+=1
+ if self.startsWithInList(somelist, scene['files'][0]['path']):
+ self.Log(f"Found scene part of list; {scene['files'][0]['path']}")
+ elif trace:
+ self.Trace(f"Not part of list; {scene['files'][0]['path']}")
+
+ def createTagId(self, tagName, tagName_descp = "", deleteIfExist = False, ignoreAutoTag = False):
+ tagId = self.find_tags(q=tagName)
+ if len(tagId):
+ tagId = tagId[0]
+ if deleteIfExist:
+ self.destroy_tag(int(tagId['id']))
+ else:
+ return tagId['id']
+ tagId = self.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": ignoreAutoTag})
+ self.Log(f"Dup-tagId={tagId['id']}")
+ return tagId['id']
+
+ def removeTag(self, scene, tagName): # scene can be scene ID or scene metadata
+ scene_details = scene
+ if isinstance(scene, int) or 'id' not in scene:
+ scene_details = self.find_scene(scene)
+ tagIds = []
+ doesHaveTagName = False
+ for tag in scene_details['tags']:
+ if tag['name'] != tagName:
+ tagIds += [tag['id']]
+ else:
+ doesHaveTagName = True
+ if doesHaveTagName:
+ dataDict = {'id' : scene_details['id']}
+ dataDict.update({'tag_ids' : tagIds})
+ self.update_scene(dataDict)
+ return doesHaveTagName
+
+ def addTag(self, scene, tagName, tagName_descp = "", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5): # scene can be scene ID or scene metadata
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ self.Warn(errMsg)
+ scene_details = scene
+ if isinstance(scene, int) or 'id' not in scene:
+ scene_details = self.find_scene(scene)
+ tagIds = [self.createTagId(tagName, tagName_descp=tagName_descp, ignoreAutoTag=ignoreAutoTag)]
+ for tag in scene_details['tags']:
+ if tag['name'] == tagName:
+ return False
+ else:
+ tagIds += [tag['id']]
+ dataDict = {'id' : scene_details['id']}
+ dataDict.update({'tag_ids' : tagIds})
+ self.update_scene(dataDict)
+ return True
+ except (ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
+
+ def copyFields(self, srcData, fieldsToCpy):
+ destData = {}
+ for key in srcData:
+ if key in fieldsToCpy:
+ destData.update({key : srcData[key]})
+ return destData
+
+ def renameTag(self,oldTagName, newTagName):
+ tagMetadata = self.find_tags(q=oldTagName)
+ if len(tagMetadata) > 0 and 'id' in tagMetadata[0]:
+ if tagMetadata[0]['name'] == newTagName:
+ return False
+ tagMetadata[0]['name'] = newTagName
+ fieldsToCpy = ["id", "name", "description", "aliases", "ignore_auto_tag", "favorite", "image", "parent_ids", "child_ids"]
+ tagUpdateInput = self.copyFields(tagMetadata[0], fieldsToCpy)
+ self.Trace(f"Renaming tag using tagUpdateInput = {tagUpdateInput}")
+ self.update_tag(tagUpdateInput)
+ return True
+ return False
+
+ def updateScene(self, update_input, create=False, retryCount = 24, sleepSecondsBetweenRetry = 5):
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ self.Warn(errMsg)
+ return self.update_scene(update_input, create)
+ except (ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
+
+ def destroyScene(self, scene_id, delete_file=False, retryCount = 12, sleepSecondsBetweenRetry = 5):
+ errMsg = None
+ for i in range(0, retryCount):
+ try:
+ if errMsg != None:
+ self.Warn(errMsg)
+ if i > 0:
+ # Check if file still exist
+ scene = self.find_scene(scene_id)
+ if scene == None or len(scene) == 0:
+ self.Warn(f"Scene {scene_id} not found in Stash.")
+ return False
+ return self.destroy_scene(scene_id, delete_file)
+ except (ConnectionResetError):
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ except Exception as e:
+ tb = traceback.format_exc()
+ errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
+ time.sleep(sleepSecondsBetweenRetry)
+
+ def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
+ """Runs a plugin operation.
+ The operation is run immediately and does not use the job queue.
+ This is a blocking call, and does not return until plugin completes.
+ Args:
+ plugin_id (ID): plugin_id
+ task_name (str, optional): Plugin task to perform
+ args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args']
+ Returns:
+ A map of the result.
+ """
+ query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) {
+ runPluginOperation(plugin_id: $plugin_id, args: $args)
+ }"""
+ if task_mode != None:
+ args.update({"mode" : task_mode})
+ variables = {
+ "plugin_id": plugin_id,
+ "args": args,
+ }
+ if asyn:
+ self.submit(self.call_GQL, query, variables)
+ return f"Made asynchronous call for plugin {plugin_id}"
+ else:
+ return self.call_GQL(query, variables)
+
+ def stopJobs(self, startPos = 0, startsWith = ""):
+ taskQue = self.job_queue()
+ if taskQue != None:
+ count = 0
+ for jobDetails in taskQue:
+ count+=1
+ if count > startPos:
+ if startsWith == "" or jobDetails['description'].startswith(startsWith):
+ self.Log(f"Killing Job ID({jobDetails['id']}); description={jobDetails['description']}")
+ self.stop_job(jobDetails['id'])
+ else:
+ self.Log(f"Excluding Job ID({jobDetails['id']}); description={jobDetails['description']}; {jobDetails})")
+ else:
+ self.Log(f"Skipping Job ID({jobDetails['id']}); description={jobDetails['description']}; {jobDetails})")
+
+ def toJson(self, data, replaceSingleQuote=False):
+ if replaceSingleQuote:
+ data = data.replace("'", '"')
+ data = data.replace("\\", "\\\\")
+ data = data.replace("\\\\\\\\", "\\\\")
+ return json.loads(data)
+
+ def isCorrectDbVersion(self, verNumber = 68):
+ results = self.sql_query("select version from schema_migrations")
+ # self.Log(results)
+ if len(results['rows']) == 0 or len(results['rows'][0]) == 0:
+ return False
+ return int(results['rows'][0][0]) == verNumber
+
+ def renameFileNameInDB(self, fileId, oldName, newName, UpdateUsingIdOnly = False):
+ if self.isCorrectDbVersion():
+ query = f'update files set basename = "{newName}" where basename = "{oldName}" and id = {fileId};'
+ if UpdateUsingIdOnly:
+ query = f'update files set basename = "{newName}" where id = {fileId};'
+ self.Trace(f"Executing query ({query})")
+ results = self.sql_commit(query)
+ if 'rows_affected' in results and results['rows_affected'] == 1:
+ return True
+ return False
+
+ def getFileNameFromDB(self, id):
+ results = self.sql_query(f'select basename from files where id = {id};')
+ self.Trace(f"results = ({results})")
+ if len(results['rows']) == 0 or len(results['rows'][0]) == 0:
+ return None
+ return results['rows'][0][0]
+
+ # ############################################################################################################
+ # Functions which are candidates to be added to parent class use snake_case naming convention.
+ # ############################################################################################################
+ # The below functions extends class StashInterface with functions which are not yet in the class or
+ # fixes for functions which have not yet made it into official class.
+ def metadata_scan(self, paths:list=[], flags={}): # ToDo: Add option to add path to library if path not included when calling metadata_scan
+ query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }"
+ scan_metadata_input = {"paths": paths}
+ if flags:
+ scan_metadata_input.update(flags)
+ elif scan_config := self.get_configuration_defaults("scan { ...ScanMetadataOptions }").get("scan"):
+ scan_metadata_input.update(scan_config)
+ result = self.call_GQL(query, {"input": scan_metadata_input})
+ return result["metadataScan"]
+
+ def get_all_scenes(self):
+ query_all_scenes = """
+ query AllScenes {
+ allScenes {
+ id
+ updated_at
+ }
+ }
+ """
+ return self.call_GQL(query_all_scenes)
- # Extends class StashInterface with functions which are not yet in the class
def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
query = """
mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
@@ -410,6 +930,26 @@ class StashPluginHelper(StashInterface):
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
+
+ # +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ # Direct SQL associated functions
+ def get_file_metadata(self, data, raw_data = False): # data is either file ID or scene metadata
+ results = None
+ if data == None:
+ return results
+ if 'files' in data and len(data['files']) > 0 and 'id' in data['files'][0]:
+ results = self.sql_query(f"select * from files where id = {data['files'][0]['id']}")
+ else:
+ results = self.sql_query(f"select * from files where id = {data}")
+ if raw_data:
+ return results
+ if 'rows' in results:
+ return results['rows'][0]
+ self.Error(f"Unknown error while SQL query with data='{data}'; Results='{results}'.")
+ return None
+
+ def set_file_basename(self, id, basename):
+ return self.sql_commit(f"update files set basename = '{basename}' where id = {id}")
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
srcData = None
@@ -430,7 +970,8 @@ class mergeMetadata: # A class to merge scene metadata from source scene to dest
self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags)
self.mergeItems('performers', 'performer_ids', [])
self.mergeItems('galleries', 'gallery_ids', [])
- self.mergeItems('movies', 'movies', [])
+ # Looks like movies has been removed from new Stash version
+ # self.mergeItems('movies', 'movies', [])
self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL)
self.mergeItem('studio', 'studio_id', 'id')
self.mergeItem('title')
@@ -483,3 +1024,54 @@ class mergeMetadata: # A class to merge scene metadata from source scene to dest
listToAdd += [item['id']]
self.dataDict.update({ updateFieldName : listToAdd})
# self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
+
+class taskQueue:
+ taskqueue = None
+ def __init__(self, taskqueue):
+ self.taskqueue = taskqueue
+
+ def tooManyScanOnTaskQueue(self, tooManyQty = 5):
+ count = 0
+ if self.taskqueue == None:
+ return False
+ for jobDetails in self.taskqueue:
+ if jobDetails['description'] == "Scanning...":
+ count += 1
+ if count < tooManyQty:
+ return False
+ return True
+
+ def cleanJobOnTaskQueue(self):
+ for jobDetails in self.taskqueue:
+ if jobDetails['description'] == "Cleaning...":
+ return True
+ return False
+
+ def cleanGeneratedJobOnTaskQueue(self):
+ for jobDetails in self.taskqueue:
+ if jobDetails['description'] == "Cleaning generated files...":
+ return True
+ return False
+
+ def isRunningPluginTaskJobOnTaskQueue(self, taskName):
+ for jobDetails in self.taskqueue:
+ if jobDetails['description'] == "Running plugin task: {taskName}":
+ return True
+ return False
+
+ def tagDuplicatesJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Tag Duplicates")
+
+ def clearDupTagsJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Clear Tags")
+
+ def generatePhashMatchingJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Generate PHASH Matching")
+
+ def deleteDuplicatesJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Delete Duplicates")
+
+ def deleteTaggedScenesJobOnTaskQueue(self):
+ return self.isRunningPluginTaskJobOnTaskQueue("Delete Tagged Scenes")
+
+
diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py
index 4710d48..1662553 100644
--- a/plugins/DupFileManager/DupFileManager.py
+++ b/plugins/DupFileManager/DupFileManager.py
@@ -15,6 +15,10 @@ from datetime import datetime
from StashPluginHelper import StashPluginHelper
from stashapi.stash_types import PhashDistance
from DupFileManager_config import config # Import config from DupFileManager_config.py
+from DupFileManager_report_config import report_config
+
+# ToDo: make sure the following line of code works
+config |= report_config
parser = argparse.ArgumentParser()
parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
@@ -73,7 +77,7 @@ advanceMenuOptions = [ "applyCombo", "applyComboBlacklist", "pathToDelete", "pa
doJsonReturnModeTypes = ["tag_duplicates_task", "removeDupTag", "addExcludeTag", "removeExcludeTag", "mergeTags", "getLocalDupReportPath",
"createDuplicateReportWithoutTagging", "deleteLocalDupReportHtmlFiles", "clear_duplicate_tags_task",
"deleteAllDupFileManagerTags", "deleteBlackListTaggedDuplicatesTask", "deleteTaggedDuplicatesLwrResOrLwrDuration",
- "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration"]
+ "deleteBlackListTaggedDuplicatesLwrResOrLwrDuration", "create_duplicate_report_task"]
doJsonReturnModeTypes += [advanceMenuOptions]
doJsonReturn = False
if len(sys.argv) < 2 and stash.PLUGIN_TASK_NAME in doJsonReturnModeTypes:
@@ -92,22 +96,6 @@ else:
stash.Debug(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}; PLUGIN_TASK_NAME = {stash.PLUGIN_TASK_NAME}; argv = {sys.argv}")
stash.status(logLevel=logging.DEBUG)
-# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
-# stash.encodeToUtf8 = True
-
-# ToDo: Remove below commented out lines of code
-# Test code that should be deleted after testing is complete
-# stash.configure_plugin(stash.PLUGIN_ID, {"zSwapHighRes" : True})
-# stash.configure_plugin(stash.PLUGIN_ID, {"zSwapLongLength" : True})
-# stash.configure_plugin(stash.PLUGIN_ID, {"zSwapBetterFrameRate" : True})
-# stash.configure_plugin(stash.PLUGIN_ID, {"zzObsoleteSettingsCheckVer2" : False})
-# stash.configure_plugin(stash.PLUGIN_ID, {"zSwapBetterBitRate" : True})
-# stash.configure_plugin(stash.PLUGIN_ID, {"zSwapCodec" : True})
-# stash.configure_plugin(stash.PLUGIN_ID, {"zyMatchDupDistance" : 1})
-# stash.configure_plugin(stash.PLUGIN_ID, {"zySwapCodec" : True})
-# stash.configure_plugin(stash.PLUGIN_ID, {"zxGraylist" : "B:\\_\\"})
-# exit(0)
-
obsoleteSettingsToConvert = {"zWhitelist" : "zvWhitelist", "zxGraylist" : "zwGraylist", "zyBlacklist" : "zxBlacklist", "zyMatchDupDistance" : "matchDupDistance", "zSwapHighRes" : "zySwapHighRes", "zSwapLongLength" : "zySwapLongLength", "zSwapBetterBitRate" : "zySwapBetterBitRate", "zSwapCodec" : "zySwapCodec", "zSwapBetterFrameRate" : "zySwapBetterFrameRate"}
stash.replaceObsoleteSettings(obsoleteSettingsToConvert, "zzObsoleteSettingsCheckVer2")
@@ -159,7 +147,8 @@ excludeFromReportIfSignificantTimeDiff = False
matchDupDistance = int(stash.Setting('matchDupDistance'))
matchPhaseDistance = PhashDistance.EXACT
matchPhaseDistanceText = "Exact Match"
-if stash.PLUGIN_TASK_NAME == "tag_duplicates_task" and 'Target' in stash.JSON_INPUT['args']:
+if (stash.PLUGIN_TASK_NAME == "tag_duplicates_task" or stash.PLUGIN_TASK_NAME == "create_duplicate_report_task") and 'Target' in stash.JSON_INPUT['args']:
+ stash.enableProgressBar(False)
if stash.JSON_INPUT['args']['Target'].startswith("0"):
matchDupDistance = 0
elif stash.JSON_INPUT['args']['Target'].startswith("1"):
@@ -402,8 +391,6 @@ def isBetterVideo(scene1, scene2, swapCandidateCk = False): # is scene2 better t
# Prioritize higher reslution over codec, bit rate, and frame rate
if int(scene1['files'][0]['width']) * int(scene1['files'][0]['height']) > int(scene2['files'][0]['width']) * int(scene2['files'][0]['height']):
return False
- # if int(scene1['files'][0]['width']) > int(scene2['files'][0]['width']) or int(scene1['files'][0]['height']) > int(scene2['files'][0]['height']):
- # return False
if (favorBitRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterBitRate):
if (favorHighBitRate and int(scene2['files'][0]['bit_rate']) > int(scene1['files'][0]['bit_rate'])) or (not favorHighBitRate and int(scene2['files'][0]['bit_rate']) < int(scene1['files'][0]['bit_rate'])):
stash.Trace(f"[isBetterVideo]:[favorHighBitRate={favorHighBitRate}] Better bit rate. {scene1['files'][0]['path']}={scene1['files'][0]['bit_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['bit_rate']}")
@@ -453,7 +440,6 @@ def isSwapCandidate(DupFileToKeep, DupFile):
# Don't move if both are in whitelist
if stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(whitelist, DupFile['files'][0]['path']):
return False
- # if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])):
if swapHighRes and int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']):
if not significantTimeDiffCheck(DupFileToKeep, DupFile):
return True
@@ -532,6 +518,7 @@ def getHtmlReportTableRow(qtyResults, tagDuplicates):
htmlReportPrefix = htmlReportPrefix.replace('(DateCreatedPlaceHolder)', datetime.now().strftime("%d-%b-%Y, %H:%M:%S"))
return htmlReportPrefix
+htmlReportTableData = stash.Setting('htmlReportTableData')
htmlDetailDiffTextColor = stash.Setting('htmlDetailDiffTextColor')
htmlSupperHighlight = stash.Setting('htmlSupperHighlight')
htmlLowerHighlight = stash.Setting('htmlLowerHighlight')
@@ -555,8 +542,100 @@ def logReason(DupFileToKeep, Scene, reason):
reasonDict[DupFileToKeep['id']] = reason
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason={reason}")
-htmlReportNameFolder = f"{stash.PLUGINS_PATH}{os.sep}DupFileManager{os.sep}report"
-htmlReportName = f"{htmlReportNameFolder}{os.sep}{stash.Setting('htmlReportName')}"
+
+def getSceneID(scene):
+ return htmlReportTableData.replace("
"
+ if htmlIncludeImagePreview:
+ imagePreview = f""
+ fileHtmlReport.write(f"{getSceneID(DupFile)}{videoPreview} {imagePreview}
")
+ else:
+ fileHtmlReport.write(f"{getSceneID(DupFile)}{videoPreview}")
+ fileHtmlReport.write(f"{getSceneID(DupFile)}{getPath(DupFile)} ")
+ fileHtmlReport.write(f"
Res Durration BitRate Codec FrameRate size ID index ")
+ fileHtmlReport.write(f"{DupFile['files'][0]['width']}x{DupFile['files'][0]['height']} {DupFile['files'][0]['duration']} {DupFile['files'][0]['bit_rate']} {DupFile['files'][0]['video_codec']} {DupFile['files'][0]['frame_rate']} {DupFile['files'][0]['size']} {DupFile['id']} {QtyTagForDel} ")
+
+ if DupFile['id'] in reasonDict:
+ fileHtmlReport.write(f"Reason: {reasonDict[DupFile['id']]} ")
+ # elif DupFileToKeep['id'] in reasonDict:
+ # fileHtmlReport.write(f"Reason: {reasonDict[DupFileToKeep['id']]} ")
+ elif int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']):
+ fileHtmlReport.write(f"Reason: Resolution {DupFile['files'][0]['width']}x{DupFile['files'][0]['height']} < {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} ")
+ elif significantMoreTimeCompareToBetterVideo(DupFile, DupFileToKeep):
+ if significantTimeDiffCheck(DupFile, DupFileToKeep):
+ theReason = f"Significant-Duration: {DupFile['files'][0]['duration']} < {DupFileToKeep['files'][0]['duration']} "
+ else:
+ theReason = f"Duration: {DupFile['files'][0]['duration']} < {DupFileToKeep['files'][0]['duration']}"
+ fileHtmlReport.write(f"Reason: {theReason} ")
+ elif isBetterVideo(DupFile, DupFileToKeep):
+ fileHtmlReport.write(f"Reason: Better Video ")
+ elif stash.startsWithInList(DupFileToKeep, DupFile['files'][0]['path']) and not stash.startsWithInList(whitelist, DupFile['files'][0]['path']):
+ fileHtmlReport.write(f"Reason: not whitelist vs whitelist ")
+ elif isTaggedExcluded(DupFileToKeep) and not isTaggedExcluded(DupFile):
+ fileHtmlReport.write(f"Reason: not ExcludeTag vs ExcludeTag ")
+
+ fileHtmlReport.write("
")
+ fileHtmlReport.write(f"[Delete] ")
+ fileHtmlReport.write(f"[Remove] ")
+ fileHtmlReport.write(f"[Copy] ")
+ fileHtmlReport.write(f"[Move] ")
+ fileHtmlReport.write(f"[CpyName] ")
+ fileHtmlReport.write(f"[Flag] ")
+ # ToDo: Add following buttons:
+ # rename file
+ if dupFileExist and tagDuplicates:
+ fileHtmlReport.write(f"[-Tag] ")
+ fileHtmlReport.write(f"[+Exclude] ")
+ fileHtmlReport.write(f"[Merge Tags] ")
+ if dupFileExist:
+ fileHtmlReport.write(f"[Folder] ")
+ fileHtmlReport.write(f"[Play] ")
+ else:
+ fileHtmlReport.write("[File NOT Exist]")
+ fileHtmlReport.write("")
+
+ videoPreview = f" "
+ if htmlIncludeImagePreview:
+ imagePreview = f""
+ fileHtmlReport.write(f"{getSceneID(DupFileToKeep)}{videoPreview} {imagePreview}
")
+ else:
+ fileHtmlReport.write(f"{getSceneID(DupFileToKeep)}{videoPreview}")
+ fileHtmlReport.write(f"{getSceneID(DupFileToKeep)}{getPath(DupFileToKeep)} ")
+ fileHtmlReport.write(f"
Res Durration BitRate Codec FrameRate size ID ")
+ fileHtmlReport.write(f"{DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} {DupFileToKeep['files'][0]['duration']} {DupFileToKeep['files'][0]['bit_rate']} {DupFileToKeep['files'][0]['video_codec']} {DupFileToKeep['files'][0]['frame_rate']} {DupFileToKeep['files'][0]['size']} {DupFileToKeep['id']}
")
+ fileHtmlReport.write(f"[Delete] ")
+ fileHtmlReport.write(f"[Remove] ")
+ fileHtmlReport.write(f"[Rename] ")
+ if isTaggedExcluded(DupFileToKeep):
+ fileHtmlReport.write(f"[-Exclude] ")
+ fileHtmlReport.write(f"[Folder] ")
+ if toKeepFileExist:
+ fileHtmlReport.write(f"[Play] ")
+ else:
+ fileHtmlReport.write("[File NOT Exist]")
+ fileHtmlReport.write(f"[Flag] ")
+ # ToDo: Add following buttons:
+ # rename file
+ fileHtmlReport.write(f"")
+
+ fileHtmlReport.write("\n")
def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False, deleteBlacklistOnly=False, deleteLowerResAndDuration=False):
global reasonDict
@@ -565,16 +644,10 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False, deleteBlack
dupTagId = stash.createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp, ignoreAutoTag=True)
stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}")
createHtmlReport = stash.Setting('createHtmlReport')
- previewOrStream = "stream" if stash.Setting('streamOverPreview') else "preview"
- htmlIncludeImagePreview = stash.Setting('htmlIncludeImagePreview')
- htmlImagePreviewPopupSize = stash.Setting('htmlImagePreviewPopupSize')
htmlReportNameHomePage = htmlReportName
- htmlReportTableRow = stash.Setting('htmlReportTableRow')
- htmlReportTableData = stash.Setting('htmlReportTableData')
- htmlReportVideoPreview = stash.Setting('htmlReportVideoPreview')
- htmlHighlightTimeDiff = stash.Setting('htmlHighlightTimeDiff')
htmlReportPaginate = stash.Setting('htmlReportPaginate')
+
addDupWhitelistTag()
addExcludeDupTag()
@@ -596,7 +669,7 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False, deleteBlack
stash.Trace("#########################################################################")
stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; matchDupDistance={matchPhaseDistanceText}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
stash.startSpinningProcessBar()
- htmlFileData = " paths {screenshot sprite " + previewOrStream + "} " if createHtmlReport else ""
+ htmlFileData = " paths {screenshot sprite " + htmlPreviewOrStream + "} " if createHtmlReport else ""
mergeFieldData = " code director title rating100 date studio {id} movies {movie {id} } galleries {id} performers {id} urls " if merge else ""
DupFileSets = stash.find_duplicate_scenes(matchPhaseDistance, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details ' + mergeFieldData + htmlFileData)
stash.stopSpinningProcessBar()
@@ -628,7 +701,6 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False, deleteBlack
DupFileDetailList = []
for DupFile in DupFileSet:
QtyDup+=1
- # Scene = stash.find_scene(DupFile['id'])
Scene = DupFile
if skipIfTagged and createHtmlReport == False and duplicateMarkForDeletion in Scene['tags']:
stash.Trace(f"Skipping scene '{Scene['files'][0]['path']}' because already tagged with {duplicateMarkForDeletion}")
@@ -645,7 +717,6 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False, deleteBlack
if significantTimeDiffCheck(DupFileToKeep, Scene):
QtyRealTimeDiff += 1
- # if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']):
if int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['width']) * int(Scene['files'][0]['height']):
logReason(DupFileToKeep, Scene, f"resolution: {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} < {Scene['files'][0]['width']}x{Scene['files'][0]['height']}")
DupFileToKeep = Scene
@@ -756,77 +827,14 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False, deleteBlack
if tagDuplicates:
didAddTag = setTagId_withRetry(duplicateMarkForDeletion, DupFile, DupFileToKeep, ignoreAutoTag=True)
if fileHtmlReport != None:
+ # ToDo: Add icons using github path
+ # add copy button with copy icon
+ # add move button with r-sqr icon
+ # repace delete button with trashcan icon
+ # add rename file code and button
+ # add delete only from stash db code and button using DB delete icon
stash.Debug(f"Adding scene {DupFile['id']} to HTML report.")
- dupFileExist = True if os.path.isfile(DupFile['files'][0]['path']) else False
- toKeepFileExist = True if os.path.isfile(DupFileToKeep['files'][0]['path']) else False
-
- fileHtmlReport.write(f"{htmlReportTableRow}")
- videoPreview = f" "
- if htmlIncludeImagePreview:
- imagePreview = f""
- fileHtmlReport.write(f"{htmlReportTableData}{videoPreview} {imagePreview}
")
- else:
- fileHtmlReport.write(f"{htmlReportTableData}{videoPreview}")
- fileHtmlReport.write(f"{htmlReportTableData}{getPath(DupFile)} ")
- fileHtmlReport.write(f"
Res Durration BitRate Codec FrameRate size ID index ")
- fileHtmlReport.write(f"{DupFile['files'][0]['width']}x{DupFile['files'][0]['height']} {DupFile['files'][0]['duration']} {DupFile['files'][0]['bit_rate']} {DupFile['files'][0]['video_codec']} {DupFile['files'][0]['frame_rate']} {DupFile['files'][0]['size']} {DupFile['id']} {QtyTagForDel} ")
-
- if DupFile['id'] in reasonDict:
- fileHtmlReport.write(f"Reason: {reasonDict[DupFile['id']]} ")
- # elif DupFileToKeep['id'] in reasonDict:
- # fileHtmlReport.write(f"Reason: {reasonDict[DupFileToKeep['id']]} ")
- elif int(DupFileToKeep['files'][0]['width']) * int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['width']) * int(DupFile['files'][0]['height']):
- fileHtmlReport.write(f"Reason: Resolution {DupFile['files'][0]['width']}x{DupFile['files'][0]['height']} < {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} ")
- elif significantMoreTimeCompareToBetterVideo(DupFile, DupFileToKeep):
- if significantTimeDiffCheck(DupFile, DupFileToKeep):
- theReason = f"Significant-Duration: {DupFile['files'][0]['duration']} < {DupFileToKeep['files'][0]['duration']} "
- else:
- theReason = f"Duration: {DupFile['files'][0]['duration']} < {DupFileToKeep['files'][0]['duration']}"
- fileHtmlReport.write(f"Reason: {theReason} ")
- elif isBetterVideo(DupFile, DupFileToKeep):
- fileHtmlReport.write(f"Reason: Better Video ")
- elif stash.startsWithInList(DupFileToKeep, DupFile['files'][0]['path']) and not stash.startsWithInList(whitelist, DupFile['files'][0]['path']):
- fileHtmlReport.write(f"Reason: not whitelist vs whitelist ")
- elif isTaggedExcluded(DupFileToKeep) and not isTaggedExcluded(DupFile):
- fileHtmlReport.write(f"Reason: not ExcludeTag vs ExcludeTag ")
-
- fileHtmlReport.write("
")
- fileHtmlReport.write(f"[Delete] ")
- if dupFileExist and tagDuplicates:
- fileHtmlReport.write(f"[Remove Tag] ")
- fileHtmlReport.write(f"[Add Exclude Tag] ")
- fileHtmlReport.write(f"[Merge Tags] ")
- if dupFileExist:
- fileHtmlReport.write(f"[Folder] ")
- fileHtmlReport.write(f"[Play] ")
- else:
- fileHtmlReport.write("[File NOT Exist]")
- # ToDo: Add following buttons:
- # Copy file name from duplicate to ToKeep
- # Copy *file* from duplicate to ToKeep
- fileHtmlReport.write("")
-
- videoPreview = f" "
- if htmlIncludeImagePreview:
- imagePreview = f""
- fileHtmlReport.write(f"{htmlReportTableData}{videoPreview} {imagePreview}
")
- else:
- fileHtmlReport.write(f"{htmlReportTableData}{videoPreview}")
- fileHtmlReport.write(f"{htmlReportTableData}{getPath(DupFileToKeep)} ")
- fileHtmlReport.write(f"
Res Durration BitRate Codec FrameRate size ID ")
- fileHtmlReport.write(f"{DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} {DupFileToKeep['files'][0]['duration']} {DupFileToKeep['files'][0]['bit_rate']} {DupFileToKeep['files'][0]['video_codec']} {DupFileToKeep['files'][0]['frame_rate']} {DupFileToKeep['files'][0]['size']} {DupFileToKeep['id']}
")
- fileHtmlReport.write(f"[Delete] ")
- if isTaggedExcluded(DupFileToKeep):
- fileHtmlReport.write(f"[Remove Exclude Tag] ")
- fileHtmlReport.write(f"[Folder] ")
- if toKeepFileExist:
- fileHtmlReport.write(f"[Play] ")
- else:
- fileHtmlReport.write("[File NOT Exist]")
- fileHtmlReport.write(f"")
-
- fileHtmlReport.write("\n")
-
+ writeRowToHtmlReport(fileHtmlReport, DupFile, DupFileToKeep, QtyTagForDel, tagDuplicates)
if QtyTagForDelPaginate >= htmlReportPaginate:
QtyTagForDelPaginate = 0
fileHtmlReport.write("\n")
@@ -885,27 +893,6 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False, deleteBlack
prevHtmReportLink = f"[Prev] "
fileHtmlReport.write(f" {homeHtmReportLink} {prevHtmReportLink}
")
fileHtmlReport.write(f"Total Tagged for Deletion {QtyTagForDel} \n")
- # ToDo: Add a menu after the report with the following options:
- # Delete all Dup tagged files (any match)
- # Remove all Dup tagged files (Just remove from stash, and leave file)
- # Delete Blacklist Dup tagged files
- # Remove Blacklist Dup tagged files
- # Delete all Dup tagged files (Exact Match)
- # Remove all Dup tagged files (Exact Match)
- # Delete all Dup tagged files (High Match)
- # Remove all Dup tagged files (High Match)
- # Delete all Dup tagged files (Medium Match)
- # Remove all Dup tagged files (Medium Match)
- # Delete all Dup tagged files (Low Match)
- # Remove all Dup tagged files (Low Match)
- # Clear dup tag from all scenes
- # Delete dup tag
- # Clear ExcludeDup tag
- # Delete ExcludeDup tag
- # Clear GraylistMarkForDel tag
- # Delete GraylistMarkForDel tag
- # Clear all DupFileManager created tags
- # Delete all DupFileManager created tags
fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}")
fileHtmlReport.close()
stash.Log(f"************************************************************", printTo = stash.LogTo.STASH)
@@ -988,6 +975,7 @@ def getAdvanceMenuOptionSelected(advanceMenuOptionSelected):
compareToLess = False
compareToGreater = False
if advanceMenuOptionSelected:
+ stash.enableProgressBar(False)
if 'Target' in stash.JSON_INPUT['args']:
if "applyCombo" in stash.PLUGIN_TASK_NAME:
jsonObject = toJson(stash.JSON_INPUT['args']['Target'])
@@ -997,6 +985,8 @@ def getAdvanceMenuOptionSelected(advanceMenuOptionSelected):
return getAnAdvanceMenuOptionSelected(stash.PLUGIN_TASK_NAME, stash.JSON_INPUT['args']['Target'], isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, compareToLess, compareToGreater)
return isBlackList, pathToDelete, sizeToDelete, durationToDelete, resolutionToDelete, ratingToDelete, tagToDelete, titleToDelete, pathStrToDelete, fileNotExistToDelete, compareToLess, compareToGreater
+# //////////////////////////////////////////////////////////////////////////////
+# //////////////////////////////////////////////////////////////////////////////
def manageTagggedDuplicates(deleteScenes=False, clearTag=False, setGrayListTag=False, tagId=-1, advanceMenuOptionSelected=False):
if tagId == -1:
tagId = findCurrentTagId([duplicateMarkForDeletion, base1_duplicateMarkForDeletion, base2_duplicateMarkForDeletion, 'DuplicateMarkForDeletion', '_DuplicateMarkForDeletion'])
@@ -1198,25 +1188,38 @@ def removeExcludeTag():
stash.Log(f"Done removing exclude tag from scene {scene}.")
sys.stdout.write("{" + f"removeExcludeTag : 'complete', id: '{scene}'" + "}")
-def mergeTags():
+def getParseData(getSceneDetails1=True, getSceneDetails2=True):
if 'Target' not in stash.JSON_INPUT['args']:
stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return None, None
+ targetsSrc = stash.JSON_INPUT['args']['Target']
+ targets = targetsSrc.split(":")
+ if len(targets) < 2:
+ stash.Error(f"Could not get both targets from string {targetsSrc}")
+ return None, None
+ stash.Log(f"Parsed targets {targets[0]} and {targets[1]}")
+ target1 = targets[0]
+ target2 = targets[1]
+ if getSceneDetails1:
+ target1 = stash.find_scene(int(target1))
+ if getSceneDetails2:
+ target2 = stash.find_scene(int(target2))
+ elif len(targets) > 2:
+ target2 = target2 + targets[2]
+ return target1, target2
+
+
+def mergeTags():
+ scene1, scene2 = getParseData()
+ if scene1 == None or scene2 == None:
+ sys.stdout.write("{" + f"mergeTags : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}")
return
- mergeScenes = stash.JSON_INPUT['args']['Target']
- scenes = mergeScenes.split(":")
- if len(scenes) < 2:
- stash.Error(f"Could not get both scenes from string {mergeScenes}")
- return
- stash.Log(f"Merging tags for scene {scenes[0]} and scene {scenes[1]}")
- scene1 = stash.find_scene(int(scenes[0]))
- scene2 = stash.find_scene(int(scenes[1]))
stash.mergeMetadata(scene1, scene2)
- stash.Log(f"Done merging scenes for scene {scenes[0]} and scene {scenes[1]}")
- sys.stdout.write("{" + f"mergeTags : 'complete', id1: '{scene1}', id2: '{scene2}'" + "}")
+ stash.Log(f"Done merging scenes for scene {scene1['id']} and scene {scene2['id']}")
+ sys.stdout.write("{" + f"mergeTags : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}'" + "}")
def getLocalDupReportPath():
htmlReportExist = "true" if os.path.isfile(htmlReportName) else "false"
- # htmlReportExist = "false"
localPath = htmlReportName.replace("\\", "\\\\")
jsonReturn = "{'LocalDupReportExist' : " + f"{htmlReportExist}" + ", 'Path': '" + f"{localPath}" + "'}"
stash.Log(f"Sending json value {jsonReturn}")
@@ -1242,6 +1245,7 @@ def deleteLocalDupReportHtmlFiles(doJsonOutput = True):
sys.stdout.write(jsonReturn)
def removeTagFromAllScenes(tagName, deleteTags):
+ # ToDo: Replace code with SQL code if DB version 68
tagId = stash.find_tags(q=tagName)
if len(tagId) > 0 and 'id' in tagId[0]:
if deleteTags:
@@ -1271,15 +1275,184 @@ def removeAllDupTagsFromAllScenes(deleteTags=False):
else:
stash.Log(f"Clear tags {tagsToClear}")
+def updateScenesInReport(fileName, scene):
+ stash.Log(f"Updating table rows with scene {scene} in file {fileName}")
+ scene1 = -1
+ scene2 = -1
+ strToFind = "class=\"ID_"
+ lines = None
+ with open(fileName, 'r') as file:
+ lines = file.readlines()
+ stash.Log(f"line count = {len(lines)}")
+ with open(fileName, 'w') as file:
+ for line in lines:
+ # stash.Debug(f"line = {line}")
+ if f"class=\"ID_{scene}\"" in line:
+ idx = 0
+ while line.find(strToFind, idx) > -1:
+ idx = line.find(strToFind, idx) + len(strToFind)
+ id = line[idx:]
+ stash.Debug(f"id = {id}, idx = {idx}")
+ id = id[:id.find('"')]
+ stash.Debug(f"id = {id}")
+ if scene1 == -1:
+ scene1 = int(id)
+ elif scene1 != int(id) and scene2 == -1:
+ scene2 = int(id)
+ elif scene1 != -1 and scene2 != -1:
+ break
+ if scene1 != -1 and scene2 != -1:
+ sceneDetail1 = stash.find_scene(scene1)
+ sceneDetail2 = stash.find_scene(scene2)
+ if sceneDetail1 == None or sceneDetail2 == None:
+ stash.Error("Could not get scene details for both scene1 ({scene1}) and scene2 ({scene2}); sceneDetail1={sceneDetail1}; sceneDetail2={sceneDetail2};")
+ else:
+ writeRowToHtmlReport(file, sceneDetail1, sceneDetail2)
+ else:
+ stash.Error(f"Could not get both scene ID associated with scene {scene}; scene1 = {scene1}; scene2 = {scene2}")
+ file.write(line)
+ else:
+ file.write(line)
-# ToDo: Add additional menu items option only for bottom of report:
-# Remove from stash all files no longer part of stash library
-# Remove from stash all files in the Exclusion list (Not supporting regexps)
+def updateScenesInReports(scene, ReportName = htmlReportName):
+ if os.path.isfile(ReportName):
+ updateScenesInReport(ReportName, scene)
+ for x in range(2, 9999):
+ fileName = ReportName.replace(".html", f"_{x-1}.html")
+ stash.Debug(f"Checking if file '{fileName}' exist.")
+ if not os.path.isfile(fileName):
+ break
+ updateScenesInReport(fileName, scene)
+ else:
+ stash.Log(f"Report file does not exist: {ReportName}")
+
+def addPropertyToSceneClass(fileName, scene, property):
+ stash.Log(f"Inserting property {property} for scene {scene} in file {fileName}")
+ doStyleEndTagCheck = True
+ lines = None
+ with open(fileName, 'r') as file:
+ lines = file.readlines()
+ stash.Log(f"line count = {len(lines)}")
+ with open(fileName, 'w') as file:
+ for line in lines:
+ # stash.Debug(f"line = {line}")
+ if doStyleEndTagCheck:
+ if property == "" and line.startswith(f".ID_{scene}" + "{"):
+ continue
+ if line.startswith(""):
+ if property != "":
+ styleSetting = f".ID_{scene}{property}\n"
+ stash.Log(f"styleSetting = {styleSetting}")
+ file.write(styleSetting)
+ doStyleEndTagCheck = False
+ file.write(line)
+
+def addPropertyToSceneClassToAllFiles(scene, property, ReportName = htmlReportName):
+ if os.path.isfile(ReportName):
+ addPropertyToSceneClass(ReportName, scene, property)
+ for x in range(2, 9999):
+ fileName = ReportName.replace(".html", f"_{x-1}.html")
+ stash.Debug(f"Checking if file '{fileName}' exist.")
+ if not os.path.isfile(fileName):
+ break
+ addPropertyToSceneClass(fileName, scene, property)
+ else:
+ stash.Log(f"Report file does not exist: {ReportName}")
+
+def deleteScene(disableInReport=True, deleteFile=True):
+ if 'Target' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find Target in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return
+ scene = stash.JSON_INPUT['args']['Target']
+ stash.Log(f"Processing scene ID# {scene}")
+ result = None
+ result = stash.destroyScene(scene, delete_file=deleteFile)
+ if disableInReport:
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:gray;pointer-events:none;}")
+ stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene} with results = {result}")
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id: '{scene}', result: '{result}'" + "}")
+
+def copyScene(moveScene=False):
+ scene1, scene2 = getParseData()
+ if scene1 == None or scene2 == None:
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', id1: '{scene1}', id2: '{scene2}'" + "}")
+ return
+ if moveScene:
+ stash.mergeMetadata(scene1, scene2)
+ result = shutil.copy(scene1['files'][0]['path'], scene2['files'][0]['path'])
+ if moveScene:
+ result = stash.destroyScene(scene1['id'], delete_file=True)
+ stash.Log(f"destroyScene for scene {scene1['id']} results = {result}")
+ stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene1['id']} and {scene2['id']}")
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', id1: '{scene1['id']}', id2: '{scene2['id']}', result: '{result}'" + "}")
+
+def renameFile():
+ scene, newName = getParseData(getSceneDetails2=False)
+ if scene == None or newName == None:
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', newName: '{newName}'" + "}")
+ return
+ newName = newName.strip("'")
+ ext = pathlib.Path(scene['files'][0]['path']).suffix
+ newNameFull = f"{pathlib.Path(scene['files'][0]['path']).resolve().parent}{os.sep}{newName}{ext}"
+ newNameFull = newNameFull.strip("'")
+ newNameFull = newNameFull.replace("\\\\", "\\")
+ oldNameFull = scene['files'][0]['path']
+ oldNameFull = oldNameFull.strip("'")
+ oldNameFull = oldNameFull.replace("\\\\", "\\")
+ stash.Log(f"renaming file '{stash.asc2(oldNameFull)}' to '{stash.asc2(newNameFull)}'")
+ result = os.rename(oldNameFull, newNameFull)
+ stash.renameFileNameInDB(scene['files'][0]['id'], pathlib.Path(oldNameFull).stem, f"{newName}{ext}", UpdateUsingIdOnly = True)
+ updateScenesInReports(scene['id'])
+ stash.Log(f"{stash.PLUGIN_TASK_NAME} complete for scene {scene['id']} ;renamed to {newName}; result={result}")
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', scene: '{scene['id']}', newName: '{newName}', result: '{result}'" + "}")
+
+def flagScene():
+ scene, flagType = getParseData(False, False)
+ if scene == None or flagType == None:
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', flagType: '{flagType}'" + "}")
+ return
+ if flagType == "disable-scene":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:gray;pointer-events:none;}")
+ elif flagType == "strike-through":
+ addPropertyToSceneClassToAllFiles(scene, "{text-decoration: line-through;}")
+ elif flagType == "yellow highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:yellow;}")
+ elif flagType == "green highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:#00FF00;}")
+ elif flagType == "orange highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:orange;}")
+ elif flagType == "cyan highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:cyan;}")
+ elif flagType == "pink highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:pink;}")
+ elif flagType == "red highlight":
+ addPropertyToSceneClassToAllFiles(scene, "{background-color:red;}")
+ elif flagType == "remove all flags":
+ addPropertyToSceneClassToAllFiles(scene, "")
+ else:
+ stash.Log(f"Invalid flagType ({flagType})")
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'failed', scene: '{scene}', flagType: '{flagType}'" + "}")
+ return
+ sys.stdout.write("{" + f"{stash.PLUGIN_TASK_NAME} : 'complete', scene: '{scene}', flagType: '{flagType}'" + "}")
+
+# ToDo: Add to UI menu
+# Remove all Dup tagged files (Just remove from stash, and leave file)
+# Clear GraylistMarkForDel tag
+# Delete GraylistMarkForDel tag
+# Remove from stash all files no longer part of stash library
+# Remove from stash all files in the Exclusion list (Not supporting regexps)
+# ToDo: Add to advance menu
+# Remove only graylist dup
+# Exclude graylist from delete
+# Include graylist in delete
try:
if stash.PLUGIN_TASK_NAME == "tag_duplicates_task":
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "create_duplicate_report_task":
+ mangeDupFiles(tagDuplicates=False, merge=mergeDupFilename)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task":
manageTagggedDuplicates(deleteScenes=True)
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
@@ -1295,6 +1468,24 @@ try:
elif stash.PLUGIN_TASK_NAME == "generate_phash_task":
stash.metadata_generate({"phashes": True})
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "deleteScene":
+ deleteScene()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "removeScene":
+ deleteScene(deleteFile=False)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "renameFile":
+ renameFile()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "flagScene":
+ flagScene()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "copyScene":
+ copyScene()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "moveScene":
+ copyScene(moveScene=True)
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif stash.PLUGIN_TASK_NAME == "removeDupTag":
removeDupTag()
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml
index 7a9389c..caa7279 100644
--- a/plugins/DupFileManager/DupFileManager.yml
+++ b/plugins/DupFileManager/DupFileManager.yml
@@ -1,6 +1,6 @@
name: DupFileManager
description: Manages duplicate files.
-version: 0.1.6
+version: 0.1.8
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
ui:
javascript:
diff --git a/plugins/DupFileManager/DupFileManager_config.py b/plugins/DupFileManager/DupFileManager_config.py
index 9f6676c..65ee067 100644
--- a/plugins/DupFileManager/DupFileManager_config.py
+++ b/plugins/DupFileManager/DupFileManager_config.py
@@ -77,153 +77,6 @@ config = {
# Determines which codecRankingSet to use when ranking codec. Default is 1 for codecRankingSet1
"codecRankingSetToUse" : 1,
- # HTML Report **************************************************
- # If enabled, create an HTML report when tagging duplicate files
- "createHtmlReport" : True,
- # If enabled, report displays stream instead of preview for video
- "streamOverPreview" : False, # This option works in Chrome, but does not work very well on firefox.
- # If enabled, report displays an image preview similar to sceneDuplicateChecker
- "htmlIncludeImagePreview" : False,
- "htmlImagePreviewPopupSize" : 600,
- # Name of the HTML file to create
- "htmlReportName" : "DuplicateTagScenes.html",
- # HTML report prefix, before table listing
- "htmlReportPrefix" : """
-
-
-Stash Duplicate Report
-
-
-
-
-
-
-Report Info Report Options
-
-
-Found (QtyPlaceHolder) duplice sets
-Date Created: (DateCreatedPlaceHolder)
-
-
-
-Stash Duplicate Scenes Report (MatchTypePlaceHolder) \n""",
- # HTML report postfiox, after table listing
- "htmlReportPostfix" : "\n",
- # HTML report table
- "htmlReportTable" : "",
- # HTML report table row
- "htmlReportTableRow" : "",
- # HTML report table header
- "htmlReportTableHeader" : "",
- # HTML report table data
- "htmlReportTableData" : " ",
- # HTML report video preview
- "htmlReportVideoPreview" : "width='160' height='120' controls", # Alternative option "autoplay loop controls" or "autoplay controls"
- # The number off seconds in time difference for supper highlight on htmlReport
- "htmlHighlightTimeDiff" : 3,
- # Supper highlight for details with higher resolution or duration
- "htmlSupperHighlight" : "yellow",
- # Lower highlight for details with slightly higher duration
- "htmlLowerHighlight" : "nyanza",
- # Text color for details with different resolution, duration, size, bitrate,codec, or framerate
- "htmlDetailDiffTextColor" : "red",
- # Paginate HTML report. Maximum number of results to display on one page, before adding (paginating) an additional page.
- "htmlReportPaginate" : 100,
-
# The following fields are ONLY used when running DupFileManager in script mode
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
"endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server
diff --git a/plugins/DupFileManager/DupFileManager_report_config.py b/plugins/DupFileManager/DupFileManager_report_config.py
index b1f01ff..9f776a3 100644
--- a/plugins/DupFileManager/DupFileManager_report_config.py
+++ b/plugins/DupFileManager/DupFileManager_report_config.py
@@ -73,63 +73,102 @@ li:hover .large {
}
+
+
@@ -141,7 +180,7 @@ $(document).ready(function(){ $("button").click(function(){RunPluginDupFileManag
Date Created: (DateCreatedPlaceHolder)
diff --git a/plugins/DupFileManager/README.md b/plugins/DupFileManager/README.md
index 21651bc..f46a0a9 100644
--- a/plugins/DupFileManager/README.md
+++ b/plugins/DupFileManager/README.md
@@ -1,11 +1,24 @@
-# DupFileManager: Ver 0.1.7 (By David Maisonave)
+# DupFileManager: Ver 0.1.8 (By David Maisonave)
DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate files in the Stash system.
It has both **task** and **tools-UI** components.
### Features
-- Creates a report which can be accessed from the settings->tools menu.
+- Creates a duplicate file report which can be accessed from the settings->tools menu options.The report is created as an HTML file and stored in local path under plugins\DupFileManager\report\DuplicateTagScenes.html.
+ - Items on the left side of the report are the primary duplicates designated for deletion. By default, these duplicates are given a special _duplicate tag.
+ - Items on the right side of the report are designated as primary duplicates to keep. They usually have higher resolution, duration and/or preferred paths.
+ - The report has the following options:
+ - Delete: Delete file and remove from Stash library.
+ - Remove: Remove from Stash library.
+ - Rename: Rename file.
+ - Copy: Copy file from left (source) to right (to-keep).
+ - Move: Copy file and metadata left to right.
+ - Cpy-Name: Copy file name left to right.
+ - Add-Exclude: Add exclude tag to scene,so that scene is excluded from deletion.
+ - Remove-Tag: Remove duplicate tag from scene.
+ - *Flag-Scene: Flag (mark) scene in report as reviewed (or as requiring further review). Optional flags (red, blue, green, black, & hide-item)
+ - Merge: Copy Metadata (tags, performers,& studios) from left to right.
- Can merge potential source in the duplicate file names for tag names, performers, and studios.
- Normally when Stash searches the file name for tag names, performers, and studios, it only does so using the primary file.
- Advance menu (for specially tagged duplicates)
diff --git a/plugins/DupFileManager/StashPluginHelper.py b/plugins/DupFileManager/StashPluginHelper.py
index 09620d2..a9be414 100644
--- a/plugins/DupFileManager/StashPluginHelper.py
+++ b/plugins/DupFileManager/StashPluginHelper.py
@@ -115,6 +115,7 @@ class StashPluginHelper(StashInterface):
_mergeMetadata = None
encodeToUtf8 = False
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
+ progressBarIsEnabled = True
# Prefix message value
class Level(EnumValue):
@@ -544,23 +545,28 @@ class StashPluginHelper(StashInterface):
return 2
return 1
+ def enableProgressBar(self, enable=True):
+ self.progressBarIsEnabled = enable
+
# Use setProgressBarIter to reduce traffic to the server by only updating the progressBar every X(updateProgressbarOnIter) iteration.
def setProgressBarIter(self, qtyResults):
- self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
- self.currentProgressbarIteration = 0
+ if self.progressBarIsEnabled:
+ self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
+ self.currentProgressbarIteration = 0
def progressBar(self, currentIndex, maxCount):
- if self.updateProgressbarOnIter > 0:
- self.currentProgressbarIteration+=1
- if self.currentProgressbarIteration > self.updateProgressbarOnIter:
- self.currentProgressbarIteration = 0
- else:
- return
- progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
- try:
- self.log.progress(progress)
- except Exception as e:
- pass
+ if self.progressBarIsEnabled:
+ if self.updateProgressbarOnIter > 0:
+ self.currentProgressbarIteration+=1
+ if self.currentProgressbarIteration > self.updateProgressbarOnIter:
+ self.currentProgressbarIteration = 0
+ else:
+ return
+ progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
+ try:
+ self.log.progress(progress)
+ except Exception as e:
+ pass
def isDocker(self):
cgroup = pathlib.Path('/proc/self/cgroup')
@@ -752,6 +758,12 @@ class StashPluginHelper(StashInterface):
try:
if errMsg != None:
self.Warn(errMsg)
+ if i > 0:
+ # Check if file still exist
+ scene = self.find_scene(scene_id)
+ if scene == None or len(scene) == 0:
+ self.Warn(f"Scene {scene_id} not found in Stash.")
+ return False
return self.destroy_scene(scene_id, delete_file)
except (ConnectionResetError):
tb = traceback.format_exc()
@@ -816,9 +828,11 @@ class StashPluginHelper(StashInterface):
return False
return int(results['rows'][0][0]) == verNumber
- def renameFileNameInDB(self, fileId, oldName, newName):
+ def renameFileNameInDB(self, fileId, oldName, newName, UpdateUsingIdOnly = False):
if self.isCorrectDbVersion():
query = f'update files set basename = "{newName}" where basename = "{oldName}" and id = {fileId};'
+ if UpdateUsingIdOnly:
+ query = f'update files set basename = "{newName}" where id = {fileId};'
self.Trace(f"Executing query ({query})")
results = self.sql_commit(query)
if 'rows_affected' in results and results['rows_affected'] == 1:
diff --git a/plugins/DupFileManager/advance_options.html b/plugins/DupFileManager/advance_options.html
index dda375c..262c078 100644
--- a/plugins/DupFileManager/advance_options.html
+++ b/plugins/DupFileManager/advance_options.html
@@ -75,6 +75,10 @@ $(document).ready(function(){
{
RunPluginDupFileManager("tag_duplicates_task", this.value + ":" + $("#significantTimeDiff").val(), true);
}
+ else if (this.id.startsWith("create_duplicate_report_task"))
+ {
+ RunPluginDupFileManager("create_duplicate_report_task", this.value + ":" + $("#significantTimeDiff").val(), true);
+ }
else if (this.id === "viewreport")
{
var reportUrl = window.location.href;
@@ -1794,26 +1798,46 @@ function DeleteDupInPath(){
- Create report with different [Match Duplicate Distance] options
+ Create report with different [Match Duplicate Distance] options
Overrides user [Match Duplicate Distance] and [significantTimeDiff] settings
-
- Create Duplicate Tagging Report [Exact Match]
-
-
- Create Duplicate Tagging Report [High Match]
-
-
- Create Duplicate Tagging Report [Medium Match]
-
-
- Create Duplicate Tagging Report [Low Match]
-
-
+
+
+ Create Report with Tagging
+
+ Create Duplicate Tagging Report [Exact Match]
+
+
+ Create Duplicate Tagging Report [High Match]
+
+
+ Create Duplicate Tagging Report [Medium Match]
+
+
+ Create Duplicate Tagging Report [Low Match]
+
+
+
+ Create Report without Tagging
+
+ Create Duplicate Report [Exact Match]
+
+
+ Create Duplicate Report [High Match]
+
+
+ Create Duplicate Report [Medium Match]
+
+
+ Create Duplicate Report [Low Match]
+
+
+
+
Details:
Match Duplicate Distance Number Details
@@ -1822,7 +1846,7 @@ function DeleteDupInPath(){
Safest and most reliable option
Uses tag name _DuplicateMarkForDeletion_0
- Has the fewest results
+ Has the fewest results, and it's very rare to have false matches.
High Match
@@ -1844,9 +1868,31 @@ function DeleteDupInPath(){
To reduce false matches use a time difference of .98 or higher.
Uses tag name _DuplicateMarkForDeletion_3
Scenes tagged by 0, 1, and 2 will have four tags.
- Has the most results, but with many false matches
+ Has the most results, but with many false matches.
+ Time Difference
+
+ Significant time difference setting, where 1 equals 100% and (.9) equals 90%.
+ This setting overrides the setting in DupFileManager_config.py.
+
+ See setting significantTimeDiff in DupFileManager_config.py
+
+ This setting is generally not useful for [Exact Match] reports.
+ This is an important setting when creating Low or Medium match reports. It will reduce false matches.
+
+ Report with tagging
+
+ Reports with tagging will work with above DupFileManager Advance Menu .
+ The report can take serveral minutes to complete.
+ It takes much more time to produce a report with tagging compare to creating a report without tagging.
+
+ Report WITHOUT tagging
+
+ Reports with no tagging can NOT be used with above DupFileManager Advance Menu .
+ The report is created much faster. It usually takes a few seconds to complete.
+ This is the recommended report type to create if the DupFileManager Advance Menu is not needed or desired.
+
diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py
index 09620d2..a9be414 100644
--- a/plugins/FileMonitor/StashPluginHelper.py
+++ b/plugins/FileMonitor/StashPluginHelper.py
@@ -115,6 +115,7 @@ class StashPluginHelper(StashInterface):
_mergeMetadata = None
encodeToUtf8 = False
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
+ progressBarIsEnabled = True
# Prefix message value
class Level(EnumValue):
@@ -544,23 +545,28 @@ class StashPluginHelper(StashInterface):
return 2
return 1
+ def enableProgressBar(self, enable=True):
+ self.progressBarIsEnabled = enable
+
# Use setProgressBarIter to reduce traffic to the server by only updating the progressBar every X(updateProgressbarOnIter) iteration.
def setProgressBarIter(self, qtyResults):
- self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
- self.currentProgressbarIteration = 0
+ if self.progressBarIsEnabled:
+ self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
+ self.currentProgressbarIteration = 0
def progressBar(self, currentIndex, maxCount):
- if self.updateProgressbarOnIter > 0:
- self.currentProgressbarIteration+=1
- if self.currentProgressbarIteration > self.updateProgressbarOnIter:
- self.currentProgressbarIteration = 0
- else:
- return
- progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
- try:
- self.log.progress(progress)
- except Exception as e:
- pass
+ if self.progressBarIsEnabled:
+ if self.updateProgressbarOnIter > 0:
+ self.currentProgressbarIteration+=1
+ if self.currentProgressbarIteration > self.updateProgressbarOnIter:
+ self.currentProgressbarIteration = 0
+ else:
+ return
+ progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
+ try:
+ self.log.progress(progress)
+ except Exception as e:
+ pass
def isDocker(self):
cgroup = pathlib.Path('/proc/self/cgroup')
@@ -752,6 +758,12 @@ class StashPluginHelper(StashInterface):
try:
if errMsg != None:
self.Warn(errMsg)
+ if i > 0:
+ # Check if file still exist
+ scene = self.find_scene(scene_id)
+ if scene == None or len(scene) == 0:
+ self.Warn(f"Scene {scene_id} not found in Stash.")
+ return False
return self.destroy_scene(scene_id, delete_file)
except (ConnectionResetError):
tb = traceback.format_exc()
@@ -816,9 +828,11 @@ class StashPluginHelper(StashInterface):
return False
return int(results['rows'][0][0]) == verNumber
- def renameFileNameInDB(self, fileId, oldName, newName):
+ def renameFileNameInDB(self, fileId, oldName, newName, UpdateUsingIdOnly = False):
if self.isCorrectDbVersion():
query = f'update files set basename = "{newName}" where basename = "{oldName}" and id = {fileId};'
+ if UpdateUsingIdOnly:
+ query = f'update files set basename = "{newName}" where id = {fileId};'
self.Trace(f"Executing query ({query})")
results = self.sql_commit(query)
if 'rows_affected' in results and results['rows_affected'] == 1:
diff --git a/plugins/RenameFile/StashPluginHelper.py b/plugins/RenameFile/StashPluginHelper.py
index 09620d2..a9be414 100644
--- a/plugins/RenameFile/StashPluginHelper.py
+++ b/plugins/RenameFile/StashPluginHelper.py
@@ -115,6 +115,7 @@ class StashPluginHelper(StashInterface):
_mergeMetadata = None
encodeToUtf8 = False
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
+ progressBarIsEnabled = True
# Prefix message value
class Level(EnumValue):
@@ -544,23 +545,28 @@ class StashPluginHelper(StashInterface):
return 2
return 1
+ def enableProgressBar(self, enable=True):
+ self.progressBarIsEnabled = enable
+
# Use setProgressBarIter to reduce traffic to the server by only updating the progressBar every X(updateProgressbarOnIter) iteration.
def setProgressBarIter(self, qtyResults):
- self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
- self.currentProgressbarIteration = 0
+ if self.progressBarIsEnabled:
+ self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
+ self.currentProgressbarIteration = 0
def progressBar(self, currentIndex, maxCount):
- if self.updateProgressbarOnIter > 0:
- self.currentProgressbarIteration+=1
- if self.currentProgressbarIteration > self.updateProgressbarOnIter:
- self.currentProgressbarIteration = 0
- else:
- return
- progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
- try:
- self.log.progress(progress)
- except Exception as e:
- pass
+ if self.progressBarIsEnabled:
+ if self.updateProgressbarOnIter > 0:
+ self.currentProgressbarIteration+=1
+ if self.currentProgressbarIteration > self.updateProgressbarOnIter:
+ self.currentProgressbarIteration = 0
+ else:
+ return
+ progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
+ try:
+ self.log.progress(progress)
+ except Exception as e:
+ pass
def isDocker(self):
cgroup = pathlib.Path('/proc/self/cgroup')
@@ -752,6 +758,12 @@ class StashPluginHelper(StashInterface):
try:
if errMsg != None:
self.Warn(errMsg)
+ if i > 0:
+ # Check if file still exist
+ scene = self.find_scene(scene_id)
+ if scene == None or len(scene) == 0:
+ self.Warn(f"Scene {scene_id} not found in Stash.")
+ return False
return self.destroy_scene(scene_id, delete_file)
except (ConnectionResetError):
tb = traceback.format_exc()
@@ -816,9 +828,11 @@ class StashPluginHelper(StashInterface):
return False
return int(results['rows'][0][0]) == verNumber
- def renameFileNameInDB(self, fileId, oldName, newName):
+ def renameFileNameInDB(self, fileId, oldName, newName, UpdateUsingIdOnly = False):
if self.isCorrectDbVersion():
query = f'update files set basename = "{newName}" where basename = "{oldName}" and id = {fileId};'
+ if UpdateUsingIdOnly:
+ query = f'update files set basename = "{newName}" where id = {fileId};'
self.Trace(f"Executing query ({query})")
results = self.sql_commit(query)
if 'rows_affected' in results and results['rows_affected'] == 1: