forked from Github/Axter-Stash
Moved module logic to separate module.
This commit is contained in:
126
StashPluginHelper/ModulesValidate.py
Normal file
126
StashPluginHelper/ModulesValidate.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
# ModulesValidate (By David Maisonave aka Axter)
|
||||||
|
# Description:
|
||||||
|
# Checks if packages are installed, and optionally install packages if missing.
|
||||||
|
# The below example usage code should be plave at the very top of the scource code before any other imports.
|
||||||
|
# Example Usage:
|
||||||
|
# import ModulesValidate
|
||||||
|
# ModulesValidate.modulesInstalled(["watchdog", "schedule", "requests"])
|
||||||
|
# Testing:
|
||||||
|
# To test, uninstall packages via command line: pip uninstall -y watchdog schedule requests
|
||||||
|
import sys, os, pathlib, platform, traceback
|
||||||
|
# ToDo: Add logic to optionally pull package requirements from requirements.txt file.
|
||||||
|
|
||||||
|
def modulesInstalled(moduleNames, install=True, silent=False):
|
||||||
|
retrnValue = True
|
||||||
|
for moduleName in moduleNames:
|
||||||
|
try: # Try Python 3.3 > way
|
||||||
|
import importlib
|
||||||
|
import importlib.util
|
||||||
|
if moduleName in sys.modules:
|
||||||
|
if not silent: print(f"{moduleName!r} already in sys.modules")
|
||||||
|
elif isModuleInstalled(moduleName):
|
||||||
|
if not silent: print(f"Module {moduleName!r} is available.")
|
||||||
|
else:
|
||||||
|
if install and (results:=installModule(moduleName)) > 0:
|
||||||
|
if results == 1:
|
||||||
|
print(f"Module {moduleName!r} has been installed")
|
||||||
|
else:
|
||||||
|
if not silent: print(f"Module {moduleName!r} is already installed")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if install:
|
||||||
|
print(f"Can't find the {moduleName!r} module")
|
||||||
|
retrnValue = False
|
||||||
|
except Exception as e:
|
||||||
|
try:
|
||||||
|
i = importlib.import_module(moduleName)
|
||||||
|
except ImportError as e:
|
||||||
|
if install and (results:=installModule(moduleName)) > 0:
|
||||||
|
if results == 1:
|
||||||
|
print(f"Module {moduleName!r} has been installed")
|
||||||
|
else:
|
||||||
|
if not silent: print(f"Module {moduleName!r} is already installed")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if install:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
print(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
|
||||||
|
retrnValue = False
|
||||||
|
return retrnValue
|
||||||
|
|
||||||
|
def isModuleInstalled(moduleName):
|
||||||
|
try:
|
||||||
|
__import__(moduleName)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
def installModule(moduleName):
|
||||||
|
try:
|
||||||
|
if isLinux():
|
||||||
|
# Note: Linux may first need : sudo apt install python3-pip
|
||||||
|
# if error starts with "Command 'pip' not found"
|
||||||
|
# or includes "No module named pip"
|
||||||
|
results = os.popen(f"pip --version").read()
|
||||||
|
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
||||||
|
results = os.popen(f"sudo apt install python3-pip").read()
|
||||||
|
results = os.popen(f"pip --version").read()
|
||||||
|
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
||||||
|
return -1
|
||||||
|
if isFreeBSD():
|
||||||
|
print("Warning: installModule may NOT work on freebsd")
|
||||||
|
pipArg = ""
|
||||||
|
if isDocker():
|
||||||
|
pipArg = " --break-system-packages"
|
||||||
|
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
|
||||||
|
results = results.strip("\n")
|
||||||
|
if results.find("Requirement already satisfied:") > -1:
|
||||||
|
return 2
|
||||||
|
elif results.find("Successfully installed") > -1:
|
||||||
|
return 1
|
||||||
|
elif modulesInstalled(moduleNames=[moduleName], install=False):
|
||||||
|
return 1
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def installPackage(package): # Should delete this. It doesn't work consistently
|
||||||
|
try:
|
||||||
|
import pip
|
||||||
|
if hasattr(pip, 'main'):
|
||||||
|
pip.main(['install', package])
|
||||||
|
else:
|
||||||
|
pip._internal.main(['install', package])
|
||||||
|
except Exception as e:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def isDocker():
|
||||||
|
cgroup = pathlib.Path('/proc/self/cgroup')
|
||||||
|
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
||||||
|
|
||||||
|
def isWindows():
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isLinux():
|
||||||
|
if platform.system().lower().startswith("linux"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isFreeBSD():
|
||||||
|
if platform.system().lower().startswith("freebsd"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isMacOS():
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isWindows():
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
@@ -3,7 +3,9 @@
|
|||||||
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
||||||
# Note: To call this script outside of Stash, pass argument --url
|
# Note: To call this script outside of Stash, pass argument --url
|
||||||
# Example: python DupFileManager.py --url http://localhost:9999 -a
|
# Example: python DupFileManager.py --url http://localhost:9999 -a
|
||||||
import os, sys, time, pathlib, argparse, platform, shutil, logging
|
import ModulesValidate
|
||||||
|
ModulesValidate.modulesInstalled(["send2trash", "requests"])
|
||||||
|
import os, sys, time, pathlib, argparse, platform, shutil, traceback, logging, requests
|
||||||
from StashPluginHelper import StashPluginHelper
|
from StashPluginHelper import StashPluginHelper
|
||||||
from stashapi.stash_types import PhashDistance
|
from stashapi.stash_types import PhashDistance
|
||||||
from DupFileManager_config import config # Import config from DupFileManager_config.py
|
from DupFileManager_config import config # Import config from DupFileManager_config.py
|
||||||
@@ -18,11 +20,13 @@ parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', he
|
|||||||
parse_args = parser.parse_args()
|
parse_args = parser.parse_args()
|
||||||
|
|
||||||
settings = {
|
settings = {
|
||||||
|
"clearAllDupfileManagerTags": False,
|
||||||
"doNotGeneratePhash": False,
|
"doNotGeneratePhash": False,
|
||||||
"mergeDupFilename": False,
|
"mergeDupFilename": False,
|
||||||
"permanentlyDelete": False,
|
"permanentlyDelete": False,
|
||||||
"whitelistDelDupInSameFolder": False,
|
"whitelistDelDupInSameFolder": False,
|
||||||
"whitelistDoTagLowResDup": False,
|
"whitelistDoTagLowResDup": False,
|
||||||
|
"xGrayListTagging": False,
|
||||||
"zCleanAfterDel": False,
|
"zCleanAfterDel": False,
|
||||||
"zSwapHighRes": False,
|
"zSwapHighRes": False,
|
||||||
"zSwapLongLength": False,
|
"zSwapLongLength": False,
|
||||||
@@ -34,7 +38,8 @@ settings = {
|
|||||||
"zyBlacklist": "",
|
"zyBlacklist": "",
|
||||||
"zyMatchDupDistance": 0,
|
"zyMatchDupDistance": 0,
|
||||||
"zyMaxDupToProcess": 0,
|
"zyMaxDupToProcess": 0,
|
||||||
"zzdebugTracing": False,
|
"zzDebug": False,
|
||||||
|
"zzTracing": False,
|
||||||
}
|
}
|
||||||
stash = StashPluginHelper(
|
stash = StashPluginHelper(
|
||||||
stash_url=parse_args.stash_url,
|
stash_url=parse_args.stash_url,
|
||||||
@@ -42,29 +47,33 @@ stash = StashPluginHelper(
|
|||||||
settings=settings,
|
settings=settings,
|
||||||
config=config,
|
config=config,
|
||||||
maxbytes=10*1024*1024,
|
maxbytes=10*1024*1024,
|
||||||
|
DebugTraceFieldName="zzTracing",
|
||||||
|
DebugFieldName="zzDebug",
|
||||||
)
|
)
|
||||||
stash.convertToAscii = True
|
stash.convertToAscii = True
|
||||||
|
stash.Log("******************* Starting *******************")
|
||||||
if len(sys.argv) > 1:
|
if len(sys.argv) > 1:
|
||||||
stash.Log(f"argv = {sys.argv}")
|
stash.Log(f"argv = {sys.argv}")
|
||||||
else:
|
else:
|
||||||
stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}")
|
stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}")
|
||||||
stash.status(logLevel=logging.DEBUG)
|
stash.status(logLevel=logging.DEBUG)
|
||||||
|
|
||||||
stash.modulesInstalled(["send2trash", "requests"])
|
|
||||||
|
|
||||||
# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
||||||
# stash.encodeToUtf8 = True
|
# stash.encodeToUtf8 = True
|
||||||
|
|
||||||
|
|
||||||
LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE
|
LOG_STASH_N_PLUGIN = stash.LogTo.STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LogTo.CONSOLE + stash.LogTo.FILE
|
||||||
listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
|
listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
|
||||||
addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
|
addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
|
||||||
|
clearAllDupfileManagerTags = stash.Setting('clearAllDupfileManagerTags')
|
||||||
doNotGeneratePhash = stash.Setting('doNotGeneratePhash')
|
doNotGeneratePhash = stash.Setting('doNotGeneratePhash')
|
||||||
mergeDupFilename = stash.Setting('mergeDupFilename')
|
mergeDupFilename = stash.Setting('mergeDupFilename')
|
||||||
moveToTrashCan = False if stash.Setting('permanentlyDelete') else True
|
moveToTrashCan = False if stash.Setting('permanentlyDelete') else True
|
||||||
alternateTrashCanPath = stash.Setting('dup_path')
|
alternateTrashCanPath = stash.Setting('dup_path')
|
||||||
whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder')
|
whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder')
|
||||||
whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup')
|
whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup')
|
||||||
|
grayListTagging = stash.Setting('xGrayListTagging')
|
||||||
maxDupToProcess = int(stash.Setting('zyMaxDupToProcess'))
|
maxDupToProcess = int(stash.Setting('zyMaxDupToProcess'))
|
||||||
significantTimeDiff = stash.Setting('significantTimeDiff')
|
significantTimeDiff = stash.Setting('significantTimeDiff')
|
||||||
toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap')
|
toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap')
|
||||||
@@ -93,6 +102,9 @@ else:
|
|||||||
codecRanking = stash.Setting('codecRankingSet1')
|
codecRanking = stash.Setting('codecRankingSet1')
|
||||||
skipIfTagged = stash.Setting('skipIfTagged')
|
skipIfTagged = stash.Setting('skipIfTagged')
|
||||||
killScanningPostProcess = stash.Setting('killScanningPostProcess')
|
killScanningPostProcess = stash.Setting('killScanningPostProcess')
|
||||||
|
tagLongDurationLowRes = stash.Setting('tagLongDurationLowRes')
|
||||||
|
bitRateIsImporantComp = stash.Setting('bitRateIsImporantComp')
|
||||||
|
codecIsImporantComp = stash.Setting('codecIsImporantComp')
|
||||||
|
|
||||||
matchDupDistance = int(stash.Setting('zyMatchDupDistance'))
|
matchDupDistance = int(stash.Setting('zyMatchDupDistance'))
|
||||||
matchPhaseDistance = PhashDistance.EXACT
|
matchPhaseDistance = PhashDistance.EXACT
|
||||||
@@ -103,6 +115,9 @@ if matchDupDistance == 1:
|
|||||||
elif matchDupDistance == 2:
|
elif matchDupDistance == 2:
|
||||||
matchPhaseDistance = PhashDistance.MEDIUM
|
matchPhaseDistance = PhashDistance.MEDIUM
|
||||||
matchPhaseDistanceText = "Medium Match"
|
matchPhaseDistanceText = "Medium Match"
|
||||||
|
elif matchDupDistance == 3:
|
||||||
|
matchPhaseDistance = PhashDistance.LOW
|
||||||
|
matchPhaseDistanceText = "Low Match"
|
||||||
|
|
||||||
# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5
|
# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5
|
||||||
if significantTimeDiff > 1:
|
if significantTimeDiff > 1:
|
||||||
@@ -123,6 +138,14 @@ excludeDupFileDeleteTag = stash.Setting('excludeDupFileDeleteTag')
|
|||||||
if excludeDupFileDeleteTag == "":
|
if excludeDupFileDeleteTag == "":
|
||||||
excludeDupFileDeleteTag = '_ExcludeDuplicateMarkForDeletion'
|
excludeDupFileDeleteTag = '_ExcludeDuplicateMarkForDeletion'
|
||||||
|
|
||||||
|
graylistMarkForDeletion = stash.Setting('graylistMarkForDeletion')
|
||||||
|
if graylistMarkForDeletion == "":
|
||||||
|
graylistMarkForDeletion = '_GraylistMarkForDeletion'
|
||||||
|
|
||||||
|
longerDurationLowerResolution = stash.Setting('longerDurationLowerResolution')
|
||||||
|
if longerDurationLowerResolution == "":
|
||||||
|
longerDurationLowerResolution = '_LongerDurationLowerResolution'
|
||||||
|
|
||||||
excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag]
|
excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag]
|
||||||
stash.initMergeMetadata(excludeMergeTags)
|
stash.initMergeMetadata(excludeMergeTags)
|
||||||
|
|
||||||
@@ -234,12 +257,27 @@ def setTagId(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=F
|
|||||||
if details != "":
|
if details != "":
|
||||||
dataDict.update({'details' : details})
|
dataDict.update({'details' : details})
|
||||||
if dataDict != ORG_DATA_DICT:
|
if dataDict != ORG_DATA_DICT:
|
||||||
stash.update_scene(dataDict)
|
stash.updateScene(dataDict)
|
||||||
stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict}", toAscii=True)
|
stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict} and tag {tagName}", toAscii=True)
|
||||||
else:
|
else:
|
||||||
stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']}.", toAscii=True)
|
stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']} already has tag {tagName}.", toAscii=True)
|
||||||
return doAddTag
|
return doAddTag
|
||||||
|
|
||||||
|
def setTagId_withRetry(tagName, sceneDetails, DupFileToKeep, TagReason="", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5):
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
stash.Warn(errMsg)
|
||||||
|
return setTagId(tagName, sceneDetails, DupFileToKeep, TagReason, ignoreAutoTag)
|
||||||
|
except (requests.exceptions.ConnectionError, ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"[setTagId] Exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"[setTagId] Unknown exception calling setTagId. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
def hasSameDir(path1, path2):
|
def hasSameDir(path1, path2):
|
||||||
if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent:
|
if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent:
|
||||||
return True
|
return True
|
||||||
@@ -263,13 +301,19 @@ def sendToTrash(path):
|
|||||||
stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True)
|
stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def significantLessTime(durrationToKeep, durrationOther):
|
def significantLessTime(durration1, durration2): # Where durration1 is ecpected to be smaller than durration2 IE(45/60=.75)
|
||||||
timeDiff = durrationToKeep / durrationOther
|
if 'files' in durration1:
|
||||||
|
durration1 = int(durration1['files'][0]['duration'])
|
||||||
|
durration2 = int(durration2['files'][0]['duration'])
|
||||||
|
timeDiff = getTimeDif(durration1, durration2)
|
||||||
if timeDiff < significantTimeDiff:
|
if timeDiff < significantTimeDiff:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def isBetterVideo(scene1, scene2, swapCandidateCk = False):
|
def getTimeDif(durration1, durration2): # Where durration1 is ecpected to be smaller than durration2 IE(45/60=.75)
|
||||||
|
return durration1 / durration2
|
||||||
|
|
||||||
|
def isBetterVideo(scene1, scene2, swapCandidateCk = False): # is scene2 better than scene1
|
||||||
# Prioritize higher reslution over codec, bit rate, and frame rate
|
# Prioritize higher reslution over codec, bit rate, and frame rate
|
||||||
if int(scene1['files'][0]['width']) > int(scene2['files'][0]['width']) or int(scene1['files'][0]['height']) > int(scene2['files'][0]['height']):
|
if int(scene1['files'][0]['width']) > int(scene2['files'][0]['width']) or int(scene1['files'][0]['height']) > int(scene2['files'][0]['height']):
|
||||||
return False
|
return False
|
||||||
@@ -289,12 +333,38 @@ def isBetterVideo(scene1, scene2, swapCandidateCk = False):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def significantMoreTimeCompareToBetterVideo(scene1, scene2): # is scene2 better than scene1
|
||||||
|
if int(scene1['files'][0]['duration']) >= int(scene2['files'][0]['duration']):
|
||||||
|
return False
|
||||||
|
if int(scene1['files'][0]['width']) > int(scene2['files'][0]['width']) or int(scene1['files'][0]['height']) > int(scene2['files'][0]['height']):
|
||||||
|
if significantLessTime(scene1, scene2):
|
||||||
|
if tagLongDurationLowRes:
|
||||||
|
didAddTag = setTagId_withRetry(longerDurationLowerResolution, scene2, scene1, ignoreAutoTag=True)
|
||||||
|
stash.Log(f"Tagged sene2 with tag {longerDurationLowerResolution}, because scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']}); didAddTag={didAddTag}")
|
||||||
|
else:
|
||||||
|
stash.Warn(f"Scene1 is better video, but it has significant less time ({getTimeDif(int(scene1['files'][0]['duration']), int(scene2['files'][0]['duration']))}%) compare to scene2; Scene1={scene1['files'][0]['path']} (ID={scene1['id']})(duration={scene1['files'][0]['duration']}); Scene2={scene2['files'][0]['path']} (ID={scene2['id']}) (duration={scene1['files'][0]['duration']})")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def allThingsEqual(scene1, scene2): # If all important things are equal, return true
|
||||||
|
if int(scene1['files'][0]['duration']) != int(scene2['files'][0]['duration']):
|
||||||
|
return False
|
||||||
|
if scene1['files'][0]['width'] != scene2['files'][0]['width']:
|
||||||
|
return False
|
||||||
|
if scene1['files'][0]['height'] != scene2['files'][0]['height']:
|
||||||
|
return False
|
||||||
|
if bitRateIsImporantComp and scene1['files'][0]['bit_rate'] != scene2['files'][0]['bit_rate']:
|
||||||
|
return False
|
||||||
|
if codecIsImporantComp and scene1['files'][0]['video_codec'] != scene2['files'][0]['video_codec']:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
def isSwapCandidate(DupFileToKeep, DupFile):
|
def isSwapCandidate(DupFileToKeep, DupFile):
|
||||||
# Don't move if both are in whitelist
|
# Don't move if both are in whitelist
|
||||||
if stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(whitelist, DupFile['files'][0]['path']):
|
if stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(whitelist, DupFile['files'][0]['path']):
|
||||||
return False
|
return False
|
||||||
if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])):
|
if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])):
|
||||||
if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])):
|
if not significantLessTime(DupFileToKeep, DupFile):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True)
|
stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True)
|
||||||
@@ -302,10 +372,10 @@ def isSwapCandidate(DupFileToKeep, DupFile):
|
|||||||
if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']):
|
if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']):
|
||||||
return True
|
return True
|
||||||
if isBetterVideo(DupFile, DupFileToKeep, swapCandidateCk=True):
|
if isBetterVideo(DupFile, DupFileToKeep, swapCandidateCk=True):
|
||||||
if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])):
|
if not significantLessTime(DupFileToKeep, DupFile):
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has better codec/bit-rate than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True)
|
stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has better codec/bit-rate than '{DupFile['files'][0]['path']}', but the duration is significantly shorter; DupFileToKeep-ID={DupFileToKeep['id']};DupFile-ID={DupFile['id']};BitRate {DupFileToKeep['files'][0]['bit_rate']} vs {DupFile['files'][0]['bit_rate']};Codec {DupFileToKeep['files'][0]['video_codec']} vs {DupFile['files'][0]['video_codec']};FrameRate {DupFileToKeep['files'][0]['frame_rate']} vs {DupFile['files'][0]['frame_rate']};", toAscii=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
dupWhitelistTagId = None
|
dupWhitelistTagId = None
|
||||||
@@ -344,6 +414,14 @@ def isWorseKeepCandidate(DupFileToKeep, Scene):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def killScanningJobs():
|
||||||
|
try:
|
||||||
|
if killScanningPostProcess:
|
||||||
|
stash.stopJobs(0, "Scanning...")
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
stash.Error(f"Exception while trying to kill scan jobs; Error: {e}\nTraceBack={tb}")
|
||||||
|
|
||||||
def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
||||||
duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.'
|
duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.'
|
||||||
stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
|
stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
|
||||||
@@ -373,6 +451,7 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
|||||||
DupFileSets = stash.find_duplicate_scenes(matchPhaseDistance, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details ' + mergeFieldData)
|
DupFileSets = stash.find_duplicate_scenes(matchPhaseDistance, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details ' + mergeFieldData)
|
||||||
stash.stopSpinningProcessBar()
|
stash.stopSpinningProcessBar()
|
||||||
qtyResults = len(DupFileSets)
|
qtyResults = len(DupFileSets)
|
||||||
|
stash.setProgressBarIter(qtyResults)
|
||||||
stash.Trace("#########################################################################")
|
stash.Trace("#########################################################################")
|
||||||
stash.Log(f"Found {qtyResults} duplicate sets...")
|
stash.Log(f"Found {qtyResults} duplicate sets...")
|
||||||
for DupFileSet in DupFileSets:
|
for DupFileSet in DupFileSets:
|
||||||
@@ -380,7 +459,7 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
|||||||
QtyDupSet+=1
|
QtyDupSet+=1
|
||||||
stash.progressBar(QtyDupSet, qtyResults)
|
stash.progressBar(QtyDupSet, qtyResults)
|
||||||
SepLine = "---------------------------"
|
SepLine = "---------------------------"
|
||||||
DupFileToKeep = ""
|
DupFileToKeep = None
|
||||||
DupToCopyFrom = ""
|
DupToCopyFrom = ""
|
||||||
DupFileDetailList = []
|
DupFileDetailList = []
|
||||||
for DupFile in DupFileSet:
|
for DupFile in DupFileSet:
|
||||||
@@ -390,63 +469,68 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
|||||||
if skipIfTagged and duplicateMarkForDeletion in Scene['tags']:
|
if skipIfTagged and duplicateMarkForDeletion in Scene['tags']:
|
||||||
stash.Trace(f"Skipping scene '{Scene['files'][0]['path']}' because already tagged with {duplicateMarkForDeletion}")
|
stash.Trace(f"Skipping scene '{Scene['files'][0]['path']}' because already tagged with {duplicateMarkForDeletion}")
|
||||||
continue
|
continue
|
||||||
stash.Trace(f"Scene = {Scene}", toAscii=True)
|
stash.TraceOnce(f"Scene = {Scene}", toAscii=True)
|
||||||
DupFileDetailList = DupFileDetailList + [Scene]
|
DupFileDetailList = DupFileDetailList + [Scene]
|
||||||
if DupFileToKeep != "":
|
if os.path.isfile(Scene['files'][0]['path']):
|
||||||
|
if DupFileToKeep != None:
|
||||||
if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference
|
if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference
|
||||||
QtyExactDup+=1
|
QtyExactDup+=1
|
||||||
else:
|
else:
|
||||||
QtyAlmostDup+=1
|
QtyAlmostDup+=1
|
||||||
SepLine = "***************************"
|
SepLine = "***************************"
|
||||||
if significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(Scene['files'][0]['duration'])):
|
if significantLessTime(DupFileToKeep, Scene):
|
||||||
QtyRealTimeDiff += 1
|
QtyRealTimeDiff += 1
|
||||||
|
|
||||||
if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']):
|
if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=resolution: {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} < {Scene['files'][0]['width']}x{Scene['files'][0]['height']}")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=resolution: {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} < {Scene['files'][0]['width']}x{Scene['files'][0]['height']}")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']):
|
elif significantMoreTimeCompareToBetterVideo(DupFileToKeep, Scene):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=duration: {DupFileToKeep['files'][0]['duration']} < {Scene['files'][0]['duration']}")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=duration: {DupFileToKeep['files'][0]['duration']} < {Scene['files'][0]['duration']}")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif isBetterVideo(DupFileToKeep, Scene):
|
elif isBetterVideo(DupFileToKeep, Scene):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=codec,bit_rate, or frame_rate: {DupFileToKeep['files'][0]['video_codec']}, {DupFileToKeep['files'][0]['bit_rate']}, {DupFileToKeep['files'][0]['frame_rate']} : {Scene['files'][0]['video_codec']}, {Scene['files'][0]['bit_rate']}, {Scene['files'][0]['frame_rate']}")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=codec,bit_rate, or frame_rate: {DupFileToKeep['files'][0]['video_codec']}, {DupFileToKeep['files'][0]['bit_rate']}, {DupFileToKeep['files'][0]['frame_rate']} : {Scene['files'][0]['video_codec']}, {Scene['files'][0]['bit_rate']}, {Scene['files'][0]['frame_rate']}")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif stash.startsWithInList(whitelist, Scene['files'][0]['path']) and not stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']):
|
elif stash.startsWithInList(whitelist, Scene['files'][0]['path']) and not stash.startsWithInList(whitelist, DupFileToKeep['files'][0]['path']):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=not whitelist vs whitelist")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=not whitelist vs whitelist")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif isTaggedExcluded(Scene) and not isTaggedExcluded(DupFileToKeep):
|
elif isTaggedExcluded(Scene) and not isTaggedExcluded(DupFileToKeep):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=not ExcludeTag vs ExcludeTag")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=not ExcludeTag vs ExcludeTag")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and not stash.startsWithInList(blacklist, Scene['files'][0]['path']):
|
elif stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and not stash.startsWithInList(blacklist, Scene['files'][0]['path']):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=blacklist vs not blacklist")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=blacklist vs not blacklist")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']):
|
elif stash.startsWithInList(blacklist, DupFileToKeep['files'][0]['path']) and stash.startsWithInList(blacklist, Scene['files'][0]['path']) and stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(blacklist, Scene['files'][0]['path']):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=blacklist-index {stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(blacklist, Scene['files'][0]['path'])}")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=blacklist-index {stash.indexStartsWithInList(blacklist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(blacklist, Scene['files'][0]['path'])}")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and not stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']):
|
elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and not stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=not graylist vs graylist")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=not graylist vs graylist")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(graylist, Scene['files'][0]['path']):
|
elif stash.startsWithInList(graylist, Scene['files'][0]['path']) and stash.startsWithInList(graylist, DupFileToKeep['files'][0]['path']) and stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path']) > stash.indexStartsWithInList(graylist, Scene['files'][0]['path']):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=graylist-index {stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(graylist, Scene['files'][0]['path'])}")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=graylist-index {stash.indexStartsWithInList(graylist, DupFileToKeep['files'][0]['path'])} > {stash.indexStartsWithInList(graylist, Scene['files'][0]['path'])}")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif favorLongerFileName and len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
|
elif allThingsEqual(DupFileToKeep, Scene):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=path-len {len(DupFileToKeep['files'][0]['path'])} < {len(Scene['files'][0]['path'])}")
|
# Only do below checks if all imporant things are equal.
|
||||||
|
if favorLongerFileName and len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
|
||||||
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=path-len {len(DupFileToKeep['files'][0]['path'])} < {len(Scene['files'][0]['path'])}")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
|
elif favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=size {DupFileToKeep['files'][0]['size']} < {Scene['files'][0]['size']}")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=size {DupFileToKeep['files'][0]['size']} < {Scene['files'][0]['size']}")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif not favorLongerFileName and len(DupFileToKeep['files'][0]['path']) > len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
|
elif not favorLongerFileName and len(DupFileToKeep['files'][0]['path']) > len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=path-len {len(DupFileToKeep['files'][0]['path'])} > {len(Scene['files'][0]['path'])}")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=path-len {len(DupFileToKeep['files'][0]['path'])} > {len(Scene['files'][0]['path'])}")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
elif not favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) > int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
|
elif not favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) > int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
|
||||||
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=size {DupFileToKeep['files'][0]['size']} > {Scene['files'][0]['size']}")
|
stash.Debug(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=size {DupFileToKeep['files'][0]['size']} > {Scene['files'][0]['size']}")
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
else:
|
else:
|
||||||
DupFileToKeep = Scene
|
DupFileToKeep = Scene
|
||||||
# stash.Trace(f"DupFileToKeep = {DupFileToKeep}")
|
# stash.Trace(f"DupFileToKeep = {DupFileToKeep}")
|
||||||
stash.Trace(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True)
|
stash.Debug(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True)
|
||||||
|
else:
|
||||||
|
stash.Error(f"Scene does NOT exist; path={Scene['files'][0]['path']}; ID={Scene['id']}")
|
||||||
|
|
||||||
for DupFile in DupFileDetailList:
|
for DupFile in DupFileDetailList:
|
||||||
if DupFile['id'] != DupFileToKeep['id']:
|
if DupFileToKeep != None and DupFile['id'] != DupFileToKeep['id']:
|
||||||
if merge:
|
if merge:
|
||||||
result = stash.mergeMetadata(DupFile, DupFileToKeep)
|
result = stash.mergeMetadata(DupFile, DupFileToKeep)
|
||||||
if result != "Nothing To Merge":
|
if result != "Nothing To Merge":
|
||||||
@@ -459,24 +543,28 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
|||||||
stash.mergeMetadata(DupFileToKeep, DupFile)
|
stash.mergeMetadata(DupFileToKeep, DupFile)
|
||||||
if toRecycleBeforeSwap:
|
if toRecycleBeforeSwap:
|
||||||
sendToTrash(DupFile['files'][0]['path'])
|
sendToTrash(DupFile['files'][0]['path'])
|
||||||
|
stash.Log(f"Moving better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'; SrcID={DupFileToKeep['id']};DescID={DupFile['id']};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtySwap={QtySwap};QtySkipForDel={QtySkipForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
||||||
|
try:
|
||||||
shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path'])
|
shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path'])
|
||||||
QtySwap+=1
|
QtySwap+=1
|
||||||
stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}';QtyDup={QtyDup};QtySwap={QtySwap};QtySkipForDel={QtySkipForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
stash.Error(f"Exception while moving file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}; SrcID={DupFileToKeep['id']};DescID={DupFile['id']}'; Error: {e}\nTraceBack={tb}")
|
||||||
DupFileToKeep = DupFile
|
DupFileToKeep = DupFile
|
||||||
else:
|
else:
|
||||||
if dupWhitelistTagId and tagDuplicates:
|
if dupWhitelistTagId and tagDuplicates:
|
||||||
didAddTag = setTagId(duplicateWhitelistTag, DupFile, DupFileToKeep, ignoreAutoTag=True)
|
didAddTag = setTagId_withRetry(duplicateWhitelistTag, DupFile, DupFileToKeep, ignoreAutoTag=True)
|
||||||
stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}';didAddWhiteTag={didAddTag};QtyDup={QtyDup};QtySkipForDel={QtySkipForDel}", toAscii=True)
|
stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}';AddTagW={didAddTag};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtySkipForDel={QtySkipForDel}", toAscii=True)
|
||||||
else:
|
else:
|
||||||
if isTaggedExcluded(DupFile):
|
if isTaggedExcluded(DupFile):
|
||||||
QtyExcludeForDel+=1
|
QtyExcludeForDel+=1
|
||||||
stash.Log(f"Excluding file {DupFile['files'][0]['path']} because tagged for exclusion via tag {excludeDupFileDeleteTag};QtyDup={QtyDup}")
|
stash.Log(f"Excluding file {DupFile['files'][0]['path']} because tagged for exclusion via tag {excludeDupFileDeleteTag};QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults}")
|
||||||
else:
|
else:
|
||||||
if deleteDup:
|
if deleteDup:
|
||||||
QtyDeleted += 1
|
QtyDeleted += 1
|
||||||
DupFileName = DupFile['files'][0]['path']
|
DupFileName = DupFile['files'][0]['path']
|
||||||
DupFileNameOnly = pathlib.Path(DupFileName).stem
|
DupFileNameOnly = pathlib.Path(DupFileName).stem
|
||||||
stash.Warn(f"Deleting duplicate '{DupFileName}';QtyDup={QtyDup};QtyDeleted={QtyDeleted}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
stash.Warn(f"Deleting duplicate '{DupFileName}';QtyDup={QtyDup};Set={QtyDupSet} of {qtyResults};QtyDeleted={QtyDeleted}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
||||||
if alternateTrashCanPath != "":
|
if alternateTrashCanPath != "":
|
||||||
destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
|
destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
|
||||||
if os.path.isfile(destPath):
|
if os.path.isfile(destPath):
|
||||||
@@ -484,51 +572,60 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
|||||||
shutil.move(DupFileName, destPath)
|
shutil.move(DupFileName, destPath)
|
||||||
elif moveToTrashCan:
|
elif moveToTrashCan:
|
||||||
sendToTrash(DupFileName)
|
sendToTrash(DupFileName)
|
||||||
stash.destroy_scene(DupFile['id'], delete_file=True)
|
stash.destroyScene(DupFile['id'], delete_file=True)
|
||||||
elif tagDuplicates:
|
elif tagDuplicates:
|
||||||
QtyTagForDel+=1
|
QtyTagForDel+=1
|
||||||
didAddTag = setTagId(duplicateMarkForDeletion, DupFile, DupFileToKeep, ignoreAutoTag=True)
|
didAddTag = setTagId_withRetry(duplicateMarkForDeletion, DupFile, DupFileToKeep, ignoreAutoTag=True)
|
||||||
|
if grayListTagging and stash.startsWithInList(graylist, DupFile['files'][0]['path']):
|
||||||
|
stash.addTag(DupFile, graylistMarkForDeletion, ignoreAutoTag=True)
|
||||||
if didAddTag:
|
if didAddTag:
|
||||||
QtyNewlyTag+=1
|
QtyNewlyTag+=1
|
||||||
if QtyTagForDel == 1:
|
if QtyTagForDel == 1:
|
||||||
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion};didAddTag={didAddTag};QtyDup={QtyDup};QtyNewlyTag={QtyNewlyTag};QtyTagForDel={QtyTagForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
||||||
else:
|
else:
|
||||||
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion;didAddTag={didAddTag};QtyDup={QtyDup};QtyNewlyTag={QtyNewlyTag};QtyTagForDel={QtyTagForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
didAddTag = 1 if didAddTag else 0
|
||||||
|
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion;AddTag={didAddTag};Qty={QtyDup};Set={QtyDupSet} of {qtyResults};NewlyTag={QtyNewlyTag};isTag={QtyTagForDel}", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
||||||
stash.Trace(SepLine)
|
stash.Trace(SepLine)
|
||||||
if maxDupToProcess > 0 and QtyDup > maxDupToProcess:
|
if maxDupToProcess > 0 and QtyDup > maxDupToProcess:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
stash.Debug("#####################################################")
|
||||||
stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExcludeForDel={QtyExcludeForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
|
stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExcludeForDel={QtyExcludeForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
|
||||||
if killScanningPostProcess:
|
killScanningJobs()
|
||||||
stash.stopJobs(0, "Scanning...")
|
|
||||||
if doNotGeneratePhash == False:
|
|
||||||
stash.metadata_generate({"phashes": True})
|
|
||||||
if cleanAfterDel:
|
if cleanAfterDel:
|
||||||
stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN)
|
stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN)
|
||||||
stash.metadata_clean(paths=stash.STASH_PATHS)
|
stash.metadata_clean()
|
||||||
stash.metadata_clean_generated()
|
stash.metadata_clean_generated()
|
||||||
stash.optimise_database()
|
stash.optimise_database()
|
||||||
|
if doNotGeneratePhash == False:
|
||||||
|
stash.metadata_generate({"phashes": True})
|
||||||
|
|
||||||
def manageTagggedDuplicates(clearTag=False):
|
def manageTagggedDuplicates(deleteScenes=False, clearTag=False, setGrayListTag=False):
|
||||||
tagId = stash.find_tags(q=duplicateMarkForDeletion)
|
tagId = stash.find_tags(q=duplicateMarkForDeletion)
|
||||||
if len(tagId) > 0 and 'id' in tagId[0]:
|
if len(tagId) > 0 and 'id' in tagId[0]:
|
||||||
tagId = tagId[0]['id']
|
tagId = tagId[0]['id']
|
||||||
else:
|
else:
|
||||||
stash.Warn(f"Could not find tag ID for tag '{duplicateMarkForDeletion}'.")
|
stash.Warn(f"Could not find tag ID for tag '{duplicateMarkForDeletion}'.")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
excludedTags = [duplicateMarkForDeletion]
|
||||||
|
if clearAllDupfileManagerTags:
|
||||||
|
excludedTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag, graylistMarkForDeletion, longerDurationLowerResolution]
|
||||||
|
|
||||||
QtyDup = 0
|
QtyDup = 0
|
||||||
QtyDeleted = 0
|
QtyDeleted = 0
|
||||||
QtyClearedTags = 0
|
QtyClearedTags = 0
|
||||||
|
QtySetGraylistTag = 0
|
||||||
QtyFailedQuery = 0
|
QtyFailedQuery = 0
|
||||||
stash.Trace("#########################################################################")
|
stash.Debug("#########################################################################")
|
||||||
stash.startSpinningProcessBar()
|
stash.startSpinningProcessBar()
|
||||||
scenes = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details')
|
scenes = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id tags {id name} files {path width height duration size video_codec bit_rate frame_rate} details')
|
||||||
stash.stopSpinningProcessBar()
|
stash.stopSpinningProcessBar()
|
||||||
qtyResults = len(scenes)
|
qtyResults = len(scenes)
|
||||||
stash.Trace(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion})")
|
stash.Log(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion})")
|
||||||
|
stash.setProgressBarIter(qtyResults)
|
||||||
for scene in scenes:
|
for scene in scenes:
|
||||||
QtyDup += 1
|
QtyDup += 1
|
||||||
prgs = QtyDup / qtyResults
|
|
||||||
stash.progressBar(QtyDup, qtyResults)
|
stash.progressBar(QtyDup, qtyResults)
|
||||||
# scene = stash.find_scene(sceneID['id'])
|
# scene = stash.find_scene(sceneID['id'])
|
||||||
# if scene == None or len(scene) == 0:
|
# if scene == None or len(scene) == 0:
|
||||||
@@ -538,7 +635,14 @@ def manageTagggedDuplicates(clearTag=False):
|
|||||||
# stash.Trace(f"scene={scene}")
|
# stash.Trace(f"scene={scene}")
|
||||||
if clearTag:
|
if clearTag:
|
||||||
QtyClearedTags += 1
|
QtyClearedTags += 1
|
||||||
tags = [int(item['id']) for item in scene["tags"] if item['id'] != tagId]
|
# ToDo: Add logic to exclude graylistMarkForDeletion
|
||||||
|
tags = [int(item['id']) for item in scene["tags"] if item['name'] not in excludedTags]
|
||||||
|
# if clearAllDupfileManagerTags:
|
||||||
|
# tags = []
|
||||||
|
# for tag in scene["tags"]:
|
||||||
|
# if tag['name'] in excludedTags:
|
||||||
|
# continue
|
||||||
|
# tags += [int(tag['id'])]
|
||||||
stash.TraceOnce(f"tagId={tagId}, len={len(tags)}, tags = {tags}")
|
stash.TraceOnce(f"tagId={tagId}, len={len(tags)}, tags = {tags}")
|
||||||
dataDict = {'id' : scene['id']}
|
dataDict = {'id' : scene['id']}
|
||||||
if addPrimaryDupPathToDetails:
|
if addPrimaryDupPathToDetails:
|
||||||
@@ -549,10 +653,17 @@ def manageTagggedDuplicates(clearTag=False):
|
|||||||
sceneDetails = sceneDetails[0:Pos1] + sceneDetails[Pos2 + len(detailPostfix):]
|
sceneDetails = sceneDetails[0:Pos1] + sceneDetails[Pos2 + len(detailPostfix):]
|
||||||
dataDict.update({'details' : sceneDetails})
|
dataDict.update({'details' : sceneDetails})
|
||||||
dataDict.update({'tag_ids' : tags})
|
dataDict.update({'tag_ids' : tags})
|
||||||
stash.Log(f"Updating scene with {dataDict};QtyClearedTags={QtyClearedTags}")
|
stash.Log(f"Updating scene with {dataDict};QtyClearedTags={QtyClearedTags};Count={QtyDup} of {qtyResults}")
|
||||||
stash.update_scene(dataDict)
|
stash.updateScene(dataDict)
|
||||||
# stash.removeTag(scene, duplicateMarkForDeletion)
|
# stash.removeTag(scene, duplicateMarkForDeletion)
|
||||||
|
elif setGrayListTag:
|
||||||
|
if stash.startsWithInList(graylist, scene['files'][0]['path']):
|
||||||
|
QtySetGraylistTag+=1
|
||||||
|
if stash.addTag(scene, graylistMarkForDeletion, ignoreAutoTag=True):
|
||||||
|
stash.Log(f"Added tag {graylistMarkForDeletion} to scene {scene['files'][0]['path']};QtySetGraylistTag={QtySetGraylistTag};Count={QtyDup} of {qtyResults}")
|
||||||
else:
|
else:
|
||||||
|
stash.Trace(f"Scene already had tag {graylistMarkForDeletion}; {scene['files'][0]['path']}")
|
||||||
|
elif deleteScenes:
|
||||||
DupFileName = scene['files'][0]['path']
|
DupFileName = scene['files'][0]['path']
|
||||||
DupFileNameOnly = pathlib.Path(DupFileName).stem
|
DupFileNameOnly = pathlib.Path(DupFileName).stem
|
||||||
stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
||||||
@@ -563,45 +674,65 @@ def manageTagggedDuplicates(clearTag=False):
|
|||||||
shutil.move(DupFileName, destPath)
|
shutil.move(DupFileName, destPath)
|
||||||
elif moveToTrashCan:
|
elif moveToTrashCan:
|
||||||
sendToTrash(DupFileName)
|
sendToTrash(DupFileName)
|
||||||
result = stash.destroy_scene(scene['id'], delete_file=True)
|
result = stash.destroyScene(scene['id'], delete_file=True)
|
||||||
QtyDeleted += 1
|
QtyDeleted += 1
|
||||||
stash.Trace(f"destroy_scene result={result} for file {DupFileName};QtyDeleted={QtyDeleted}", toAscii=True)
|
stash.Debug(f"destroyScene result={result} for file {DupFileName};QtyDeleted={QtyDeleted};Count={QtyDup} of {qtyResults}", toAscii=True)
|
||||||
stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN)
|
else:
|
||||||
if doNotGeneratePhash == False and clearTag == False:
|
stash.Error("manageTagggedDuplicates called with invlaid input arguments. Doing early exit.")
|
||||||
stash.metadata_generate({"phashes": True})
|
return
|
||||||
|
stash.Debug("#####################################################")
|
||||||
|
stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtySetGraylistTag={QtySetGraylistTag}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN)
|
||||||
|
killScanningJobs()
|
||||||
|
# if doNotGeneratePhash == False and clearTag == False:
|
||||||
|
# stash.metadata_generate({"phashes": True})
|
||||||
|
|
||||||
if stash.PLUGIN_TASK_NAME == "tag_duplicates_task":
|
try:
|
||||||
|
if stash.PLUGIN_TASK_NAME == "tag_duplicates_task":
|
||||||
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
|
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
|
||||||
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task":
|
elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task":
|
||||||
manageTagggedDuplicates()
|
manageTagggedDuplicates(deleteScenes=True)
|
||||||
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task":
|
elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task":
|
||||||
mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
|
mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
|
||||||
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
elif stash.PLUGIN_TASK_NAME == "clear_duplicate_tags_task":
|
elif stash.PLUGIN_TASK_NAME == "clear_duplicate_tags_task":
|
||||||
manageTagggedDuplicates(clearTag=True)
|
manageTagggedDuplicates(clearTag=True)
|
||||||
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
elif stash.PLUGIN_TASK_NAME == "generate_phash_task":
|
elif stash.PLUGIN_TASK_NAME == "graylist_tag_task":
|
||||||
|
manageTagggedDuplicates(setGrayListTag=True)
|
||||||
|
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
|
elif stash.PLUGIN_TASK_NAME == "generate_phash_task":
|
||||||
stash.metadata_generate({"phashes": True})
|
stash.metadata_generate({"phashes": True})
|
||||||
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
elif parse_args.dup_tag:
|
elif parse_args.dup_tag:
|
||||||
|
stash.PLUGIN_TASK_NAME = "dup_tag"
|
||||||
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
|
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
|
||||||
stash.Trace(f"Tag duplicate EXIT")
|
stash.Debug(f"Tag duplicate EXIT")
|
||||||
elif parse_args.del_tag:
|
elif parse_args.del_tag:
|
||||||
manageTagggedDuplicates()
|
stash.PLUGIN_TASK_NAME = "del_tag"
|
||||||
stash.Trace(f"Delete Tagged duplicates EXIT")
|
manageTagggedDuplicates(deleteScenes=True)
|
||||||
elif parse_args.clear_tag:
|
stash.Debug(f"Delete Tagged duplicates EXIT")
|
||||||
|
elif parse_args.clear_tag:
|
||||||
|
stash.PLUGIN_TASK_NAME = "clear_tag"
|
||||||
manageTagggedDuplicates(clearTag=True)
|
manageTagggedDuplicates(clearTag=True)
|
||||||
stash.Trace(f"Clear duplicate tags EXIT")
|
stash.Debug(f"Clear duplicate tags EXIT")
|
||||||
elif parse_args.remove:
|
elif parse_args.remove:
|
||||||
|
stash.PLUGIN_TASK_NAME = "remove"
|
||||||
mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
|
mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
|
||||||
stash.Trace(f"Delete duplicate EXIT")
|
stash.Debug(f"Delete duplicate EXIT")
|
||||||
else:
|
else:
|
||||||
stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})")
|
stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})")
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
|
||||||
|
stash.Error(f"Exception while running DupFileManager Task({stash.PLUGIN_TASK_NAME}); Error: {e}\nTraceBack={tb}")
|
||||||
|
killScanningJobs()
|
||||||
|
stash.convertToAscii = False
|
||||||
|
stash.Error(f"Error: {e}\nTraceBack={tb}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")
|
stash.Log("\n*********************************\nEXITING ***********************\n*********************************")
|
||||||
|
|||||||
@@ -3,6 +3,10 @@ description: Manages duplicate files.
|
|||||||
version: 0.1.6
|
version: 0.1.6
|
||||||
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
||||||
settings:
|
settings:
|
||||||
|
clearAllDupfileManagerTags:
|
||||||
|
displayName: Clear All Tags
|
||||||
|
description: Clear Tags task clears scenes of all tags (DuplicateMarkForDeletion, _DuplicateWhite..., _ExcludeDup..., _Graylist..., _LongerDur...)
|
||||||
|
type: BOOLEAN
|
||||||
doNotGeneratePhash:
|
doNotGeneratePhash:
|
||||||
displayName: Do Not Generate PHASH
|
displayName: Do Not Generate PHASH
|
||||||
description: Do not generate PHASH after tag or delete task.
|
description: Do not generate PHASH after tag or delete task.
|
||||||
@@ -23,6 +27,10 @@ settings:
|
|||||||
displayName: Whitelist Duplicate Tagging
|
displayName: Whitelist Duplicate Tagging
|
||||||
description: Enable to tag whitelist duplicates of lower resolution or duration or same folder.
|
description: Enable to tag whitelist duplicates of lower resolution or duration or same folder.
|
||||||
type: BOOLEAN
|
type: BOOLEAN
|
||||||
|
xGrayListTagging:
|
||||||
|
displayName: Tag Graylist
|
||||||
|
description: When adding tag DuplicateMarkForDeletion to graylist scene, also add tag _GraylistMarkForDeletion.
|
||||||
|
type: BOOLEAN
|
||||||
zCleanAfterDel:
|
zCleanAfterDel:
|
||||||
displayName: Run Clean After Delete
|
displayName: Run Clean After Delete
|
||||||
description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
|
description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
|
||||||
@@ -61,15 +69,19 @@ settings:
|
|||||||
type: STRING
|
type: STRING
|
||||||
zyMatchDupDistance:
|
zyMatchDupDistance:
|
||||||
displayName: Match Duplicate Distance
|
displayName: Match Duplicate Distance
|
||||||
description: (Default=0) Where 0 = Exact Match, 1 = High Match, and 2 = Medium Match.
|
description: (Default=0) Where 0 = Exact Match, 1 = High Match, 2 = Medium Match, and 3 = Low Match.
|
||||||
type: NUMBER
|
type: NUMBER
|
||||||
zyMaxDupToProcess:
|
zyMaxDupToProcess:
|
||||||
displayName: Max Dup Process
|
displayName: Max Dup Process
|
||||||
description: Maximum number of duplicates to process. If 0, infinity
|
description: Maximum number of duplicates to process. If 0, infinity
|
||||||
type: NUMBER
|
type: NUMBER
|
||||||
zzdebugTracing:
|
zzDebug:
|
||||||
displayName: Debug Tracing
|
displayName: Debug
|
||||||
description: Enable debug tracing so-as to add additional debug logging in Stash\plugins\DupFileManager\DupFileManager.log
|
description: Enable debug so-as to add additional debug logging in Stash\plugins\DupFileManager\DupFileManager.log
|
||||||
|
type: BOOLEAN
|
||||||
|
zzTracing:
|
||||||
|
displayName: Tracing
|
||||||
|
description: Enable tracing and debug so-as to add additional tracing and debug logging in Stash\plugins\DupFileManager\DupFileManager.log
|
||||||
type: BOOLEAN
|
type: BOOLEAN
|
||||||
exec:
|
exec:
|
||||||
- python
|
- python
|
||||||
@@ -96,3 +108,7 @@ tasks:
|
|||||||
description: Generate PHASH file matching. Used for file comparisons.
|
description: Generate PHASH file matching. Used for file comparisons.
|
||||||
defaultArgs:
|
defaultArgs:
|
||||||
mode: generate_phash_task
|
mode: generate_phash_task
|
||||||
|
- name: Tag Graylist
|
||||||
|
description: Set tag _GraylistMarkForDeletion to scenes having DuplicateMarkForDeletion tag and that are in the Graylist.
|
||||||
|
defaultArgs:
|
||||||
|
mode: graylist_tag_task
|
||||||
|
|||||||
@@ -12,16 +12,32 @@ config = {
|
|||||||
"toRecycleBeforeSwap" : True,
|
"toRecycleBeforeSwap" : True,
|
||||||
# Character used to seperate items on the whitelist, blacklist, and graylist
|
# Character used to seperate items on the whitelist, blacklist, and graylist
|
||||||
"listSeparator" : ",",
|
"listSeparator" : ",",
|
||||||
|
|
||||||
|
# Tag names **************************************************
|
||||||
# Tag used to tag duplicates with lower resolution, duration, and file name length.
|
# Tag used to tag duplicates with lower resolution, duration, and file name length.
|
||||||
"DupFileTag" : "DuplicateMarkForDeletion",
|
"DupFileTag" : "DuplicateMarkForDeletion",
|
||||||
# Tag name used to tag duplicates in the whitelist. E.g. _DuplicateWhitelistFile
|
# Tag name used to tag duplicates in the whitelist. E.g. _DuplicateWhitelistFile
|
||||||
"DupWhiteListTag" : "_DuplicateWhitelistFile",
|
"DupWhiteListTag" : "_DuplicateWhitelistFile",
|
||||||
# Tags used to exclude duplicate from deletion
|
# Tag name used to exclude duplicate from deletion
|
||||||
"excludeDupFileDeleteTag" : "_ExcludeDuplicateMarkForDeletion",
|
"excludeDupFileDeleteTag" : "_ExcludeDuplicateMarkForDeletion",
|
||||||
|
# Tag name used to tag scenes with existing tag DuplicateMarkForDeletion, and that are in the graylist
|
||||||
|
"graylistMarkForDeletion" : "_GraylistMarkForDeletion",
|
||||||
|
# Tag name for scenes with significant longer duration but lower resolution
|
||||||
|
"longerDurationLowerResolution" : "_LongerDurationLowerResolution",
|
||||||
|
|
||||||
|
# Favor setings *********************************************
|
||||||
# If enabled, favor longer file name over shorter. If disabled, favor shorter file name.
|
# If enabled, favor longer file name over shorter. If disabled, favor shorter file name.
|
||||||
"favorLongerFileName" : True,
|
"favorLongerFileName" : True,
|
||||||
# If enabled, favor larger file size over smaller. If disabled, favor smaller file size.
|
# If enabled, favor larger file size over smaller. If disabled, favor smaller file size.
|
||||||
"favorLargerFileSize" : True,
|
"favorLargerFileSize" : True,
|
||||||
|
# If enabled, favor videos with a different bit rate value. If favorHighBitRate is true, favor higher rate. If favorHighBitRate is false, favor lower rate
|
||||||
|
"favorBitRateChange" : True,
|
||||||
|
# If enabled, favor videos with higher bit rate. Used with either favorBitRateChange option or UI [Swap Bit Rate Change] option.
|
||||||
|
"favorHighBitRate" : True,
|
||||||
|
# If enabled, favor videos with a different frame rate value. If favorHigherFrameRate is true, favor higher rate. If favorHigherFrameRate is false, favor lower rate
|
||||||
|
"favorFrameRateChange" : True,
|
||||||
|
# If enabled, favor videos with higher frame rate. Used with either favorFrameRateChange option or UI [Swap Better Frame Rate] option.
|
||||||
|
"favorHigherFrameRate" : True,
|
||||||
# If enabled, favor videos with better codec according to codecRanking
|
# If enabled, favor videos with better codec according to codecRanking
|
||||||
"favorCodecRanking" : True,
|
"favorCodecRanking" : True,
|
||||||
# Codec Ranking in order of preference (default (codecRankingSet1) is order of ranking based on maximum potential efficiency)
|
# Codec Ranking in order of preference (default (codecRankingSet1) is order of ranking based on maximum potential efficiency)
|
||||||
@@ -34,19 +50,17 @@ config = {
|
|||||||
"codecRankingSet4" : ["h264", "vp8", "mpeg4", "msmpeg4v3", "msmpeg4v2", "msmpeg4v1", "h266", "vp9", "av1", "h265", "h263", "h263i", "vp6f", "vc-1", "vc1", "AVC", "mpeg2", "mpeg2video", "wmv3", "wmv2", "wmv1", "mpeg1", "mpeg1video", "flv1", "png", "gif"],
|
"codecRankingSet4" : ["h264", "vp8", "mpeg4", "msmpeg4v3", "msmpeg4v2", "msmpeg4v1", "h266", "vp9", "av1", "h265", "h263", "h263i", "vp6f", "vc-1", "vc1", "AVC", "mpeg2", "mpeg2video", "wmv3", "wmv2", "wmv1", "mpeg1", "mpeg1video", "flv1", "png", "gif"],
|
||||||
# Determines which codecRankingSet to use when ranking codec. Default is 1 for codecRankingSet1
|
# Determines which codecRankingSet to use when ranking codec. Default is 1 for codecRankingSet1
|
||||||
"codecRankingSetToUse" : 1,
|
"codecRankingSetToUse" : 1,
|
||||||
# If enabled, favor videos with a different bit rate value. If favorHighBitRate is true, favor higher rate. If favorHighBitRate is false, favor lower rate
|
|
||||||
"favorBitRateChange" : True,
|
|
||||||
# If enabled, favor videos with higher bit rate. Used with either favorBitRateChange option or UI [Swap Bit Rate Change] option.
|
|
||||||
"favorHighBitRate" : True,
|
|
||||||
# If enabled, favor videos with a different frame rate value. If favorHigherFrameRate is true, favor higher rate. If favorHigherFrameRate is false, favor lower rate
|
|
||||||
"favorFrameRateChange" : True,
|
|
||||||
# If enabled, favor videos with higher frame rate. Used with either favorFrameRateChange option or UI [Swap Better Frame Rate] option.
|
|
||||||
"favorHigherFrameRate" : True,
|
|
||||||
|
|
||||||
# If enabled, skip processing tagged scenes
|
# If enabled, skip processing tagged scenes
|
||||||
"skipIfTagged" : True,
|
"skipIfTagged" : True,
|
||||||
# If enabled, stop multiple scanning jobs after processing duplicates
|
# If enabled, stop multiple scanning jobs after processing duplicates
|
||||||
"killScanningPostProcess" : True,
|
"killScanningPostProcess" : True,
|
||||||
|
# If enabled, tag scenes which have longer duration, but lower resolution
|
||||||
|
"tagLongDurationLowRes" : True,
|
||||||
|
# If enabled, bit-rate is used in important comparisons for function allThingsEqual
|
||||||
|
"bitRateIsImporantComp" : True,
|
||||||
|
# If enabled, codec is used in important comparisons for function allThingsEqual
|
||||||
|
"codecIsImporantComp" : True,
|
||||||
|
|
||||||
# The following fields are ONLY used when running DupFileManager in script mode
|
# The following fields are ONLY used when running DupFileManager in script mode
|
||||||
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
|
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
|
||||||
|
|||||||
126
plugins/DupFileManager/ModulesValidate.py
Normal file
126
plugins/DupFileManager/ModulesValidate.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
# ModulesValidate (By David Maisonave aka Axter)
|
||||||
|
# Description:
|
||||||
|
# Checks if packages are installed, and optionally install packages if missing.
|
||||||
|
# The below example usage code should be plave at the very top of the scource code before any other imports.
|
||||||
|
# Example Usage:
|
||||||
|
# import ModulesValidate
|
||||||
|
# ModulesValidate.modulesInstalled(["watchdog", "schedule", "requests"])
|
||||||
|
# Testing:
|
||||||
|
# To test, uninstall packages via command line: pip uninstall -y watchdog schedule requests
|
||||||
|
import sys, os, pathlib, platform, traceback
|
||||||
|
# ToDo: Add logic to optionally pull package requirements from requirements.txt file.
|
||||||
|
|
||||||
|
def modulesInstalled(moduleNames, install=True, silent=False):
|
||||||
|
retrnValue = True
|
||||||
|
for moduleName in moduleNames:
|
||||||
|
try: # Try Python 3.3 > way
|
||||||
|
import importlib
|
||||||
|
import importlib.util
|
||||||
|
if moduleName in sys.modules:
|
||||||
|
if not silent: print(f"{moduleName!r} already in sys.modules")
|
||||||
|
elif isModuleInstalled(moduleName):
|
||||||
|
if not silent: print(f"Module {moduleName!r} is available.")
|
||||||
|
else:
|
||||||
|
if install and (results:=installModule(moduleName)) > 0:
|
||||||
|
if results == 1:
|
||||||
|
print(f"Module {moduleName!r} has been installed")
|
||||||
|
else:
|
||||||
|
if not silent: print(f"Module {moduleName!r} is already installed")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if install:
|
||||||
|
print(f"Can't find the {moduleName!r} module")
|
||||||
|
retrnValue = False
|
||||||
|
except Exception as e:
|
||||||
|
try:
|
||||||
|
i = importlib.import_module(moduleName)
|
||||||
|
except ImportError as e:
|
||||||
|
if install and (results:=installModule(moduleName)) > 0:
|
||||||
|
if results == 1:
|
||||||
|
print(f"Module {moduleName!r} has been installed")
|
||||||
|
else:
|
||||||
|
if not silent: print(f"Module {moduleName!r} is already installed")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if install:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
print(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
|
||||||
|
retrnValue = False
|
||||||
|
return retrnValue
|
||||||
|
|
||||||
|
def isModuleInstalled(moduleName):
|
||||||
|
try:
|
||||||
|
__import__(moduleName)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
def installModule(moduleName):
|
||||||
|
try:
|
||||||
|
if isLinux():
|
||||||
|
# Note: Linux may first need : sudo apt install python3-pip
|
||||||
|
# if error starts with "Command 'pip' not found"
|
||||||
|
# or includes "No module named pip"
|
||||||
|
results = os.popen(f"pip --version").read()
|
||||||
|
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
||||||
|
results = os.popen(f"sudo apt install python3-pip").read()
|
||||||
|
results = os.popen(f"pip --version").read()
|
||||||
|
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
||||||
|
return -1
|
||||||
|
if isFreeBSD():
|
||||||
|
print("Warning: installModule may NOT work on freebsd")
|
||||||
|
pipArg = ""
|
||||||
|
if isDocker():
|
||||||
|
pipArg = " --break-system-packages"
|
||||||
|
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
|
||||||
|
results = results.strip("\n")
|
||||||
|
if results.find("Requirement already satisfied:") > -1:
|
||||||
|
return 2
|
||||||
|
elif results.find("Successfully installed") > -1:
|
||||||
|
return 1
|
||||||
|
elif modulesInstalled(moduleNames=[moduleName], install=False):
|
||||||
|
return 1
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def installPackage(package): # Should delete this. It doesn't work consistently
|
||||||
|
try:
|
||||||
|
import pip
|
||||||
|
if hasattr(pip, 'main'):
|
||||||
|
pip.main(['install', package])
|
||||||
|
else:
|
||||||
|
pip._internal.main(['install', package])
|
||||||
|
except Exception as e:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def isDocker():
|
||||||
|
cgroup = pathlib.Path('/proc/self/cgroup')
|
||||||
|
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
||||||
|
|
||||||
|
def isWindows():
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isLinux():
|
||||||
|
if platform.system().lower().startswith("linux"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isFreeBSD():
|
||||||
|
if platform.system().lower().startswith("freebsd"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isMacOS():
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isWindows():
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
@@ -1,12 +1,3 @@
|
|||||||
from stashapi.stashapp import StashInterface
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
|
|
||||||
import concurrent.futures
|
|
||||||
from stashapi.stash_types import PhashDistance
|
|
||||||
import __main__
|
|
||||||
|
|
||||||
_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
|
|
||||||
|
|
||||||
# StashPluginHelper (By David Maisonave aka Axter)
|
# StashPluginHelper (By David Maisonave aka Axter)
|
||||||
# See end of this file for example usage
|
# See end of this file for example usage
|
||||||
# Log Features:
|
# Log Features:
|
||||||
@@ -24,6 +15,14 @@ _ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
|
|||||||
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
|
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
|
||||||
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
|
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
|
||||||
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
|
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
|
||||||
|
from stashapi.stashapp import StashInterface
|
||||||
|
from logging.handlers import RotatingFileHandler
|
||||||
|
import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
|
||||||
|
import concurrent.futures
|
||||||
|
from stashapi.stash_types import PhashDistance
|
||||||
|
from enum import Enum, IntEnum
|
||||||
|
import __main__
|
||||||
|
|
||||||
class StashPluginHelper(StashInterface):
|
class StashPluginHelper(StashInterface):
|
||||||
# Primary Members for external reference
|
# Primary Members for external reference
|
||||||
PLUGIN_TASK_NAME = None
|
PLUGIN_TASK_NAME = None
|
||||||
@@ -45,15 +44,44 @@ class StashPluginHelper(StashInterface):
|
|||||||
API_KEY = None
|
API_KEY = None
|
||||||
excludeMergeTags = None
|
excludeMergeTags = None
|
||||||
|
|
||||||
|
# class EnumInt(IntEnum):
|
||||||
|
# def __repr__(self) -> str:
|
||||||
|
# return f"{self.__class__.__name__}.{self.name}"
|
||||||
|
# def __str__(self) -> str:
|
||||||
|
# return str(self.value)
|
||||||
|
# def serialize(self):
|
||||||
|
# return self.value
|
||||||
|
|
||||||
|
class EnumValue(Enum):
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}.{self.name}"
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return str(self.value)
|
||||||
|
def __add__(self, other):
|
||||||
|
return self.value + other.value
|
||||||
|
def serialize(self):
|
||||||
|
return self.value
|
||||||
|
|
||||||
# printTo argument
|
# printTo argument
|
||||||
LOG_TO_FILE = 1
|
class LogTo(IntEnum):
|
||||||
LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
|
FILE = 1
|
||||||
LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
|
CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
|
||||||
LOG_TO_STASH = 8
|
STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
|
||||||
LOG_TO_WARN = 16
|
STASH = 8
|
||||||
LOG_TO_ERROR = 32
|
WARN = 16
|
||||||
LOG_TO_CRITICAL = 64
|
ERROR = 32
|
||||||
LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
|
CRITICAL = 64
|
||||||
|
ALL = FILE + CONSOLE + STDERR + STASH
|
||||||
|
|
||||||
|
class DbgLevel(IntEnum):
|
||||||
|
TRACE = 1
|
||||||
|
DBG = 2
|
||||||
|
INF = 3
|
||||||
|
WRN = 4
|
||||||
|
ERR = 5
|
||||||
|
CRITICAL = 6
|
||||||
|
|
||||||
|
DBG_LEVEL = DbgLevel.INF
|
||||||
|
|
||||||
# Misc class variables
|
# Misc class variables
|
||||||
MAIN_SCRIPT_NAME = None
|
MAIN_SCRIPT_NAME = None
|
||||||
@@ -62,7 +90,17 @@ class StashPluginHelper(StashInterface):
|
|||||||
LOG_FILE_NAME = None
|
LOG_FILE_NAME = None
|
||||||
STDIN_READ = None
|
STDIN_READ = None
|
||||||
stopProcessBarSpin = True
|
stopProcessBarSpin = True
|
||||||
NOT_IN_LIST = 2147483646
|
updateProgressbarOnIter = 0
|
||||||
|
currentProgressbarIteration = 0
|
||||||
|
|
||||||
|
class OS_Type(IntEnum):
|
||||||
|
WINDOWS = 1
|
||||||
|
LINUX = 2
|
||||||
|
MAC_OS = 3
|
||||||
|
FREEBSD = 4
|
||||||
|
UNKNOWN_OS = 5
|
||||||
|
|
||||||
|
OS_TYPE = OS_Type.UNKNOWN_OS
|
||||||
|
|
||||||
IS_DOCKER = False
|
IS_DOCKER = False
|
||||||
IS_WINDOWS = False
|
IS_WINDOWS = False
|
||||||
@@ -79,25 +117,29 @@ class StashPluginHelper(StashInterface):
|
|||||||
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
|
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
|
||||||
|
|
||||||
# Prefix message value
|
# Prefix message value
|
||||||
LEV_TRACE = "TRACE: "
|
class Level(EnumValue):
|
||||||
LEV_DBG = "DBG: "
|
TRACE = "TRACE: "
|
||||||
LEV_INF = "INF: "
|
DBG = "DBG: "
|
||||||
LEV_WRN = "WRN: "
|
INF = "INF: "
|
||||||
LEV_ERR = "ERR: "
|
WRN = "WRN: "
|
||||||
LEV_CRITICAL = "CRITICAL: "
|
ERR = "ERR: "
|
||||||
|
CRITICAL = "CRITICAL: "
|
||||||
|
|
||||||
|
class Constant(EnumValue):
|
||||||
# Default format
|
# Default format
|
||||||
LOG_FORMAT = "[%(asctime)s] %(message)s"
|
LOG_FORMAT = "[%(asctime)s] %(message)s"
|
||||||
|
ARGUMENT_UNSPECIFIED = "_ARGUMENT_UNSPECIFIED_"
|
||||||
|
NOT_IN_LIST = 2147483646
|
||||||
|
|
||||||
# Externally modifiable variables
|
# Externally modifiable variables
|
||||||
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
log_to_err_set = LogTo.FILE + LogTo.STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
||||||
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
log_to_norm = LogTo.FILE + LogTo.CONSOLE # Can be change so-as to set target output for normal logging
|
||||||
# Warn message goes to both plugin log file and stash when sent to Stash log file.
|
# Warn message goes to both plugin log file and stash when sent to Stash log file.
|
||||||
log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
log_to_wrn_set = LogTo.STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
||||||
logFormat = LOG_FORMAT, # Plugin log line format
|
logFormat = Constant.LOG_FORMAT.value, # Plugin log line format
|
||||||
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
|
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
|
||||||
maxbytes = 8*1024*1024, # Max size of plugin log file
|
maxbytes = 8*1024*1024, # Max size of plugin log file
|
||||||
backupcount = 2, # Backup counts when log file size reaches max size
|
backupcount = 2, # Backup counts when log file size reaches max size
|
||||||
@@ -113,19 +155,35 @@ class StashPluginHelper(StashInterface):
|
|||||||
stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
|
stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
|
||||||
apiKey = None, # API Key only needed when username and password set while running script via command line
|
apiKey = None, # API Key only needed when username and password set while running script via command line
|
||||||
DebugTraceFieldName = "zzdebugTracing",
|
DebugTraceFieldName = "zzdebugTracing",
|
||||||
|
DebugFieldName = "zzDebug",
|
||||||
DryRunFieldName = "zzdryRun",
|
DryRunFieldName = "zzdryRun",
|
||||||
setStashLoggerAsPluginLogger = False):
|
setStashLoggerAsPluginLogger = False,
|
||||||
|
DBG_LEVEL = DbgLevel.INF):
|
||||||
|
if DBG_LEVEL in list(self.DbgLevel):
|
||||||
|
self.DBG_LEVEL = DBG_LEVEL
|
||||||
|
if debugTracing:
|
||||||
|
self.DEBUG_TRACING = debugTracing
|
||||||
|
if self.DBG_LEVEL > self.DbgLevel.DBG:
|
||||||
|
self.DBG_LEVEL = self.DbgLevel.TRACE
|
||||||
|
elif self.DBG_LEVEL < self.DbgLevel.INF:
|
||||||
|
self.DEBUG_TRACING = True
|
||||||
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
|
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
|
||||||
if any(platform.win32_ver()):
|
if self.isWindows():
|
||||||
self.IS_WINDOWS = True
|
self.IS_WINDOWS = True
|
||||||
elif platform.system().lower().startswith("linux"):
|
self.OS_TYPE = self.OS_Type.WINDOWS
|
||||||
|
elif self.isLinux():
|
||||||
self.IS_LINUX = True
|
self.IS_LINUX = True
|
||||||
|
self.OS_TYPE = self.OS_Type.LINUX
|
||||||
if self.isDocker():
|
if self.isDocker():
|
||||||
self.IS_DOCKER = True
|
self.IS_DOCKER = True
|
||||||
elif platform.system().lower().startswith("freebsd"):
|
elif self.isFreeBSD():
|
||||||
self.IS_FREEBSD = True
|
self.IS_FREEBSD = True
|
||||||
elif sys.platform == "darwin":
|
self.OS_TYPE = self.OS_Type.FREEBSD
|
||||||
|
if self.isDocker():
|
||||||
|
self.IS_DOCKER = True
|
||||||
|
elif self.isMacOS():
|
||||||
self.IS_MAC_OS = True
|
self.IS_MAC_OS = True
|
||||||
|
self.OS_TYPE = self.OS_Type.MAC_OS
|
||||||
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
|
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
|
||||||
if logToErrSet: self.log_to_err_set = logToErrSet
|
if logToErrSet: self.log_to_err_set = logToErrSet
|
||||||
if logToNormSet: self.log_to_norm = logToNormSet
|
if logToNormSet: self.log_to_norm = logToNormSet
|
||||||
@@ -148,7 +206,6 @@ class StashPluginHelper(StashInterface):
|
|||||||
else:
|
else:
|
||||||
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
|
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
|
||||||
|
|
||||||
if debugTracing: self.DEBUG_TRACING = debugTracing
|
|
||||||
if config:
|
if config:
|
||||||
self.pluginConfig = config
|
self.pluginConfig = config
|
||||||
if self.Setting('apiKey', "") != "":
|
if self.Setting('apiKey', "") != "":
|
||||||
@@ -210,8 +267,14 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
|
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
|
||||||
|
|
||||||
self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
|
self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
|
||||||
self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING)
|
if self.Setting(DebugTraceFieldName, self.DEBUG_TRACING):
|
||||||
if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
|
self.DEBUG_TRACING = True
|
||||||
|
self.LOG_LEVEL = logging.TRACE
|
||||||
|
self.DBG_LEVEL = self.DbgLevel.TRACE
|
||||||
|
elif self.Setting(DebugFieldName, self.DEBUG_TRACING):
|
||||||
|
self.DEBUG_TRACING = True
|
||||||
|
self.LOG_LEVEL = logging.DEBUG
|
||||||
|
self.DBG_LEVEL = self.DbgLevel.DBG
|
||||||
|
|
||||||
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
|
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
|
||||||
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
|
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
|
||||||
@@ -221,74 +284,104 @@ class StashPluginHelper(StashInterface):
|
|||||||
def __del__(self):
|
def __del__(self):
|
||||||
self.thredPool.shutdown(wait=False)
|
self.thredPool.shutdown(wait=False)
|
||||||
|
|
||||||
def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False):
|
def Setting(self, name, default=Constant.ARGUMENT_UNSPECIFIED.value, raiseEx=True, notEmpty=False):
|
||||||
if self.pluginSettings != None and name in self.pluginSettings:
|
if self.pluginSettings != None and name in self.pluginSettings:
|
||||||
if notEmpty == False or self.pluginSettings[name] != "":
|
if notEmpty == False or self.pluginSettings[name] != "":
|
||||||
return self.pluginSettings[name]
|
return self.pluginSettings[name]
|
||||||
if self.pluginConfig != None and name in self.pluginConfig:
|
if self.pluginConfig != None and name in self.pluginConfig:
|
||||||
if notEmpty == False or self.pluginConfig[name] != "":
|
if notEmpty == False or self.pluginConfig[name] != "":
|
||||||
return self.pluginConfig[name]
|
return self.pluginConfig[name]
|
||||||
if default == _ARGUMENT_UNSPECIFIED_ and raiseEx:
|
if default == self.Constant.ARGUMENT_UNSPECIFIED.value and raiseEx:
|
||||||
raise Exception(f"Missing {name} from both UI settings and config file settings.")
|
raise Exception(f"Missing {name} from both UI settings and config file settings.")
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None):
|
def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None, printLogException = False):
|
||||||
|
try:
|
||||||
if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
|
if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
|
||||||
logMsg = self.asc2(logMsg)
|
logMsg = self.asc2(logMsg)
|
||||||
else:
|
else:
|
||||||
logMsg = logMsg
|
logMsg = logMsg
|
||||||
if printTo == 0:
|
if printTo == 0:
|
||||||
printTo = self.log_to_norm
|
printTo = self.log_to_norm
|
||||||
elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
|
elif printTo == self.LogTo.ERROR and logLevel == logging.INFO:
|
||||||
logLevel = logging.ERROR
|
logLevel = logging.ERROR
|
||||||
printTo = self.log_to_err_set
|
printTo = self.log_to_err_set
|
||||||
elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
|
elif printTo == self.LogTo.CRITICAL and logLevel == logging.INFO:
|
||||||
logLevel = logging.CRITICAL
|
logLevel = logging.CRITICAL
|
||||||
printTo = self.log_to_err_set
|
printTo = self.log_to_err_set
|
||||||
elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
|
elif printTo == self.LogTo.WARN and logLevel == logging.INFO:
|
||||||
logLevel = logging.WARN
|
logLevel = logging.WARN
|
||||||
printTo = self.log_to_wrn_set
|
printTo = self.log_to_wrn_set
|
||||||
if lineNo == -1:
|
if lineNo == -1:
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
LN_Str = f"[LN:{lineNo}]"
|
LN_Str = f"[LN:{lineNo}]"
|
||||||
# print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
|
# print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
|
||||||
if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
|
if logLevel == logging.TRACE and (logAlways == False or self.LOG_LEVEL == logging.TRACE):
|
||||||
if levelStr == "": levelStr = self.LEV_DBG
|
if levelStr == "": levelStr = self.Level.DBG
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.trace(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.trace(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
elif logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG or self.LOG_LEVEL == logging.TRACE):
|
||||||
|
if levelStr == "": levelStr = self.Level.DBG
|
||||||
|
if printTo & self.LogTo.FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if printTo & self.LogTo.STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.INFO or logLevel == logging.DEBUG:
|
elif logLevel == logging.INFO or logLevel == logging.DEBUG:
|
||||||
if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
|
if levelStr == "": levelStr = self.Level.INF if logLevel == logging.INFO else self.Level.DBG
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.WARN:
|
elif logLevel == logging.WARN:
|
||||||
if levelStr == "": levelStr = self.LEV_WRN
|
if levelStr == "": levelStr = self.Level.WRN
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.ERROR:
|
elif logLevel == logging.ERROR:
|
||||||
if levelStr == "": levelStr = self.LEV_ERR
|
if levelStr == "": levelStr = self.Level.ERR
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.CRITICAL:
|
elif logLevel == logging.CRITICAL:
|
||||||
if levelStr == "": levelStr = self.LEV_CRITICAL
|
if levelStr == "": levelStr = self.Level.CRITICAL
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
if (printTo & self.LogTo.CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||||
print(f"{LN_Str} {levelStr}{logMsg}")
|
print(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
if (printTo & self.LogTo.STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||||
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
|
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
|
||||||
|
except Exception as e:
|
||||||
|
if printLogException:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
print(f"Exception calling [Log]; Error: {e}\nTraceBack={tb}")
|
||||||
|
pass
|
||||||
|
|
||||||
def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
||||||
if printTo == 0: printTo = self.LOG_TO_FILE
|
if printTo == 0: printTo = self.LogTo.FILE
|
||||||
|
if lineNo == -1:
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
logLev = logging.INFO if logAlways else logging.TRACE
|
||||||
|
if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
|
||||||
|
if logMsg == "":
|
||||||
|
logMsg = f"Line number {lineNo}..."
|
||||||
|
self.Log(logMsg, printTo, logLev, lineNo, self.Level.TRACE, logAlways, toAscii=toAscii)
|
||||||
|
|
||||||
|
# Log once per session. Only logs the first time called from a particular line number in the code.
|
||||||
|
def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
|
||||||
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
|
if FuncAndLineNo in self.logLinePreviousHits:
|
||||||
|
return
|
||||||
|
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||||
|
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||||
|
|
||||||
|
def Debug(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
||||||
|
if printTo == 0: printTo = self.LogTo.FILE
|
||||||
if lineNo == -1:
|
if lineNo == -1:
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
logLev = logging.INFO if logAlways else logging.DEBUG
|
logLev = logging.INFO if logAlways else logging.DEBUG
|
||||||
if self.DEBUG_TRACING or logAlways:
|
if self.DEBUG_TRACING or logAlways:
|
||||||
if logMsg == "":
|
if logMsg == "":
|
||||||
logMsg = f"Line number {lineNo}..."
|
logMsg = f"Line number {lineNo}..."
|
||||||
self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii)
|
self.Log(logMsg, printTo, logLev, lineNo, self.Level.DBG, logAlways, toAscii=toAscii)
|
||||||
|
|
||||||
# Log once per session. Only logs the first time called from a particular line number in the code.
|
# Log once per session. Only logs the first time called from a particular line number in the code.
|
||||||
def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
def DebugOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
if self.DEBUG_TRACING or logAlways:
|
if self.DEBUG_TRACING or logAlways:
|
||||||
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
@@ -298,8 +391,8 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||||
|
|
||||||
# Log INFO on first call, then do Trace on remaining calls.
|
# Log INFO on first call, then do Trace on remaining calls.
|
||||||
def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None):
|
def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None, printLogException = False):
|
||||||
if printTo == 0: printTo = self.LOG_TO_FILE
|
if printTo == 0: printTo = self.LogTo.FILE
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
if FuncAndLineNo in self.logLinePreviousHits:
|
if FuncAndLineNo in self.logLinePreviousHits:
|
||||||
@@ -307,17 +400,17 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||||
else:
|
else:
|
||||||
self.logLinePreviousHits.append(FuncAndLineNo)
|
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||||
self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii)
|
self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii, printLogException=printLogException)
|
||||||
|
|
||||||
def Warn(self, logMsg, printTo = 0, toAscii = None):
|
def Warn(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
|
||||||
if printTo == 0: printTo = self.log_to_wrn_set
|
if printTo == 0: printTo = self.log_to_wrn_set
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii)
|
self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii, printLogException=printLogException)
|
||||||
|
|
||||||
def Error(self, logMsg, printTo = 0, toAscii = None):
|
def Error(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
|
||||||
if printTo == 0: printTo = self.log_to_err_set
|
if printTo == 0: printTo = self.log_to_err_set
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
|
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii, printLogException=printLogException)
|
||||||
|
|
||||||
# Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
|
# Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
|
||||||
# The below non-loggging functions use (lower) camelCase naming convention.
|
# The below non-loggging functions use (lower) camelCase naming convention.
|
||||||
@@ -369,142 +462,93 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.excludeMergeTags = excludeMergeTags
|
self.excludeMergeTags = excludeMergeTags
|
||||||
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
|
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
|
||||||
|
|
||||||
# Must call initMergeMetadata, before calling mergeMetadata
|
def mergeMetadata(self, SrcData, DestData, retryCount = 12, sleepSecondsBetweenRetry = 5, excludeMergeTags=None): # Input arguments can be scene ID or scene metadata
|
||||||
def mergeMetadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
|
import requests
|
||||||
|
if self._mergeMetadata == None:
|
||||||
|
self.initMergeMetadata(excludeMergeTags)
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
if type(SrcData) is int:
|
if type(SrcData) is int:
|
||||||
SrcData = self.find_scene(SrcData)
|
SrcData = self.find_scene(SrcData)
|
||||||
DestData = self.find_scene(DestData)
|
DestData = self.find_scene(DestData)
|
||||||
return self._mergeMetadata.merge(SrcData, DestData)
|
return self._mergeMetadata.merge(SrcData, DestData)
|
||||||
|
except (requests.exceptions.ConnectionError, ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
|
def getUpdateProgressBarIter(self, qtyResults):
|
||||||
|
if qtyResults > 40000:
|
||||||
|
return 100
|
||||||
|
if qtyResults > 20000:
|
||||||
|
return 80
|
||||||
|
if qtyResults > 10000:
|
||||||
|
return 40
|
||||||
|
if qtyResults > 5000:
|
||||||
|
return 20
|
||||||
|
if qtyResults > 2000:
|
||||||
|
return 10
|
||||||
|
if qtyResults > 1000:
|
||||||
|
return 5
|
||||||
|
if qtyResults > 500:
|
||||||
|
return 3
|
||||||
|
if qtyResults > 200:
|
||||||
|
return 2
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Use setProgressBarIter to reduce traffic to the server by only updating the progressBar every X(updateProgressbarOnIter) iteration.
|
||||||
|
def setProgressBarIter(self, qtyResults):
|
||||||
|
self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
|
||||||
|
self.currentProgressbarIteration = 0
|
||||||
|
|
||||||
def progressBar(self, currentIndex, maxCount):
|
def progressBar(self, currentIndex, maxCount):
|
||||||
|
if self.updateProgressbarOnIter > 0:
|
||||||
|
self.currentProgressbarIteration+=1
|
||||||
|
if self.currentProgressbarIteration > self.updateProgressbarOnIter:
|
||||||
|
self.currentProgressbarIteration = 0
|
||||||
|
else:
|
||||||
|
return
|
||||||
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
|
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
|
||||||
|
try:
|
||||||
self.log.progress(progress)
|
self.log.progress(progress)
|
||||||
|
|
||||||
# Test via command line: pip uninstall -y pyYAML watchdog schedule requests
|
|
||||||
def modulesInstalled(self, moduleNames, install=True, silent=False): # moduleNames=["stashapp-tools", "requests", "pyYAML"]
|
|
||||||
retrnValue = True
|
|
||||||
for moduleName in moduleNames:
|
|
||||||
try: # Try Python 3.3 > way
|
|
||||||
import importlib
|
|
||||||
import importlib.util
|
|
||||||
if moduleName in sys.modules:
|
|
||||||
if not silent: self.Trace(f"{moduleName!r} already in sys.modules")
|
|
||||||
elif self.isModuleInstalled(moduleName):
|
|
||||||
if not silent: self.Trace(f"Module {moduleName!r} is available.")
|
|
||||||
else:
|
|
||||||
if install and (results:=self.installModule(moduleName)) > 0:
|
|
||||||
if results == 1:
|
|
||||||
self.Log(f"Module {moduleName!r} has been installed")
|
|
||||||
else:
|
|
||||||
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if install:
|
|
||||||
self.Error(f"Can't find the {moduleName!r} module")
|
|
||||||
retrnValue = False
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
try:
|
|
||||||
i = importlib.import_module(moduleName)
|
|
||||||
except ImportError as e:
|
|
||||||
if install and (results:=self.installModule(moduleName)) > 0:
|
|
||||||
if results == 1:
|
|
||||||
self.Log(f"Module {moduleName!r} has been installed")
|
|
||||||
else:
|
|
||||||
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if install:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Error(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
|
|
||||||
retrnValue = False
|
|
||||||
return retrnValue
|
|
||||||
|
|
||||||
def isModuleInstalled(self, moduleName):
|
|
||||||
try:
|
|
||||||
__import__(moduleName)
|
|
||||||
# self.Trace(f"Module {moduleName!r} is installed")
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Warn(f"Module {moduleName!r} is NOT installed!")
|
|
||||||
self.Trace(f"Error: {e}\nTraceBack={tb}")
|
|
||||||
pass
|
pass
|
||||||
return False
|
|
||||||
|
|
||||||
def installModule(self,moduleName):
|
|
||||||
# if not self.IS_DOCKER:
|
|
||||||
# try:
|
|
||||||
# self.Log(f"Attempting to install package {moduleName!r} using pip import method.")
|
|
||||||
# First try pip import method. (This may fail in a future version of pip.)
|
|
||||||
# self.installPackage(moduleName)
|
|
||||||
# self.Trace(f"installPackage called for module {moduleName!r}")
|
|
||||||
# if self.modulesInstalled(moduleNames=[moduleName], install=False):
|
|
||||||
# self.Trace(f"Module {moduleName!r} installed")
|
|
||||||
# return 1
|
|
||||||
# self.Trace(f"Module {moduleName!r} still not installed.")
|
|
||||||
# except Exception as e:
|
|
||||||
# tb = traceback.format_exc()
|
|
||||||
# self.Warn(f"pip import method failed for module {moduleName!r}. Will try command line method; Error: {e}\nTraceBack={tb}")
|
|
||||||
# pass
|
|
||||||
# else:
|
|
||||||
# self.Trace("Running in Docker, so skipping pip import method.")
|
|
||||||
try:
|
|
||||||
if self.IS_LINUX:
|
|
||||||
# Note: Linux may first need : sudo apt install python3-pip
|
|
||||||
# if error starts with "Command 'pip' not found"
|
|
||||||
# or includes "No module named pip"
|
|
||||||
self.Log("Checking if pip installed.")
|
|
||||||
results = os.popen(f"pip --version").read()
|
|
||||||
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
|
||||||
results = os.popen(f"sudo apt install python3-pip").read()
|
|
||||||
results = os.popen(f"pip --version").read()
|
|
||||||
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
|
||||||
self.Error(f"Error while calling 'pip'. Make sure pip is installed, and make sure module {moduleName!r} is installed. Results = '{results}'")
|
|
||||||
return -1
|
|
||||||
self.Trace("pip good.")
|
|
||||||
if self.IS_FREEBSD:
|
|
||||||
self.Warn("installModule may NOT work on freebsd")
|
|
||||||
pipArg = ""
|
|
||||||
if self.IS_DOCKER:
|
|
||||||
pipArg = " --break-system-packages"
|
|
||||||
self.Log(f"Attempting to install package {moduleName!r} via popen.")
|
|
||||||
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
|
|
||||||
results = results.strip("\n")
|
|
||||||
self.Trace(f"pip results = {results}")
|
|
||||||
if results.find("Requirement already satisfied:") > -1:
|
|
||||||
self.Trace(f"Requirement already satisfied for module {moduleName!r}")
|
|
||||||
return 2
|
|
||||||
elif results.find("Successfully installed") > -1:
|
|
||||||
self.Trace(f"Successfully installed module {moduleName!r}")
|
|
||||||
return 1
|
|
||||||
elif self.modulesInstalled(moduleNames=[moduleName], install=False):
|
|
||||||
self.Trace(f"modulesInstalled returned True for module {moduleName!r}")
|
|
||||||
return 1
|
|
||||||
self.Error(f"Failed to install module {moduleName!r}")
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def installPackage(self,package): # Should delete this. It doesn't work consistently
|
|
||||||
try:
|
|
||||||
import pip
|
|
||||||
if hasattr(pip, 'main'):
|
|
||||||
pip.main(['install', package])
|
|
||||||
self.Trace()
|
|
||||||
else:
|
|
||||||
pip._internal.main(['install', package])
|
|
||||||
self.Trace()
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def isDocker(self):
|
def isDocker(self):
|
||||||
cgroup = pathlib.Path('/proc/self/cgroup')
|
cgroup = pathlib.Path('/proc/self/cgroup')
|
||||||
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
||||||
|
|
||||||
|
def isWindows(self):
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isLinux(self):
|
||||||
|
if platform.system().lower().startswith("linux"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isFreeBSD(self):
|
||||||
|
if platform.system().lower().startswith("freebsd"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isMacOS(self):
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isWindows(self):
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
|
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
|
||||||
if trace:
|
if trace:
|
||||||
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
|
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
|
||||||
@@ -539,7 +583,7 @@ class StashPluginHelper(StashInterface):
|
|||||||
itemToCk = itemToCk.lower()
|
itemToCk = itemToCk.lower()
|
||||||
index = -1
|
index = -1
|
||||||
lenItemMatch = 0
|
lenItemMatch = 0
|
||||||
returnValue = self.NOT_IN_LIST
|
returnValue = self.Constant.NOT_IN_LIST.value
|
||||||
for listItem in listToCk:
|
for listItem in listToCk:
|
||||||
index += 1
|
index += 1
|
||||||
if itemToCk.startswith(listItem.lower()):
|
if itemToCk.startswith(listItem.lower()):
|
||||||
@@ -598,17 +642,62 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.update_scene(dataDict)
|
self.update_scene(dataDict)
|
||||||
return doesHaveTagName
|
return doesHaveTagName
|
||||||
|
|
||||||
def addTag(self, scene, tagName, tagName_descp = "", ignoreAutoTag=False): # scene can be scene ID or scene metadata
|
def addTag(self, scene, tagName, tagName_descp = "", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5): # scene can be scene ID or scene metadata
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
scene_details = scene
|
scene_details = scene
|
||||||
if 'id' not in scene:
|
if 'id' not in scene:
|
||||||
scene_details = self.find_scene(scene)
|
scene_details = self.find_scene(scene)
|
||||||
tagIds = [self.createTagId(tagName, tagName_descp=tagName_descp, ignoreAutoTag=ignoreAutoTag)]
|
tagIds = [self.createTagId(tagName, tagName_descp=tagName_descp, ignoreAutoTag=ignoreAutoTag)]
|
||||||
for tag in scene_details['tags']:
|
for tag in scene_details['tags']:
|
||||||
if tag['name'] != tagName:
|
if tag['name'] == tagName:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
tagIds += [tag['id']]
|
tagIds += [tag['id']]
|
||||||
dataDict = {'id' : scene_details['id']}
|
dataDict = {'id' : scene_details['id']}
|
||||||
dataDict.update({'tag_ids' : tagIds})
|
dataDict.update({'tag_ids' : tagIds})
|
||||||
self.update_scene(dataDict)
|
self.update_scene(dataDict)
|
||||||
|
return True
|
||||||
|
except (ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
|
def updateScene(self, update_input, create=False, retryCount = 24, sleepSecondsBetweenRetry = 5):
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
|
return self.update_scene(update_input, create)
|
||||||
|
except (ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
|
def destroyScene(self, scene_id, delete_file=False, retryCount = 12, sleepSecondsBetweenRetry = 5):
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
|
return self.destroy_scene(scene_id, delete_file)
|
||||||
|
except (ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
|
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
|
||||||
"""Runs a plugin operation.
|
"""Runs a plugin operation.
|
||||||
|
|||||||
126
plugins/FileMonitor/ModulesValidate.py
Normal file
126
plugins/FileMonitor/ModulesValidate.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
# ModulesValidate (By David Maisonave aka Axter)
|
||||||
|
# Description:
|
||||||
|
# Checks if packages are installed, and optionally install packages if missing.
|
||||||
|
# The below example usage code should be plave at the very top of the scource code before any other imports.
|
||||||
|
# Example Usage:
|
||||||
|
# import ModulesValidate
|
||||||
|
# ModulesValidate.modulesInstalled(["watchdog", "schedule", "requests"])
|
||||||
|
# Testing:
|
||||||
|
# To test, uninstall packages via command line: pip uninstall -y watchdog schedule requests
|
||||||
|
import sys, os, pathlib, platform, traceback
|
||||||
|
# ToDo: Add logic to optionally pull package requirements from requirements.txt file.
|
||||||
|
|
||||||
|
def modulesInstalled(moduleNames, install=True, silent=False):
|
||||||
|
retrnValue = True
|
||||||
|
for moduleName in moduleNames:
|
||||||
|
try: # Try Python 3.3 > way
|
||||||
|
import importlib
|
||||||
|
import importlib.util
|
||||||
|
if moduleName in sys.modules:
|
||||||
|
if not silent: print(f"{moduleName!r} already in sys.modules")
|
||||||
|
elif isModuleInstalled(moduleName):
|
||||||
|
if not silent: print(f"Module {moduleName!r} is available.")
|
||||||
|
else:
|
||||||
|
if install and (results:=installModule(moduleName)) > 0:
|
||||||
|
if results == 1:
|
||||||
|
print(f"Module {moduleName!r} has been installed")
|
||||||
|
else:
|
||||||
|
if not silent: print(f"Module {moduleName!r} is already installed")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if install:
|
||||||
|
print(f"Can't find the {moduleName!r} module")
|
||||||
|
retrnValue = False
|
||||||
|
except Exception as e:
|
||||||
|
try:
|
||||||
|
i = importlib.import_module(moduleName)
|
||||||
|
except ImportError as e:
|
||||||
|
if install and (results:=installModule(moduleName)) > 0:
|
||||||
|
if results == 1:
|
||||||
|
print(f"Module {moduleName!r} has been installed")
|
||||||
|
else:
|
||||||
|
if not silent: print(f"Module {moduleName!r} is already installed")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if install:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
print(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
|
||||||
|
retrnValue = False
|
||||||
|
return retrnValue
|
||||||
|
|
||||||
|
def isModuleInstalled(moduleName):
|
||||||
|
try:
|
||||||
|
__import__(moduleName)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
def installModule(moduleName):
|
||||||
|
try:
|
||||||
|
if isLinux():
|
||||||
|
# Note: Linux may first need : sudo apt install python3-pip
|
||||||
|
# if error starts with "Command 'pip' not found"
|
||||||
|
# or includes "No module named pip"
|
||||||
|
results = os.popen(f"pip --version").read()
|
||||||
|
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
||||||
|
results = os.popen(f"sudo apt install python3-pip").read()
|
||||||
|
results = os.popen(f"pip --version").read()
|
||||||
|
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
||||||
|
return -1
|
||||||
|
if isFreeBSD():
|
||||||
|
print("Warning: installModule may NOT work on freebsd")
|
||||||
|
pipArg = ""
|
||||||
|
if isDocker():
|
||||||
|
pipArg = " --break-system-packages"
|
||||||
|
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
|
||||||
|
results = results.strip("\n")
|
||||||
|
if results.find("Requirement already satisfied:") > -1:
|
||||||
|
return 2
|
||||||
|
elif results.find("Successfully installed") > -1:
|
||||||
|
return 1
|
||||||
|
elif modulesInstalled(moduleNames=[moduleName], install=False):
|
||||||
|
return 1
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def installPackage(package): # Should delete this. It doesn't work consistently
|
||||||
|
try:
|
||||||
|
import pip
|
||||||
|
if hasattr(pip, 'main'):
|
||||||
|
pip.main(['install', package])
|
||||||
|
else:
|
||||||
|
pip._internal.main(['install', package])
|
||||||
|
except Exception as e:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def isDocker():
|
||||||
|
cgroup = pathlib.Path('/proc/self/cgroup')
|
||||||
|
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
||||||
|
|
||||||
|
def isWindows():
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isLinux():
|
||||||
|
if platform.system().lower().startswith("linux"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isFreeBSD():
|
||||||
|
if platform.system().lower().startswith("freebsd"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isMacOS():
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isWindows():
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
@@ -1,12 +1,3 @@
|
|||||||
from stashapi.stashapp import StashInterface
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
|
|
||||||
import concurrent.futures
|
|
||||||
from stashapi.stash_types import PhashDistance
|
|
||||||
import __main__
|
|
||||||
|
|
||||||
_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
|
|
||||||
|
|
||||||
# StashPluginHelper (By David Maisonave aka Axter)
|
# StashPluginHelper (By David Maisonave aka Axter)
|
||||||
# See end of this file for example usage
|
# See end of this file for example usage
|
||||||
# Log Features:
|
# Log Features:
|
||||||
@@ -24,6 +15,14 @@ _ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
|
|||||||
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
|
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
|
||||||
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
|
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
|
||||||
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
|
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
|
||||||
|
from stashapi.stashapp import StashInterface
|
||||||
|
from logging.handlers import RotatingFileHandler
|
||||||
|
import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
|
||||||
|
import concurrent.futures
|
||||||
|
from stashapi.stash_types import PhashDistance
|
||||||
|
from enum import Enum, IntEnum
|
||||||
|
import __main__
|
||||||
|
|
||||||
class StashPluginHelper(StashInterface):
|
class StashPluginHelper(StashInterface):
|
||||||
# Primary Members for external reference
|
# Primary Members for external reference
|
||||||
PLUGIN_TASK_NAME = None
|
PLUGIN_TASK_NAME = None
|
||||||
@@ -45,15 +44,44 @@ class StashPluginHelper(StashInterface):
|
|||||||
API_KEY = None
|
API_KEY = None
|
||||||
excludeMergeTags = None
|
excludeMergeTags = None
|
||||||
|
|
||||||
|
# class EnumInt(IntEnum):
|
||||||
|
# def __repr__(self) -> str:
|
||||||
|
# return f"{self.__class__.__name__}.{self.name}"
|
||||||
|
# def __str__(self) -> str:
|
||||||
|
# return str(self.value)
|
||||||
|
# def serialize(self):
|
||||||
|
# return self.value
|
||||||
|
|
||||||
|
class EnumValue(Enum):
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}.{self.name}"
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return str(self.value)
|
||||||
|
def __add__(self, other):
|
||||||
|
return self.value + other.value
|
||||||
|
def serialize(self):
|
||||||
|
return self.value
|
||||||
|
|
||||||
# printTo argument
|
# printTo argument
|
||||||
LOG_TO_FILE = 1
|
class LogTo(IntEnum):
|
||||||
LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
|
FILE = 1
|
||||||
LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
|
CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
|
||||||
LOG_TO_STASH = 8
|
STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
|
||||||
LOG_TO_WARN = 16
|
STASH = 8
|
||||||
LOG_TO_ERROR = 32
|
WARN = 16
|
||||||
LOG_TO_CRITICAL = 64
|
ERROR = 32
|
||||||
LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
|
CRITICAL = 64
|
||||||
|
ALL = FILE + CONSOLE + STDERR + STASH
|
||||||
|
|
||||||
|
class DbgLevel(IntEnum):
|
||||||
|
TRACE = 1
|
||||||
|
DBG = 2
|
||||||
|
INF = 3
|
||||||
|
WRN = 4
|
||||||
|
ERR = 5
|
||||||
|
CRITICAL = 6
|
||||||
|
|
||||||
|
DBG_LEVEL = DbgLevel.INF
|
||||||
|
|
||||||
# Misc class variables
|
# Misc class variables
|
||||||
MAIN_SCRIPT_NAME = None
|
MAIN_SCRIPT_NAME = None
|
||||||
@@ -62,7 +90,17 @@ class StashPluginHelper(StashInterface):
|
|||||||
LOG_FILE_NAME = None
|
LOG_FILE_NAME = None
|
||||||
STDIN_READ = None
|
STDIN_READ = None
|
||||||
stopProcessBarSpin = True
|
stopProcessBarSpin = True
|
||||||
NOT_IN_LIST = 2147483646
|
updateProgressbarOnIter = 0
|
||||||
|
currentProgressbarIteration = 0
|
||||||
|
|
||||||
|
class OS_Type(IntEnum):
|
||||||
|
WINDOWS = 1
|
||||||
|
LINUX = 2
|
||||||
|
MAC_OS = 3
|
||||||
|
FREEBSD = 4
|
||||||
|
UNKNOWN_OS = 5
|
||||||
|
|
||||||
|
OS_TYPE = OS_Type.UNKNOWN_OS
|
||||||
|
|
||||||
IS_DOCKER = False
|
IS_DOCKER = False
|
||||||
IS_WINDOWS = False
|
IS_WINDOWS = False
|
||||||
@@ -79,25 +117,29 @@ class StashPluginHelper(StashInterface):
|
|||||||
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
|
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
|
||||||
|
|
||||||
# Prefix message value
|
# Prefix message value
|
||||||
LEV_TRACE = "TRACE: "
|
class Level(EnumValue):
|
||||||
LEV_DBG = "DBG: "
|
TRACE = "TRACE: "
|
||||||
LEV_INF = "INF: "
|
DBG = "DBG: "
|
||||||
LEV_WRN = "WRN: "
|
INF = "INF: "
|
||||||
LEV_ERR = "ERR: "
|
WRN = "WRN: "
|
||||||
LEV_CRITICAL = "CRITICAL: "
|
ERR = "ERR: "
|
||||||
|
CRITICAL = "CRITICAL: "
|
||||||
|
|
||||||
|
class Constant(EnumValue):
|
||||||
# Default format
|
# Default format
|
||||||
LOG_FORMAT = "[%(asctime)s] %(message)s"
|
LOG_FORMAT = "[%(asctime)s] %(message)s"
|
||||||
|
ARGUMENT_UNSPECIFIED = "_ARGUMENT_UNSPECIFIED_"
|
||||||
|
NOT_IN_LIST = 2147483646
|
||||||
|
|
||||||
# Externally modifiable variables
|
# Externally modifiable variables
|
||||||
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
log_to_err_set = LogTo.FILE + LogTo.STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
||||||
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
log_to_norm = LogTo.FILE + LogTo.CONSOLE # Can be change so-as to set target output for normal logging
|
||||||
# Warn message goes to both plugin log file and stash when sent to Stash log file.
|
# Warn message goes to both plugin log file and stash when sent to Stash log file.
|
||||||
log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
log_to_wrn_set = LogTo.STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
||||||
logFormat = LOG_FORMAT, # Plugin log line format
|
logFormat = Constant.LOG_FORMAT.value, # Plugin log line format
|
||||||
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
|
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
|
||||||
maxbytes = 8*1024*1024, # Max size of plugin log file
|
maxbytes = 8*1024*1024, # Max size of plugin log file
|
||||||
backupcount = 2, # Backup counts when log file size reaches max size
|
backupcount = 2, # Backup counts when log file size reaches max size
|
||||||
@@ -113,19 +155,35 @@ class StashPluginHelper(StashInterface):
|
|||||||
stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
|
stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
|
||||||
apiKey = None, # API Key only needed when username and password set while running script via command line
|
apiKey = None, # API Key only needed when username and password set while running script via command line
|
||||||
DebugTraceFieldName = "zzdebugTracing",
|
DebugTraceFieldName = "zzdebugTracing",
|
||||||
|
DebugFieldName = "zzDebug",
|
||||||
DryRunFieldName = "zzdryRun",
|
DryRunFieldName = "zzdryRun",
|
||||||
setStashLoggerAsPluginLogger = False):
|
setStashLoggerAsPluginLogger = False,
|
||||||
|
DBG_LEVEL = DbgLevel.INF):
|
||||||
|
if DBG_LEVEL in list(self.DbgLevel):
|
||||||
|
self.DBG_LEVEL = DBG_LEVEL
|
||||||
|
if debugTracing:
|
||||||
|
self.DEBUG_TRACING = debugTracing
|
||||||
|
if self.DBG_LEVEL > self.DbgLevel.DBG:
|
||||||
|
self.DBG_LEVEL = self.DbgLevel.TRACE
|
||||||
|
elif self.DBG_LEVEL < self.DbgLevel.INF:
|
||||||
|
self.DEBUG_TRACING = True
|
||||||
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
|
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
|
||||||
if any(platform.win32_ver()):
|
if self.isWindows():
|
||||||
self.IS_WINDOWS = True
|
self.IS_WINDOWS = True
|
||||||
elif platform.system().lower().startswith("linux"):
|
self.OS_TYPE = self.OS_Type.WINDOWS
|
||||||
|
elif self.isLinux():
|
||||||
self.IS_LINUX = True
|
self.IS_LINUX = True
|
||||||
|
self.OS_TYPE = self.OS_Type.LINUX
|
||||||
if self.isDocker():
|
if self.isDocker():
|
||||||
self.IS_DOCKER = True
|
self.IS_DOCKER = True
|
||||||
elif platform.system().lower().startswith("freebsd"):
|
elif self.isFreeBSD():
|
||||||
self.IS_FREEBSD = True
|
self.IS_FREEBSD = True
|
||||||
elif sys.platform == "darwin":
|
self.OS_TYPE = self.OS_Type.FREEBSD
|
||||||
|
if self.isDocker():
|
||||||
|
self.IS_DOCKER = True
|
||||||
|
elif self.isMacOS():
|
||||||
self.IS_MAC_OS = True
|
self.IS_MAC_OS = True
|
||||||
|
self.OS_TYPE = self.OS_Type.MAC_OS
|
||||||
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
|
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
|
||||||
if logToErrSet: self.log_to_err_set = logToErrSet
|
if logToErrSet: self.log_to_err_set = logToErrSet
|
||||||
if logToNormSet: self.log_to_norm = logToNormSet
|
if logToNormSet: self.log_to_norm = logToNormSet
|
||||||
@@ -148,7 +206,6 @@ class StashPluginHelper(StashInterface):
|
|||||||
else:
|
else:
|
||||||
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
|
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
|
||||||
|
|
||||||
if debugTracing: self.DEBUG_TRACING = debugTracing
|
|
||||||
if config:
|
if config:
|
||||||
self.pluginConfig = config
|
self.pluginConfig = config
|
||||||
if self.Setting('apiKey', "") != "":
|
if self.Setting('apiKey', "") != "":
|
||||||
@@ -210,8 +267,14 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
|
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
|
||||||
|
|
||||||
self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
|
self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
|
||||||
self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING)
|
if self.Setting(DebugTraceFieldName, self.DEBUG_TRACING):
|
||||||
if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
|
self.DEBUG_TRACING = True
|
||||||
|
self.LOG_LEVEL = logging.TRACE
|
||||||
|
self.DBG_LEVEL = self.DbgLevel.TRACE
|
||||||
|
elif self.Setting(DebugFieldName, self.DEBUG_TRACING):
|
||||||
|
self.DEBUG_TRACING = True
|
||||||
|
self.LOG_LEVEL = logging.DEBUG
|
||||||
|
self.DBG_LEVEL = self.DbgLevel.DBG
|
||||||
|
|
||||||
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
|
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
|
||||||
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
|
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
|
||||||
@@ -221,74 +284,104 @@ class StashPluginHelper(StashInterface):
|
|||||||
def __del__(self):
|
def __del__(self):
|
||||||
self.thredPool.shutdown(wait=False)
|
self.thredPool.shutdown(wait=False)
|
||||||
|
|
||||||
def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False):
|
def Setting(self, name, default=Constant.ARGUMENT_UNSPECIFIED.value, raiseEx=True, notEmpty=False):
|
||||||
if self.pluginSettings != None and name in self.pluginSettings:
|
if self.pluginSettings != None and name in self.pluginSettings:
|
||||||
if notEmpty == False or self.pluginSettings[name] != "":
|
if notEmpty == False or self.pluginSettings[name] != "":
|
||||||
return self.pluginSettings[name]
|
return self.pluginSettings[name]
|
||||||
if self.pluginConfig != None and name in self.pluginConfig:
|
if self.pluginConfig != None and name in self.pluginConfig:
|
||||||
if notEmpty == False or self.pluginConfig[name] != "":
|
if notEmpty == False or self.pluginConfig[name] != "":
|
||||||
return self.pluginConfig[name]
|
return self.pluginConfig[name]
|
||||||
if default == _ARGUMENT_UNSPECIFIED_ and raiseEx:
|
if default == self.Constant.ARGUMENT_UNSPECIFIED.value and raiseEx:
|
||||||
raise Exception(f"Missing {name} from both UI settings and config file settings.")
|
raise Exception(f"Missing {name} from both UI settings and config file settings.")
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None):
|
def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None, printLogException = False):
|
||||||
|
try:
|
||||||
if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
|
if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
|
||||||
logMsg = self.asc2(logMsg)
|
logMsg = self.asc2(logMsg)
|
||||||
else:
|
else:
|
||||||
logMsg = logMsg
|
logMsg = logMsg
|
||||||
if printTo == 0:
|
if printTo == 0:
|
||||||
printTo = self.log_to_norm
|
printTo = self.log_to_norm
|
||||||
elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
|
elif printTo == self.LogTo.ERROR and logLevel == logging.INFO:
|
||||||
logLevel = logging.ERROR
|
logLevel = logging.ERROR
|
||||||
printTo = self.log_to_err_set
|
printTo = self.log_to_err_set
|
||||||
elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
|
elif printTo == self.LogTo.CRITICAL and logLevel == logging.INFO:
|
||||||
logLevel = logging.CRITICAL
|
logLevel = logging.CRITICAL
|
||||||
printTo = self.log_to_err_set
|
printTo = self.log_to_err_set
|
||||||
elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
|
elif printTo == self.LogTo.WARN and logLevel == logging.INFO:
|
||||||
logLevel = logging.WARN
|
logLevel = logging.WARN
|
||||||
printTo = self.log_to_wrn_set
|
printTo = self.log_to_wrn_set
|
||||||
if lineNo == -1:
|
if lineNo == -1:
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
LN_Str = f"[LN:{lineNo}]"
|
LN_Str = f"[LN:{lineNo}]"
|
||||||
# print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
|
# print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
|
||||||
if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
|
if logLevel == logging.TRACE and (logAlways == False or self.LOG_LEVEL == logging.TRACE):
|
||||||
if levelStr == "": levelStr = self.LEV_DBG
|
if levelStr == "": levelStr = self.Level.DBG
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.trace(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.trace(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
elif logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG or self.LOG_LEVEL == logging.TRACE):
|
||||||
|
if levelStr == "": levelStr = self.Level.DBG
|
||||||
|
if printTo & self.LogTo.FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if printTo & self.LogTo.STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.INFO or logLevel == logging.DEBUG:
|
elif logLevel == logging.INFO or logLevel == logging.DEBUG:
|
||||||
if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
|
if levelStr == "": levelStr = self.Level.INF if logLevel == logging.INFO else self.Level.DBG
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.WARN:
|
elif logLevel == logging.WARN:
|
||||||
if levelStr == "": levelStr = self.LEV_WRN
|
if levelStr == "": levelStr = self.Level.WRN
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.ERROR:
|
elif logLevel == logging.ERROR:
|
||||||
if levelStr == "": levelStr = self.LEV_ERR
|
if levelStr == "": levelStr = self.Level.ERR
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.CRITICAL:
|
elif logLevel == logging.CRITICAL:
|
||||||
if levelStr == "": levelStr = self.LEV_CRITICAL
|
if levelStr == "": levelStr = self.Level.CRITICAL
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
if (printTo & self.LogTo.CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||||
print(f"{LN_Str} {levelStr}{logMsg}")
|
print(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
if (printTo & self.LogTo.STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||||
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
|
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
|
||||||
|
except Exception as e:
|
||||||
|
if printLogException:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
print(f"Exception calling [Log]; Error: {e}\nTraceBack={tb}")
|
||||||
|
pass
|
||||||
|
|
||||||
def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
||||||
if printTo == 0: printTo = self.LOG_TO_FILE
|
if printTo == 0: printTo = self.LogTo.FILE
|
||||||
|
if lineNo == -1:
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
logLev = logging.INFO if logAlways else logging.TRACE
|
||||||
|
if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
|
||||||
|
if logMsg == "":
|
||||||
|
logMsg = f"Line number {lineNo}..."
|
||||||
|
self.Log(logMsg, printTo, logLev, lineNo, self.Level.TRACE, logAlways, toAscii=toAscii)
|
||||||
|
|
||||||
|
# Log once per session. Only logs the first time called from a particular line number in the code.
|
||||||
|
def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
|
||||||
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
|
if FuncAndLineNo in self.logLinePreviousHits:
|
||||||
|
return
|
||||||
|
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||||
|
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||||
|
|
||||||
|
def Debug(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
||||||
|
if printTo == 0: printTo = self.LogTo.FILE
|
||||||
if lineNo == -1:
|
if lineNo == -1:
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
logLev = logging.INFO if logAlways else logging.DEBUG
|
logLev = logging.INFO if logAlways else logging.DEBUG
|
||||||
if self.DEBUG_TRACING or logAlways:
|
if self.DEBUG_TRACING or logAlways:
|
||||||
if logMsg == "":
|
if logMsg == "":
|
||||||
logMsg = f"Line number {lineNo}..."
|
logMsg = f"Line number {lineNo}..."
|
||||||
self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii)
|
self.Log(logMsg, printTo, logLev, lineNo, self.Level.DBG, logAlways, toAscii=toAscii)
|
||||||
|
|
||||||
# Log once per session. Only logs the first time called from a particular line number in the code.
|
# Log once per session. Only logs the first time called from a particular line number in the code.
|
||||||
def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
def DebugOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
if self.DEBUG_TRACING or logAlways:
|
if self.DEBUG_TRACING or logAlways:
|
||||||
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
@@ -298,8 +391,8 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||||
|
|
||||||
# Log INFO on first call, then do Trace on remaining calls.
|
# Log INFO on first call, then do Trace on remaining calls.
|
||||||
def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None):
|
def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None, printLogException = False):
|
||||||
if printTo == 0: printTo = self.LOG_TO_FILE
|
if printTo == 0: printTo = self.LogTo.FILE
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
if FuncAndLineNo in self.logLinePreviousHits:
|
if FuncAndLineNo in self.logLinePreviousHits:
|
||||||
@@ -307,17 +400,17 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||||
else:
|
else:
|
||||||
self.logLinePreviousHits.append(FuncAndLineNo)
|
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||||
self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii)
|
self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii, printLogException=printLogException)
|
||||||
|
|
||||||
def Warn(self, logMsg, printTo = 0, toAscii = None):
|
def Warn(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
|
||||||
if printTo == 0: printTo = self.log_to_wrn_set
|
if printTo == 0: printTo = self.log_to_wrn_set
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii)
|
self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii, printLogException=printLogException)
|
||||||
|
|
||||||
def Error(self, logMsg, printTo = 0, toAscii = None):
|
def Error(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
|
||||||
if printTo == 0: printTo = self.log_to_err_set
|
if printTo == 0: printTo = self.log_to_err_set
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
|
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii, printLogException=printLogException)
|
||||||
|
|
||||||
# Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
|
# Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
|
||||||
# The below non-loggging functions use (lower) camelCase naming convention.
|
# The below non-loggging functions use (lower) camelCase naming convention.
|
||||||
@@ -369,142 +462,93 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.excludeMergeTags = excludeMergeTags
|
self.excludeMergeTags = excludeMergeTags
|
||||||
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
|
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
|
||||||
|
|
||||||
# Must call initMergeMetadata, before calling mergeMetadata
|
def mergeMetadata(self, SrcData, DestData, retryCount = 12, sleepSecondsBetweenRetry = 5, excludeMergeTags=None): # Input arguments can be scene ID or scene metadata
|
||||||
def mergeMetadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
|
import requests
|
||||||
|
if self._mergeMetadata == None:
|
||||||
|
self.initMergeMetadata(excludeMergeTags)
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
if type(SrcData) is int:
|
if type(SrcData) is int:
|
||||||
SrcData = self.find_scene(SrcData)
|
SrcData = self.find_scene(SrcData)
|
||||||
DestData = self.find_scene(DestData)
|
DestData = self.find_scene(DestData)
|
||||||
return self._mergeMetadata.merge(SrcData, DestData)
|
return self._mergeMetadata.merge(SrcData, DestData)
|
||||||
|
except (requests.exceptions.ConnectionError, ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
|
def getUpdateProgressBarIter(self, qtyResults):
|
||||||
|
if qtyResults > 40000:
|
||||||
|
return 100
|
||||||
|
if qtyResults > 20000:
|
||||||
|
return 80
|
||||||
|
if qtyResults > 10000:
|
||||||
|
return 40
|
||||||
|
if qtyResults > 5000:
|
||||||
|
return 20
|
||||||
|
if qtyResults > 2000:
|
||||||
|
return 10
|
||||||
|
if qtyResults > 1000:
|
||||||
|
return 5
|
||||||
|
if qtyResults > 500:
|
||||||
|
return 3
|
||||||
|
if qtyResults > 200:
|
||||||
|
return 2
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Use setProgressBarIter to reduce traffic to the server by only updating the progressBar every X(updateProgressbarOnIter) iteration.
|
||||||
|
def setProgressBarIter(self, qtyResults):
|
||||||
|
self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
|
||||||
|
self.currentProgressbarIteration = 0
|
||||||
|
|
||||||
def progressBar(self, currentIndex, maxCount):
|
def progressBar(self, currentIndex, maxCount):
|
||||||
|
if self.updateProgressbarOnIter > 0:
|
||||||
|
self.currentProgressbarIteration+=1
|
||||||
|
if self.currentProgressbarIteration > self.updateProgressbarOnIter:
|
||||||
|
self.currentProgressbarIteration = 0
|
||||||
|
else:
|
||||||
|
return
|
||||||
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
|
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
|
||||||
|
try:
|
||||||
self.log.progress(progress)
|
self.log.progress(progress)
|
||||||
|
|
||||||
# Test via command line: pip uninstall -y pyYAML watchdog schedule requests
|
|
||||||
def modulesInstalled(self, moduleNames, install=True, silent=False): # moduleNames=["stashapp-tools", "requests", "pyYAML"]
|
|
||||||
retrnValue = True
|
|
||||||
for moduleName in moduleNames:
|
|
||||||
try: # Try Python 3.3 > way
|
|
||||||
import importlib
|
|
||||||
import importlib.util
|
|
||||||
if moduleName in sys.modules:
|
|
||||||
if not silent: self.Trace(f"{moduleName!r} already in sys.modules")
|
|
||||||
elif self.isModuleInstalled(moduleName):
|
|
||||||
if not silent: self.Trace(f"Module {moduleName!r} is available.")
|
|
||||||
else:
|
|
||||||
if install and (results:=self.installModule(moduleName)) > 0:
|
|
||||||
if results == 1:
|
|
||||||
self.Log(f"Module {moduleName!r} has been installed")
|
|
||||||
else:
|
|
||||||
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if install:
|
|
||||||
self.Error(f"Can't find the {moduleName!r} module")
|
|
||||||
retrnValue = False
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
try:
|
|
||||||
i = importlib.import_module(moduleName)
|
|
||||||
except ImportError as e:
|
|
||||||
if install and (results:=self.installModule(moduleName)) > 0:
|
|
||||||
if results == 1:
|
|
||||||
self.Log(f"Module {moduleName!r} has been installed")
|
|
||||||
else:
|
|
||||||
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if install:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Error(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
|
|
||||||
retrnValue = False
|
|
||||||
return retrnValue
|
|
||||||
|
|
||||||
def isModuleInstalled(self, moduleName):
|
|
||||||
try:
|
|
||||||
__import__(moduleName)
|
|
||||||
# self.Trace(f"Module {moduleName!r} is installed")
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Warn(f"Module {moduleName!r} is NOT installed!")
|
|
||||||
self.Trace(f"Error: {e}\nTraceBack={tb}")
|
|
||||||
pass
|
pass
|
||||||
return False
|
|
||||||
|
|
||||||
def installModule(self,moduleName):
|
|
||||||
# if not self.IS_DOCKER:
|
|
||||||
# try:
|
|
||||||
# self.Log(f"Attempting to install package {moduleName!r} using pip import method.")
|
|
||||||
# First try pip import method. (This may fail in a future version of pip.)
|
|
||||||
# self.installPackage(moduleName)
|
|
||||||
# self.Trace(f"installPackage called for module {moduleName!r}")
|
|
||||||
# if self.modulesInstalled(moduleNames=[moduleName], install=False):
|
|
||||||
# self.Trace(f"Module {moduleName!r} installed")
|
|
||||||
# return 1
|
|
||||||
# self.Trace(f"Module {moduleName!r} still not installed.")
|
|
||||||
# except Exception as e:
|
|
||||||
# tb = traceback.format_exc()
|
|
||||||
# self.Warn(f"pip import method failed for module {moduleName!r}. Will try command line method; Error: {e}\nTraceBack={tb}")
|
|
||||||
# pass
|
|
||||||
# else:
|
|
||||||
# self.Trace("Running in Docker, so skipping pip import method.")
|
|
||||||
try:
|
|
||||||
if self.IS_LINUX:
|
|
||||||
# Note: Linux may first need : sudo apt install python3-pip
|
|
||||||
# if error starts with "Command 'pip' not found"
|
|
||||||
# or includes "No module named pip"
|
|
||||||
self.Log("Checking if pip installed.")
|
|
||||||
results = os.popen(f"pip --version").read()
|
|
||||||
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
|
||||||
results = os.popen(f"sudo apt install python3-pip").read()
|
|
||||||
results = os.popen(f"pip --version").read()
|
|
||||||
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
|
||||||
self.Error(f"Error while calling 'pip'. Make sure pip is installed, and make sure module {moduleName!r} is installed. Results = '{results}'")
|
|
||||||
return -1
|
|
||||||
self.Trace("pip good.")
|
|
||||||
if self.IS_FREEBSD:
|
|
||||||
self.Warn("installModule may NOT work on freebsd")
|
|
||||||
pipArg = ""
|
|
||||||
if self.IS_DOCKER:
|
|
||||||
pipArg = " --break-system-packages"
|
|
||||||
self.Log(f"Attempting to install package {moduleName!r} via popen.")
|
|
||||||
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
|
|
||||||
results = results.strip("\n")
|
|
||||||
self.Trace(f"pip results = {results}")
|
|
||||||
if results.find("Requirement already satisfied:") > -1:
|
|
||||||
self.Trace(f"Requirement already satisfied for module {moduleName!r}")
|
|
||||||
return 2
|
|
||||||
elif results.find("Successfully installed") > -1:
|
|
||||||
self.Trace(f"Successfully installed module {moduleName!r}")
|
|
||||||
return 1
|
|
||||||
elif self.modulesInstalled(moduleNames=[moduleName], install=False):
|
|
||||||
self.Trace(f"modulesInstalled returned True for module {moduleName!r}")
|
|
||||||
return 1
|
|
||||||
self.Error(f"Failed to install module {moduleName!r}")
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def installPackage(self,package): # Should delete this. It doesn't work consistently
|
|
||||||
try:
|
|
||||||
import pip
|
|
||||||
if hasattr(pip, 'main'):
|
|
||||||
pip.main(['install', package])
|
|
||||||
self.Trace()
|
|
||||||
else:
|
|
||||||
pip._internal.main(['install', package])
|
|
||||||
self.Trace()
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def isDocker(self):
|
def isDocker(self):
|
||||||
cgroup = pathlib.Path('/proc/self/cgroup')
|
cgroup = pathlib.Path('/proc/self/cgroup')
|
||||||
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
||||||
|
|
||||||
|
def isWindows(self):
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isLinux(self):
|
||||||
|
if platform.system().lower().startswith("linux"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isFreeBSD(self):
|
||||||
|
if platform.system().lower().startswith("freebsd"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isMacOS(self):
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isWindows(self):
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
|
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
|
||||||
if trace:
|
if trace:
|
||||||
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
|
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
|
||||||
@@ -539,7 +583,7 @@ class StashPluginHelper(StashInterface):
|
|||||||
itemToCk = itemToCk.lower()
|
itemToCk = itemToCk.lower()
|
||||||
index = -1
|
index = -1
|
||||||
lenItemMatch = 0
|
lenItemMatch = 0
|
||||||
returnValue = self.NOT_IN_LIST
|
returnValue = self.Constant.NOT_IN_LIST.value
|
||||||
for listItem in listToCk:
|
for listItem in listToCk:
|
||||||
index += 1
|
index += 1
|
||||||
if itemToCk.startswith(listItem.lower()):
|
if itemToCk.startswith(listItem.lower()):
|
||||||
@@ -598,17 +642,62 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.update_scene(dataDict)
|
self.update_scene(dataDict)
|
||||||
return doesHaveTagName
|
return doesHaveTagName
|
||||||
|
|
||||||
def addTag(self, scene, tagName, tagName_descp = "", ignoreAutoTag=False): # scene can be scene ID or scene metadata
|
def addTag(self, scene, tagName, tagName_descp = "", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5): # scene can be scene ID or scene metadata
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
scene_details = scene
|
scene_details = scene
|
||||||
if 'id' not in scene:
|
if 'id' not in scene:
|
||||||
scene_details = self.find_scene(scene)
|
scene_details = self.find_scene(scene)
|
||||||
tagIds = [self.createTagId(tagName, tagName_descp=tagName_descp, ignoreAutoTag=ignoreAutoTag)]
|
tagIds = [self.createTagId(tagName, tagName_descp=tagName_descp, ignoreAutoTag=ignoreAutoTag)]
|
||||||
for tag in scene_details['tags']:
|
for tag in scene_details['tags']:
|
||||||
if tag['name'] != tagName:
|
if tag['name'] == tagName:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
tagIds += [tag['id']]
|
tagIds += [tag['id']]
|
||||||
dataDict = {'id' : scene_details['id']}
|
dataDict = {'id' : scene_details['id']}
|
||||||
dataDict.update({'tag_ids' : tagIds})
|
dataDict.update({'tag_ids' : tagIds})
|
||||||
self.update_scene(dataDict)
|
self.update_scene(dataDict)
|
||||||
|
return True
|
||||||
|
except (ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
|
def updateScene(self, update_input, create=False, retryCount = 24, sleepSecondsBetweenRetry = 5):
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
|
return self.update_scene(update_input, create)
|
||||||
|
except (ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
|
def destroyScene(self, scene_id, delete_file=False, retryCount = 12, sleepSecondsBetweenRetry = 5):
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
|
return self.destroy_scene(scene_id, delete_file)
|
||||||
|
except (ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
|
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
|
||||||
"""Runs a plugin operation.
|
"""Runs a plugin operation.
|
||||||
|
|||||||
@@ -3,8 +3,10 @@
|
|||||||
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
|
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
|
||||||
# Note: To call this script outside of Stash, pass argument --url and the Stash URL.
|
# Note: To call this script outside of Stash, pass argument --url and the Stash URL.
|
||||||
# Example: python filemonitor.py --url http://localhost:9999
|
# Example: python filemonitor.py --url http://localhost:9999
|
||||||
import os, sys, time, pathlib, argparse, platform, traceback, logging
|
import ModulesValidate
|
||||||
|
ModulesValidate.modulesInstalled(["watchdog", "schedule", "requests"])
|
||||||
from StashPluginHelper import StashPluginHelper
|
from StashPluginHelper import StashPluginHelper
|
||||||
|
import os, sys, time, pathlib, argparse, platform, traceback, logging
|
||||||
from StashPluginHelper import taskQueue
|
from StashPluginHelper import taskQueue
|
||||||
from threading import Lock, Condition
|
from threading import Lock, Condition
|
||||||
from multiprocessing import shared_memory
|
from multiprocessing import shared_memory
|
||||||
@@ -31,6 +33,7 @@ parser.add_argument('--silent', '--quit', '-q', dest='quit', action='store_true'
|
|||||||
parser.add_argument('--apikey', '-a', dest='apikey', type=str, help='API Key')
|
parser.add_argument('--apikey', '-a', dest='apikey', type=str, help='API Key')
|
||||||
parse_args = parser.parse_args()
|
parse_args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
logToErrSet = 0
|
logToErrSet = 0
|
||||||
logToNormSet = 0
|
logToNormSet = 0
|
||||||
if parse_args.quit:
|
if parse_args.quit:
|
||||||
@@ -57,7 +60,6 @@ stash = StashPluginHelper(
|
|||||||
stash.status(logLevel=logging.DEBUG)
|
stash.status(logLevel=logging.DEBUG)
|
||||||
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
||||||
stash.Trace(f"stash.JSON_INPUT={stash.JSON_INPUT}")
|
stash.Trace(f"stash.JSON_INPUT={stash.JSON_INPUT}")
|
||||||
stash.modulesInstalled(["watchdog", "schedule", "requests"])
|
|
||||||
|
|
||||||
exitMsg = "Change success!!"
|
exitMsg = "Change success!!"
|
||||||
mutex = Lock()
|
mutex = Lock()
|
||||||
@@ -243,16 +245,24 @@ class StashScheduler: # Stash Scheduler
|
|||||||
|
|
||||||
result = None
|
result = None
|
||||||
if task['task'] == "Clean":
|
if task['task'] == "Clean":
|
||||||
|
result = self.jobIdOutput(stash.metadata_clean(dry_run=stash.DRY_RUN))
|
||||||
|
elif task['task'] == "Clean Path":
|
||||||
result = self.jobIdOutput(stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN))
|
result = self.jobIdOutput(stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN))
|
||||||
elif task['task'] == "Clean Generated Files":
|
elif task['task'] == "Clean Generated Files":
|
||||||
result = self.jobIdOutput(stash.metadata_clean_generated())
|
result = self.jobIdOutput(stash.metadata_clean_generated())
|
||||||
elif task['task'] == "Generate":
|
elif task['task'] == "Generate":
|
||||||
result = self.jobIdOutput(stash.metadata_generate())
|
result = self.jobIdOutput(stash.metadata_generate())
|
||||||
|
elif task['task'] == "Generate Phashes":
|
||||||
|
result = self.jobIdOutput(stash.metadata_generate({"phashes": True}))
|
||||||
elif task['task'] == "Backup":
|
elif task['task'] == "Backup":
|
||||||
result = self.jobIdOutput(self.runBackupTask(task))
|
result = self.jobIdOutput(self.runBackupTask(task))
|
||||||
elif task['task'] == "Scan":
|
elif task['task'] == "Scan":
|
||||||
|
result = self.jobIdOutput(stash.metadata_scan())
|
||||||
|
elif task['task'] == "Scan Path":
|
||||||
result = self.jobIdOutput(stash.metadata_scan(paths=targetPaths))
|
result = self.jobIdOutput(stash.metadata_scan(paths=targetPaths))
|
||||||
elif task['task'] == "Auto Tag":
|
elif task['task'] == "Auto Tag":
|
||||||
|
result = self.jobIdOutput(stash.metadata_autotag())
|
||||||
|
elif task['task'] == "Auto Tag Path":
|
||||||
result = self.jobIdOutput(stash.metadata_autotag(paths=targetPaths))
|
result = self.jobIdOutput(stash.metadata_autotag(paths=targetPaths))
|
||||||
elif task['task'] == "Optimise Database":
|
elif task['task'] == "Optimise Database":
|
||||||
result = self.jobIdOutput(stash.optimise_database())
|
result = self.jobIdOutput(stash.optimise_database())
|
||||||
@@ -280,6 +290,11 @@ class StashScheduler: # Stash Scheduler
|
|||||||
if 'msg' in task and task['msg'] != "":
|
if 'msg' in task and task['msg'] != "":
|
||||||
Msg = task['msg']
|
Msg = task['msg']
|
||||||
result = stash.TraceOnce(Msg)
|
result = stash.TraceOnce(Msg)
|
||||||
|
elif task['task'] == "DebugOnce":
|
||||||
|
Msg = "Scheduled DebugOnce."
|
||||||
|
if 'msg' in task and task['msg'] != "":
|
||||||
|
Msg = task['msg']
|
||||||
|
result = stash.DebugOnce(Msg)
|
||||||
elif task['task'] == "CheckStashIsRunning":
|
elif task['task'] == "CheckStashIsRunning":
|
||||||
result = self.checkStashIsRunning(task)
|
result = self.checkStashIsRunning(task)
|
||||||
elif task['task'] == "python":
|
elif task['task'] == "python":
|
||||||
@@ -634,14 +649,14 @@ def start_library_monitor():
|
|||||||
if TargetPath == SPECIAL_FILE_NAME:
|
if TargetPath == SPECIAL_FILE_NAME:
|
||||||
if os.path.isfile(SPECIAL_FILE_NAME):
|
if os.path.isfile(SPECIAL_FILE_NAME):
|
||||||
shm_buffer[0] = STOP_RUNNING_SIG
|
shm_buffer[0] = STOP_RUNNING_SIG
|
||||||
stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LOG_TO_FILE + stash.LOG_TO_CONSOLE + stash.LOG_TO_STASH)
|
stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LogTo.FILE + stash.LogTo.CONSOLE + stash.LogTo.STASH)
|
||||||
else:
|
else:
|
||||||
stash.Trace(f"[SpFl]Did not find file {SPECIAL_FILE_NAME}.")
|
stash.Trace(f"[SpFl]Did not find file {SPECIAL_FILE_NAME}.")
|
||||||
|
|
||||||
# Make sure special file does not exist, incase change was missed.
|
# Make sure special file does not exist, incase change was missed.
|
||||||
if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME) and shm_buffer[0] == CONTINUE_RUNNING_SIG:
|
if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME) and shm_buffer[0] == CONTINUE_RUNNING_SIG:
|
||||||
shm_buffer[0] = STOP_RUNNING_SIG
|
shm_buffer[0] = STOP_RUNNING_SIG
|
||||||
stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LOG_TO_FILE + stash.LOG_TO_CONSOLE + stash.LOG_TO_STASH)
|
stash.Log(f"[SpFl]Detected trigger file to kill FileMonitor. {SPECIAL_FILE_NAME}", printTo = stash.LogTo.FILE + stash.LogTo.CONSOLE + stash.LogTo.STASH)
|
||||||
TargetPaths = []
|
TargetPaths = []
|
||||||
TmpTargetPaths = list(set(TmpTargetPaths))
|
TmpTargetPaths = list(set(TmpTargetPaths))
|
||||||
if TmpTargetPaths != [] or lastScanJob['DelayedProcessTargetPaths'] != []:
|
if TmpTargetPaths != [] or lastScanJob['DelayedProcessTargetPaths'] != []:
|
||||||
|
|||||||
@@ -21,9 +21,10 @@ config = {
|
|||||||
|
|
||||||
# The following tasks are scheduled weekly
|
# The following tasks are scheduled weekly
|
||||||
# Optional field for task "Scan", "Auto Tag", and "Clean" is 'paths'. For detail usage, see examples #A3: in filemonitor_task_examples.py
|
# Optional field for task "Scan", "Auto Tag", and "Clean" is 'paths'. For detail usage, see examples #A3: in filemonitor_task_examples.py
|
||||||
{"task" : "Scan", "weekday" : "saturday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every saturday at 3AM)
|
{"task" : "Scan", "weekday" : "saturday", "time" : "02:30"}, # Library -> [Scan] (Weekly) (Every saturday at 2:30AM)
|
||||||
{"task" : "Auto Tag", "weekday" : "saturday", "time" : "03:30"}, # Auto Tag -> [Auto Tag] (Weekly) (Every saturday at 3:30AM)
|
{"task" : "Auto Tag", "weekday" : "saturday", "time" : "03:00"}, # Auto Tag -> [Auto Tag] (Weekly) (Every saturday at 3AM)
|
||||||
{"task" : "Generate", "weekday" : "saturday", "time" : "04:00"}, # Generated Content-> [Generate] (Every saturday at 4AM)
|
{"task" : "Generate", "weekday" : "saturday", "time" : "03:30"}, # Generated Content-> [Generate] (Every saturday at 3:30AM)
|
||||||
|
{"task" : "Generate Phashes", "weekday" : "saturday", "time" : "04:00"}, # [Generate Phashes] (Every saturday at 4AM)
|
||||||
{"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM)
|
{"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM)
|
||||||
{"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM)
|
{"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM)
|
||||||
{"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM)
|
{"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM)
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ self_unit_test = {
|
|||||||
{"task" : "Trace", "minutes" : 1}, # Test plugin trace logging
|
{"task" : "Trace", "minutes" : 1}, # Test plugin trace logging
|
||||||
{"task" : "LogOnce", "seconds" :15}, # Test LogOnce
|
{"task" : "LogOnce", "seconds" :15}, # Test LogOnce
|
||||||
{"task" : "TraceOnce", "seconds" : 5}, # Test TraceOnce
|
{"task" : "TraceOnce", "seconds" : 5}, # Test TraceOnce
|
||||||
|
{"task" : "DebugOnce", "seconds" : 5}, # Test DebugOnce
|
||||||
{"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "seconds" :15}, # Test RunAfter
|
{"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "seconds" :15}, # Test RunAfter
|
||||||
{"task" : "CheckStashIsRunning", "command" : "<stash_path>stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash.
|
{"task" : "CheckStashIsRunning", "command" : "<stash_path>stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash.
|
||||||
# {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe"
|
# {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe"
|
||||||
@@ -23,14 +24,18 @@ self_unit_test = {
|
|||||||
# Test [Delete Duplicates] with [Delete Duplicate Scheduler] disabled, and then with it enabled.
|
# Test [Delete Duplicates] with [Delete Duplicate Scheduler] disabled, and then with it enabled.
|
||||||
{"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates]
|
{"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates]
|
||||||
{"task" : "Generate", "weekday" : "every", "time" : "06:17"},
|
{"task" : "Generate", "weekday" : "every", "time" : "06:17"},
|
||||||
|
{"task" : "Generate Phashes", "weekday" : "every", "time" : "06:17"},
|
||||||
{"task" : "Clean", "weekday" : "every", "time" : "06:17"},
|
{"task" : "Clean", "weekday" : "every", "time" : "06:17"},
|
||||||
|
{"task" : "Clean Path", "weekday" : "every", "time" : "06:17"},
|
||||||
{"task" : "Auto Tag", "weekday" : "every", "time" : "06:17"},
|
{"task" : "Auto Tag", "weekday" : "every", "time" : "06:17"},
|
||||||
|
{"task" : "Auto Tag Path", "weekday" : "every", "time" : "06:17"},
|
||||||
{"task" : "Optimise Database", "weekday" : "every", "time" : "06:17"},
|
{"task" : "Optimise Database", "weekday" : "every", "time" : "06:17"},
|
||||||
{"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Running plugin task: Create Tags
|
{"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Running plugin task: Create Tags
|
||||||
{"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "taskQue":False, "weekday" : "every", "time" : "06:17"}, # Does NOT run in the task queue
|
{"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "taskQue":False, "weekday" : "every", "time" : "06:17"}, # Does NOT run in the task queue
|
||||||
{"task" : "DupFileManager", "taskName" : "Tag Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
|
{"task" : "DupFileManager", "taskName" : "Tag Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
|
||||||
{"task" : "DupFileManager", "taskName" : "Delete Tagged Duplicates", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
|
{"task" : "DupFileManager", "taskName" : "Delete Tagged Duplicates", "weekday" : "every", "time" : "06:17"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
|
||||||
{"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "every", "time" : "06:17"},
|
{"task" : "Scan", "weekday" : "every", "time" : "06:17"},
|
||||||
|
{"task" : "Scan Path","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "every", "time" : "06:17"},
|
||||||
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Optimising database...
|
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Optimising database...
|
||||||
{"task" : "Clean Generated Files", "weekday" : "every", "time" : "06:17"},
|
{"task" : "Clean Generated Files", "weekday" : "every", "time" : "06:17"},
|
||||||
{"task" : "RenameGeneratedFiles", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Migrating scene hashes...
|
{"task" : "RenameGeneratedFiles", "weekday" : "every", "time" : "06:17"}, # In task queue as -> Migrating scene hashes...
|
||||||
|
|||||||
@@ -11,9 +11,9 @@ task_examples = {
|
|||||||
{"task" : "python", "script" : "<plugin_path>test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time
|
{"task" : "python", "script" : "<plugin_path>test_script_hello_world.py", "args" : "--MyArguments Hello", "weekday" : "monday", "time" : "DISABLED"}, # change "DISABLED" to valid time
|
||||||
|
|
||||||
# Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used.
|
# Example#A3: The following task types can optionally take a [paths] field. If the paths field does not exists, the paths in the Stash library is used.
|
||||||
{"task" : "Scan", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan]
|
{"task" : "Scan Path", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Library -> [Scan]
|
||||||
{"task" : "Auto Tag", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag]
|
{"task" : "Auto Tag Path", "paths" : [r"E:\MyVideos\downloads", r"V:\MyOtherVideos"], "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "DISABLED"}, # Auto Tag -> [Auto Tag]
|
||||||
{"task" : "Clean", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate]
|
{"task" : "Clean Path", "paths" : ["E:\\MyVideos\\downloads", "V:\\MyOtherVideos"], "weekday" : "sunday", "time" : "DISABLED"}, # Generated Content-> [Generate]
|
||||||
|
|
||||||
# Example#A4: Task which calls Migrations -> [Rename generated files]
|
# Example#A4: Task which calls Migrations -> [Rename generated files]
|
||||||
{"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example
|
{"task" : "RenameGeneratedFiles", "weekday" : "tuesday,thursday", "time" : "DISABLED"}, # (bi-weekly) example
|
||||||
|
|||||||
126
plugins/RenameFile/ModulesValidate.py
Normal file
126
plugins/RenameFile/ModulesValidate.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
# ModulesValidate (By David Maisonave aka Axter)
|
||||||
|
# Description:
|
||||||
|
# Checks if packages are installed, and optionally install packages if missing.
|
||||||
|
# The below example usage code should be plave at the very top of the scource code before any other imports.
|
||||||
|
# Example Usage:
|
||||||
|
# import ModulesValidate
|
||||||
|
# ModulesValidate.modulesInstalled(["watchdog", "schedule", "requests"])
|
||||||
|
# Testing:
|
||||||
|
# To test, uninstall packages via command line: pip uninstall -y watchdog schedule requests
|
||||||
|
import sys, os, pathlib, platform, traceback
|
||||||
|
# ToDo: Add logic to optionally pull package requirements from requirements.txt file.
|
||||||
|
|
||||||
|
def modulesInstalled(moduleNames, install=True, silent=False):
|
||||||
|
retrnValue = True
|
||||||
|
for moduleName in moduleNames:
|
||||||
|
try: # Try Python 3.3 > way
|
||||||
|
import importlib
|
||||||
|
import importlib.util
|
||||||
|
if moduleName in sys.modules:
|
||||||
|
if not silent: print(f"{moduleName!r} already in sys.modules")
|
||||||
|
elif isModuleInstalled(moduleName):
|
||||||
|
if not silent: print(f"Module {moduleName!r} is available.")
|
||||||
|
else:
|
||||||
|
if install and (results:=installModule(moduleName)) > 0:
|
||||||
|
if results == 1:
|
||||||
|
print(f"Module {moduleName!r} has been installed")
|
||||||
|
else:
|
||||||
|
if not silent: print(f"Module {moduleName!r} is already installed")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if install:
|
||||||
|
print(f"Can't find the {moduleName!r} module")
|
||||||
|
retrnValue = False
|
||||||
|
except Exception as e:
|
||||||
|
try:
|
||||||
|
i = importlib.import_module(moduleName)
|
||||||
|
except ImportError as e:
|
||||||
|
if install and (results:=installModule(moduleName)) > 0:
|
||||||
|
if results == 1:
|
||||||
|
print(f"Module {moduleName!r} has been installed")
|
||||||
|
else:
|
||||||
|
if not silent: print(f"Module {moduleName!r} is already installed")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if install:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
print(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
|
||||||
|
retrnValue = False
|
||||||
|
return retrnValue
|
||||||
|
|
||||||
|
def isModuleInstalled(moduleName):
|
||||||
|
try:
|
||||||
|
__import__(moduleName)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
def installModule(moduleName):
|
||||||
|
try:
|
||||||
|
if isLinux():
|
||||||
|
# Note: Linux may first need : sudo apt install python3-pip
|
||||||
|
# if error starts with "Command 'pip' not found"
|
||||||
|
# or includes "No module named pip"
|
||||||
|
results = os.popen(f"pip --version").read()
|
||||||
|
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
||||||
|
results = os.popen(f"sudo apt install python3-pip").read()
|
||||||
|
results = os.popen(f"pip --version").read()
|
||||||
|
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
||||||
|
return -1
|
||||||
|
if isFreeBSD():
|
||||||
|
print("Warning: installModule may NOT work on freebsd")
|
||||||
|
pipArg = ""
|
||||||
|
if isDocker():
|
||||||
|
pipArg = " --break-system-packages"
|
||||||
|
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
|
||||||
|
results = results.strip("\n")
|
||||||
|
if results.find("Requirement already satisfied:") > -1:
|
||||||
|
return 2
|
||||||
|
elif results.find("Successfully installed") > -1:
|
||||||
|
return 1
|
||||||
|
elif modulesInstalled(moduleNames=[moduleName], install=False):
|
||||||
|
return 1
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def installPackage(package): # Should delete this. It doesn't work consistently
|
||||||
|
try:
|
||||||
|
import pip
|
||||||
|
if hasattr(pip, 'main'):
|
||||||
|
pip.main(['install', package])
|
||||||
|
else:
|
||||||
|
pip._internal.main(['install', package])
|
||||||
|
except Exception as e:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def isDocker():
|
||||||
|
cgroup = pathlib.Path('/proc/self/cgroup')
|
||||||
|
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
||||||
|
|
||||||
|
def isWindows():
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isLinux():
|
||||||
|
if platform.system().lower().startswith("linux"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isFreeBSD():
|
||||||
|
if platform.system().lower().startswith("freebsd"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isMacOS():
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isWindows():
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
@@ -1,12 +1,3 @@
|
|||||||
from stashapi.stashapp import StashInterface
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
|
|
||||||
import concurrent.futures
|
|
||||||
from stashapi.stash_types import PhashDistance
|
|
||||||
import __main__
|
|
||||||
|
|
||||||
_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
|
|
||||||
|
|
||||||
# StashPluginHelper (By David Maisonave aka Axter)
|
# StashPluginHelper (By David Maisonave aka Axter)
|
||||||
# See end of this file for example usage
|
# See end of this file for example usage
|
||||||
# Log Features:
|
# Log Features:
|
||||||
@@ -24,6 +15,14 @@ _ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
|
|||||||
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
|
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
|
||||||
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
|
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
|
||||||
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
|
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
|
||||||
|
from stashapi.stashapp import StashInterface
|
||||||
|
from logging.handlers import RotatingFileHandler
|
||||||
|
import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
|
||||||
|
import concurrent.futures
|
||||||
|
from stashapi.stash_types import PhashDistance
|
||||||
|
from enum import Enum, IntEnum
|
||||||
|
import __main__
|
||||||
|
|
||||||
class StashPluginHelper(StashInterface):
|
class StashPluginHelper(StashInterface):
|
||||||
# Primary Members for external reference
|
# Primary Members for external reference
|
||||||
PLUGIN_TASK_NAME = None
|
PLUGIN_TASK_NAME = None
|
||||||
@@ -45,15 +44,44 @@ class StashPluginHelper(StashInterface):
|
|||||||
API_KEY = None
|
API_KEY = None
|
||||||
excludeMergeTags = None
|
excludeMergeTags = None
|
||||||
|
|
||||||
|
# class EnumInt(IntEnum):
|
||||||
|
# def __repr__(self) -> str:
|
||||||
|
# return f"{self.__class__.__name__}.{self.name}"
|
||||||
|
# def __str__(self) -> str:
|
||||||
|
# return str(self.value)
|
||||||
|
# def serialize(self):
|
||||||
|
# return self.value
|
||||||
|
|
||||||
|
class EnumValue(Enum):
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return f"{self.__class__.__name__}.{self.name}"
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return str(self.value)
|
||||||
|
def __add__(self, other):
|
||||||
|
return self.value + other.value
|
||||||
|
def serialize(self):
|
||||||
|
return self.value
|
||||||
|
|
||||||
# printTo argument
|
# printTo argument
|
||||||
LOG_TO_FILE = 1
|
class LogTo(IntEnum):
|
||||||
LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
|
FILE = 1
|
||||||
LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
|
CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
|
||||||
LOG_TO_STASH = 8
|
STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
|
||||||
LOG_TO_WARN = 16
|
STASH = 8
|
||||||
LOG_TO_ERROR = 32
|
WARN = 16
|
||||||
LOG_TO_CRITICAL = 64
|
ERROR = 32
|
||||||
LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
|
CRITICAL = 64
|
||||||
|
ALL = FILE + CONSOLE + STDERR + STASH
|
||||||
|
|
||||||
|
class DbgLevel(IntEnum):
|
||||||
|
TRACE = 1
|
||||||
|
DBG = 2
|
||||||
|
INF = 3
|
||||||
|
WRN = 4
|
||||||
|
ERR = 5
|
||||||
|
CRITICAL = 6
|
||||||
|
|
||||||
|
DBG_LEVEL = DbgLevel.INF
|
||||||
|
|
||||||
# Misc class variables
|
# Misc class variables
|
||||||
MAIN_SCRIPT_NAME = None
|
MAIN_SCRIPT_NAME = None
|
||||||
@@ -62,7 +90,17 @@ class StashPluginHelper(StashInterface):
|
|||||||
LOG_FILE_NAME = None
|
LOG_FILE_NAME = None
|
||||||
STDIN_READ = None
|
STDIN_READ = None
|
||||||
stopProcessBarSpin = True
|
stopProcessBarSpin = True
|
||||||
NOT_IN_LIST = 2147483646
|
updateProgressbarOnIter = 0
|
||||||
|
currentProgressbarIteration = 0
|
||||||
|
|
||||||
|
class OS_Type(IntEnum):
|
||||||
|
WINDOWS = 1
|
||||||
|
LINUX = 2
|
||||||
|
MAC_OS = 3
|
||||||
|
FREEBSD = 4
|
||||||
|
UNKNOWN_OS = 5
|
||||||
|
|
||||||
|
OS_TYPE = OS_Type.UNKNOWN_OS
|
||||||
|
|
||||||
IS_DOCKER = False
|
IS_DOCKER = False
|
||||||
IS_WINDOWS = False
|
IS_WINDOWS = False
|
||||||
@@ -79,25 +117,29 @@ class StashPluginHelper(StashInterface):
|
|||||||
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
|
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
|
||||||
|
|
||||||
# Prefix message value
|
# Prefix message value
|
||||||
LEV_TRACE = "TRACE: "
|
class Level(EnumValue):
|
||||||
LEV_DBG = "DBG: "
|
TRACE = "TRACE: "
|
||||||
LEV_INF = "INF: "
|
DBG = "DBG: "
|
||||||
LEV_WRN = "WRN: "
|
INF = "INF: "
|
||||||
LEV_ERR = "ERR: "
|
WRN = "WRN: "
|
||||||
LEV_CRITICAL = "CRITICAL: "
|
ERR = "ERR: "
|
||||||
|
CRITICAL = "CRITICAL: "
|
||||||
|
|
||||||
|
class Constant(EnumValue):
|
||||||
# Default format
|
# Default format
|
||||||
LOG_FORMAT = "[%(asctime)s] %(message)s"
|
LOG_FORMAT = "[%(asctime)s] %(message)s"
|
||||||
|
ARGUMENT_UNSPECIFIED = "_ARGUMENT_UNSPECIFIED_"
|
||||||
|
NOT_IN_LIST = 2147483646
|
||||||
|
|
||||||
# Externally modifiable variables
|
# Externally modifiable variables
|
||||||
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
log_to_err_set = LogTo.FILE + LogTo.STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
||||||
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
log_to_norm = LogTo.FILE + LogTo.CONSOLE # Can be change so-as to set target output for normal logging
|
||||||
# Warn message goes to both plugin log file and stash when sent to Stash log file.
|
# Warn message goes to both plugin log file and stash when sent to Stash log file.
|
||||||
log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
log_to_wrn_set = LogTo.STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
||||||
logFormat = LOG_FORMAT, # Plugin log line format
|
logFormat = Constant.LOG_FORMAT.value, # Plugin log line format
|
||||||
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
|
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
|
||||||
maxbytes = 8*1024*1024, # Max size of plugin log file
|
maxbytes = 8*1024*1024, # Max size of plugin log file
|
||||||
backupcount = 2, # Backup counts when log file size reaches max size
|
backupcount = 2, # Backup counts when log file size reaches max size
|
||||||
@@ -113,19 +155,35 @@ class StashPluginHelper(StashInterface):
|
|||||||
stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
|
stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
|
||||||
apiKey = None, # API Key only needed when username and password set while running script via command line
|
apiKey = None, # API Key only needed when username and password set while running script via command line
|
||||||
DebugTraceFieldName = "zzdebugTracing",
|
DebugTraceFieldName = "zzdebugTracing",
|
||||||
|
DebugFieldName = "zzDebug",
|
||||||
DryRunFieldName = "zzdryRun",
|
DryRunFieldName = "zzdryRun",
|
||||||
setStashLoggerAsPluginLogger = False):
|
setStashLoggerAsPluginLogger = False,
|
||||||
|
DBG_LEVEL = DbgLevel.INF):
|
||||||
|
if DBG_LEVEL in list(self.DbgLevel):
|
||||||
|
self.DBG_LEVEL = DBG_LEVEL
|
||||||
|
if debugTracing:
|
||||||
|
self.DEBUG_TRACING = debugTracing
|
||||||
|
if self.DBG_LEVEL > self.DbgLevel.DBG:
|
||||||
|
self.DBG_LEVEL = self.DbgLevel.TRACE
|
||||||
|
elif self.DBG_LEVEL < self.DbgLevel.INF:
|
||||||
|
self.DEBUG_TRACING = True
|
||||||
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
|
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
|
||||||
if any(platform.win32_ver()):
|
if self.isWindows():
|
||||||
self.IS_WINDOWS = True
|
self.IS_WINDOWS = True
|
||||||
elif platform.system().lower().startswith("linux"):
|
self.OS_TYPE = self.OS_Type.WINDOWS
|
||||||
|
elif self.isLinux():
|
||||||
self.IS_LINUX = True
|
self.IS_LINUX = True
|
||||||
|
self.OS_TYPE = self.OS_Type.LINUX
|
||||||
if self.isDocker():
|
if self.isDocker():
|
||||||
self.IS_DOCKER = True
|
self.IS_DOCKER = True
|
||||||
elif platform.system().lower().startswith("freebsd"):
|
elif self.isFreeBSD():
|
||||||
self.IS_FREEBSD = True
|
self.IS_FREEBSD = True
|
||||||
elif sys.platform == "darwin":
|
self.OS_TYPE = self.OS_Type.FREEBSD
|
||||||
|
if self.isDocker():
|
||||||
|
self.IS_DOCKER = True
|
||||||
|
elif self.isMacOS():
|
||||||
self.IS_MAC_OS = True
|
self.IS_MAC_OS = True
|
||||||
|
self.OS_TYPE = self.OS_Type.MAC_OS
|
||||||
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
|
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
|
||||||
if logToErrSet: self.log_to_err_set = logToErrSet
|
if logToErrSet: self.log_to_err_set = logToErrSet
|
||||||
if logToNormSet: self.log_to_norm = logToNormSet
|
if logToNormSet: self.log_to_norm = logToNormSet
|
||||||
@@ -148,7 +206,6 @@ class StashPluginHelper(StashInterface):
|
|||||||
else:
|
else:
|
||||||
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
|
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
|
||||||
|
|
||||||
if debugTracing: self.DEBUG_TRACING = debugTracing
|
|
||||||
if config:
|
if config:
|
||||||
self.pluginConfig = config
|
self.pluginConfig = config
|
||||||
if self.Setting('apiKey', "") != "":
|
if self.Setting('apiKey', "") != "":
|
||||||
@@ -210,8 +267,14 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
|
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
|
||||||
|
|
||||||
self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
|
self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
|
||||||
self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING)
|
if self.Setting(DebugTraceFieldName, self.DEBUG_TRACING):
|
||||||
if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
|
self.DEBUG_TRACING = True
|
||||||
|
self.LOG_LEVEL = logging.TRACE
|
||||||
|
self.DBG_LEVEL = self.DbgLevel.TRACE
|
||||||
|
elif self.Setting(DebugFieldName, self.DEBUG_TRACING):
|
||||||
|
self.DEBUG_TRACING = True
|
||||||
|
self.LOG_LEVEL = logging.DEBUG
|
||||||
|
self.DBG_LEVEL = self.DbgLevel.DBG
|
||||||
|
|
||||||
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
|
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
|
||||||
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
|
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
|
||||||
@@ -221,74 +284,104 @@ class StashPluginHelper(StashInterface):
|
|||||||
def __del__(self):
|
def __del__(self):
|
||||||
self.thredPool.shutdown(wait=False)
|
self.thredPool.shutdown(wait=False)
|
||||||
|
|
||||||
def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False):
|
def Setting(self, name, default=Constant.ARGUMENT_UNSPECIFIED.value, raiseEx=True, notEmpty=False):
|
||||||
if self.pluginSettings != None and name in self.pluginSettings:
|
if self.pluginSettings != None and name in self.pluginSettings:
|
||||||
if notEmpty == False or self.pluginSettings[name] != "":
|
if notEmpty == False or self.pluginSettings[name] != "":
|
||||||
return self.pluginSettings[name]
|
return self.pluginSettings[name]
|
||||||
if self.pluginConfig != None and name in self.pluginConfig:
|
if self.pluginConfig != None and name in self.pluginConfig:
|
||||||
if notEmpty == False or self.pluginConfig[name] != "":
|
if notEmpty == False or self.pluginConfig[name] != "":
|
||||||
return self.pluginConfig[name]
|
return self.pluginConfig[name]
|
||||||
if default == _ARGUMENT_UNSPECIFIED_ and raiseEx:
|
if default == self.Constant.ARGUMENT_UNSPECIFIED.value and raiseEx:
|
||||||
raise Exception(f"Missing {name} from both UI settings and config file settings.")
|
raise Exception(f"Missing {name} from both UI settings and config file settings.")
|
||||||
return default
|
return default
|
||||||
|
|
||||||
def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None):
|
def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None, printLogException = False):
|
||||||
|
try:
|
||||||
if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
|
if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
|
||||||
logMsg = self.asc2(logMsg)
|
logMsg = self.asc2(logMsg)
|
||||||
else:
|
else:
|
||||||
logMsg = logMsg
|
logMsg = logMsg
|
||||||
if printTo == 0:
|
if printTo == 0:
|
||||||
printTo = self.log_to_norm
|
printTo = self.log_to_norm
|
||||||
elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
|
elif printTo == self.LogTo.ERROR and logLevel == logging.INFO:
|
||||||
logLevel = logging.ERROR
|
logLevel = logging.ERROR
|
||||||
printTo = self.log_to_err_set
|
printTo = self.log_to_err_set
|
||||||
elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
|
elif printTo == self.LogTo.CRITICAL and logLevel == logging.INFO:
|
||||||
logLevel = logging.CRITICAL
|
logLevel = logging.CRITICAL
|
||||||
printTo = self.log_to_err_set
|
printTo = self.log_to_err_set
|
||||||
elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
|
elif printTo == self.LogTo.WARN and logLevel == logging.INFO:
|
||||||
logLevel = logging.WARN
|
logLevel = logging.WARN
|
||||||
printTo = self.log_to_wrn_set
|
printTo = self.log_to_wrn_set
|
||||||
if lineNo == -1:
|
if lineNo == -1:
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
LN_Str = f"[LN:{lineNo}]"
|
LN_Str = f"[LN:{lineNo}]"
|
||||||
# print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
|
# print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
|
||||||
if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
|
if logLevel == logging.TRACE and (logAlways == False or self.LOG_LEVEL == logging.TRACE):
|
||||||
if levelStr == "": levelStr = self.LEV_DBG
|
if levelStr == "": levelStr = self.Level.DBG
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.trace(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.trace(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
elif logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG or self.LOG_LEVEL == logging.TRACE):
|
||||||
|
if levelStr == "": levelStr = self.Level.DBG
|
||||||
|
if printTo & self.LogTo.FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
|
if printTo & self.LogTo.STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.INFO or logLevel == logging.DEBUG:
|
elif logLevel == logging.INFO or logLevel == logging.DEBUG:
|
||||||
if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
|
if levelStr == "": levelStr = self.Level.INF if logLevel == logging.INFO else self.Level.DBG
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.WARN:
|
elif logLevel == logging.WARN:
|
||||||
if levelStr == "": levelStr = self.LEV_WRN
|
if levelStr == "": levelStr = self.Level.WRN
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.ERROR:
|
elif logLevel == logging.ERROR:
|
||||||
if levelStr == "": levelStr = self.LEV_ERR
|
if levelStr == "": levelStr = self.Level.ERR
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
elif logLevel == logging.CRITICAL:
|
elif logLevel == logging.CRITICAL:
|
||||||
if levelStr == "": levelStr = self.LEV_CRITICAL
|
if levelStr == "": levelStr = self.Level.CRITICAL
|
||||||
if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
if printTo & self.LogTo.STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
if (printTo & self.LogTo.CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||||
print(f"{LN_Str} {levelStr}{logMsg}")
|
print(f"{LN_Str} {levelStr}{logMsg}")
|
||||||
if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
if (printTo & self.LogTo.STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||||
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
|
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
|
||||||
|
except Exception as e:
|
||||||
|
if printLogException:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
print(f"Exception calling [Log]; Error: {e}\nTraceBack={tb}")
|
||||||
|
pass
|
||||||
|
|
||||||
def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
||||||
if printTo == 0: printTo = self.LOG_TO_FILE
|
if printTo == 0: printTo = self.LogTo.FILE
|
||||||
|
if lineNo == -1:
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
logLev = logging.INFO if logAlways else logging.TRACE
|
||||||
|
if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
|
||||||
|
if logMsg == "":
|
||||||
|
logMsg = f"Line number {lineNo}..."
|
||||||
|
self.Log(logMsg, printTo, logLev, lineNo, self.Level.TRACE, logAlways, toAscii=toAscii)
|
||||||
|
|
||||||
|
# Log once per session. Only logs the first time called from a particular line number in the code.
|
||||||
|
def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
||||||
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
|
if self.DBG_LEVEL == self.DbgLevel.TRACE or logAlways:
|
||||||
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
|
if FuncAndLineNo in self.logLinePreviousHits:
|
||||||
|
return
|
||||||
|
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||||
|
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||||
|
|
||||||
|
def Debug(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
||||||
|
if printTo == 0: printTo = self.LogTo.FILE
|
||||||
if lineNo == -1:
|
if lineNo == -1:
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
logLev = logging.INFO if logAlways else logging.DEBUG
|
logLev = logging.INFO if logAlways else logging.DEBUG
|
||||||
if self.DEBUG_TRACING or logAlways:
|
if self.DEBUG_TRACING or logAlways:
|
||||||
if logMsg == "":
|
if logMsg == "":
|
||||||
logMsg = f"Line number {lineNo}..."
|
logMsg = f"Line number {lineNo}..."
|
||||||
self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii)
|
self.Log(logMsg, printTo, logLev, lineNo, self.Level.DBG, logAlways, toAscii=toAscii)
|
||||||
|
|
||||||
# Log once per session. Only logs the first time called from a particular line number in the code.
|
# Log once per session. Only logs the first time called from a particular line number in the code.
|
||||||
def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
def DebugOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
if self.DEBUG_TRACING or logAlways:
|
if self.DEBUG_TRACING or logAlways:
|
||||||
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
@@ -298,8 +391,8 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||||
|
|
||||||
# Log INFO on first call, then do Trace on remaining calls.
|
# Log INFO on first call, then do Trace on remaining calls.
|
||||||
def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None):
|
def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None, printLogException = False):
|
||||||
if printTo == 0: printTo = self.LOG_TO_FILE
|
if printTo == 0: printTo = self.LogTo.FILE
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||||
if FuncAndLineNo in self.logLinePreviousHits:
|
if FuncAndLineNo in self.logLinePreviousHits:
|
||||||
@@ -307,17 +400,17 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||||
else:
|
else:
|
||||||
self.logLinePreviousHits.append(FuncAndLineNo)
|
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||||
self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii)
|
self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii, printLogException=printLogException)
|
||||||
|
|
||||||
def Warn(self, logMsg, printTo = 0, toAscii = None):
|
def Warn(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
|
||||||
if printTo == 0: printTo = self.log_to_wrn_set
|
if printTo == 0: printTo = self.log_to_wrn_set
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii)
|
self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii, printLogException=printLogException)
|
||||||
|
|
||||||
def Error(self, logMsg, printTo = 0, toAscii = None):
|
def Error(self, logMsg, printTo = 0, toAscii = None, printLogException = False):
|
||||||
if printTo == 0: printTo = self.log_to_err_set
|
if printTo == 0: printTo = self.log_to_err_set
|
||||||
lineNo = inspect.currentframe().f_back.f_lineno
|
lineNo = inspect.currentframe().f_back.f_lineno
|
||||||
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
|
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii, printLogException=printLogException)
|
||||||
|
|
||||||
# Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
|
# Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
|
||||||
# The below non-loggging functions use (lower) camelCase naming convention.
|
# The below non-loggging functions use (lower) camelCase naming convention.
|
||||||
@@ -369,142 +462,93 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.excludeMergeTags = excludeMergeTags
|
self.excludeMergeTags = excludeMergeTags
|
||||||
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
|
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
|
||||||
|
|
||||||
# Must call initMergeMetadata, before calling mergeMetadata
|
def mergeMetadata(self, SrcData, DestData, retryCount = 12, sleepSecondsBetweenRetry = 5, excludeMergeTags=None): # Input arguments can be scene ID or scene metadata
|
||||||
def mergeMetadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
|
import requests
|
||||||
|
if self._mergeMetadata == None:
|
||||||
|
self.initMergeMetadata(excludeMergeTags)
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
if type(SrcData) is int:
|
if type(SrcData) is int:
|
||||||
SrcData = self.find_scene(SrcData)
|
SrcData = self.find_scene(SrcData)
|
||||||
DestData = self.find_scene(DestData)
|
DestData = self.find_scene(DestData)
|
||||||
return self._mergeMetadata.merge(SrcData, DestData)
|
return self._mergeMetadata.merge(SrcData, DestData)
|
||||||
|
except (requests.exceptions.ConnectionError, ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [mergeMetadata]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
|
def getUpdateProgressBarIter(self, qtyResults):
|
||||||
|
if qtyResults > 40000:
|
||||||
|
return 100
|
||||||
|
if qtyResults > 20000:
|
||||||
|
return 80
|
||||||
|
if qtyResults > 10000:
|
||||||
|
return 40
|
||||||
|
if qtyResults > 5000:
|
||||||
|
return 20
|
||||||
|
if qtyResults > 2000:
|
||||||
|
return 10
|
||||||
|
if qtyResults > 1000:
|
||||||
|
return 5
|
||||||
|
if qtyResults > 500:
|
||||||
|
return 3
|
||||||
|
if qtyResults > 200:
|
||||||
|
return 2
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Use setProgressBarIter to reduce traffic to the server by only updating the progressBar every X(updateProgressbarOnIter) iteration.
|
||||||
|
def setProgressBarIter(self, qtyResults):
|
||||||
|
self.updateProgressbarOnIter = self.getUpdateProgressBarIter(qtyResults)
|
||||||
|
self.currentProgressbarIteration = 0
|
||||||
|
|
||||||
def progressBar(self, currentIndex, maxCount):
|
def progressBar(self, currentIndex, maxCount):
|
||||||
|
if self.updateProgressbarOnIter > 0:
|
||||||
|
self.currentProgressbarIteration+=1
|
||||||
|
if self.currentProgressbarIteration > self.updateProgressbarOnIter:
|
||||||
|
self.currentProgressbarIteration = 0
|
||||||
|
else:
|
||||||
|
return
|
||||||
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
|
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
|
||||||
|
try:
|
||||||
self.log.progress(progress)
|
self.log.progress(progress)
|
||||||
|
|
||||||
# Test via command line: pip uninstall -y pyYAML watchdog schedule requests
|
|
||||||
def modulesInstalled(self, moduleNames, install=True, silent=False): # moduleNames=["stashapp-tools", "requests", "pyYAML"]
|
|
||||||
retrnValue = True
|
|
||||||
for moduleName in moduleNames:
|
|
||||||
try: # Try Python 3.3 > way
|
|
||||||
import importlib
|
|
||||||
import importlib.util
|
|
||||||
if moduleName in sys.modules:
|
|
||||||
if not silent: self.Trace(f"{moduleName!r} already in sys.modules")
|
|
||||||
elif self.isModuleInstalled(moduleName):
|
|
||||||
if not silent: self.Trace(f"Module {moduleName!r} is available.")
|
|
||||||
else:
|
|
||||||
if install and (results:=self.installModule(moduleName)) > 0:
|
|
||||||
if results == 1:
|
|
||||||
self.Log(f"Module {moduleName!r} has been installed")
|
|
||||||
else:
|
|
||||||
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if install:
|
|
||||||
self.Error(f"Can't find the {moduleName!r} module")
|
|
||||||
retrnValue = False
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
try:
|
|
||||||
i = importlib.import_module(moduleName)
|
|
||||||
except ImportError as e:
|
|
||||||
if install and (results:=self.installModule(moduleName)) > 0:
|
|
||||||
if results == 1:
|
|
||||||
self.Log(f"Module {moduleName!r} has been installed")
|
|
||||||
else:
|
|
||||||
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
if install:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Error(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
|
|
||||||
retrnValue = False
|
|
||||||
return retrnValue
|
|
||||||
|
|
||||||
def isModuleInstalled(self, moduleName):
|
|
||||||
try:
|
|
||||||
__import__(moduleName)
|
|
||||||
# self.Trace(f"Module {moduleName!r} is installed")
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Warn(f"Module {moduleName!r} is NOT installed!")
|
|
||||||
self.Trace(f"Error: {e}\nTraceBack={tb}")
|
|
||||||
pass
|
pass
|
||||||
return False
|
|
||||||
|
|
||||||
def installModule(self,moduleName):
|
|
||||||
# if not self.IS_DOCKER:
|
|
||||||
# try:
|
|
||||||
# self.Log(f"Attempting to install package {moduleName!r} using pip import method.")
|
|
||||||
# First try pip import method. (This may fail in a future version of pip.)
|
|
||||||
# self.installPackage(moduleName)
|
|
||||||
# self.Trace(f"installPackage called for module {moduleName!r}")
|
|
||||||
# if self.modulesInstalled(moduleNames=[moduleName], install=False):
|
|
||||||
# self.Trace(f"Module {moduleName!r} installed")
|
|
||||||
# return 1
|
|
||||||
# self.Trace(f"Module {moduleName!r} still not installed.")
|
|
||||||
# except Exception as e:
|
|
||||||
# tb = traceback.format_exc()
|
|
||||||
# self.Warn(f"pip import method failed for module {moduleName!r}. Will try command line method; Error: {e}\nTraceBack={tb}")
|
|
||||||
# pass
|
|
||||||
# else:
|
|
||||||
# self.Trace("Running in Docker, so skipping pip import method.")
|
|
||||||
try:
|
|
||||||
if self.IS_LINUX:
|
|
||||||
# Note: Linux may first need : sudo apt install python3-pip
|
|
||||||
# if error starts with "Command 'pip' not found"
|
|
||||||
# or includes "No module named pip"
|
|
||||||
self.Log("Checking if pip installed.")
|
|
||||||
results = os.popen(f"pip --version").read()
|
|
||||||
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
|
||||||
results = os.popen(f"sudo apt install python3-pip").read()
|
|
||||||
results = os.popen(f"pip --version").read()
|
|
||||||
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
|
|
||||||
self.Error(f"Error while calling 'pip'. Make sure pip is installed, and make sure module {moduleName!r} is installed. Results = '{results}'")
|
|
||||||
return -1
|
|
||||||
self.Trace("pip good.")
|
|
||||||
if self.IS_FREEBSD:
|
|
||||||
self.Warn("installModule may NOT work on freebsd")
|
|
||||||
pipArg = ""
|
|
||||||
if self.IS_DOCKER:
|
|
||||||
pipArg = " --break-system-packages"
|
|
||||||
self.Log(f"Attempting to install package {moduleName!r} via popen.")
|
|
||||||
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
|
|
||||||
results = results.strip("\n")
|
|
||||||
self.Trace(f"pip results = {results}")
|
|
||||||
if results.find("Requirement already satisfied:") > -1:
|
|
||||||
self.Trace(f"Requirement already satisfied for module {moduleName!r}")
|
|
||||||
return 2
|
|
||||||
elif results.find("Successfully installed") > -1:
|
|
||||||
self.Trace(f"Successfully installed module {moduleName!r}")
|
|
||||||
return 1
|
|
||||||
elif self.modulesInstalled(moduleNames=[moduleName], install=False):
|
|
||||||
self.Trace(f"modulesInstalled returned True for module {moduleName!r}")
|
|
||||||
return 1
|
|
||||||
self.Error(f"Failed to install module {moduleName!r}")
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def installPackage(self,package): # Should delete this. It doesn't work consistently
|
|
||||||
try:
|
|
||||||
import pip
|
|
||||||
if hasattr(pip, 'main'):
|
|
||||||
pip.main(['install', package])
|
|
||||||
self.Trace()
|
|
||||||
else:
|
|
||||||
pip._internal.main(['install', package])
|
|
||||||
self.Trace()
|
|
||||||
except Exception as e:
|
|
||||||
tb = traceback.format_exc()
|
|
||||||
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def isDocker(self):
|
def isDocker(self):
|
||||||
cgroup = pathlib.Path('/proc/self/cgroup')
|
cgroup = pathlib.Path('/proc/self/cgroup')
|
||||||
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
|
||||||
|
|
||||||
|
def isWindows(self):
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isLinux(self):
|
||||||
|
if platform.system().lower().startswith("linux"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isFreeBSD(self):
|
||||||
|
if platform.system().lower().startswith("freebsd"):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isMacOS(self):
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def isWindows(self):
|
||||||
|
if any(platform.win32_ver()):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
|
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
|
||||||
if trace:
|
if trace:
|
||||||
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
|
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
|
||||||
@@ -539,7 +583,7 @@ class StashPluginHelper(StashInterface):
|
|||||||
itemToCk = itemToCk.lower()
|
itemToCk = itemToCk.lower()
|
||||||
index = -1
|
index = -1
|
||||||
lenItemMatch = 0
|
lenItemMatch = 0
|
||||||
returnValue = self.NOT_IN_LIST
|
returnValue = self.Constant.NOT_IN_LIST.value
|
||||||
for listItem in listToCk:
|
for listItem in listToCk:
|
||||||
index += 1
|
index += 1
|
||||||
if itemToCk.startswith(listItem.lower()):
|
if itemToCk.startswith(listItem.lower()):
|
||||||
@@ -598,17 +642,62 @@ class StashPluginHelper(StashInterface):
|
|||||||
self.update_scene(dataDict)
|
self.update_scene(dataDict)
|
||||||
return doesHaveTagName
|
return doesHaveTagName
|
||||||
|
|
||||||
def addTag(self, scene, tagName, tagName_descp = "", ignoreAutoTag=False): # scene can be scene ID or scene metadata
|
def addTag(self, scene, tagName, tagName_descp = "", ignoreAutoTag=False, retryCount = 12, sleepSecondsBetweenRetry = 5): # scene can be scene ID or scene metadata
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
scene_details = scene
|
scene_details = scene
|
||||||
if 'id' not in scene:
|
if 'id' not in scene:
|
||||||
scene_details = self.find_scene(scene)
|
scene_details = self.find_scene(scene)
|
||||||
tagIds = [self.createTagId(tagName, tagName_descp=tagName_descp, ignoreAutoTag=ignoreAutoTag)]
|
tagIds = [self.createTagId(tagName, tagName_descp=tagName_descp, ignoreAutoTag=ignoreAutoTag)]
|
||||||
for tag in scene_details['tags']:
|
for tag in scene_details['tags']:
|
||||||
if tag['name'] != tagName:
|
if tag['name'] == tagName:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
tagIds += [tag['id']]
|
tagIds += [tag['id']]
|
||||||
dataDict = {'id' : scene_details['id']}
|
dataDict = {'id' : scene_details['id']}
|
||||||
dataDict.update({'tag_ids' : tagIds})
|
dataDict.update({'tag_ids' : tagIds})
|
||||||
self.update_scene(dataDict)
|
self.update_scene(dataDict)
|
||||||
|
return True
|
||||||
|
except (ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [addTag]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
|
def updateScene(self, update_input, create=False, retryCount = 24, sleepSecondsBetweenRetry = 5):
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
|
return self.update_scene(update_input, create)
|
||||||
|
except (ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
|
def destroyScene(self, scene_id, delete_file=False, retryCount = 12, sleepSecondsBetweenRetry = 5):
|
||||||
|
errMsg = None
|
||||||
|
for i in range(0, retryCount):
|
||||||
|
try:
|
||||||
|
if errMsg != None:
|
||||||
|
self.Warn(errMsg)
|
||||||
|
return self.destroy_scene(scene_id, delete_file)
|
||||||
|
except (ConnectionResetError):
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
except Exception as e:
|
||||||
|
tb = traceback.format_exc()
|
||||||
|
errMsg = f"Exception calling [updateScene]. Will retry; count({i}); Error: {e}\nTraceBack={tb}"
|
||||||
|
time.sleep(sleepSecondsBetweenRetry)
|
||||||
|
|
||||||
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
|
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
|
||||||
"""Runs a plugin operation.
|
"""Runs a plugin operation.
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
|
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
|
||||||
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
|
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
|
||||||
# Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer
|
# Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer
|
||||||
|
import ModulesValidate
|
||||||
|
ModulesValidate.modulesInstalled(["requests"])
|
||||||
import os, sys, shutil, json, hashlib, pathlib, logging, time, traceback
|
import os, sys, shutil, json, hashlib, pathlib, logging, time, traceback
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import stashapi.log as log # Importing stashapi.log as log for critical events ONLY
|
import stashapi.log as log # Importing stashapi.log as log for critical events ONLY
|
||||||
@@ -35,6 +37,7 @@ settings = {
|
|||||||
"performerAppend": False,
|
"performerAppend": False,
|
||||||
"studioAppend": False,
|
"studioAppend": False,
|
||||||
"tagAppend": False,
|
"tagAppend": False,
|
||||||
|
"yRenameEvenIfTitleEmpty": False,
|
||||||
"z_keyFIeldsIncludeInFileName": False,
|
"z_keyFIeldsIncludeInFileName": False,
|
||||||
"zafileRenameViaMove": False,
|
"zafileRenameViaMove": False,
|
||||||
"zfieldKeyList": DEFAULT_FIELD_KEY_LIST,
|
"zfieldKeyList": DEFAULT_FIELD_KEY_LIST,
|
||||||
@@ -52,11 +55,13 @@ stash = StashPluginHelper(
|
|||||||
if stash.PLUGIN_ID in stash.PLUGIN_CONFIGURATION:
|
if stash.PLUGIN_ID in stash.PLUGIN_CONFIGURATION:
|
||||||
stash.pluginSettings.update(stash.PLUGIN_CONFIGURATION[stash.PLUGIN_ID])
|
stash.pluginSettings.update(stash.PLUGIN_CONFIGURATION[stash.PLUGIN_ID])
|
||||||
if stash.IS_DOCKER:
|
if stash.IS_DOCKER:
|
||||||
stash.log_to_wrn_set = stash.LOG_TO_STASH + stash.LOG_TO_FILE
|
stash.log_to_wrn_set = stash.LogTo.STASH + stash.LogTo.FILE
|
||||||
# ----------------------------------------------------------------------
|
# ----------------------------------------------------------------------
|
||||||
WRAPPER_STYLES = config["wrapper_styles"]
|
WRAPPER_STYLES = config["wrapper_styles"]
|
||||||
POSTFIX_STYLES = config["postfix_styles"]
|
POSTFIX_STYLES = config["postfix_styles"]
|
||||||
|
|
||||||
|
renameEvenIfTitleEmpty = stash.pluginSettings["yRenameEvenIfTitleEmpty"]
|
||||||
|
|
||||||
# Extract dry_run setting from settings
|
# Extract dry_run setting from settings
|
||||||
dry_run = stash.pluginSettings["zzdryRun"]
|
dry_run = stash.pluginSettings["zzdryRun"]
|
||||||
dry_run_prefix = ''
|
dry_run_prefix = ''
|
||||||
@@ -89,7 +94,7 @@ if len(tag_whitelist) > 0:
|
|||||||
handleExe = stash.pluginConfig['handleExe']
|
handleExe = stash.pluginConfig['handleExe']
|
||||||
openedfile = None
|
openedfile = None
|
||||||
if handleExe != None and handleExe != "" and os.path.isfile(handleExe):
|
if handleExe != None and handleExe != "" and os.path.isfile(handleExe):
|
||||||
stash.modulesInstalled(["psutil"], silent=True)
|
ModulesValidate.modulesInstalled(["psutil"], silent=True)
|
||||||
from openedFile import openedFile
|
from openedFile import openedFile
|
||||||
openedfile = openedFile(handleExe, stash)
|
openedfile = openedFile(handleExe, stash)
|
||||||
|
|
||||||
@@ -296,6 +301,8 @@ def rename_scene(scene_id):
|
|||||||
maxScanCountDefault = 5
|
maxScanCountDefault = 5
|
||||||
maxScanCountForUpdate = 10
|
maxScanCountForUpdate = 10
|
||||||
if scene_details['title'] == None or scene_details['title'] == "":
|
if scene_details['title'] == None or scene_details['title'] == "":
|
||||||
|
if renameEvenIfTitleEmpty == False:
|
||||||
|
return None
|
||||||
maxScanCountDefault = 1
|
maxScanCountDefault = 1
|
||||||
maxScanCountForUpdate = 1
|
maxScanCountForUpdate = 1
|
||||||
if not os.path.isfile(original_file_path) and not taskqueue.clearDupTagsJobOnTaskQueue() and not taskqueue.deleteTaggedScenesJobOnTaskQueue() and not taskqueue.tooManyScanOnTaskQueue(maxScanCountDefault):
|
if not os.path.isfile(original_file_path) and not taskqueue.clearDupTagsJobOnTaskQueue() and not taskqueue.deleteTaggedScenesJobOnTaskQueue() and not taskqueue.tooManyScanOnTaskQueue(maxScanCountDefault):
|
||||||
@@ -304,6 +311,9 @@ def rename_scene(scene_id):
|
|||||||
time.sleep(POST_SCAN_DELAY) # After a scan, need a few seconds delay before fetching data.
|
time.sleep(POST_SCAN_DELAY) # After a scan, need a few seconds delay before fetching data.
|
||||||
scene_details = stash.find_scene(scene_id)
|
scene_details = stash.find_scene(scene_id)
|
||||||
original_file_path = scene_details['files'][0]['path']
|
original_file_path = scene_details['files'][0]['path']
|
||||||
|
if not os.path.isfile(original_file_path):
|
||||||
|
stash.Error(f"Can not rename file because path {original_file_path} doesn't exist.")
|
||||||
|
return None
|
||||||
stash.Trace(f"(original_file_path={original_file_path})")
|
stash.Trace(f"(original_file_path={original_file_path})")
|
||||||
# Check if the scene's path matches any of the excluded paths
|
# Check if the scene's path matches any of the excluded paths
|
||||||
if exclude_paths and any(Path(original_file_path).match(exclude_path) for exclude_path in exclude_paths):
|
if exclude_paths and any(Path(original_file_path).match(exclude_path) for exclude_path in exclude_paths):
|
||||||
|
|||||||
@@ -15,6 +15,10 @@ settings:
|
|||||||
displayName: Append Tags
|
displayName: Append Tags
|
||||||
description: Enable to append tag names to file name when renaming a file. Requires tags to be included in [Key Fields] list, which by default it is included.
|
description: Enable to append tag names to file name when renaming a file. Requires tags to be included in [Key Fields] list, which by default it is included.
|
||||||
type: BOOLEAN
|
type: BOOLEAN
|
||||||
|
yRenameEvenIfTitleEmpty:
|
||||||
|
displayName: Empty Title Rename
|
||||||
|
description: If enable, rename files even if TITLE field is empty.
|
||||||
|
type: BOOLEAN
|
||||||
z_keyFIeldsIncludeInFileName: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed)
|
z_keyFIeldsIncludeInFileName: # Prefixing z_ to variable names so that the GUI will place these fields after above fields (alphabatically listed)
|
||||||
displayName: Include Existing Key Field
|
displayName: Include Existing Key Field
|
||||||
description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name.
|
description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name.
|
||||||
|
|||||||
Reference in New Issue
Block a user