forked from Github/Axter-Stash
Change UI options
This commit is contained in:
@@ -172,6 +172,9 @@ class StashPluginHelper(StashInterface):
|
|||||||
super().__init__(self.FRAGMENT_SERVER)
|
super().__init__(self.FRAGMENT_SERVER)
|
||||||
self.STASH_INTERFACE_INIT = True
|
self.STASH_INTERFACE_INIT = True
|
||||||
|
|
||||||
|
if self.STASH_URL.startswith("http://0.0.0.0:"):
|
||||||
|
self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:")
|
||||||
|
|
||||||
if self.STASH_INTERFACE_INIT:
|
if self.STASH_INTERFACE_INIT:
|
||||||
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
||||||
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
||||||
|
|||||||
@@ -9,7 +9,7 @@
|
|||||||
# Python library for parse-reparsepoint
|
# Python library for parse-reparsepoint
|
||||||
# https://pypi.org/project/parse-reparsepoint/
|
# https://pypi.org/project/parse-reparsepoint/
|
||||||
# pip install parse-reparsepoint
|
# pip install parse-reparsepoint
|
||||||
import os, sys, time, pathlib, argparse, platform, shutil
|
import os, sys, time, pathlib, argparse, platform, shutil, logging
|
||||||
from StashPluginHelper import StashPluginHelper
|
from StashPluginHelper import StashPluginHelper
|
||||||
from DupFileManager_config import config # Import config from DupFileManager_config.py
|
from DupFileManager_config import config # Import config from DupFileManager_config.py
|
||||||
|
|
||||||
@@ -21,17 +21,17 @@ parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true',
|
|||||||
parse_args = parser.parse_args()
|
parse_args = parser.parse_args()
|
||||||
|
|
||||||
settings = {
|
settings = {
|
||||||
"dupWhiteListTag": "",
|
|
||||||
"dupFileTag": "DuplicateMarkForDeletion",
|
|
||||||
"dupFileTagSwap": "DuplicateMarkForSwap",
|
|
||||||
"mergeDupFilename": False,
|
"mergeDupFilename": False,
|
||||||
"permanentlyDelete": False,
|
"permanentlyDelete": False,
|
||||||
"whitelistDelDupInSameFolder": False,
|
"whitelistDelDupInSameFolder": False,
|
||||||
"zcleanAfterDel": False,
|
"whitelistDoTagLowResDup": False,
|
||||||
"zwhitelist": "",
|
"zCleanAfterDel": False,
|
||||||
"zxgraylist": "",
|
"zSwapHighRes": False,
|
||||||
"zyblacklist": "",
|
"zSwapLongLength": False,
|
||||||
"zymaxDupToProcess": 0,
|
"zWhitelist": "",
|
||||||
|
"zxGraylist": "",
|
||||||
|
"zyBlacklist": "",
|
||||||
|
"zyMaxDupToProcess": 0,
|
||||||
"zzdebugTracing": False,
|
"zzdebugTracing": False,
|
||||||
}
|
}
|
||||||
stash = StashPluginHelper(
|
stash = StashPluginHelper(
|
||||||
@@ -39,54 +39,65 @@ stash = StashPluginHelper(
|
|||||||
debugTracing=parse_args.trace,
|
debugTracing=parse_args.trace,
|
||||||
settings=settings,
|
settings=settings,
|
||||||
config=config,
|
config=config,
|
||||||
maxbytes=100*1024*1024,
|
maxbytes=50*1024*1024,
|
||||||
)
|
)
|
||||||
stash.Status()
|
stash.Status(logLevel=logging.DEBUG)
|
||||||
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
||||||
|
|
||||||
stash.Trace(f"(stashPaths={stash.STASH_PATHS})")
|
stash.Trace(f"(stashPaths={stash.STASH_PATHS}")
|
||||||
|
stash.Trace(f"(STASH_URL={stash.STASH_URL}")
|
||||||
# stash.encodeToUtf8 = True
|
# stash.encodeToUtf8 = True
|
||||||
|
|
||||||
|
LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE
|
||||||
listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
|
listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
|
||||||
addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
|
addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
|
||||||
mergeDupFilename = stash.Setting('mergeDupFilename')
|
mergeDupFilename = stash.Setting('mergeDupFilename')
|
||||||
moveToTrashCan = False if stash.Setting('permanentlyDelete') else True
|
moveToTrashCan = False if stash.Setting('permanentlyDelete') else True
|
||||||
alternateTrashCanPath = stash.Setting('dup_path')
|
alternateTrashCanPath = stash.Setting('dup_path')
|
||||||
whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder')
|
whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder')
|
||||||
maxDupToProcess = int(stash.Setting('zymaxDupToProcess'))
|
whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup')
|
||||||
swapHighRes = stash.Setting('swapHighRes')
|
maxDupToProcess = int(stash.Setting('zyMaxDupToProcess'))
|
||||||
swapLongLength = stash.Setting('swapLongLength')
|
swapHighRes = stash.Setting('zSwapHighRes')
|
||||||
|
swapLongLength = stash.Setting('zSwapLongLength')
|
||||||
significantTimeDiff = stash.Setting('significantTimeDiff')
|
significantTimeDiff = stash.Setting('significantTimeDiff')
|
||||||
toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap')
|
toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap')
|
||||||
cleanAfterDel = stash.Setting('zcleanAfterDel')
|
cleanAfterDel = stash.Setting('zCleanAfterDel')
|
||||||
|
duration_diff = float(stash.Setting('duration_diff'))
|
||||||
|
if duration_diff > 10:
|
||||||
|
duration_diff = 10
|
||||||
|
elif duration_diff < 1:
|
||||||
|
duration_diff = 1
|
||||||
|
|
||||||
duplicateMarkForDeletion = stash.Setting('dupFileTag')
|
# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5
|
||||||
|
if significantTimeDiff > 1:
|
||||||
|
significantTimeDiff = 1
|
||||||
|
if significantTimeDiff < .5:
|
||||||
|
significantTimeDiff = .5
|
||||||
|
|
||||||
|
|
||||||
|
duplicateMarkForDeletion = stash.Setting('DupFileTag')
|
||||||
if duplicateMarkForDeletion == "":
|
if duplicateMarkForDeletion == "":
|
||||||
duplicateMarkForDeletion = 'DuplicateMarkForDeletion'
|
duplicateMarkForDeletion = 'DuplicateMarkForDeletion'
|
||||||
|
|
||||||
DuplicateMarkForSwap = stash.Setting('dupFileTagSwap')
|
duplicateWhitelistTag = stash.Setting('DupWhiteListTag')
|
||||||
if DuplicateMarkForSwap == "":
|
if duplicateWhitelistTag == "":
|
||||||
DuplicateMarkForSwap = 'DuplicateMarkForSwap'
|
duplicateWhitelistTag = 'DuplicateWhitelistFile'
|
||||||
|
|
||||||
duplicateWhitelistTag = stash.Setting('dupWhiteListTag')
|
excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag]
|
||||||
|
|
||||||
excludeMergeTags = [duplicateMarkForDeletion, DuplicateMarkForSwap]
|
|
||||||
if duplicateWhitelistTag != "":
|
|
||||||
excludeMergeTags += [duplicateWhitelistTag]
|
|
||||||
stash.init_mergeMetadata(excludeMergeTags)
|
stash.init_mergeMetadata(excludeMergeTags)
|
||||||
|
|
||||||
graylist = stash.Setting('zxgraylist').split(listSeparator)
|
graylist = stash.Setting('zxGraylist').split(listSeparator)
|
||||||
graylist = [item.lower() for item in graylist]
|
graylist = [item.lower() for item in graylist]
|
||||||
if graylist == [""] : graylist = []
|
if graylist == [""] : graylist = []
|
||||||
stash.Log(f"graylist = {graylist}")
|
stash.Trace(f"graylist = {graylist}")
|
||||||
whitelist = stash.Setting('zwhitelist').split(listSeparator)
|
whitelist = stash.Setting('zWhitelist').split(listSeparator)
|
||||||
whitelist = [item.lower() for item in whitelist]
|
whitelist = [item.lower() for item in whitelist]
|
||||||
if whitelist == [""] : whitelist = []
|
if whitelist == [""] : whitelist = []
|
||||||
stash.Log(f"whitelist = {whitelist}")
|
stash.Trace(f"whitelist = {whitelist}")
|
||||||
blacklist = stash.Setting('zyblacklist').split(listSeparator)
|
blacklist = stash.Setting('zyBlacklist').split(listSeparator)
|
||||||
blacklist = [item.lower() for item in blacklist]
|
blacklist = [item.lower() for item in blacklist]
|
||||||
if blacklist == [""] : blacklist = []
|
if blacklist == [""] : blacklist = []
|
||||||
stash.Log(f"blacklist = {blacklist}")
|
stash.Trace(f"blacklist = {blacklist}")
|
||||||
|
|
||||||
def realpath(path):
|
def realpath(path):
|
||||||
"""
|
"""
|
||||||
@@ -173,25 +184,32 @@ def createTagId(tagName, tagName_descp, deleteIfExist = False):
|
|||||||
stash.Log(f"Dup-tagId={tagId['id']}")
|
stash.Log(f"Dup-tagId={tagId['id']}")
|
||||||
return tagId['id']
|
return tagId['id']
|
||||||
|
|
||||||
def setTagId(tagId, tagName, sceneDetails, PrimeDuplicateScene = ""):
|
def setTagId(tagId, tagName, sceneDetails, DupFileToKeep):
|
||||||
if PrimeDuplicateScene != "" and addPrimaryDupPathToDetails:
|
stash.Trace()
|
||||||
BaseDupStr = f"BaseDup={PrimeDuplicateScene}"
|
details = ""
|
||||||
if sceneDetails['details'].startswith(BaseDupStr) or sceneDetails['details'].startswith(f"Primary Duplicate = {PrimeDuplicateScene}"):
|
ORG_DATA_DICT = {'id' : sceneDetails['id']}
|
||||||
PrimeDuplicateScene = ""
|
dataDict = ORG_DATA_DICT.copy()
|
||||||
elif sceneDetails['details'] == "":
|
doAddTag = True
|
||||||
PrimeDuplicateScene = BaseDupStr
|
if addPrimaryDupPathToDetails:
|
||||||
else:
|
BaseDupStr = f"BaseDup={DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n"
|
||||||
PrimeDuplicateScene = f"{BaseDupStr};\n{sceneDetails['details']}"
|
if sceneDetails['details'] == "":
|
||||||
|
details = BaseDupStr
|
||||||
|
elif not sceneDetails['details'].startswith(BaseDupStr):
|
||||||
|
details = f"{BaseDupStr};\n{sceneDetails['details']}"
|
||||||
for tag in sceneDetails['tags']:
|
for tag in sceneDetails['tags']:
|
||||||
if tag['name'] == tagName:
|
if tag['name'] == tagName:
|
||||||
if PrimeDuplicateScene != "" and addPrimaryDupPathToDetails:
|
doAddTag = False
|
||||||
stash.update_scene({'id' : sceneDetails['id'], 'details' : PrimeDuplicateScene})
|
break
|
||||||
return
|
if doAddTag:
|
||||||
|
dataDict.update({'tag_ids' : tagId})
|
||||||
if PrimeDuplicateScene == "" or not addPrimaryDupPathToDetails:
|
if details != "":
|
||||||
stash.update_scene({'id' : sceneDetails['id'], 'tag_ids' : tagId})
|
dataDict.update({'details' : details})
|
||||||
|
if dataDict != ORG_DATA_DICT:
|
||||||
|
stash.update_scene(dataDict)
|
||||||
|
stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict}", toAscii=True)
|
||||||
else:
|
else:
|
||||||
stash.update_scene({'id' : sceneDetails['id'], 'tag_ids' : tagId, 'details' : PrimeDuplicateScene})
|
stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']}.", toAscii=True)
|
||||||
|
|
||||||
|
|
||||||
def isInList(listToCk, pathToCk):
|
def isInList(listToCk, pathToCk):
|
||||||
pathToCk = pathToCk.lower()
|
pathToCk = pathToCk.lower()
|
||||||
@@ -229,28 +247,29 @@ def significantLessTime(durrationToKeep, durrationOther):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def isBetter(DupFileToKeep, DupFile):
|
def isSwapCandidate(DupFileToKeep, DupFile):
|
||||||
# Don't move if both are in whitelist
|
# Don't move if both are in whitelist
|
||||||
if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']):
|
if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']):
|
||||||
return False
|
return False
|
||||||
if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])):
|
if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])):
|
||||||
if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])):
|
if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])):
|
||||||
return True
|
return True
|
||||||
|
else:
|
||||||
|
stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True)
|
||||||
if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']):
|
if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']):
|
||||||
if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']):
|
if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']):
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
||||||
duration_diff = 10.00
|
|
||||||
duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.'
|
duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.'
|
||||||
stash.Log(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
|
stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
|
||||||
dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp)
|
dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp)
|
||||||
stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}")
|
stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}")
|
||||||
|
|
||||||
dupWhitelistTagId = None
|
dupWhitelistTagId = None
|
||||||
if duplicateWhitelistTag != "":
|
if whitelistDoTagLowResDup:
|
||||||
stash.Log(f"duplicateWhitelistTag = {duplicateWhitelistTag}")
|
stash.Trace(f"duplicateWhitelistTag = {duplicateWhitelistTag}")
|
||||||
duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.'
|
duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.'
|
||||||
dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp)
|
dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp)
|
||||||
stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}")
|
stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}")
|
||||||
@@ -266,10 +285,10 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
|||||||
QtyMerge = 0
|
QtyMerge = 0
|
||||||
QtyDeleted = 0
|
QtyDeleted = 0
|
||||||
stash.Log("#########################################################################")
|
stash.Log("#########################################################################")
|
||||||
stash.Log("#########################################################################")
|
stash.Trace("#########################################################################")
|
||||||
stash.Log("Waiting for find_duplicate_scenes_diff to return results...")
|
stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; duration_diff={duration_diff}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
|
||||||
DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff)
|
DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff)
|
||||||
stash.Log("#########################################################################")
|
stash.Trace("#########################################################################")
|
||||||
for DupFileSet in DupFileSets:
|
for DupFileSet in DupFileSets:
|
||||||
stash.Trace(f"DupFileSet={DupFileSet}")
|
stash.Trace(f"DupFileSet={DupFileSet}")
|
||||||
QtyDupSet+=1
|
QtyDupSet+=1
|
||||||
@@ -318,25 +337,25 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
|||||||
QtyMerge += 1
|
QtyMerge += 1
|
||||||
|
|
||||||
if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])):
|
if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])):
|
||||||
if isBetter(DupFileToKeep, DupFile):
|
if isSwapCandidate(DupFileToKeep, DupFile):
|
||||||
if merge:
|
if merge:
|
||||||
stash.merge_metadata(DupFileToKeep, DupFile)
|
stash.merge_metadata(DupFileToKeep, DupFile)
|
||||||
if toRecycleBeforeSwap:
|
if toRecycleBeforeSwap:
|
||||||
sendToTrash(DupFile['files'][0]['path'])
|
sendToTrash(DupFile['files'][0]['path'])
|
||||||
shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path'])
|
shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path'])
|
||||||
stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'", toAscii=True)
|
stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
||||||
DupFileToKeep = DupFile
|
DupFileToKeep = DupFile
|
||||||
QtySwap+=1
|
QtySwap+=1
|
||||||
else:
|
else:
|
||||||
stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True)
|
stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True)
|
||||||
if dupWhitelistTagId and tagDuplicates:
|
if dupWhitelistTagId and tagDuplicates:
|
||||||
setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep['files'][0]['path'])
|
setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep)
|
||||||
QtySkipForDel+=1
|
QtySkipForDel+=1
|
||||||
else:
|
else:
|
||||||
if deleteDup:
|
if deleteDup:
|
||||||
DupFileName = DupFile['files'][0]['path']
|
DupFileName = DupFile['files'][0]['path']
|
||||||
DupFileNameOnly = pathlib.Path(DupFileName).stem
|
DupFileNameOnly = pathlib.Path(DupFileName).stem
|
||||||
stash.Log(f"Deleting duplicate '{DupFileName}'", toAscii=True)
|
stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
||||||
if alternateTrashCanPath != "":
|
if alternateTrashCanPath != "":
|
||||||
destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
|
destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
|
||||||
if os.path.isfile(destPath):
|
if os.path.isfile(destPath):
|
||||||
@@ -348,18 +367,18 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
|||||||
QtyDeleted += 1
|
QtyDeleted += 1
|
||||||
elif tagDuplicates:
|
elif tagDuplicates:
|
||||||
if QtyTagForDel == 0:
|
if QtyTagForDel == 0:
|
||||||
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True)
|
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
||||||
else:
|
else:
|
||||||
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True)
|
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
|
||||||
setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep['files'][0]['path'])
|
setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep)
|
||||||
QtyTagForDel+=1
|
QtyTagForDel+=1
|
||||||
stash.Log(SepLine)
|
stash.Trace(SepLine)
|
||||||
if maxDupToProcess > 0 and QtyDup > maxDupToProcess:
|
if maxDupToProcess > 0 and QtyDup > maxDupToProcess:
|
||||||
break
|
break
|
||||||
|
|
||||||
stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}")
|
stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
|
||||||
if cleanAfterDel:
|
if cleanAfterDel:
|
||||||
stash.Log("Adding clean jobs to the Task Queue")
|
stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN)
|
||||||
stash.metadata_clean(paths=stash.STASH_PATHS)
|
stash.metadata_clean(paths=stash.STASH_PATHS)
|
||||||
stash.metadata_clean_generated()
|
stash.metadata_clean_generated()
|
||||||
stash.optimise_database()
|
stash.optimise_database()
|
||||||
@@ -374,11 +393,11 @@ def testSetDupTagOnScene(sceneId):
|
|||||||
stash.Log(f"tag_ids={tag_ids}")
|
stash.Log(f"tag_ids={tag_ids}")
|
||||||
stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids})
|
stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids})
|
||||||
|
|
||||||
if stash.PLUGIN_TASK_NAME == "merge_dup_filename_task":
|
if stash.PLUGIN_TASK_NAME == "tag_duplicates_task":
|
||||||
mangeDupFiles(merge=True)
|
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
|
||||||
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
elif stash.PLUGIN_TASK_NAME == "delete_duplicates":
|
elif stash.PLUGIN_TASK_NAME == "delete_duplicates":
|
||||||
mangeDupFiles(deleteDup=True)
|
mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
|
||||||
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
|
||||||
elif parse_args.dup_tag:
|
elif parse_args.dup_tag:
|
||||||
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
|
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
|
||||||
|
|||||||
@@ -3,43 +3,47 @@ description: Manages duplicate files.
|
|||||||
version: 0.1.1
|
version: 0.1.1
|
||||||
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
|
||||||
settings:
|
settings:
|
||||||
dupFileTag:
|
|
||||||
displayName: Duplicate File Tag Name
|
|
||||||
description: (Default = DuplicateMarkForDeletion) Tag used to tag duplicates with lower resolution, duration, and file name length.
|
|
||||||
type: STRING
|
|
||||||
dupWhiteListTag:
|
|
||||||
displayName: Duplicate Whitelist Tag Name
|
|
||||||
description: If populated, a tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile
|
|
||||||
type: STRING
|
|
||||||
mergeDupFilename:
|
mergeDupFilename:
|
||||||
displayName: Merge Duplicate Tags
|
displayName: Merge Duplicate Tags
|
||||||
description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
|
description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
|
||||||
type: BOOLEAN
|
type: BOOLEAN
|
||||||
permanentlyDelete:
|
permanentlyDelete:
|
||||||
displayName: Permanent Delete
|
displayName: Permanent Delete
|
||||||
description: (Default=false) Enable to permanently delete files, instead of moving files to trash can.
|
description: Enable to permanently delete files, instead of moving files to trash can.
|
||||||
type: BOOLEAN
|
type: BOOLEAN
|
||||||
whitelistDelDupInSameFolder:
|
whitelistDelDupInSameFolder:
|
||||||
displayName: Whitelist Delete In Same Folder
|
displayName: Whitelist Delete In Same Folder
|
||||||
description: (Default=false) Allow whitelist deletion of duplicates within the same whitelist folder.
|
description: Allow whitelist deletion of duplicates within the same whitelist folder.
|
||||||
type: BOOLEAN
|
type: BOOLEAN
|
||||||
zcleanAfterDel:
|
whitelistDoTagLowResDup:
|
||||||
|
displayName: Whitelist Duplicate Tagging
|
||||||
|
description: Enable to tag whitelist duplicates of lower resolution or duration or same folder.
|
||||||
|
type: BOOLEAN
|
||||||
|
zCleanAfterDel:
|
||||||
displayName: Run Clean After Delete
|
displayName: Run Clean After Delete
|
||||||
description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
|
description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
|
||||||
type: BOOLEAN
|
type: BOOLEAN
|
||||||
zwhitelist:
|
zSwapHighRes:
|
||||||
|
displayName: Swap High Resolution
|
||||||
|
description: If enabled, swap higher resolution duplicate files to preferred path.
|
||||||
|
type: BOOLEAN
|
||||||
|
zSwapLongLength:
|
||||||
|
displayName: Swap Longer Duration
|
||||||
|
description: If enabled, swap longer duration media files to preferred path. Longer is determine by significantLongerTime field.
|
||||||
|
type: BOOLEAN
|
||||||
|
zWhitelist:
|
||||||
displayName: White List
|
displayName: White List
|
||||||
description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\
|
description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\
|
||||||
type: STRING
|
type: STRING
|
||||||
zxgraylist:
|
zxGraylist:
|
||||||
displayName: Gray List
|
displayName: Gray List
|
||||||
description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\
|
description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\
|
||||||
type: STRING
|
type: STRING
|
||||||
zyblacklist:
|
zyBlacklist:
|
||||||
displayName: Black List
|
displayName: Black List
|
||||||
description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\
|
description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\
|
||||||
type: STRING
|
type: STRING
|
||||||
zymaxDupToProcess:
|
zyMaxDupToProcess:
|
||||||
displayName: Max Dup Process
|
displayName: Max Dup Process
|
||||||
description: Maximum number of duplicates to process. If 0, infinity
|
description: Maximum number of duplicates to process. If 0, infinity
|
||||||
type: NUMBER
|
type: NUMBER
|
||||||
@@ -52,10 +56,10 @@ exec:
|
|||||||
- "{pluginDir}/DupFileManager.py"
|
- "{pluginDir}/DupFileManager.py"
|
||||||
interface: raw
|
interface: raw
|
||||||
tasks:
|
tasks:
|
||||||
- name: Tag Duplicate Filename
|
- name: Tag Duplicates
|
||||||
description: Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path.
|
description: Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path.
|
||||||
defaultArgs:
|
defaultArgs:
|
||||||
mode: merge_dup_filename_task
|
mode: tag_duplicates_task
|
||||||
- name: Delete Duplicates
|
- name: Delete Duplicates
|
||||||
description: Deletes duplicate files
|
description: Deletes duplicate files
|
||||||
defaultArgs:
|
defaultArgs:
|
||||||
|
|||||||
@@ -6,16 +6,18 @@ config = {
|
|||||||
"addPrimaryDupPathToDetails" : True,
|
"addPrimaryDupPathToDetails" : True,
|
||||||
# Alternative path to move duplicate files.
|
# Alternative path to move duplicate files.
|
||||||
"dup_path": "", #Example: "C:\\TempDeleteFolder"
|
"dup_path": "", #Example: "C:\\TempDeleteFolder"
|
||||||
# If enabled, swap higher resolution duplicate files to preferred path.
|
|
||||||
"swapHighRes" : True,
|
|
||||||
# If enabled, swap longer length media files to preferred path. Longer will be determine by significantLongerTime value.
|
|
||||||
"swapLongLength" : True,
|
|
||||||
# The threshold as to what percentage is consider a significant shorter time.
|
# The threshold as to what percentage is consider a significant shorter time.
|
||||||
"significantTimeDiff" : .90, # 95% threshold
|
"significantTimeDiff" : .90, # 90% threshold
|
||||||
|
# Valued passed to stash API function FindDuplicateScenes.
|
||||||
|
"duration_diff" : 10, # (default=10) A value from 1 to 10.
|
||||||
# If enabled, moves destination file to recycle bin before swapping Hi-Res file.
|
# If enabled, moves destination file to recycle bin before swapping Hi-Res file.
|
||||||
"toRecycleBeforeSwap" : True,
|
"toRecycleBeforeSwap" : True,
|
||||||
# Character used to seperate items on the whitelist, blacklist, and graylist
|
# Character used to seperate items on the whitelist, blacklist, and graylist
|
||||||
"listSeparator" : ",",
|
"listSeparator" : ",",
|
||||||
|
# Tag used to tag duplicates with lower resolution, duration, and file name length.
|
||||||
|
"DupFileTag" : "DuplicateMarkForDeletion",
|
||||||
|
# Tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile
|
||||||
|
"DupWhiteListTag" : "DuplicateWhitelistFile",
|
||||||
|
|
||||||
# The following fields are ONLY used when running DupFileManager in script mode
|
# The following fields are ONLY used when running DupFileManager in script mode
|
||||||
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
|
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
|
||||||
|
|||||||
@@ -172,6 +172,9 @@ class StashPluginHelper(StashInterface):
|
|||||||
super().__init__(self.FRAGMENT_SERVER)
|
super().__init__(self.FRAGMENT_SERVER)
|
||||||
self.STASH_INTERFACE_INIT = True
|
self.STASH_INTERFACE_INIT = True
|
||||||
|
|
||||||
|
if self.STASH_URL.startswith("http://0.0.0.0:"):
|
||||||
|
self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:")
|
||||||
|
|
||||||
if self.STASH_INTERFACE_INIT:
|
if self.STASH_INTERFACE_INIT:
|
||||||
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
||||||
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# FileMonitor: Ver 0.8.8 (By David Maisonave)
|
# FileMonitor: Ver 0.8.9 (By David Maisonave)
|
||||||
FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features:
|
FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features:
|
||||||
- Updates Stash when any file changes occurs in the Stash library.
|
- Updates Stash when any file changes occurs in the Stash library.
|
||||||
- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**.
|
- **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**.
|
||||||
|
|||||||
@@ -172,6 +172,9 @@ class StashPluginHelper(StashInterface):
|
|||||||
super().__init__(self.FRAGMENT_SERVER)
|
super().__init__(self.FRAGMENT_SERVER)
|
||||||
self.STASH_INTERFACE_INIT = True
|
self.STASH_INTERFACE_INIT = True
|
||||||
|
|
||||||
|
if self.STASH_URL.startswith("http://0.0.0.0:"):
|
||||||
|
self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:")
|
||||||
|
|
||||||
if self.STASH_INTERFACE_INIT:
|
if self.STASH_INTERFACE_INIT:
|
||||||
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
||||||
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
||||||
|
|||||||
@@ -14,8 +14,10 @@ from filemonitor_task_examples import task_examples
|
|||||||
from filemonitor_self_unit_test import self_unit_test
|
from filemonitor_self_unit_test import self_unit_test
|
||||||
|
|
||||||
config['task_scheduler'] = config['task_scheduler'] + task_examples['task_scheduler']
|
config['task_scheduler'] = config['task_scheduler'] + task_examples['task_scheduler']
|
||||||
if self_unit_test['selfUnitTest']:
|
if self_unit_test['selfUnitTest_repeat']:
|
||||||
config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler']
|
config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler_repeat']
|
||||||
|
if self_unit_test['selfUnitTest_set_time']:
|
||||||
|
config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler_set_time']
|
||||||
|
|
||||||
CONTINUE_RUNNING_SIG = 99
|
CONTINUE_RUNNING_SIG = 99
|
||||||
STOP_RUNNING_SIG = 32
|
STOP_RUNNING_SIG = 32
|
||||||
@@ -38,6 +40,7 @@ if parse_args.quit:
|
|||||||
settings = {
|
settings = {
|
||||||
"recursiveDisabled": False,
|
"recursiveDisabled": False,
|
||||||
"turnOnScheduler": False,
|
"turnOnScheduler": False,
|
||||||
|
"turnOnSchedulerDeleteDup": False,
|
||||||
"zmaximumBackups": 1,
|
"zmaximumBackups": 1,
|
||||||
"zzdebugTracing": False
|
"zzdebugTracing": False
|
||||||
}
|
}
|
||||||
@@ -82,6 +85,7 @@ if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME):
|
|||||||
fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConfig['fileExtTypes'] != "" else []
|
fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConfig['fileExtTypes'] != "" else []
|
||||||
includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS
|
includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS
|
||||||
excludePathChanges = stash.pluginConfig['excludePathChanges']
|
excludePathChanges = stash.pluginConfig['excludePathChanges']
|
||||||
|
turnOnSchedulerDeleteDup = stash.pluginSettings['turnOnSchedulerDeleteDup']
|
||||||
|
|
||||||
if stash.DRY_RUN:
|
if stash.DRY_RUN:
|
||||||
stash.Log("Dry run mode is enabled.")
|
stash.Log("Dry run mode is enabled.")
|
||||||
@@ -303,6 +307,9 @@ class StashScheduler: # Stash Scheduler
|
|||||||
if invalidDir:
|
if invalidDir:
|
||||||
stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'")
|
stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'")
|
||||||
else:
|
else:
|
||||||
|
if task['task'] == "Delete Duplicates" and not turnOnSchedulerDeleteDup:
|
||||||
|
stash.Warn(f"Not running task {task['task']}, because [Delete Duplicate Scheduler] is NOT enabled. See Stash UI option Settings->Plugins->Plugins->FileMonitor->[Delete Duplicate Scheduler]")
|
||||||
|
return None
|
||||||
stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}")
|
stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}")
|
||||||
return stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task'])
|
return stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task'])
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
name: FileMonitor
|
name: FileMonitor
|
||||||
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
|
description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths.
|
||||||
version: 0.8.8
|
version: 0.8.9
|
||||||
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
|
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor
|
||||||
settings:
|
settings:
|
||||||
recursiveDisabled:
|
recursiveDisabled:
|
||||||
@@ -11,9 +11,13 @@ settings:
|
|||||||
displayName: Scheduler
|
displayName: Scheduler
|
||||||
description: Enable to turn on the scheduler. See filemonitor_config.py for more details.
|
description: Enable to turn on the scheduler. See filemonitor_config.py for more details.
|
||||||
type: BOOLEAN
|
type: BOOLEAN
|
||||||
|
turnOnSchedulerDeleteDup:
|
||||||
|
displayName: Delete Duplicate Scheduler
|
||||||
|
description: Turn on scheduler for deleting duplicates in Stash library. (Requires plugin DupFileManager and [Scheduler] enabled)
|
||||||
|
type: BOOLEAN
|
||||||
zmaximumBackups:
|
zmaximumBackups:
|
||||||
displayName: Max DB Backups
|
displayName: Max DB Backups
|
||||||
description: When value greater than 1, will trim the number of database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath populated with path length longer than 4.
|
description: Trim database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath path length longer than 4.
|
||||||
type: NUMBER
|
type: NUMBER
|
||||||
zzdebugTracing:
|
zzdebugTracing:
|
||||||
displayName: Debug Tracing
|
displayName: Debug Tracing
|
||||||
@@ -29,10 +33,10 @@ tasks:
|
|||||||
defaultArgs:
|
defaultArgs:
|
||||||
mode: start_library_monitor_service
|
mode: start_library_monitor_service
|
||||||
- name: Stop Library Monitor
|
- name: Stop Library Monitor
|
||||||
description: Stops library monitoring within 2 minute.
|
description: Stops library monitoring within 2 minutes.
|
||||||
defaultArgs:
|
defaultArgs:
|
||||||
mode: stop_library_monitor
|
mode: stop_library_monitor
|
||||||
- name: Monitor as a Plugin
|
- name: Monitor as a Plugin
|
||||||
description: Run [Library Monitor] as a plugin (*not recommended method*)
|
description: Run [Library Monitor] as a plugin (*Not recommended*)
|
||||||
defaultArgs:
|
defaultArgs:
|
||||||
mode: start_library_monitor
|
mode: start_library_monitor
|
||||||
|
|||||||
@@ -17,6 +17,9 @@ config = {
|
|||||||
# Task "Create Tags" is a plugin task. All plugin task have a REQUIRED pluginId field and an optional validateDir field. For detail usage, see examples #B1 and #B2 in filemonitor_task_examples.py
|
# Task "Create Tags" is a plugin task. All plugin task have a REQUIRED pluginId field and an optional validateDir field. For detail usage, see examples #B1 and #B2 in filemonitor_task_examples.py
|
||||||
{"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser",
|
{"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser",
|
||||||
"weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
|
"weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
|
||||||
|
# The following task runs plugin DupFileManager if the plugin is installed.
|
||||||
|
{"task" : "Tag Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager",
|
||||||
|
"weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM)
|
||||||
{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM)
|
{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM)
|
||||||
|
|
||||||
# The following tasks are scheduled weekly
|
# The following tasks are scheduled weekly
|
||||||
@@ -27,9 +30,6 @@ config = {
|
|||||||
{"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM)
|
{"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM)
|
||||||
{"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM)
|
{"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM)
|
||||||
{"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM)
|
{"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM)
|
||||||
# The following task runs plugin DupFileManager if the plugin is installed.
|
|
||||||
{"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager",
|
|
||||||
"weekday" : "saturday", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Every saturday at 2:30AM)
|
|
||||||
|
|
||||||
# To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field.
|
# To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field.
|
||||||
# The monthly field value must be 1, 2, 3, or 4.
|
# The monthly field value must be 1, 2, 3, or 4.
|
||||||
@@ -40,6 +40,9 @@ config = {
|
|||||||
# The Backup task is scheduled monthly
|
# The Backup task is scheduled monthly
|
||||||
# Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py
|
# Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py
|
||||||
{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
|
{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
|
||||||
|
# The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled.
|
||||||
|
{"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager",
|
||||||
|
"weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00)
|
||||||
|
|
||||||
# The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash.
|
# The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash.
|
||||||
# This task only works if FileMonitor is started as a service or in command line mode.
|
# This task only works if FileMonitor is started as a service or in command line mode.
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
# These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor
|
# These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor
|
||||||
# These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts
|
# These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts
|
||||||
self_unit_test = {
|
self_unit_test = {
|
||||||
"task_scheduler": [
|
"task_scheduler_repeat": [
|
||||||
{"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name
|
{"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name
|
||||||
{"task" : "execute", "minutes" : 1}, # Test invalid task (missing command)
|
{"task" : "execute", "minutes" : 1}, # Test invalid task (missing command)
|
||||||
{"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts)
|
{"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts)
|
||||||
@@ -17,27 +17,30 @@ self_unit_test = {
|
|||||||
{"task" : "Trace", "minutes" : 1}, # Test plugin trace logging
|
{"task" : "Trace", "minutes" : 1}, # Test plugin trace logging
|
||||||
{"task" : "LogOnce", "seconds" :15}, # Test LogOnce
|
{"task" : "LogOnce", "seconds" :15}, # Test LogOnce
|
||||||
{"task" : "TraceOnce", "seconds" : 5}, # Test TraceOnce
|
{"task" : "TraceOnce", "seconds" : 5}, # Test TraceOnce
|
||||||
# {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe"
|
|
||||||
{"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "seconds" :15}, # Test RunAfter
|
{"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "seconds" :15}, # Test RunAfter
|
||||||
{"task" : "CheckStashIsRunning", "command" : "<stash_path>stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash.
|
{"task" : "CheckStashIsRunning", "command" : "<stash_path>stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash.
|
||||||
{"task" : "Generate", "weekday" : "friday", "time" : "12:03"},
|
# {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe"
|
||||||
{"task" : "Clean", "weekday" : "friday", "time" : "12:03"},
|
],
|
||||||
{"task" : "Auto Tag", "weekday" : "friday", "time" : "12:03"},
|
"task_scheduler_set_time": [
|
||||||
{"task" : "Optimise Database", "weekday" : "friday", "time" : "12:03"},
|
# Test [Delete Duplicates] with [Delete Duplicate Scheduler] disabled, and then with it enabled.
|
||||||
{"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Running plugin task: Create Tags
|
{"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager","weekday" : "sunday", "time" : "17:56"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates]
|
||||||
{"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager","weekday" : "friday", "time" : "12:03"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates]
|
{"task" : "Generate", "weekday" : "sunday", "time" : "17:56"},
|
||||||
{"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "12:03"},
|
{"task" : "Clean", "weekday" : "sunday", "time" : "17:56"},
|
||||||
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Optimising database...
|
{"task" : "Auto Tag", "weekday" : "sunday", "time" : "17:56"},
|
||||||
{"task" : "Clean Generated Files", "weekday" : "friday", "time" : "12:03"},
|
{"task" : "Optimise Database", "weekday" : "sunday", "time" : "17:56"},
|
||||||
{"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Migrating scene hashes...
|
{"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "sunday", "time" : "17:56"}, # In task queue as -> Running plugin task: Create Tags
|
||||||
{"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
|
{"task" : "Tag Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager", "weekday" : "sunday", "time" : "17:56"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
|
||||||
{"task" : "python", "script" : "<plugin_path>test_hello_world2.py", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
|
{"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "sunday", "time" : "17:56"},
|
||||||
{"task" : "python", "script" : "<plugin_path>test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
|
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "17:56"}, # In task queue as -> Optimising database...
|
||||||
{"task" : "execute", "command" : "<plugin_path>test_hello_world2.cmd", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
|
{"task" : "Clean Generated Files", "weekday" : "sunday", "time" : "17:56"},
|
||||||
{"task" : "execute", "command" : "<plugin_path>test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
|
{"task" : "RenameGeneratedFiles", "weekday" : "sunday", "time" : "17:56"}, # In task queue as -> Migrating scene hashes...
|
||||||
|
{"task" : "Backup", "maxBackups" : 0, "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
|
||||||
|
{"task" : "python", "script" : "<plugin_path>test_hello_world2.py", "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
|
||||||
|
{"task" : "python", "script" : "<plugin_path>test_hello_world.py", "detach" : False, "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
|
||||||
|
{"task" : "execute", "command" : "<plugin_path>test_hello_world2.cmd", "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
|
||||||
|
{"task" : "execute", "command" : "<plugin_path>test_hello_world.bat", "args" : "--name David", "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
|
||||||
],
|
],
|
||||||
|
|
||||||
# MUST ToDo: Always set selfUnitTest to False before checking in this code!!!
|
# MUST ToDo: Always set selfUnitTest to False before checking in this code!!!
|
||||||
# Enable to turn on self unit test.
|
"selfUnitTest_repeat" : False , # Enable to turn on self unit test.
|
||||||
"selfUnitTest": False,
|
"selfUnitTest_set_time" : False , # Enable to turn on self unit test.
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
# RenameFile: Ver 0.4.4 (By David Maisonave)
|
# RenameFile: Ver 0.4.5 (By David Maisonave)
|
||||||
RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks.
|
RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks.
|
||||||
- **Rename Scene File Name** (On-The-Fly)
|
- **Rename Scene File Name** (On-The-Fly)
|
||||||
- **Append tag names** to file name
|
- **Append tag names** to file name
|
||||||
|
|||||||
@@ -479,7 +479,10 @@ def rename_scene(scene_id, stash_directory):
|
|||||||
new_path_info = {'new_file_path': new_path}
|
new_path_info = {'new_file_path': new_path}
|
||||||
if debugTracing: logger.info(f"{dry_run_prefix}New filename: {new_path}")
|
if debugTracing: logger.info(f"{dry_run_prefix}New filename: {new_path}")
|
||||||
|
|
||||||
if move_files and original_parent_directory.name != scene_details['studio']['name']:
|
studioName = ""
|
||||||
|
if 'studio' in scene_details and scene_details['studio'] != None and 'name' in scene_details['studio']:
|
||||||
|
studioName = scene_details['studio']['name']
|
||||||
|
if move_files and studioName != "" and original_parent_directory.name != studioName:
|
||||||
new_path = original_parent_directory / scene_details['studio']['name'] / (new_filename + Path(original_file_path).suffix)
|
new_path = original_parent_directory / scene_details['studio']['name'] / (new_filename + Path(original_file_path).suffix)
|
||||||
new_path_info = {'new_file_path': new_path}
|
new_path_info = {'new_file_path': new_path}
|
||||||
move_or_rename_files(scene_details, new_filename, original_parent_directory)
|
move_or_rename_files(scene_details, new_filename, original_parent_directory)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
name: RenameFile
|
name: RenameFile
|
||||||
description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
|
description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
|
||||||
version: 0.4.4
|
version: 0.4.5
|
||||||
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
|
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
|
||||||
settings:
|
settings:
|
||||||
performerAppend:
|
performerAppend:
|
||||||
|
|||||||
Reference in New Issue
Block a user