Too many changes to list

This commit is contained in:
David Maisonave
2024-09-13 10:10:37 -04:00
parent 5b34502963
commit 452c08df03
18 changed files with 1645 additions and 353 deletions

View File

@@ -5,17 +5,20 @@
# Example: python DupFileManager.py --url http://localhost:9999 -a # Example: python DupFileManager.py --url http://localhost:9999 -a
import os, sys, time, pathlib, argparse, platform, shutil, logging import os, sys, time, pathlib, argparse, platform, shutil, logging
from StashPluginHelper import StashPluginHelper from StashPluginHelper import StashPluginHelper
from stashapi.stash_types import PhashDistance
from DupFileManager_config import config # Import config from DupFileManager_config.py from DupFileManager_config import config # Import config from DupFileManager_config.py
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL') parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.') parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.') parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.')
parser.add_argument('--clear_dup_tag', '-c', dest='clear_tag', action='store_true', help='Clear duplicates of duplicate tags.')
parser.add_argument('--del_tag_dup', '-d', dest='del_tag', action='store_true', help='Only delete scenes having DuplicateMarkForDeletion tag.') parser.add_argument('--del_tag_dup', '-d', dest='del_tag', action='store_true', help='Only delete scenes having DuplicateMarkForDeletion tag.')
parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.') parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.')
parse_args = parser.parse_args() parse_args = parser.parse_args()
settings = { settings = {
"doNotGeneratePhash": False,
"mergeDupFilename": False, "mergeDupFilename": False,
"permanentlyDelete": False, "permanentlyDelete": False,
"whitelistDelDupInSameFolder": False, "whitelistDelDupInSameFolder": False,
@@ -23,9 +26,13 @@ settings = {
"zCleanAfterDel": False, "zCleanAfterDel": False,
"zSwapHighRes": False, "zSwapHighRes": False,
"zSwapLongLength": False, "zSwapLongLength": False,
"zSwapBetterBitRate": False,
"zSwapCodec": False,
"zSwapBetterFrameRate": False,
"zWhitelist": "", "zWhitelist": "",
"zxGraylist": "", "zxGraylist": "",
"zyBlacklist": "", "zyBlacklist": "",
"zyMatchDupDistance": 0,
"zyMaxDupToProcess": 0, "zyMaxDupToProcess": 0,
"zzdebugTracing": False, "zzdebugTracing": False,
} }
@@ -40,7 +47,9 @@ if len(sys.argv) > 1:
stash.Log(f"argv = {sys.argv}") stash.Log(f"argv = {sys.argv}")
else: else:
stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}") stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}")
stash.Status(logLevel=logging.DEBUG) stash.status(logLevel=logging.DEBUG)
stash.modulesInstalled(["send2trash", "requests"])
# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") # stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
# stash.encodeToUtf8 = True # stash.encodeToUtf8 = True
@@ -49,22 +58,48 @@ stash.Status(logLevel=logging.DEBUG)
LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE
listSeparator = stash.Setting('listSeparator', ',', notEmpty=True) listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails') addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
doNotGeneratePhash = stash.Setting('doNotGeneratePhash')
mergeDupFilename = stash.Setting('mergeDupFilename') mergeDupFilename = stash.Setting('mergeDupFilename')
moveToTrashCan = False if stash.Setting('permanentlyDelete') else True moveToTrashCan = False if stash.Setting('permanentlyDelete') else True
alternateTrashCanPath = stash.Setting('dup_path') alternateTrashCanPath = stash.Setting('dup_path')
whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder') whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder')
whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup') whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup')
maxDupToProcess = int(stash.Setting('zyMaxDupToProcess')) maxDupToProcess = int(stash.Setting('zyMaxDupToProcess'))
swapHighRes = stash.Setting('zSwapHighRes')
swapLongLength = stash.Setting('zSwapLongLength')
significantTimeDiff = stash.Setting('significantTimeDiff') significantTimeDiff = stash.Setting('significantTimeDiff')
toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap') toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap')
cleanAfterDel = stash.Setting('zCleanAfterDel') cleanAfterDel = stash.Setting('zCleanAfterDel')
duration_diff = float(stash.Setting('duration_diff'))
if duration_diff > 10: swapHighRes = stash.Setting('zSwapHighRes')
duration_diff = 10 swapLongLength = stash.Setting('zSwapLongLength')
elif duration_diff < 1: swapBetterBitRate = stash.Setting('zSwapBetterBitRate')
duration_diff = 1 swapCodec = stash.Setting('zSwapCodec')
swapBetterFrameRate = stash.Setting('zSwapBetterFrameRate')
favorLongerFileName = stash.Setting('favorLongerFileName')
favorLargerFileSize = stash.Setting('favorLargerFileSize')
favorBitRateChange = stash.Setting('favorBitRateChange')
favorHighBitRate = stash.Setting('favorHighBitRate')
favorFrameRateChange = stash.Setting('favorFrameRateChange')
favorHigherFrameRate = stash.Setting('favorHigherFrameRate')
favorCodecRanking = stash.Setting('favorCodecRanking')
codecRankingSetToUse = stash.Setting('codecRankingSetToUse')
if codecRankingSetToUse == 4:
codecRanking = stash.Setting('codecRankingSet4')
elif codecRankingSetToUse == 3:
codecRanking = stash.Setting('codecRankingSet3')
elif codecRankingSetToUse == 2:
codecRanking = stash.Setting('codecRankingSet2')
else:
codecRanking = stash.Setting('codecRankingSet1')
matchDupDistance = int(stash.Setting('zyMatchDupDistance'))
matchPhaseDistance = PhashDistance.EXACT
matchPhaseDistanceText = "Exact Match"
if matchDupDistance == 1:
matchPhaseDistance = PhashDistance.HIGH
matchPhaseDistanceText = "High Match"
elif matchDupDistance == 2:
matchPhaseDistance = PhashDistance.MEDIUM
matchPhaseDistanceText = "Medium Match"
# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5 # significantTimeDiff can not be higher than 1 and shouldn't be lower than .5
if significantTimeDiff > 1: if significantTimeDiff > 1:
@@ -79,10 +114,14 @@ if duplicateMarkForDeletion == "":
duplicateWhitelistTag = stash.Setting('DupWhiteListTag') duplicateWhitelistTag = stash.Setting('DupWhiteListTag')
if duplicateWhitelistTag == "": if duplicateWhitelistTag == "":
duplicateWhitelistTag = 'DuplicateWhitelistFile' duplicateWhitelistTag = '_DuplicateWhitelistFile'
excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag] excludeDupFileDeleteTag = stash.Setting('excludeDupFileDeleteTag')
stash.init_mergeMetadata(excludeMergeTags) if excludeDupFileDeleteTag == "":
excludeDupFileDeleteTag = '_ExcludeDuplicateMarkForDeletion'
excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag]
stash.initMergeMetadata(excludeMergeTags)
graylist = stash.Setting('zxGraylist').split(listSeparator) graylist = stash.Setting('zxGraylist').split(listSeparator)
graylist = [item.lower() for item in graylist] graylist = [item.lower() for item in graylist]
@@ -169,36 +208,26 @@ def testReparsePointAndSymLink(merge=False, deleteDup=False):
stash.Log(f"Not isSymLink '{myTestPath6}'") stash.Log(f"Not isSymLink '{myTestPath6}'")
return return
detailPrefix = "BaseDup="
detailPostfix = "<BaseDup>\n"
def createTagId(tagName, tagName_descp, deleteIfExist = False): def setTagId(tagName, sceneDetails, DupFileToKeep):
tagId = stash.find_tags(q=tagName)
if len(tagId):
tagId = tagId[0]
if deleteIfExist:
stash.destroy_tag(int(tagId['id']))
else:
return tagId['id']
tagId = stash.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": True})
stash.Log(f"Dup-tagId={tagId['id']}")
return tagId['id']
def setTagId(tagId, tagName, sceneDetails, DupFileToKeep):
details = "" details = ""
ORG_DATA_DICT = {'id' : sceneDetails['id']} ORG_DATA_DICT = {'id' : sceneDetails['id']}
dataDict = ORG_DATA_DICT.copy() dataDict = ORG_DATA_DICT.copy()
doAddTag = True doAddTag = True
if addPrimaryDupPathToDetails: if addPrimaryDupPathToDetails:
BaseDupStr = f"BaseDup={DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n" BaseDupStr = f"{detailPrefix}{DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n(matchDupDistance={matchPhaseDistanceText})\n{detailPostfix}"
if sceneDetails['details'] == "": if sceneDetails['details'] == "":
details = BaseDupStr details = BaseDupStr
elif not sceneDetails['details'].startswith(BaseDupStr): elif not sceneDetails['details'].startswith(detailPrefix):
details = f"{BaseDupStr};\n{sceneDetails['details']}" details = f"{BaseDupStr};\n{sceneDetails['details']}"
for tag in sceneDetails['tags']: for tag in sceneDetails['tags']:
if tag['name'] == tagName: if tag['name'] == tagName:
doAddTag = False doAddTag = False
break break
if doAddTag: if doAddTag:
dataDict.update({'tag_ids' : tagId}) stash.addTag(sceneDetails, tagName)
if details != "": if details != "":
dataDict.update({'details' : details}) dataDict.update({'details' : details})
if dataDict != ORG_DATA_DICT: if dataDict != ORG_DATA_DICT:
@@ -208,13 +237,27 @@ def setTagId(tagId, tagName, sceneDetails, DupFileToKeep):
stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']}.", toAscii=True) stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']}.", toAscii=True)
def isInList(listToCk, pathToCk): def isInList(listToCk, itemToCk):
pathToCk = pathToCk.lower() itemToCk = itemToCk.lower()
for item in listToCk: for item in listToCk:
if pathToCk.startswith(item): if itemToCk.startswith(item):
return True return True
return False return False
NOT_IN_LIST = 65535
def indexInList(listToCk, itemToCk):
itemToCk = itemToCk.lower()
index = -1
lenItemMatch = 0
returnValue = NOT_IN_LIST
for item in listToCk:
index += 1
if itemToCk.startswith(item):
if len(item) > lenItemMatch: # Make sure the best match is selected by getting match with longest string.
lenItemMatch = len(item)
returnValue = index
return returnValue
def hasSameDir(path1, path2): def hasSameDir(path1, path2):
if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent: if pathlib.Path(path1).resolve().parent == pathlib.Path(path2).resolve().parent:
return True return True
@@ -244,6 +287,26 @@ def significantLessTime(durrationToKeep, durrationOther):
return True return True
return False return False
def isBetterVideo(scene1, scene2, swapCandidateCk = False):
# Prioritize higher reslution over codec, bit rate, and frame rate
if int(scene1['files'][0]['width']) > int(scene2['files'][0]['width']) or int(scene1['files'][0]['height']) > int(scene2['files'][0]['height']):
return False
if (favorBitRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterBitRate):
if (favorHighBitRate and int(scene2['files'][0]['bit_rate']) > int(scene1['files'][0]['bit_rate'])) or (not favorHighBitRate and int(scene2['files'][0]['bit_rate']) < int(scene1['files'][0]['bit_rate'])):
stash.Trace(f"[isBetterVideo]:[favorHighBitRate={favorHighBitRate}] Better bit rate. {scene1['files'][0]['path']}={scene1['files'][0]['bit_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['bit_rate']}")
return True
if (favorCodecRanking and swapCandidateCk == False) or (swapCandidateCk and swapCodec):
scene1CodecRank = indexInList(codecRanking, scene1['files'][0]['video_codec'])
scene2CodecRank = indexInList(codecRanking, scene2['files'][0]['video_codec'])
if scene2CodecRank < scene1CodecRank:
stash.Trace(f"[isBetterVideo] Better codec. {scene1['files'][0]['path']}={scene1['files'][0]['video_codec']}:Rank={scene1CodecRank} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['video_codec']}:Rank={scene2CodecRank}")
return True
if (favorFrameRateChange and swapCandidateCk == False) or (swapCandidateCk and swapBetterFrameRate):
if (favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) > int(scene1['files'][0]['frame_rate'])) or (not favorHigherFrameRate and int(scene2['files'][0]['frame_rate']) < int(scene1['files'][0]['frame_rate'])):
stash.Trace(f"[isBetterVideo]:[favorHigherFrameRate={favorHigherFrameRate}] Better frame rate. {scene1['files'][0]['path']}={scene1['files'][0]['frame_rate']} v.s. {scene2['files'][0]['path']}={scene2['files'][0]['frame_rate']}")
return True
return False
def isSwapCandidate(DupFileToKeep, DupFile): def isSwapCandidate(DupFileToKeep, DupFile):
# Don't move if both are in whitelist # Don't move if both are in whitelist
if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']): if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']):
@@ -256,20 +319,69 @@ def isSwapCandidate(DupFileToKeep, DupFile):
if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']): if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']):
if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']): if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']):
return True return True
if isBetterVideo(DupFile, DupFileToKeep, swapCandidateCk=True):
if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])):
return True
else:
stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has better codec/bit-rate than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True)
return False return False
dupWhitelistTagId = None
def addDupWhitelistTag():
global dupWhitelistTagId
stash.Trace(f"Adding tag duplicateWhitelistTag = {duplicateWhitelistTag}")
descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.'
dupWhitelistTagId = stash.createTagId(duplicateWhitelistTag, descp, ignoreAutoTag=True)
stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}")
excludeDupFileDeleteTagId = None
def addExcludeDupTag():
global excludeDupFileDeleteTagId
stash.Trace(f"Adding tag excludeDupFileDeleteTag = {excludeDupFileDeleteTag}")
descp = 'Excludes duplicate scene from DupFileManager tagging and deletion process. A scene having this tag will not get deleted by DupFileManager'
excludeDupFileDeleteTagId = stash.createTagId(excludeDupFileDeleteTag, descp, ignoreAutoTag=True)
stash.Trace(f"dupWhitelistTagId={excludeDupFileDeleteTagId} name={excludeDupFileDeleteTag}")
def isTaggedExcluded(Scene):
for tag in Scene['tags']:
if tag['name'] == excludeDupFileDeleteTag:
return True
return False
def isWorseKeepCandidate(DupFileToKeep, Scene):
if not isInList(whitelist, Scene['files'][0]['path']) and isInList(whitelist, DupFileToKeep['files'][0]['path']):
return True
if not isInList(graylist, Scene['files'][0]['path']) and isInList(graylist, DupFileToKeep['files'][0]['path']):
return True
if not isInList(blacklist, DupFileToKeep['files'][0]['path']) and isInList(blacklist, Scene['files'][0]['path']):
return True
if isInList(graylist, Scene['files'][0]['path']) and isInList(graylist, DupFileToKeep['files'][0]['path']) and indexInList(graylist, DupFileToKeep['files'][0]['path']) < indexInList(graylist, Scene['files'][0]['path']):
return True
if isInList(blacklist, DupFileToKeep['files'][0]['path']) and isInList(blacklist, Scene['files'][0]['path']) and indexInList(blacklist, DupFileToKeep['files'][0]['path']) < indexInList(blacklist, Scene['files'][0]['path']):
return True
return False
stopProcessBarSpin = True
def spinProcessBar(sleepSeconds = 1):
pos = 1
maxPos = 30
while stopProcessBarSpin == False:
stash.progressBar(pos, maxPos)
pos +=1
if pos > maxPos:
pos = 1
time.sleep(sleepSeconds)
def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
global stopProcessBarSpin
duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.' duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.'
stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}") stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp) dupTagId = stash.createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp, ignoreAutoTag=True)
stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}") stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}")
dupWhitelistTagId = None addDupWhitelistTag()
if whitelistDoTagLowResDup: addExcludeDupTag()
stash.Trace(f"duplicateWhitelistTag = {duplicateWhitelistTag}")
duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.'
dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp)
stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}")
QtyDupSet = 0 QtyDupSet = 0
QtyDup = 0 QtyDup = 0
@@ -278,26 +390,30 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
QtyRealTimeDiff = 0 QtyRealTimeDiff = 0
QtyTagForDel = 0 QtyTagForDel = 0
QtySkipForDel = 0 QtySkipForDel = 0
QtyExcludeForDel = 0
QtySwap = 0 QtySwap = 0
QtyMerge = 0 QtyMerge = 0
QtyDeleted = 0 QtyDeleted = 0
stash.Log("#########################################################################") stash.Log("#########################################################################")
stash.Trace("#########################################################################") stash.Trace("#########################################################################")
stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; duration_diff={duration_diff}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN) stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; matchDupDistance={matchPhaseDistanceText}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff) stopProcessBarSpin = False
stash.submit(spinProcessBar)
DupFileSets = stash.find_duplicate_scenes(matchPhaseDistance)
stopProcessBarSpin = True
time.sleep(1) # Make sure we give time for spinProcessBar to exit
qtyResults = len(DupFileSets) qtyResults = len(DupFileSets)
stash.Trace("#########################################################################") stash.Trace("#########################################################################")
for DupFileSet in DupFileSets: for DupFileSet in DupFileSets:
stash.Trace(f"DupFileSet={DupFileSet}") stash.Trace(f"DupFileSet={DupFileSet}")
QtyDupSet+=1 QtyDupSet+=1
stash.Progress(QtyDupSet, qtyResults) stash.progressBar(QtyDupSet, qtyResults)
SepLine = "---------------------------" SepLine = "---------------------------"
DupFileToKeep = "" DupFileToKeep = ""
DupToCopyFrom = "" DupToCopyFrom = ""
DupFileDetailList = [] DupFileDetailList = []
for DupFile in DupFileSet: for DupFile in DupFileSet:
QtyDup+=1 QtyDup+=1
stash.log.sl.progress(f"Scene ID = {DupFile['id']}")
time.sleep(2) time.sleep(2)
Scene = stash.find_scene(DupFile['id']) Scene = stash.find_scene(DupFile['id'])
sceneData = f"Scene = {Scene}" sceneData = f"Scene = {Scene}"
@@ -311,19 +427,45 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
SepLine = "***************************" SepLine = "***************************"
if significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(Scene['files'][0]['duration'])): if significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(Scene['files'][0]['duration'])):
QtyRealTimeDiff += 1 QtyRealTimeDiff += 1
if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']): if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=resolution: {DupFileToKeep['files'][0]['width']}x{DupFileToKeep['files'][0]['height']} < {Scene['files'][0]['width']}x{Scene['files'][0]['height']}")
DupFileToKeep = Scene DupFileToKeep = Scene
elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']): elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=duration: {DupFileToKeep['files'][0]['duration']} < {Scene['files'][0]['duration']}")
DupFileToKeep = Scene
elif isBetterVideo(DupFileToKeep, Scene):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=codec,bit_rate, or frame_rate: {DupFileToKeep['files'][0]['video_codec']}, {DupFileToKeep['files'][0]['bit_rate']}, {DupFileToKeep['files'][0]['frame_rate']} : {Scene['files'][0]['video_codec']}, {Scene['files'][0]['bit_rate']}, {Scene['files'][0]['frame_rate']}")
DupFileToKeep = Scene DupFileToKeep = Scene
elif isInList(whitelist, Scene['files'][0]['path']) and not isInList(whitelist, DupFileToKeep['files'][0]['path']): elif isInList(whitelist, Scene['files'][0]['path']) and not isInList(whitelist, DupFileToKeep['files'][0]['path']):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=not whitelist vs whitelist")
DupFileToKeep = Scene
elif isTaggedExcluded(Scene) and not isTaggedExcluded(DupFileToKeep):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=not ExcludeTag vs ExcludeTag")
DupFileToKeep = Scene DupFileToKeep = Scene
elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and not isInList(blacklist, Scene['files'][0]['path']): elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and not isInList(blacklist, Scene['files'][0]['path']):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=blacklist vs not blacklist")
DupFileToKeep = Scene
elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and isInList(blacklist, Scene['files'][0]['path']) and indexInList(blacklist, DupFileToKeep['files'][0]['path']) > indexInList(blacklist, Scene['files'][0]['path']):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=blacklist-index {indexInList(blacklist, DupFileToKeep['files'][0]['path'])} > {indexInList(blacklist, Scene['files'][0]['path'])}")
DupFileToKeep = Scene DupFileToKeep = Scene
elif isInList(graylist, Scene['files'][0]['path']) and not isInList(graylist, DupFileToKeep['files'][0]['path']): elif isInList(graylist, Scene['files'][0]['path']) and not isInList(graylist, DupFileToKeep['files'][0]['path']):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=not graylist vs graylist")
DupFileToKeep = Scene DupFileToKeep = Scene
elif len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']): elif isInList(graylist, Scene['files'][0]['path']) and isInList(graylist, DupFileToKeep['files'][0]['path']) and indexInList(graylist, DupFileToKeep['files'][0]['path']) > indexInList(graylist, Scene['files'][0]['path']):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=graylist-index {indexInList(graylist, DupFileToKeep['files'][0]['path'])} > {indexInList(graylist, Scene['files'][0]['path'])}")
DupFileToKeep = Scene DupFileToKeep = Scene
elif int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']): elif favorLongerFileName and len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=path-len {len(DupFileToKeep['files'][0]['path'])} < {len(Scene['files'][0]['path'])}")
DupFileToKeep = Scene
elif favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=size {DupFileToKeep['files'][0]['size']} < {Scene['files'][0]['size']}")
DupFileToKeep = Scene
elif not favorLongerFileName and len(DupFileToKeep['files'][0]['path']) > len(Scene['files'][0]['path']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=path-len {len(DupFileToKeep['files'][0]['path'])} > {len(Scene['files'][0]['path'])}")
DupFileToKeep = Scene
elif not favorLargerFileSize and int(DupFileToKeep['files'][0]['size']) > int(Scene['files'][0]['size']) and not isWorseKeepCandidate(DupFileToKeep, Scene):
stash.Trace(f"Replacing {DupFileToKeep['files'][0]['path']} with {Scene['files'][0]['path']} for candidate to keep. Reason=size {DupFileToKeep['files'][0]['size']} > {Scene['files'][0]['size']}")
DupFileToKeep = Scene DupFileToKeep = Scene
else: else:
DupFileToKeep = Scene DupFileToKeep = Scene
@@ -333,14 +475,14 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
for DupFile in DupFileDetailList: for DupFile in DupFileDetailList:
if DupFile['id'] != DupFileToKeep['id']: if DupFile['id'] != DupFileToKeep['id']:
if merge: if merge:
result = stash.merge_metadata(DupFile, DupFileToKeep) result = stash.mergeMetadata(DupFile, DupFileToKeep)
if result != "Nothing To Merge": if result != "Nothing To Merge":
QtyMerge += 1 QtyMerge += 1
if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])): if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])):
if isSwapCandidate(DupFileToKeep, DupFile): if isSwapCandidate(DupFileToKeep, DupFile):
if merge: if merge:
stash.merge_metadata(DupFileToKeep, DupFile) stash.mergeMetadata(DupFileToKeep, DupFile)
if toRecycleBeforeSwap: if toRecycleBeforeSwap:
sendToTrash(DupFile['files'][0]['path']) sendToTrash(DupFile['files'][0]['path'])
shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path']) shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path'])
@@ -350,41 +492,48 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
else: else:
stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True) stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True)
if dupWhitelistTagId and tagDuplicates: if dupWhitelistTagId and tagDuplicates:
setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep) setTagId(duplicateWhitelistTag, DupFile, DupFileToKeep)
QtySkipForDel+=1 QtySkipForDel+=1
else: else:
if deleteDup: if isTaggedExcluded(DupFile):
DupFileName = DupFile['files'][0]['path'] stash.Log(f"Excluding file {DupFile['files'][0]['path']} because tagged for exclusion via tag {excludeDupFileDeleteTag}")
DupFileNameOnly = pathlib.Path(DupFileName).stem QtyExcludeForDel+=1
stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) else:
if alternateTrashCanPath != "": if deleteDup:
destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" DupFileName = DupFile['files'][0]['path']
if os.path.isfile(destPath): DupFileNameOnly = pathlib.Path(DupFileName).stem
destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
shutil.move(DupFileName, destPath) if alternateTrashCanPath != "":
elif moveToTrashCan: destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
sendToTrash(DupFileName) if os.path.isfile(destPath):
stash.destroy_scene(DupFile['id'], delete_file=True) destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}"
QtyDeleted += 1 shutil.move(DupFileName, destPath)
elif tagDuplicates: elif moveToTrashCan:
if QtyTagForDel == 0: sendToTrash(DupFileName)
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True, printTo=LOG_STASH_N_PLUGIN) stash.destroy_scene(DupFile['id'], delete_file=True)
else: QtyDeleted += 1
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True, printTo=LOG_STASH_N_PLUGIN) elif tagDuplicates:
setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep) if QtyTagForDel == 0:
QtyTagForDel+=1 stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
else:
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
setTagId(duplicateMarkForDeletion, DupFile, DupFileToKeep)
QtyTagForDel+=1
stash.Trace(SepLine) stash.Trace(SepLine)
if maxDupToProcess > 0 and QtyDup > maxDupToProcess: if maxDupToProcess > 0 and QtyDup > maxDupToProcess:
break break
stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN) stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExcludeForDel={QtyExcludeForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
if doNotGeneratePhash == False:
stash.metadata_generate({"phashes": True})
if cleanAfterDel: if cleanAfterDel:
stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN) stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN)
stash.metadata_clean(paths=stash.STASH_PATHS) stash.metadata_clean(paths=stash.STASH_PATHS)
stash.metadata_clean_generated() stash.metadata_clean_generated()
stash.optimise_database() stash.optimise_database()
def deleteTagggedDuplicates(): def manageTagggedDuplicates(clearTag=False):
global stopProcessBarSpin
tagId = stash.find_tags(q=duplicateMarkForDeletion) tagId = stash.find_tags(q=duplicateMarkForDeletion)
if len(tagId) > 0 and 'id' in tagId[0]: if len(tagId) > 0 and 'id' in tagId[0]:
tagId = tagId[0]['id'] tagId = tagId[0]['id']
@@ -393,63 +542,85 @@ def deleteTagggedDuplicates():
return return
QtyDup = 0 QtyDup = 0
QtyDeleted = 0 QtyDeleted = 0
QtyClearedTags = 0
QtyFailedQuery = 0 QtyFailedQuery = 0
stash.Trace("#########################################################################") stash.Trace("#########################################################################")
stopProcessBarSpin = False
stash.submit(spinProcessBar)
sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id') sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id')
stopProcessBarSpin = True
time.sleep(1) # Make sure we give time for spinProcessBar to exit
qtyResults = len(sceneIDs) qtyResults = len(sceneIDs)
stash.Trace(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}") stash.Trace(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}")
for sceneID in sceneIDs: for sceneID in sceneIDs:
# stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.") # stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.")
QtyDup += 1 QtyDup += 1
prgs = QtyDup / qtyResults prgs = QtyDup / qtyResults
stash.Progress(QtyDup, qtyResults) stash.progressBar(QtyDup, qtyResults)
scene = stash.find_scene(sceneID['id']) scene = stash.find_scene(sceneID['id'])
if scene == None or len(scene) == 0: if scene == None or len(scene) == 0:
stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.") stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.")
QtyFailedQuery += 1 QtyFailedQuery += 1
continue continue
# stash.Log(f"scene={scene}") # stash.Trace(f"scene={scene}")
DupFileName = scene['files'][0]['path'] if clearTag:
DupFileNameOnly = pathlib.Path(DupFileName).stem tags = [int(item['id']) for item in scene["tags"] if item['id'] != tagId]
stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) stash.TraceOnce(f"tagId={tagId}, len={len(tags)}, tags = {tags}")
if alternateTrashCanPath != "": dataDict = {'id' : scene['id']}
destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" if addPrimaryDupPathToDetails:
if os.path.isfile(destPath): sceneDetails = scene['details']
destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" if sceneDetails.find(detailPrefix) == 0 and sceneDetails.find(detailPostfix) > 1:
shutil.move(DupFileName, destPath) Pos1 = sceneDetails.find(detailPrefix)
elif moveToTrashCan: Pos2 = sceneDetails.find(detailPostfix)
sendToTrash(DupFileName) sceneDetails = sceneDetails[0:Pos1] + sceneDetails[Pos2 + len(detailPostfix):]
result = stash.destroy_scene(scene['id'], delete_file=True) dataDict.update({'details' : sceneDetails})
stash.Trace(f"destroy_scene result={result} for file {DupFileName}", toAscii=True) dataDict.update({'tag_ids' : tags})
QtyDeleted += 1 stash.Log(f"Updating scene with {dataDict}")
stash.Log(f"QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN) stash.update_scene(dataDict)
return # stash.removeTag(scene, duplicateMarkForDeletion)
QtyClearedTags += 1
def testSetDupTagOnScene(sceneId): else:
scene = stash.find_scene(sceneId) DupFileName = scene['files'][0]['path']
stash.Log(f"scene={scene}") DupFileNameOnly = pathlib.Path(DupFileName).stem
stash.Log(f"scene tags={scene['tags']}") stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN)
tag_ids = [dupTagId] if alternateTrashCanPath != "":
for tag in scene['tags']: destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}"
tag_ids = tag_ids + [tag['id']] if os.path.isfile(destPath):
stash.Log(f"tag_ids={tag_ids}") destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}"
stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids}) shutil.move(DupFileName, destPath)
elif moveToTrashCan:
sendToTrash(DupFileName)
result = stash.destroy_scene(scene['id'], delete_file=True)
stash.Trace(f"destroy_scene result={result} for file {DupFileName}", toAscii=True)
QtyDeleted += 1
stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN)
if doNotGeneratePhash == False and clearTag == False:
stash.metadata_generate({"phashes": True})
if stash.PLUGIN_TASK_NAME == "tag_duplicates_task": if stash.PLUGIN_TASK_NAME == "tag_duplicates_task":
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task": elif stash.PLUGIN_TASK_NAME == "delete_tagged_duplicates_task":
deleteTagggedDuplicates() manageTagggedDuplicates()
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task": elif stash.PLUGIN_TASK_NAME == "delete_duplicates_task":
mangeDupFiles(deleteDup=True, merge=mergeDupFilename) mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif stash.PLUGIN_TASK_NAME == "clear_duplicate_tags_task":
manageTagggedDuplicates(clearTag=True)
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif stash.PLUGIN_TASK_NAME == "generate_phash_task":
stash.metadata_generate({"phashes": True})
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif parse_args.dup_tag: elif parse_args.dup_tag:
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
stash.Trace(f"Tag duplicate EXIT") stash.Trace(f"Tag duplicate EXIT")
elif parse_args.del_tag: elif parse_args.del_tag:
deleteTagggedDuplicates() manageTagggedDuplicates()
stash.Trace(f"Delete Tagged duplicates EXIT") stash.Trace(f"Delete Tagged duplicates EXIT")
elif parse_args.clear_tag:
manageTagggedDuplicates(clearTag=True)
stash.Trace(f"Clear duplicate tags EXIT")
elif parse_args.remove: elif parse_args.remove:
mangeDupFiles(deleteDup=True, merge=mergeDupFilename) mangeDupFiles(deleteDup=True, merge=mergeDupFilename)
stash.Trace(f"Delete duplicate EXIT") stash.Trace(f"Delete duplicate EXIT")

View File

@@ -1,8 +1,12 @@
name: DupFileManager name: DupFileManager
description: Manages duplicate files. description: Manages duplicate files.
version: 0.1.2 version: 0.1.4
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
settings: settings:
doNotGeneratePhash:
displayName: Do Not Generate PHASH
description: Do not generate PHASH after tag or delete task.
type: BOOLEAN
mergeDupFilename: mergeDupFilename:
displayName: Merge Duplicate Tags displayName: Merge Duplicate Tags
description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
@@ -23,8 +27,20 @@ settings:
displayName: Run Clean After Delete displayName: Run Clean After Delete
description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database. description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
type: BOOLEAN type: BOOLEAN
zSwapBetterBitRate:
displayName: Swap Better Bit Rate
description: Swap better bit rate for duplicate files. Use with DupFileManager_config.py file option favorHighBitRate
type: BOOLEAN
zSwapBetterFrameRate:
displayName: Swap Better Frame Rate
description: Swap better frame rate for duplicates. Use with DupFileManager_config.py file option favorHigherFrameRate
type: BOOLEAN
zSwapCodec:
displayName: Swap Better Codec
description: If enabled, swap better codec duplicate files to preferred path.
type: BOOLEAN
zSwapHighRes: zSwapHighRes:
displayName: Swap High Resolution displayName: Swap Higher Resolution
description: If enabled, swap higher resolution duplicate files to preferred path. description: If enabled, swap higher resolution duplicate files to preferred path.
type: BOOLEAN type: BOOLEAN
zSwapLongLength: zSwapLongLength:
@@ -37,19 +53,23 @@ settings:
type: STRING type: STRING
zxGraylist: zxGraylist:
displayName: Gray List displayName: Gray List
description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\ description: Preferential paths to determine which duplicate should be kept. E.g. C:\2nd_Fav,C:\3rd_Fav,C:\4th_Fav,H:\ShouldKeep
type: STRING type: STRING
zyBlacklist: zyBlacklist:
displayName: Black List displayName: Black List
description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\ description: Least preferential paths; Determine primary deletion candidates. E.g. C:\Downloads,C:\DelMe-3rd,C:\DelMe-2nd,C:\DeleteMeFirst
type: STRING type: STRING
zyMatchDupDistance:
displayName: Match Duplicate Distance
description: (Default=0) Where 0 = Exact Match, 1 = High Match, and 2 = Medium Match.
type: NUMBER
zyMaxDupToProcess: zyMaxDupToProcess:
displayName: Max Dup Process displayName: Max Dup Process
description: Maximum number of duplicates to process. If 0, infinity description: Maximum number of duplicates to process. If 0, infinity
type: NUMBER type: NUMBER
zzdebugTracing: zzdebugTracing:
displayName: Debug Tracing displayName: Debug Tracing
description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log description: Enable debug tracing so-as to add additional debug logging in Stash\plugins\DupFileManager\DupFileManager.log
type: BOOLEAN type: BOOLEAN
exec: exec:
- python - python
@@ -60,7 +80,11 @@ tasks:
description: Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, or black list path. description: Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, or black list path.
defaultArgs: defaultArgs:
mode: tag_duplicates_task mode: tag_duplicates_task
- name: Delete Tagged Duplicates - name: Clear Tags
description: Clear tag DuplicateMarkForDeletion. Remove the tag from all files.
defaultArgs:
mode: clear_duplicate_tags_task
- name: Delete Tagged Scenes
description: Only delete scenes having DuplicateMarkForDeletion tag. description: Only delete scenes having DuplicateMarkForDeletion tag.
defaultArgs: defaultArgs:
mode: delete_tagged_duplicates_task mode: delete_tagged_duplicates_task
@@ -68,3 +92,7 @@ tasks:
description: Delete duplicate scenes. Performs deletion without first tagging. description: Delete duplicate scenes. Performs deletion without first tagging.
defaultArgs: defaultArgs:
mode: delete_duplicates_task mode: delete_duplicates_task
- name: Generate PHASH Matching
description: Generate PHASH file matching. Used for file comparisons.
defaultArgs:
mode: generate_phash_task

View File

@@ -8,19 +8,51 @@ config = {
"dup_path": "", #Example: "C:\\TempDeleteFolder" "dup_path": "", #Example: "C:\\TempDeleteFolder"
# The threshold as to what percentage is consider a significant shorter time. # The threshold as to what percentage is consider a significant shorter time.
"significantTimeDiff" : .90, # 90% threshold "significantTimeDiff" : .90, # 90% threshold
# Valued passed to stash API function FindDuplicateScenes.
"duration_diff" : 10, # (default=10) A value from 1 to 10.
# If enabled, moves destination file to recycle bin before swapping Hi-Res file. # If enabled, moves destination file to recycle bin before swapping Hi-Res file.
"toRecycleBeforeSwap" : True, "toRecycleBeforeSwap" : True,
# Character used to seperate items on the whitelist, blacklist, and graylist # Character used to seperate items on the whitelist, blacklist, and graylist
"listSeparator" : ",", "listSeparator" : ",",
# Tag used to tag duplicates with lower resolution, duration, and file name length. # Tag used to tag duplicates with lower resolution, duration, and file name length.
"DupFileTag" : "DuplicateMarkForDeletion", "DupFileTag" : "DuplicateMarkForDeletion",
# Tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile # Tag name used to tag duplicates in the whitelist. E.g. _DuplicateWhitelistFile
"DupWhiteListTag" : "DuplicateWhitelistFile", "DupWhiteListTag" : "_DuplicateWhitelistFile",
# Tags used to exclude duplicate from deletion
"excludeDupFileDeleteTag" : "_ExcludeDuplicateMarkForDeletion",
# If enabled, favor longer file name over shorter. If disabled, favor shorter file name.
"favorLongerFileName" : True,
# If enabled, favor larger file size over smaller. If disabled, favor smaller file size.
"favorLargerFileSize" : True,
# If enabled, favor videos with better codec according to codecRanking
"favorCodecRanking" : True,
# Codec Ranking in order of preference (default (codecRankingSet1) is order of ranking based on maximum potential efficiency)
"codecRankingSet1" : ["h266", "vvc", "av1", "vvdec", "shvc", "h265", "hevc", "xvc", "vp9", "h264", "avc", "mvc", "msmpeg4v10", "vp8", "vcb", "msmpeg4v3", "h263", "h263i", "msmpeg4v2", "msmpeg4v1", "mpeg4", "mpeg-4", "mpeg4video", "theora", "vc3", "vc-3", "vp7", "vp6f", "vp6", "vc1", "vc-1", "mpeg2", "mpeg-2", "mpeg2video", "h262", "h222", "h261", "vp5", "vp4", "vp3", "wmv3", "mpeg1", "mpeg-1", "mpeg1video", "vp3", "wmv2", "wmv1", "wmv", "flv1", "png", "gif", "jpeg", "m-jpeg", "mjpeg"],
# codecRankingSet2 is in order of least potential efficiency
"codecRankingSet2" : ["gif", "png", "flv1", "mpeg1video", "mpeg1", "wmv1", "wmv2", "wmv3", "mpeg2video", "mpeg2", "AVC", "vc1", "vc-1", "msmpeg4v1", "msmpeg4v2", "msmpeg4v3", "mpeg4", "vp6f", "vp8", "h263i", "h263", "h264", "h265", "av1", "vp9", "h266"],
# codecRankingSet3 is in order of quality
"codecRankingSet3" : ["h266", "vp9", "av1", "h265", "h264", "h263", "h263i", "vp8", "vp6f", "mpeg4", "msmpeg4v3", "msmpeg4v2", "msmpeg4v1", "vc-1", "vc1", "AVC", "mpeg2", "mpeg2video", "wmv3", "wmv2", "wmv1", "mpeg1", "mpeg1video", "flv1", "png", "gif"],
# codecRankingSet4 is in order of compatibility
"codecRankingSet4" : ["h264", "vp8", "mpeg4", "msmpeg4v3", "msmpeg4v2", "msmpeg4v1", "h266", "vp9", "av1", "h265", "h263", "h263i", "vp6f", "vc-1", "vc1", "AVC", "mpeg2", "mpeg2video", "wmv3", "wmv2", "wmv1", "mpeg1", "mpeg1video", "flv1", "png", "gif"],
# Determines which codecRankingSet to use when ranking codec. Default is 1 for codecRankingSet1
"codecRankingSetToUse" : 1,
# If enabled, favor videos with a different bit rate value. If favorHighBitRate is true, favor higher rate. If favorHighBitRate is false, favor lower rate
"favorBitRateChange" : True,
# If enabled, favor videos with higher bit rate. Used with either favorBitRateChange option or UI [Swap Bit Rate Change] option.
"favorHighBitRate" : True,
# If enabled, favor videos with a different frame rate value. If favorHigherFrameRate is true, favor higher rate. If favorHigherFrameRate is false, favor lower rate
"favorFrameRateChange" : True,
# If enabled, favor videos with higher frame rate. Used with either favorFrameRateChange option or UI [Swap Better Frame Rate] option.
"favorHigherFrameRate" : True,
# The following fields are ONLY used when running DupFileManager in script mode # The following fields are ONLY used when running DupFileManager in script mode
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
"endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server
"endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server
} }
# Codec ranking research source:
# https://imagekit.io/blog/video-encoding/
# https://support.spinetix.com/wiki/Video_decoding
# https://en.wikipedia.org/wiki/Comparison_of_video_codecs
# https://en.wikipedia.org/wiki/List_of_open-source_codecs
# https://en.wikipedia.org/wiki/List_of_codecs
# https://en.wikipedia.org/wiki/Comparison_of_video_container_formats

View File

@@ -1,4 +1,4 @@
# DupFileManager: Ver 0.1.2 (By David Maisonave) # DupFileManager: Ver 0.1.4 (By David Maisonave)
DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate file in the Stash system. DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which manages duplicate file in the Stash system.
@@ -31,9 +31,9 @@ DupFileManager is a [Stash](https://github.com/stashapp/stash) plugin which mana
### Requirements ### Requirements
`pip install --upgrade stashapp-tools` - `pip install --upgrade stashapp-tools`
`pip install pyYAML` - `pip install requests`
`pip install Send2Trash` - `pip install Send2Trash`
### Installation ### Installation

View File

@@ -1,6 +1,6 @@
from stashapi.stashapp import StashInterface from stashapi.stashapp import StashInterface
from logging.handlers import RotatingFileHandler from logging.handlers import RotatingFileHandler
import re, inspect, sys, os, pathlib, logging, json import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
import concurrent.futures import concurrent.futures
from stashapi.stash_types import PhashDistance from stashapi.stash_types import PhashDistance
import __main__ import __main__
@@ -61,6 +61,14 @@ class StashPluginHelper(StashInterface):
LOG_FILE_DIR = None LOG_FILE_DIR = None
LOG_FILE_NAME = None LOG_FILE_NAME = None
STDIN_READ = None STDIN_READ = None
stopProcessBarSpin = True
IS_DOCKER = False
IS_WINDOWS = False
IS_LINUX = False
IS_FREEBSD = False
IS_MAC_OS = False
pluginLog = None pluginLog = None
logLinePreviousHits = [] logLinePreviousHits = []
thredPool = None thredPool = None
@@ -107,6 +115,16 @@ class StashPluginHelper(StashInterface):
DryRunFieldName = "zzdryRun", DryRunFieldName = "zzdryRun",
setStashLoggerAsPluginLogger = False): setStashLoggerAsPluginLogger = False):
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2) self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
if any(platform.win32_ver()):
self.IS_WINDOWS = True
elif platform.system().lower().startswith("linux"):
self.IS_LINUX = True
if self.isDocker():
self.IS_DOCKER = True
elif platform.system().lower().startswith("freebsd"):
self.IS_FREEBSD = True
elif sys.platform == "darwin":
self.IS_MAC_OS = True
if logToWrnSet: self.log_to_wrn_set = logToWrnSet if logToWrnSet: self.log_to_wrn_set = logToWrnSet
if logToErrSet: self.log_to_err_set = logToErrSet if logToErrSet: self.log_to_err_set = logToErrSet
if logToNormSet: self.log_to_norm = logToNormSet if logToNormSet: self.log_to_norm = logToNormSet
@@ -300,37 +318,43 @@ class StashPluginHelper(StashInterface):
lineNo = inspect.currentframe().f_back.f_lineno lineNo = inspect.currentframe().f_back.f_lineno
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): # Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
# The below non-loggging functions use (lower) camelCase naming convention.
def status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
if printTo == 0: printTo = self.log_to_norm if printTo == 0: printTo = self.log_to_norm
if lineNo == -1: if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno lineNo = inspect.currentframe().f_back.f_lineno
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})", self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
printTo, logLevel, lineNo) printTo, logLevel, lineNo)
def ExecuteProcess(self, args, ExecDetach=False): def executeProcess(self, args, ExecDetach=False):
import platform, subprocess
is_windows = any(platform.win32_ver())
pid = None pid = None
self.Trace(f"is_windows={is_windows} args={args}") self.Trace(f"self.IS_WINDOWS={self.IS_WINDOWS} args={args}")
if is_windows: if self.IS_WINDOWS:
if ExecDetach: if ExecDetach:
self.Trace("Executing process using Windows DETACHED_PROCESS") self.Trace(f"Executing process using Windows DETACHED_PROCESS; args=({args})")
DETACHED_PROCESS = 0x00000008 DETACHED_PROCESS = 0x00000008
pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
else: else:
pid = subprocess.Popen(args, shell=True).pid pid = subprocess.Popen(args, shell=True).pid
else: else:
self.Trace("Executing process using normal Popen") if ExecDetach:
pid = subprocess.Popen(args).pid # For linux detached, use nohup. I.E. subprocess.Popen(["nohup", "python", "test.py"])
if self.IS_LINUX:
args = ["nohup"] + args
self.Trace(f"Executing detached process using Popen({args})")
else:
self.Trace(f"Executing process using normal Popen({args})")
pid = subprocess.Popen(args).pid # On detach, may need the following for MAC OS subprocess.Popen(args, shell=True, start_new_session=True)
self.Trace(f"pid={pid}") self.Trace(f"pid={pid}")
return pid return pid
def ExecutePythonScript(self, args, ExecDetach=True): def executePythonScript(self, args, ExecDetach=True):
PythonExe = f"{sys.executable}" PythonExe = f"{sys.executable}"
argsWithPython = [f"{PythonExe}"] + args argsWithPython = [f"{PythonExe}"] + args
return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) return self.executeProcess(argsWithPython,ExecDetach=ExecDetach)
def Submit(self, *args, **kwargs): def submit(self, *args, **kwargs):
return self.thredPool.submit(*args, **kwargs) return self.thredPool.submit(*args, **kwargs)
def asc2(self, data, convertToAscii=None): def asc2(self, data, convertToAscii=None):
@@ -340,24 +364,214 @@ class StashPluginHelper(StashInterface):
# data = str(data).encode('ascii','ignore') # This works better for logging than ascii function # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
# return str(data)[2:-1] # strip out b'str' # return str(data)[2:-1] # strip out b'str'
def init_mergeMetadata(self, excludeMergeTags=None): def initMergeMetadata(self, excludeMergeTags=None):
self.excludeMergeTags = excludeMergeTags self.excludeMergeTags = excludeMergeTags
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
# Must call init_mergeMetadata, before calling merge_metadata # Must call initMergeMetadata, before calling mergeMetadata
def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata def mergeMetadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
if type(SrcData) is int: if type(SrcData) is int:
SrcData = self.find_scene(SrcData) SrcData = self.find_scene(SrcData)
DestData = self.find_scene(DestData) DestData = self.find_scene(DestData)
return self._mergeMetadata.merge(SrcData, DestData) return self._mergeMetadata.merge(SrcData, DestData)
def Progress(self, currentIndex, maxCount): def progressBar(self, currentIndex, maxCount):
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex) progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
self.log.progress(progress) self.log.progress(progress)
def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False): # Test via command line: pip uninstall -y pyYAML watchdog schedule requests
def modulesInstalled(self, moduleNames, install=True, silent=False): # moduleNames=["stashapp-tools", "requests", "pyYAML"]
retrnValue = True
for moduleName in moduleNames:
try: # Try Python 3.3 > way
import importlib
import importlib.util
if moduleName in sys.modules:
if not silent: self.Trace(f"{moduleName!r} already in sys.modules")
elif self.isModuleInstalled(moduleName):
if not silent: self.Trace(f"Module {moduleName!r} is available.")
else:
if install and (results:=self.installModule(moduleName)) > 0:
if results == 1:
self.Log(f"Module {moduleName!r} has been installed")
else:
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
continue
else:
if install:
self.Error(f"Can't find the {moduleName!r} module")
retrnValue = False
except Exception as e:
try:
i = importlib.import_module(moduleName)
except ImportError as e:
if install and (results:=self.installModule(moduleName)) > 0:
if results == 1:
self.Log(f"Module {moduleName!r} has been installed")
else:
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
continue
else:
if install:
tb = traceback.format_exc()
self.Error(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
retrnValue = False
return retrnValue
def isModuleInstalled(self, moduleName):
try:
__import__(moduleName)
# self.Trace(f"Module {moduleName!r} is installed")
return True
except Exception as e:
tb = traceback.format_exc()
self.Warn(f"Module {moduleName!r} is NOT installed!")
self.Trace(f"Error: {e}\nTraceBack={tb}")
pass
return False
def installModule(self,moduleName):
# if not self.IS_DOCKER:
# try:
# self.Log(f"Attempting to install package {moduleName!r} using pip import method.")
# First try pip import method. (This may fail in a future version of pip.)
# self.installPackage(moduleName)
# self.Trace(f"installPackage called for module {moduleName!r}")
# if self.modulesInstalled(moduleNames=[moduleName], install=False):
# self.Trace(f"Module {moduleName!r} installed")
# return 1
# self.Trace(f"Module {moduleName!r} still not installed.")
# except Exception as e:
# tb = traceback.format_exc()
# self.Warn(f"pip import method failed for module {moduleName!r}. Will try command line method; Error: {e}\nTraceBack={tb}")
# pass
# else:
# self.Trace("Running in Docker, so skipping pip import method.")
try:
if self.IS_LINUX:
# Note: Linux may first need : sudo apt install python3-pip
# if error starts with "Command 'pip' not found"
# or includes "No module named pip"
self.Log("Checking if pip installed.")
results = os.popen(f"pip --version").read()
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
results = os.popen(f"sudo apt install python3-pip").read()
results = os.popen(f"pip --version").read()
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
self.Error(f"Error while calling 'pip'. Make sure pip is installed, and make sure module {moduleName!r} is installed. Results = '{results}'")
return -1
self.Trace("pip good.")
if self.IS_FREEBSD:
self.Warn("installModule may NOT work on freebsd")
pipArg = ""
if self.IS_DOCKER:
pipArg = " --break-system-packages"
self.Log(f"Attempting to install package {moduleName!r} via popen.")
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
results = results.strip("\n")
self.Trace(f"pip results = {results}")
if results.find("Requirement already satisfied:") > -1:
self.Trace(f"Requirement already satisfied for module {moduleName!r}")
return 2
elif results.find("Successfully installed") > -1:
self.Trace(f"Successfully installed module {moduleName!r}")
return 1
elif self.modulesInstalled(moduleNames=[moduleName], install=False):
self.Trace(f"modulesInstalled returned True for module {moduleName!r}")
return 1
self.Error(f"Failed to install module {moduleName!r}")
except Exception as e:
tb = traceback.format_exc()
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
return 0
def installPackage(self,package): # Should delete this. It doesn't work consistently
try:
import pip
if hasattr(pip, 'main'):
pip.main(['install', package])
self.Trace()
else:
pip._internal.main(['install', package])
self.Trace()
except Exception as e:
tb = traceback.format_exc()
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
return False
return True
def isDocker(self):
cgroup = pathlib.Path('/proc/self/cgroup')
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
if trace:
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
pos = 1
while self.stopProcessBarSpin == False:
if trace:
self.Trace(f"progressBar({pos}, {maxPos})")
self.progressBar(pos, maxPos)
pos +=1
if pos > maxPos:
pos = 1
time.sleep(sleepSeconds)
def startSpinningProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
self.stopProcessBarSpin = False
if trace:
self.Trace(f"submitting spinProcessBar; sleepSeconds={sleepSeconds}, maxPos={maxPos}, trace={trace}")
self.submit(self.spinProcessBar, sleepSeconds, maxPos, trace)
def stopSpinningProcessBar(self, sleepSeconds = 1):
self.stopProcessBarSpin = True
time.sleep(sleepSeconds)
def createTagId(self, tagName, tagName_descp = "", deleteIfExist = False, ignoreAutoTag = False):
tagId = self.find_tags(q=tagName)
if len(tagId):
tagId = tagId[0]
if deleteIfExist:
self.destroy_tag(int(tagId['id']))
else:
return tagId['id']
tagId = self.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": ignoreAutoTag})
self.Log(f"Dup-tagId={tagId['id']}")
return tagId['id']
def removeTag(self, scene, tagName): # scene can be scene ID or scene metadata
scene_details = scene
if 'id' not in scene:
scene_details = self.find_scene(scene)
tagIds = []
doesHaveTagName = False
for tag in scene_details['tags']:
if tag['name'] != tagName:
tagIds += [tag['id']]
else:
doesHaveTagName = True
if doesHaveTagName:
dataDict = {'id' : scene_details['id']}
dataDict.update({'tag_ids' : tagIds})
self.update_scene(dataDict)
return doesHaveTagName
def addTag(self, scene, tagName): # scene can be scene ID or scene metadata
scene_details = scene
if 'id' not in scene:
scene_details = self.find_scene(scene)
tagIds = [self.createTagId(tagName)]
for tag in scene_details['tags']:
if tag['name'] != tagName:
tagIds += [tag['id']]
dataDict = {'id' : scene_details['id']}
dataDict.update({'tag_ids' : tagIds})
self.update_scene(dataDict)
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
"""Runs a plugin operation. """Runs a plugin operation.
The operation is run immediately and does not use the job queue. The operation is run immediately and does not use the job queue.
This is a blocking call, and does not return until plugin completes.
Args: Args:
plugin_id (ID): plugin_id plugin_id (ID): plugin_id
task_name (str, optional): Plugin task to perform task_name (str, optional): Plugin task to perform
@@ -375,43 +589,26 @@ class StashPluginHelper(StashInterface):
"args": args, "args": args,
} }
if asyn: if asyn:
self.Submit(self.call_GQL, query, variables) self.submit(self.call_GQL, query, variables)
return f"Made asynchronous call for plugin {plugin_id}" return f"Made asynchronous call for plugin {plugin_id}"
else: else:
return self.call_GQL(query, variables) return self.call_GQL(query, variables)
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
# ################################################################################################# # ############################################################################################################
# Functions which are candidates to be added to parent class use snake_case naming convention.
# ############################################################################################################
# The below functions extends class StashInterface with functions which are not yet in the class or # The below functions extends class StashInterface with functions which are not yet in the class or
# fixes for functions which have not yet made it into official class. # fixes for functions which have not yet made it into official class.
def metadata_scan(self, paths:list=[], flags={}): def metadata_scan(self, paths:list=[], flags={}): # ToDo: Add option to add path to library if path not included when calling metadata_scan
query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }" query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }"
scan_metadata_input = {"paths": paths} scan_metadata_input = {"paths": paths}
if flags: if flags:
scan_metadata_input.update(flags) scan_metadata_input.update(flags)
else: elif scan_config := self.get_configuration_defaults("scan { ...ScanMetadataOptions }").get("scan"):
scanData = self.get_configuration_defaults("scan { ...ScanMetadataOptions }") scan_metadata_input.update(scan_config)
if scanData['scan'] != None:
scan_metadata_input.update(scanData.get("scan",{}))
result = self.call_GQL(query, {"input": scan_metadata_input}) result = self.call_GQL(query, {"input": scan_metadata_input})
return result["metadataScan"] return result["metadataScan"]
def get_all_scenes(self): def get_all_scenes(self):
query_all_scenes = """ query_all_scenes = """
query AllScenes { query AllScenes {
@@ -464,6 +661,43 @@ class StashPluginHelper(StashInterface):
def rename_generated_files(self): def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Direct SQL associated functions
def get_file_metadata(self, data, raw_data = False): # data is either file ID or scene metadata
results = None
if data == None:
return results
if 'files' in data and len(data['files']) > 0 and 'id' in data['files'][0]:
results = self.sql_query(f"select * from files where id = {data['files'][0]['id']}")
else:
results = self.sql_query(f"select * from files where id = {data}")
if raw_data:
return results
if 'rows' in results:
return results['rows'][0]
self.Error(f"Unknown error while SQL query with data='{data}'; Results='{results}'.")
return None
def set_file_basename(self, id, basename):
return self.sql_commit(f"update files set basename = '{basename}' where id = {id}")
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
srcData = None srcData = None
@@ -537,3 +771,54 @@ class mergeMetadata: # A class to merge scene metadata from source scene to dest
listToAdd += [item['id']] listToAdd += [item['id']]
self.dataDict.update({ updateFieldName : listToAdd}) self.dataDict.update({ updateFieldName : listToAdd})
# self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
class taskQueue:
taskqueue = None
def __init__(self, taskqueue):
self.taskqueue = taskqueue
def tooManyScanOnTaskQueue(self, tooManyQty = 5):
count = 0
if self.taskqueue == None:
return False
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Scanning...":
count += 1
if count < tooManyQty:
return False
return True
def cleanJobOnTaskQueue(self):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Cleaning...":
return True
return False
def cleanGeneratedJobOnTaskQueue(self):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Cleaning generated files...":
return True
return False
def isRunningPluginTaskJobOnTaskQueue(self, taskName):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Running plugin task: {taskName}":
return True
return False
def tagDuplicatesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Tag Duplicates")
def clearDupTagsJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Clear Tags")
def generatePhashMatchingJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Generate PHASH Matching")
def deleteDuplicatesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Delete Duplicates")
def deleteTaggedScenesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Delete Tagged Scenes")

View File

@@ -1,4 +1,3 @@
stashapp-tools >= 0.2.50 stashapp-tools >= 0.2.50
pyYAML requests
watchdog
Send2Trash Send2Trash

View File

@@ -142,7 +142,7 @@ To configure the schedule or to add new task, edit the **task_scheduler** sectio
- pip install -r requirements.txt - pip install -r requirements.txt
- Or manually install each requirement: - Or manually install each requirement:
- `pip install stashapp-tools --upgrade` - `pip install stashapp-tools --upgrade`
- `pip install pyYAML` - `pip install requests`
- `pip install watchdog` - `pip install watchdog`
- `pip install schedule` - `pip install schedule`

View File

@@ -1,6 +1,6 @@
from stashapi.stashapp import StashInterface from stashapi.stashapp import StashInterface
from logging.handlers import RotatingFileHandler from logging.handlers import RotatingFileHandler
import re, inspect, sys, os, pathlib, logging, json import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
import concurrent.futures import concurrent.futures
from stashapi.stash_types import PhashDistance from stashapi.stash_types import PhashDistance
import __main__ import __main__
@@ -61,6 +61,14 @@ class StashPluginHelper(StashInterface):
LOG_FILE_DIR = None LOG_FILE_DIR = None
LOG_FILE_NAME = None LOG_FILE_NAME = None
STDIN_READ = None STDIN_READ = None
stopProcessBarSpin = True
IS_DOCKER = False
IS_WINDOWS = False
IS_LINUX = False
IS_FREEBSD = False
IS_MAC_OS = False
pluginLog = None pluginLog = None
logLinePreviousHits = [] logLinePreviousHits = []
thredPool = None thredPool = None
@@ -107,6 +115,16 @@ class StashPluginHelper(StashInterface):
DryRunFieldName = "zzdryRun", DryRunFieldName = "zzdryRun",
setStashLoggerAsPluginLogger = False): setStashLoggerAsPluginLogger = False):
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2) self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
if any(platform.win32_ver()):
self.IS_WINDOWS = True
elif platform.system().lower().startswith("linux"):
self.IS_LINUX = True
if self.isDocker():
self.IS_DOCKER = True
elif platform.system().lower().startswith("freebsd"):
self.IS_FREEBSD = True
elif sys.platform == "darwin":
self.IS_MAC_OS = True
if logToWrnSet: self.log_to_wrn_set = logToWrnSet if logToWrnSet: self.log_to_wrn_set = logToWrnSet
if logToErrSet: self.log_to_err_set = logToErrSet if logToErrSet: self.log_to_err_set = logToErrSet
if logToNormSet: self.log_to_norm = logToNormSet if logToNormSet: self.log_to_norm = logToNormSet
@@ -300,37 +318,43 @@ class StashPluginHelper(StashInterface):
lineNo = inspect.currentframe().f_back.f_lineno lineNo = inspect.currentframe().f_back.f_lineno
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): # Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
# The below non-loggging functions use (lower) camelCase naming convention.
def status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
if printTo == 0: printTo = self.log_to_norm if printTo == 0: printTo = self.log_to_norm
if lineNo == -1: if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno lineNo = inspect.currentframe().f_back.f_lineno
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})", self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
printTo, logLevel, lineNo) printTo, logLevel, lineNo)
def ExecuteProcess(self, args, ExecDetach=False): def executeProcess(self, args, ExecDetach=False):
import platform, subprocess
is_windows = any(platform.win32_ver())
pid = None pid = None
self.Trace(f"is_windows={is_windows} args={args}") self.Trace(f"self.IS_WINDOWS={self.IS_WINDOWS} args={args}")
if is_windows: if self.IS_WINDOWS:
if ExecDetach: if ExecDetach:
self.Trace("Executing process using Windows DETACHED_PROCESS") self.Trace(f"Executing process using Windows DETACHED_PROCESS; args=({args})")
DETACHED_PROCESS = 0x00000008 DETACHED_PROCESS = 0x00000008
pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
else: else:
pid = subprocess.Popen(args, shell=True).pid pid = subprocess.Popen(args, shell=True).pid
else: else:
self.Trace("Executing process using normal Popen") if ExecDetach:
pid = subprocess.Popen(args).pid # For linux detached, use nohup. I.E. subprocess.Popen(["nohup", "python", "test.py"])
if self.IS_LINUX:
args = ["nohup"] + args
self.Trace(f"Executing detached process using Popen({args})")
else:
self.Trace(f"Executing process using normal Popen({args})")
pid = subprocess.Popen(args).pid # On detach, may need the following for MAC OS subprocess.Popen(args, shell=True, start_new_session=True)
self.Trace(f"pid={pid}") self.Trace(f"pid={pid}")
return pid return pid
def ExecutePythonScript(self, args, ExecDetach=True): def executePythonScript(self, args, ExecDetach=True):
PythonExe = f"{sys.executable}" PythonExe = f"{sys.executable}"
argsWithPython = [f"{PythonExe}"] + args argsWithPython = [f"{PythonExe}"] + args
return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) return self.executeProcess(argsWithPython,ExecDetach=ExecDetach)
def Submit(self, *args, **kwargs): def submit(self, *args, **kwargs):
return self.thredPool.submit(*args, **kwargs) return self.thredPool.submit(*args, **kwargs)
def asc2(self, data, convertToAscii=None): def asc2(self, data, convertToAscii=None):
@@ -340,24 +364,214 @@ class StashPluginHelper(StashInterface):
# data = str(data).encode('ascii','ignore') # This works better for logging than ascii function # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
# return str(data)[2:-1] # strip out b'str' # return str(data)[2:-1] # strip out b'str'
def init_mergeMetadata(self, excludeMergeTags=None): def initMergeMetadata(self, excludeMergeTags=None):
self.excludeMergeTags = excludeMergeTags self.excludeMergeTags = excludeMergeTags
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
# Must call init_mergeMetadata, before calling merge_metadata # Must call initMergeMetadata, before calling mergeMetadata
def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata def mergeMetadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
if type(SrcData) is int: if type(SrcData) is int:
SrcData = self.find_scene(SrcData) SrcData = self.find_scene(SrcData)
DestData = self.find_scene(DestData) DestData = self.find_scene(DestData)
return self._mergeMetadata.merge(SrcData, DestData) return self._mergeMetadata.merge(SrcData, DestData)
def Progress(self, currentIndex, maxCount): def progressBar(self, currentIndex, maxCount):
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex) progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
self.log.progress(progress) self.log.progress(progress)
def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False): # Test via command line: pip uninstall -y pyYAML watchdog schedule requests
def modulesInstalled(self, moduleNames, install=True, silent=False): # moduleNames=["stashapp-tools", "requests", "pyYAML"]
retrnValue = True
for moduleName in moduleNames:
try: # Try Python 3.3 > way
import importlib
import importlib.util
if moduleName in sys.modules:
if not silent: self.Trace(f"{moduleName!r} already in sys.modules")
elif self.isModuleInstalled(moduleName):
if not silent: self.Trace(f"Module {moduleName!r} is available.")
else:
if install and (results:=self.installModule(moduleName)) > 0:
if results == 1:
self.Log(f"Module {moduleName!r} has been installed")
else:
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
continue
else:
if install:
self.Error(f"Can't find the {moduleName!r} module")
retrnValue = False
except Exception as e:
try:
i = importlib.import_module(moduleName)
except ImportError as e:
if install and (results:=self.installModule(moduleName)) > 0:
if results == 1:
self.Log(f"Module {moduleName!r} has been installed")
else:
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
continue
else:
if install:
tb = traceback.format_exc()
self.Error(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
retrnValue = False
return retrnValue
def isModuleInstalled(self, moduleName):
try:
__import__(moduleName)
# self.Trace(f"Module {moduleName!r} is installed")
return True
except Exception as e:
tb = traceback.format_exc()
self.Warn(f"Module {moduleName!r} is NOT installed!")
self.Trace(f"Error: {e}\nTraceBack={tb}")
pass
return False
def installModule(self,moduleName):
# if not self.IS_DOCKER:
# try:
# self.Log(f"Attempting to install package {moduleName!r} using pip import method.")
# First try pip import method. (This may fail in a future version of pip.)
# self.installPackage(moduleName)
# self.Trace(f"installPackage called for module {moduleName!r}")
# if self.modulesInstalled(moduleNames=[moduleName], install=False):
# self.Trace(f"Module {moduleName!r} installed")
# return 1
# self.Trace(f"Module {moduleName!r} still not installed.")
# except Exception as e:
# tb = traceback.format_exc()
# self.Warn(f"pip import method failed for module {moduleName!r}. Will try command line method; Error: {e}\nTraceBack={tb}")
# pass
# else:
# self.Trace("Running in Docker, so skipping pip import method.")
try:
if self.IS_LINUX:
# Note: Linux may first need : sudo apt install python3-pip
# if error starts with "Command 'pip' not found"
# or includes "No module named pip"
self.Log("Checking if pip installed.")
results = os.popen(f"pip --version").read()
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
results = os.popen(f"sudo apt install python3-pip").read()
results = os.popen(f"pip --version").read()
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
self.Error(f"Error while calling 'pip'. Make sure pip is installed, and make sure module {moduleName!r} is installed. Results = '{results}'")
return -1
self.Trace("pip good.")
if self.IS_FREEBSD:
self.Warn("installModule may NOT work on freebsd")
pipArg = ""
if self.IS_DOCKER:
pipArg = " --break-system-packages"
self.Log(f"Attempting to install package {moduleName!r} via popen.")
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
results = results.strip("\n")
self.Trace(f"pip results = {results}")
if results.find("Requirement already satisfied:") > -1:
self.Trace(f"Requirement already satisfied for module {moduleName!r}")
return 2
elif results.find("Successfully installed") > -1:
self.Trace(f"Successfully installed module {moduleName!r}")
return 1
elif self.modulesInstalled(moduleNames=[moduleName], install=False):
self.Trace(f"modulesInstalled returned True for module {moduleName!r}")
return 1
self.Error(f"Failed to install module {moduleName!r}")
except Exception as e:
tb = traceback.format_exc()
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
return 0
def installPackage(self,package): # Should delete this. It doesn't work consistently
try:
import pip
if hasattr(pip, 'main'):
pip.main(['install', package])
self.Trace()
else:
pip._internal.main(['install', package])
self.Trace()
except Exception as e:
tb = traceback.format_exc()
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
return False
return True
def isDocker(self):
cgroup = pathlib.Path('/proc/self/cgroup')
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
if trace:
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
pos = 1
while self.stopProcessBarSpin == False:
if trace:
self.Trace(f"progressBar({pos}, {maxPos})")
self.progressBar(pos, maxPos)
pos +=1
if pos > maxPos:
pos = 1
time.sleep(sleepSeconds)
def startSpinningProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
self.stopProcessBarSpin = False
if trace:
self.Trace(f"submitting spinProcessBar; sleepSeconds={sleepSeconds}, maxPos={maxPos}, trace={trace}")
self.submit(self.spinProcessBar, sleepSeconds, maxPos, trace)
def stopSpinningProcessBar(self, sleepSeconds = 1):
self.stopProcessBarSpin = True
time.sleep(sleepSeconds)
def createTagId(self, tagName, tagName_descp = "", deleteIfExist = False, ignoreAutoTag = False):
tagId = self.find_tags(q=tagName)
if len(tagId):
tagId = tagId[0]
if deleteIfExist:
self.destroy_tag(int(tagId['id']))
else:
return tagId['id']
tagId = self.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": ignoreAutoTag})
self.Log(f"Dup-tagId={tagId['id']}")
return tagId['id']
def removeTag(self, scene, tagName): # scene can be scene ID or scene metadata
scene_details = scene
if 'id' not in scene:
scene_details = self.find_scene(scene)
tagIds = []
doesHaveTagName = False
for tag in scene_details['tags']:
if tag['name'] != tagName:
tagIds += [tag['id']]
else:
doesHaveTagName = True
if doesHaveTagName:
dataDict = {'id' : scene_details['id']}
dataDict.update({'tag_ids' : tagIds})
self.update_scene(dataDict)
return doesHaveTagName
def addTag(self, scene, tagName): # scene can be scene ID or scene metadata
scene_details = scene
if 'id' not in scene:
scene_details = self.find_scene(scene)
tagIds = [self.createTagId(tagName)]
for tag in scene_details['tags']:
if tag['name'] != tagName:
tagIds += [tag['id']]
dataDict = {'id' : scene_details['id']}
dataDict.update({'tag_ids' : tagIds})
self.update_scene(dataDict)
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
"""Runs a plugin operation. """Runs a plugin operation.
The operation is run immediately and does not use the job queue. The operation is run immediately and does not use the job queue.
This is a blocking call, and does not return until plugin completes.
Args: Args:
plugin_id (ID): plugin_id plugin_id (ID): plugin_id
task_name (str, optional): Plugin task to perform task_name (str, optional): Plugin task to perform
@@ -375,43 +589,26 @@ class StashPluginHelper(StashInterface):
"args": args, "args": args,
} }
if asyn: if asyn:
self.Submit(self.call_GQL, query, variables) self.submit(self.call_GQL, query, variables)
return f"Made asynchronous call for plugin {plugin_id}" return f"Made asynchronous call for plugin {plugin_id}"
else: else:
return self.call_GQL(query, variables) return self.call_GQL(query, variables)
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
# ################################################################################################# # ############################################################################################################
# Functions which are candidates to be added to parent class use snake_case naming convention.
# ############################################################################################################
# The below functions extends class StashInterface with functions which are not yet in the class or # The below functions extends class StashInterface with functions which are not yet in the class or
# fixes for functions which have not yet made it into official class. # fixes for functions which have not yet made it into official class.
def metadata_scan(self, paths:list=[], flags={}): def metadata_scan(self, paths:list=[], flags={}): # ToDo: Add option to add path to library if path not included when calling metadata_scan
query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }" query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }"
scan_metadata_input = {"paths": paths} scan_metadata_input = {"paths": paths}
if flags: if flags:
scan_metadata_input.update(flags) scan_metadata_input.update(flags)
else: elif scan_config := self.get_configuration_defaults("scan { ...ScanMetadataOptions }").get("scan"):
scanData = self.get_configuration_defaults("scan { ...ScanMetadataOptions }") scan_metadata_input.update(scan_config)
if scanData['scan'] != None:
scan_metadata_input.update(scanData.get("scan",{}))
result = self.call_GQL(query, {"input": scan_metadata_input}) result = self.call_GQL(query, {"input": scan_metadata_input})
return result["metadataScan"] return result["metadataScan"]
def get_all_scenes(self): def get_all_scenes(self):
query_all_scenes = """ query_all_scenes = """
query AllScenes { query AllScenes {
@@ -464,6 +661,43 @@ class StashPluginHelper(StashInterface):
def rename_generated_files(self): def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Direct SQL associated functions
def get_file_metadata(self, data, raw_data = False): # data is either file ID or scene metadata
results = None
if data == None:
return results
if 'files' in data and len(data['files']) > 0 and 'id' in data['files'][0]:
results = self.sql_query(f"select * from files where id = {data['files'][0]['id']}")
else:
results = self.sql_query(f"select * from files where id = {data}")
if raw_data:
return results
if 'rows' in results:
return results['rows'][0]
self.Error(f"Unknown error while SQL query with data='{data}'; Results='{results}'.")
return None
def set_file_basename(self, id, basename):
return self.sql_commit(f"update files set basename = '{basename}' where id = {id}")
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
srcData = None srcData = None
@@ -537,3 +771,54 @@ class mergeMetadata: # A class to merge scene metadata from source scene to dest
listToAdd += [item['id']] listToAdd += [item['id']]
self.dataDict.update({ updateFieldName : listToAdd}) self.dataDict.update({ updateFieldName : listToAdd})
# self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
class taskQueue:
taskqueue = None
def __init__(self, taskqueue):
self.taskqueue = taskqueue
def tooManyScanOnTaskQueue(self, tooManyQty = 5):
count = 0
if self.taskqueue == None:
return False
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Scanning...":
count += 1
if count < tooManyQty:
return False
return True
def cleanJobOnTaskQueue(self):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Cleaning...":
return True
return False
def cleanGeneratedJobOnTaskQueue(self):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Cleaning generated files...":
return True
return False
def isRunningPluginTaskJobOnTaskQueue(self, taskName):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Running plugin task: {taskName}":
return True
return False
def tagDuplicatesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Tag Duplicates")
def clearDupTagsJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Clear Tags")
def generatePhashMatchingJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Generate PHASH Matching")
def deleteDuplicatesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Delete Duplicates")
def deleteTaggedScenesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Delete Tagged Scenes")

View File

@@ -5,8 +5,7 @@
# Example: python filemonitor.py --url http://localhost:9999 # Example: python filemonitor.py --url http://localhost:9999
import os, sys, time, pathlib, argparse, platform, traceback, logging import os, sys, time, pathlib, argparse, platform, traceback, logging
from StashPluginHelper import StashPluginHelper from StashPluginHelper import StashPluginHelper
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/ from StashPluginHelper import taskQueue
from watchdog.observers import Observer # This is also needed for event attributes
from threading import Lock, Condition from threading import Lock, Condition
from multiprocessing import shared_memory from multiprocessing import shared_memory
from filemonitor_config import config from filemonitor_config import config
@@ -25,7 +24,8 @@ STOP_RUNNING_SIG = 32
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL') parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.') parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
parser.add_argument('--stop', '-s', dest='stop', action='store_true', help='Stop (kill) a running FileMonitor task.') parser.add_argument('--stop', '-s', dest='stop', action='store_true', help='Stop a running FileMonitor task.')
parser.add_argument('--kill_que', '-k', dest='kill_job_task_que', type=str, help='Kill job on Task Queue while running in service mode (command line mode).')
parser.add_argument('--restart', '-r', dest='restart', action='store_true', help='Restart FileMonitor.') parser.add_argument('--restart', '-r', dest='restart', action='store_true', help='Restart FileMonitor.')
parser.add_argument('--silent', '--quit', '-q', dest='quit', action='store_true', help='Run in silent mode. No output to console or stderr. Use this when running from pythonw.exe') parser.add_argument('--silent', '--quit', '-q', dest='quit', action='store_true', help='Run in silent mode. No output to console or stderr. Use this when running from pythonw.exe')
parser.add_argument('--apikey', '-a', dest='apikey', type=str, help='API Key') parser.add_argument('--apikey', '-a', dest='apikey', type=str, help='API Key')
@@ -54,8 +54,10 @@ stash = StashPluginHelper(
maxbytes=5*1024*1024, maxbytes=5*1024*1024,
apiKey=parse_args.apikey apiKey=parse_args.apikey
) )
stash.Status(logLevel=logging.DEBUG) stash.status(logLevel=logging.DEBUG)
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
stash.Trace(f"stash.JSON_INPUT={stash.JSON_INPUT}")
stash.modulesInstalled(["watchdog", "schedule", "requests"])
exitMsg = "Change success!!" exitMsg = "Change success!!"
mutex = Lock() mutex = Lock()
@@ -86,6 +88,7 @@ fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConf
includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS
excludePathChanges = stash.pluginConfig['excludePathChanges'] excludePathChanges = stash.pluginConfig['excludePathChanges']
turnOnSchedulerDeleteDup = stash.pluginSettings['turnOnSchedulerDeleteDup'] turnOnSchedulerDeleteDup = stash.pluginSettings['turnOnSchedulerDeleteDup']
NotInLibraryTagName = stash.pluginConfig['NotInLibraryTagName']
if stash.DRY_RUN: if stash.DRY_RUN:
stash.Log("Dry run mode is enabled.") stash.Log("Dry run mode is enabled.")
@@ -93,34 +96,50 @@ stash.Trace(f"(SCAN_MODIFIED={SCAN_MODIFIED}) (SCAN_ON_ANY_EVENT={SCAN_ON_ANY_EV
StartFileMonitorAsAPluginTaskName = "Monitor as a Plugin" StartFileMonitorAsAPluginTaskName = "Monitor as a Plugin"
StartFileMonitorAsAServiceTaskName = "Start Library Monitor Service" StartFileMonitorAsAServiceTaskName = "Start Library Monitor Service"
StartFileMonitorAsAPluginTaskID = "start_library_monitor" StartFileMonitorAsAPluginTaskID = "start_library_monitor"
StartFileMonitorAsAServiceTaskID = "start_library_monitor_service" StartFileMonitorAsAServiceTaskID = "start_library_monitor_service"
StopFileMonitorAsAPluginTaskID = "stop_library_monitor"
SYNC_LIBRARY_REMOVE = "sync_library_remove"
SYNC_LIBRARY_TAG = "sync_library_tag"
CLEAR_SYNC_LIBRARY_TAG = "clear_sync_tags_task"
FileMonitorPluginIsOnTaskQue = stash.CALLED_AS_STASH_PLUGIN FileMonitorPluginIsOnTaskQue = stash.CALLED_AS_STASH_PLUGIN
StopLibraryMonitorWaitingInTaskQueue = False StopLibraryMonitorWaitingInTaskQueue = False
JobIdInTheQue = 0 JobIdInTheQue = 0
def isJobWaitingToRun(): JobIdOf_StartAsAServiceTask = None
def isJobWaitingToRun(getJobIdOf_StartAsAServiceTask = False):
global StopLibraryMonitorWaitingInTaskQueue global StopLibraryMonitorWaitingInTaskQueue
global JobIdInTheQue global JobIdInTheQue
global JobIdOf_StartAsAServiceTask
global FileMonitorPluginIsOnTaskQue global FileMonitorPluginIsOnTaskQue
FileMonitorPluginIsOnTaskQue = False FileMonitorPluginIsOnTaskQue = False
jobIsWaiting = False jobIsWaiting = False
taskQue = stash.job_queue() taskQue = stash.job_queue()
for jobDetails in taskQue: for jobDetails in taskQue:
stash.Trace(f"(Job ID({jobDetails['id']})={jobDetails})") stash.Trace(f"(Job ID({jobDetails['id']})={jobDetails})")
if jobDetails['status'] == "READY": if getJobIdOf_StartAsAServiceTask:
if jobDetails['description'] == "Running plugin task: Stop Library Monitor": if jobDetails['status'] == "RUNNING" and jobDetails['description'].find(StartFileMonitorAsAServiceTaskName) > -1:
StopLibraryMonitorWaitingInTaskQueue = True JobIdOf_StartAsAServiceTask = jobDetails['id']
JobIdInTheQue = jobDetails['id'] stash.Trace(f"Found current running task '{jobDetails['description']}' with Job ID {JobIdOf_StartAsAServiceTask}")
jobIsWaiting = True return True
elif jobDetails['status'] == "RUNNING" and jobDetails['description'].find(StartFileMonitorAsAPluginTaskName) > -1: else:
FileMonitorPluginIsOnTaskQue = True if jobDetails['status'] == "READY":
if jobDetails['description'] == "Running plugin task: Stop Library Monitor":
StopLibraryMonitorWaitingInTaskQueue = True
JobIdInTheQue = jobDetails['id']
jobIsWaiting = True
elif jobDetails['status'] == "RUNNING" and jobDetails['description'].find(StartFileMonitorAsAPluginTaskName) > -1:
FileMonitorPluginIsOnTaskQue = True
JobIdInTheQue = 0 JobIdInTheQue = 0
return jobIsWaiting return jobIsWaiting
if stash.CALLED_AS_STASH_PLUGIN and stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID: if stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID:
stash.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun()})") stash.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun()})")
elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAServiceTaskID:
stash.Trace(f"isJobWaitingToRun() = {isJobWaitingToRun(True)})")
class StashScheduler: # Stash Scheduler class StashScheduler: # Stash Scheduler
def __init__(self): def __init__(self):
import schedule # pip install schedule # https://github.com/dbader/schedule import schedule # pip install schedule # https://github.com/dbader/schedule
@@ -292,7 +311,7 @@ class StashScheduler: # Stash Scheduler
if 'args' in task and len(task['args']) > 0: if 'args' in task and len(task['args']) > 0:
args = args + [task['args']] args = args + [task['args']]
stash.Log(f"Executing command arguments {args}.") stash.Log(f"Executing command arguments {args}.")
return f"Execute process PID = {stash.ExecuteProcess(args)}" return f"Execute process PID = {stash.executeProcess(args)}"
else: else:
stash.Error(f"Can not run task '{task['task']}', because it's missing 'command' field.") stash.Error(f"Can not run task '{task['task']}', because it's missing 'command' field.")
return None return None
@@ -307,7 +326,7 @@ class StashScheduler: # Stash Scheduler
detached = True detached = True
if 'detach' in task: if 'detach' in task:
detached = task['detach'] detached = task['detach']
return f"Python process PID = {stash.ExecutePythonScript(args, ExecDetach=detached)}" return f"Python process PID = {stash.executePythonScript(args, ExecDetach=detached)}"
else: else:
stash.Error(f"Can not run task '{task['task']}', because it's missing 'script' field.") stash.Error(f"Can not run task '{task['task']}', because it's missing 'script' field.")
return None return None
@@ -345,8 +364,8 @@ class StashScheduler: # Stash Scheduler
taskMode = task['taskMode'] taskMode = task['taskMode']
if ('taskQue' in task and task['taskQue'] == False) or taskName == None: if ('taskQue' in task and task['taskQue'] == False) or taskName == None:
stash.Log(f"Running plugin task pluginID={task['task']}, task mode = {taskMode}. {validDirMsg}") stash.Log(f"Running plugin task pluginID={task['task']}, task mode = {taskMode}. {validDirMsg}")
# Asynchronous threading logic to call run_plugin, because it's a blocking call. # Asynchronous threading logic to call runPlugin, because it's a blocking call.
stash.run_plugin(plugin_id=task['task'], task_mode=taskMode, asyn=True) stash.runPlugin(plugin_id=task['task'], task_mode=taskMode, asyn=True)
return None return None
else: else:
stash.Trace(f"Adding to Task Queue plugin task pluginID={task['task']}, task name = {taskName}. {validDirMsg}") stash.Trace(f"Adding to Task Queue plugin task pluginID={task['task']}, task name = {taskName}. {validDirMsg}")
@@ -362,11 +381,11 @@ class StashScheduler: # Stash Scheduler
except: except:
pass pass
stash.Error("Failed to get response from Stash.") stash.Error("Failed to get response from Stash.")
if platform.system() == "Windows": if stash.IS_WINDOWS:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-win.exe" execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-win.exe"
elif platform.system() == "Darwin": # MacOS elif stash.IS_MAC_OS:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep} stash-macos " execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep} stash-macos "
elif platform.system().lower().startswith("linux"): elif stash.IS_LINUX:
# ToDo: Need to verify this method will work for (stash-linux-arm32v6, stash-linux-arm32v7, and stash-linux-arm64v8) # ToDo: Need to verify this method will work for (stash-linux-arm32v6, stash-linux-arm32v7, and stash-linux-arm64v8)
if platform.system().lower().find("32v6") > -1: if platform.system().lower().find("32v6") > -1:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-linux-arm32v6" execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-linux-arm32v6"
@@ -376,7 +395,7 @@ class StashScheduler: # Stash Scheduler
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-linux-arm64v8" execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-linux-arm64v8"
else: else:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-linux" execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-linux"
elif platform.system().lower().startswith("freebsd"): elif stash.IS_FREEBSD:
execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-freebsd" execPath = f"{pathlib.Path(stash.PLUGINS_PATH).resolve().parent}{os.sep}stash-freebsd"
elif 'command' not in task or task['command'] == "": elif 'command' not in task or task['command'] == "":
stash.Error("Can not start Stash, because failed to determine platform OS. As a workaround, add 'command' field to this task.") stash.Error("Can not start Stash, because failed to determine platform OS. As a workaround, add 'command' field to this task.")
@@ -391,7 +410,7 @@ class StashScheduler: # Stash Scheduler
else: else:
stash.Error("Could not start Stash, because could not find executable Stash file '{execPath}'") stash.Error("Could not start Stash, because could not find executable Stash file '{execPath}'")
return None return None
result = f"Execute process PID = {stash.ExecuteProcess(args)}" result = f"Execute process PID = {stash.executeProcess(args)}"
time.sleep(sleepAfterStart) time.sleep(sleepAfterStart)
if "RunAfter" in task and len(task['RunAfter']) > 0: if "RunAfter" in task and len(task['RunAfter']) > 0:
for runAfterTask in task['RunAfter']: for runAfterTask in task['RunAfter']:
@@ -456,6 +475,8 @@ lastScanJob = {
JOB_ENDED_STATUSES = ["FINISHED", "CANCELLED"] JOB_ENDED_STATUSES = ["FINISHED", "CANCELLED"]
def start_library_monitor(): def start_library_monitor():
from watchdog.observers import Observer # This is also needed for event attributes
import watchdog # pip install watchdog # https://pythonhosted.org/watchdog/
global shouldUpdate global shouldUpdate
global TargetPaths global TargetPaths
global lastScanJob global lastScanJob
@@ -624,7 +645,7 @@ def start_library_monitor():
TargetPaths = [] TargetPaths = []
TmpTargetPaths = list(set(TmpTargetPaths)) TmpTargetPaths = list(set(TmpTargetPaths))
if TmpTargetPaths != [] or lastScanJob['DelayedProcessTargetPaths'] != []: if TmpTargetPaths != [] or lastScanJob['DelayedProcessTargetPaths'] != []:
stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths}") stash.Log(f"Triggering Stash scan for path(s) {TmpTargetPaths} and/or {lastScanJob['DelayedProcessTargetPaths']}")
if lastScanJob['DelayedProcessTargetPaths'] != [] or len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR: if lastScanJob['DelayedProcessTargetPaths'] != [] or len(TmpTargetPaths) > 1 or TmpTargetPaths[0] != SPECIAL_FILE_DIR:
if not stash.DRY_RUN: if not stash.DRY_RUN:
if lastScanJob['id'] > -1: if lastScanJob['id'] > -1:
@@ -657,11 +678,15 @@ def start_library_monitor():
lastScanJob['DelayedProcessTargetPaths'].append(path) lastScanJob['DelayedProcessTargetPaths'].append(path)
stash.Trace(f"lastScanJob['DelayedProcessTargetPaths'] = {lastScanJob['DelayedProcessTargetPaths']}") stash.Trace(f"lastScanJob['DelayedProcessTargetPaths'] = {lastScanJob['DelayedProcessTargetPaths']}")
if lastScanJob['id'] == -1: if lastScanJob['id'] == -1:
stash.Trace(f"Calling metadata_scan for paths '{TmpTargetPaths}'") taskqueue = taskQueue(stash.job_queue())
lastScanJob['id'] = int(stash.metadata_scan(paths=TmpTargetPaths)) if taskqueue.tooManyScanOnTaskQueue(7):
lastScanJob['TargetPaths'] = TmpTargetPaths stash.Log(f"[metadata_scan] Skipping updating Stash for paths '{TmpTargetPaths}', because too many scans on Task Queue.")
lastScanJob['timeAddedToTaskQueue'] = time.time() else:
stash.Trace(f"metadata_scan JobId = {lastScanJob['id']}, Start-Time = {lastScanJob['timeAddedToTaskQueue']}, paths = {lastScanJob['TargetPaths']}") stash.Trace(f"[metadata_scan] Calling metadata_scan for paths '{TmpTargetPaths}'")
lastScanJob['id'] = int(stash.metadata_scan(paths=TmpTargetPaths))
lastScanJob['TargetPaths'] = TmpTargetPaths
lastScanJob['timeAddedToTaskQueue'] = time.time()
stash.Trace(f"metadata_scan JobId = {lastScanJob['id']}, Start-Time = {lastScanJob['timeAddedToTaskQueue']}, paths = {lastScanJob['TargetPaths']}")
if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata: if RUN_CLEAN_AFTER_DELETE and RunCleanMetadata:
stash.metadata_clean(paths=TmpTargetPaths, dry_run=stash.DRY_RUN) stash.metadata_clean(paths=TmpTargetPaths, dry_run=stash.DRY_RUN)
if RUN_GENERATE_CONTENT: if RUN_GENERATE_CONTENT:
@@ -723,9 +748,81 @@ def start_library_monitor_service():
pass pass
stash.Trace("FileMonitor is not running, so it's safe to start it as a service.") stash.Trace("FileMonitor is not running, so it's safe to start it as a service.")
args = [f"{pathlib.Path(__file__).resolve().parent}{os.sep}filemonitor.py", '--url', f"{stash.STASH_URL}"] args = [f"{pathlib.Path(__file__).resolve().parent}{os.sep}filemonitor.py", '--url', f"{stash.STASH_URL}"]
if JobIdOf_StartAsAServiceTask != None:
args += ["-k", JobIdOf_StartAsAServiceTask]
if stash.API_KEY: if stash.API_KEY:
args = args + ["-a", stash.API_KEY] args += ["-a", stash.API_KEY]
stash.ExecutePythonScript(args) results = stash.executePythonScript(args)
stash.Trace(f"executePythonScript results='{results}'")
def synchronize_library(removeScene=False):
stash.startSpinningProcessBar()
scenes = stash.find_scenes(fragment='id tags {id name} files {path}')
qtyResults = len(scenes)
Qty = 0
stash.Log(f"count = {qtyResults}")
stash.stopSpinningProcessBar()
sceneIDs = stash.find_scenes(fragment='id files {path}')
for scene in scenes:
Qty += 1
stash.progressBar(Qty, qtyResults)
scenePartOfLibrary = False
for path in stash.STASH_PATHS:
if scene['files'][0]['path'].startswith(path):
scenePartOfLibrary = True
break
if scenePartOfLibrary == False:
stash.Log(f"Scene ID={scene['id']}; path={scene['files'][0]['path']} not part of Stash Library")
if removeScene:
stash.destroy_scene(scene['id'])
stash.Log(f"Removed Scene ID={scene['id']}; path={scene['files'][0]['path']}")
else:
stash.addTag(scene, NotInLibraryTagName)
stash.Trace(f"Tagged ({NotInLibraryTagName}) Scene ID={scene['id']}; path={scene['files'][0]['path']}")
def manageTagggedScenes(clearTag=True):
tagId = stash.find_tags(q=NotInLibraryTagName)
if len(tagId) > 0 and 'id' in tagId[0]:
tagId = tagId[0]['id']
else:
stash.Warn(f"Could not find tag ID for tag '{NotInLibraryTagName}'.")
return
QtyDup = 0
QtyRemoved = 0
QtyClearedTags = 0
QtyFailedQuery = 0
stash.Trace("#########################################################################")
stash.startSpinningProcessBar()
stash.Trace(f"Calling find_scenes with tagId={tagId}")
sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id')
stash.stopSpinningProcessBar()
qtyResults = len(sceneIDs)
stash.Trace(f"Found {qtyResults} scenes with tag ({NotInLibraryTagName}): sceneIDs = {sceneIDs}")
for sceneID in sceneIDs:
# stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.")
QtyDup += 1
prgs = QtyDup / qtyResults
stash.progressBar(QtyDup, qtyResults)
scene = stash.find_scene(sceneID['id'])
if scene == None or len(scene) == 0:
stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.")
QtyFailedQuery += 1
continue
# stash.Trace(f"scene={scene}")
if clearTag:
tags = [int(item['id']) for item in scene["tags"] if item['id'] != tagId]
stash.TraceOnce(f"tagId={tagId}, len={len(tags)}, tags = {tags}")
dataDict = {'id' : scene['id']}
dataDict.update({'tag_ids' : tags})
stash.Log(f"Updating scene with {dataDict}")
stash.update_scene(dataDict)
# stash.removeTag(scene, NotInLibraryTagName)
QtyClearedTags += 1
else:
stash.destroy_scene(scene['id'])
stash.Log(f"Removed Scene ID={scene['id']}; path={scene['files'][0]['path']}")
QtyRemoved += 1
stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtyRemoved={QtyRemoved}, QtyFailedQuery={QtyFailedQuery}")
runTypeID=0 runTypeID=0
runTypeName=["NothingToDo", "stop_library_monitor", "StartFileMonitorAsAServiceTaskID", "StartFileMonitorAsAPluginTaskID", "CommandLineStartLibMonitor"] runTypeName=["NothingToDo", "stop_library_monitor", "StartFileMonitorAsAServiceTaskID", "StartFileMonitorAsAPluginTaskID", "CommandLineStartLibMonitor"]
@@ -742,13 +839,29 @@ try:
elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAServiceTaskID: elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAServiceTaskID:
runTypeID=2 runTypeID=2
start_library_monitor_service() start_library_monitor_service()
stash.Trace(f"{StartFileMonitorAsAServiceTaskID} EXIT") stash.Trace(f"{StartFileMonitorAsAServiceTaskID} transitioning to service mode.")
elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID: elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID:
runTypeID=3 runTypeID=3
start_library_monitor() start_library_monitor()
stash.Trace(f"{StartFileMonitorAsAPluginTaskID} EXIT") stash.Trace(f"{StartFileMonitorAsAPluginTaskID} EXIT")
elif stash.PLUGIN_TASK_NAME == SYNC_LIBRARY_REMOVE:
runTypeID=5
synchronize_library(removeScene=tRUE)
stash.Trace(f"{SYNC_LIBRARY_REMOVE} EXIT")
elif stash.PLUGIN_TASK_NAME == SYNC_LIBRARY_TAG:
runTypeID=6
synchronize_library()
stash.Trace(f"{SYNC_LIBRARY_TAG} EXIT")
elif stash.PLUGIN_TASK_NAME == CLEAR_SYNC_LIBRARY_TAG:
runTypeID=7
manageTagggedScenes()
stash.Trace(f"{CLEAR_SYNC_LIBRARY_TAG} EXIT")
elif not stash.CALLED_AS_STASH_PLUGIN: elif not stash.CALLED_AS_STASH_PLUGIN:
runTypeID=4 runTypeID=4
if parse_args.kill_job_task_que != None and parse_args.kill_job_task_que != "":
# Removing the job from the Task Queue is really only needed for Linux, but it should be OK to do in general.
stash.Log(f"Removing job ID {parse_args.kill_job_task_que} from the Task Queue, because transitioning to service mode.")
stash.stop_job(parse_args.kill_job_task_que)
start_library_monitor() start_library_monitor()
stash.Trace("Command line FileMonitor EXIT") stash.Trace("Command line FileMonitor EXIT")
else: else:
@@ -756,6 +869,7 @@ try:
except Exception as e: except Exception as e:
tb = traceback.format_exc() tb = traceback.format_exc()
stash.Error(f"Exception while running FileMonitor. runType='{runTypeName[runTypeID]}'; Error: {e}\nTraceBack={tb}") stash.Error(f"Exception while running FileMonitor. runType='{runTypeName[runTypeID]}'; Error: {e}\nTraceBack={tb}")
stash.log.exception('Got exception on main handler')
stash.Trace("\n*********************************\nEXITING ***********************\n*********************************") stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")
# ToDo: Add option to add path to library if path not included when calling metadata_scan

View File

@@ -40,3 +40,15 @@ tasks:
description: Run [Library Monitor] as a plugin (*Not recommended*) description: Run [Library Monitor] as a plugin (*Not recommended*)
defaultArgs: defaultArgs:
mode: start_library_monitor mode: start_library_monitor
- name: Synchronize Library Tag
description: Tag (_NoLongerPartOfLibrary) scenes from database with paths no longer in Stash Library.
defaultArgs:
mode: sync_library_tag
- name: Synchronize Library Clean
description: Remove scenes from database with paths no longer in Stash Library.
defaultArgs:
mode: sync_library_remove
- name: Clear Sync Tags
description: Clear tag _NoLongerPartOfLibrary. Remove this tag from all files.
defaultArgs:
mode: clear_sync_tags_task

View File

@@ -63,6 +63,8 @@ config = {
"runCleanAfterDelete": False, "runCleanAfterDelete": False,
# Enable to run metadata_generate (Generate Content) after metadata scan. # Enable to run metadata_generate (Generate Content) after metadata scan.
"runGenerateContent": False, "runGenerateContent": False,
# Tag name when tagging files that are no longer in Stash Library paths.
"NotInLibraryTagName" : "_NoLongerPartOfLibrary",
# When populated (comma separated list [lower-case]), only scan for changes for specified file extension # When populated (comma separated list [lower-case]), only scan for changes for specified file extension
"fileExtTypes" : "", # Example: "mp4,mpg,mpeg,m2ts,wmv,avi,m4v,flv,mov,asf,mkv,divx,webm,ts,mp2t" "fileExtTypes" : "", # Example: "mp4,mpg,mpeg,m2ts,wmv,avi,m4v,flv,mov,asf,mkv,divx,webm,ts,mp2t"

View File

@@ -1,3 +1,4 @@
stashapp-tools >= 0.2.50 stashapp-tools >= 0.2.50
pyYAML requests
watchdog watchdog
schedule

View File

@@ -48,8 +48,8 @@ RenameFile is a [Stash](https://github.com/stashapp/stash) plugin.
- pip install -r requirements.txt - pip install -r requirements.txt
- Or manually install each requirement: - Or manually install each requirement:
- `pip install stashapp-tools --upgrade` - `pip install stashapp-tools --upgrade`
- `pip install pyYAML`
- `pip install requests` - `pip install requests`
- `pip install psutil`
- For (Windows-Only) optional feature **handleExe**, download handle.exe: - For (Windows-Only) optional feature **handleExe**, download handle.exe:
- https://learn.microsoft.com/en-us/sysinternals/downloads/handle - https://learn.microsoft.com/en-us/sysinternals/downloads/handle

View File

@@ -1,6 +1,6 @@
from stashapi.stashapp import StashInterface from stashapi.stashapp import StashInterface
from logging.handlers import RotatingFileHandler from logging.handlers import RotatingFileHandler
import re, inspect, sys, os, pathlib, logging, json import re, inspect, sys, os, pathlib, logging, json, platform, subprocess, traceback, time
import concurrent.futures import concurrent.futures
from stashapi.stash_types import PhashDistance from stashapi.stash_types import PhashDistance
import __main__ import __main__
@@ -61,6 +61,14 @@ class StashPluginHelper(StashInterface):
LOG_FILE_DIR = None LOG_FILE_DIR = None
LOG_FILE_NAME = None LOG_FILE_NAME = None
STDIN_READ = None STDIN_READ = None
stopProcessBarSpin = True
IS_DOCKER = False
IS_WINDOWS = False
IS_LINUX = False
IS_FREEBSD = False
IS_MAC_OS = False
pluginLog = None pluginLog = None
logLinePreviousHits = [] logLinePreviousHits = []
thredPool = None thredPool = None
@@ -107,6 +115,16 @@ class StashPluginHelper(StashInterface):
DryRunFieldName = "zzdryRun", DryRunFieldName = "zzdryRun",
setStashLoggerAsPluginLogger = False): setStashLoggerAsPluginLogger = False):
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2) self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
if any(platform.win32_ver()):
self.IS_WINDOWS = True
elif platform.system().lower().startswith("linux"):
self.IS_LINUX = True
if self.isDocker():
self.IS_DOCKER = True
elif platform.system().lower().startswith("freebsd"):
self.IS_FREEBSD = True
elif sys.platform == "darwin":
self.IS_MAC_OS = True
if logToWrnSet: self.log_to_wrn_set = logToWrnSet if logToWrnSet: self.log_to_wrn_set = logToWrnSet
if logToErrSet: self.log_to_err_set = logToErrSet if logToErrSet: self.log_to_err_set = logToErrSet
if logToNormSet: self.log_to_norm = logToNormSet if logToNormSet: self.log_to_norm = logToNormSet
@@ -300,37 +318,43 @@ class StashPluginHelper(StashInterface):
lineNo = inspect.currentframe().f_back.f_lineno lineNo = inspect.currentframe().f_back.f_lineno
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): # Above logging functions all use UpperCamelCase naming convention to avoid conflict with parent class logging function names.
# The below non-loggging functions use (lower) camelCase naming convention.
def status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
if printTo == 0: printTo = self.log_to_norm if printTo == 0: printTo = self.log_to_norm
if lineNo == -1: if lineNo == -1:
lineNo = inspect.currentframe().f_back.f_lineno lineNo = inspect.currentframe().f_back.f_lineno
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})", self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
printTo, logLevel, lineNo) printTo, logLevel, lineNo)
def ExecuteProcess(self, args, ExecDetach=False): def executeProcess(self, args, ExecDetach=False):
import platform, subprocess
is_windows = any(platform.win32_ver())
pid = None pid = None
self.Trace(f"is_windows={is_windows} args={args}") self.Trace(f"self.IS_WINDOWS={self.IS_WINDOWS} args={args}")
if is_windows: if self.IS_WINDOWS:
if ExecDetach: if ExecDetach:
self.Trace("Executing process using Windows DETACHED_PROCESS") self.Trace(f"Executing process using Windows DETACHED_PROCESS; args=({args})")
DETACHED_PROCESS = 0x00000008 DETACHED_PROCESS = 0x00000008
pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
else: else:
pid = subprocess.Popen(args, shell=True).pid pid = subprocess.Popen(args, shell=True).pid
else: else:
self.Trace("Executing process using normal Popen") if ExecDetach:
pid = subprocess.Popen(args).pid # For linux detached, use nohup. I.E. subprocess.Popen(["nohup", "python", "test.py"])
if self.IS_LINUX:
args = ["nohup"] + args
self.Trace(f"Executing detached process using Popen({args})")
else:
self.Trace(f"Executing process using normal Popen({args})")
pid = subprocess.Popen(args).pid # On detach, may need the following for MAC OS subprocess.Popen(args, shell=True, start_new_session=True)
self.Trace(f"pid={pid}") self.Trace(f"pid={pid}")
return pid return pid
def ExecutePythonScript(self, args, ExecDetach=True): def executePythonScript(self, args, ExecDetach=True):
PythonExe = f"{sys.executable}" PythonExe = f"{sys.executable}"
argsWithPython = [f"{PythonExe}"] + args argsWithPython = [f"{PythonExe}"] + args
return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) return self.executeProcess(argsWithPython,ExecDetach=ExecDetach)
def Submit(self, *args, **kwargs): def submit(self, *args, **kwargs):
return self.thredPool.submit(*args, **kwargs) return self.thredPool.submit(*args, **kwargs)
def asc2(self, data, convertToAscii=None): def asc2(self, data, convertToAscii=None):
@@ -340,24 +364,214 @@ class StashPluginHelper(StashInterface):
# data = str(data).encode('ascii','ignore') # This works better for logging than ascii function # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
# return str(data)[2:-1] # strip out b'str' # return str(data)[2:-1] # strip out b'str'
def init_mergeMetadata(self, excludeMergeTags=None): def initMergeMetadata(self, excludeMergeTags=None):
self.excludeMergeTags = excludeMergeTags self.excludeMergeTags = excludeMergeTags
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
# Must call init_mergeMetadata, before calling merge_metadata # Must call initMergeMetadata, before calling mergeMetadata
def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata def mergeMetadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
if type(SrcData) is int: if type(SrcData) is int:
SrcData = self.find_scene(SrcData) SrcData = self.find_scene(SrcData)
DestData = self.find_scene(DestData) DestData = self.find_scene(DestData)
return self._mergeMetadata.merge(SrcData, DestData) return self._mergeMetadata.merge(SrcData, DestData)
def Progress(self, currentIndex, maxCount): def progressBar(self, currentIndex, maxCount):
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex) progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
self.log.progress(progress) self.log.progress(progress)
def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False): # Test via command line: pip uninstall -y pyYAML watchdog schedule requests
def modulesInstalled(self, moduleNames, install=True, silent=False): # moduleNames=["stashapp-tools", "requests", "pyYAML"]
retrnValue = True
for moduleName in moduleNames:
try: # Try Python 3.3 > way
import importlib
import importlib.util
if moduleName in sys.modules:
if not silent: self.Trace(f"{moduleName!r} already in sys.modules")
elif self.isModuleInstalled(moduleName):
if not silent: self.Trace(f"Module {moduleName!r} is available.")
else:
if install and (results:=self.installModule(moduleName)) > 0:
if results == 1:
self.Log(f"Module {moduleName!r} has been installed")
else:
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
continue
else:
if install:
self.Error(f"Can't find the {moduleName!r} module")
retrnValue = False
except Exception as e:
try:
i = importlib.import_module(moduleName)
except ImportError as e:
if install and (results:=self.installModule(moduleName)) > 0:
if results == 1:
self.Log(f"Module {moduleName!r} has been installed")
else:
if not silent: self.Trace(f"Module {moduleName!r} is already installed")
continue
else:
if install:
tb = traceback.format_exc()
self.Error(f"Can't find the {moduleName!r} module! Error: {e}\nTraceBack={tb}")
retrnValue = False
return retrnValue
def isModuleInstalled(self, moduleName):
try:
__import__(moduleName)
# self.Trace(f"Module {moduleName!r} is installed")
return True
except Exception as e:
tb = traceback.format_exc()
self.Warn(f"Module {moduleName!r} is NOT installed!")
self.Trace(f"Error: {e}\nTraceBack={tb}")
pass
return False
def installModule(self,moduleName):
# if not self.IS_DOCKER:
# try:
# self.Log(f"Attempting to install package {moduleName!r} using pip import method.")
# First try pip import method. (This may fail in a future version of pip.)
# self.installPackage(moduleName)
# self.Trace(f"installPackage called for module {moduleName!r}")
# if self.modulesInstalled(moduleNames=[moduleName], install=False):
# self.Trace(f"Module {moduleName!r} installed")
# return 1
# self.Trace(f"Module {moduleName!r} still not installed.")
# except Exception as e:
# tb = traceback.format_exc()
# self.Warn(f"pip import method failed for module {moduleName!r}. Will try command line method; Error: {e}\nTraceBack={tb}")
# pass
# else:
# self.Trace("Running in Docker, so skipping pip import method.")
try:
if self.IS_LINUX:
# Note: Linux may first need : sudo apt install python3-pip
# if error starts with "Command 'pip' not found"
# or includes "No module named pip"
self.Log("Checking if pip installed.")
results = os.popen(f"pip --version").read()
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
results = os.popen(f"sudo apt install python3-pip").read()
results = os.popen(f"pip --version").read()
if results.find("Command 'pip' not found") != -1 or results.find("No module named pip") != -1:
self.Error(f"Error while calling 'pip'. Make sure pip is installed, and make sure module {moduleName!r} is installed. Results = '{results}'")
return -1
self.Trace("pip good.")
if self.IS_FREEBSD:
self.Warn("installModule may NOT work on freebsd")
pipArg = ""
if self.IS_DOCKER:
pipArg = " --break-system-packages"
self.Log(f"Attempting to install package {moduleName!r} via popen.")
results = os.popen(f"{sys.executable} -m pip install {moduleName}{pipArg}").read() # May need to be f"{sys.executable} -m pip install {moduleName}"
results = results.strip("\n")
self.Trace(f"pip results = {results}")
if results.find("Requirement already satisfied:") > -1:
self.Trace(f"Requirement already satisfied for module {moduleName!r}")
return 2
elif results.find("Successfully installed") > -1:
self.Trace(f"Successfully installed module {moduleName!r}")
return 1
elif self.modulesInstalled(moduleNames=[moduleName], install=False):
self.Trace(f"modulesInstalled returned True for module {moduleName!r}")
return 1
self.Error(f"Failed to install module {moduleName!r}")
except Exception as e:
tb = traceback.format_exc()
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
return 0
def installPackage(self,package): # Should delete this. It doesn't work consistently
try:
import pip
if hasattr(pip, 'main'):
pip.main(['install', package])
self.Trace()
else:
pip._internal.main(['install', package])
self.Trace()
except Exception as e:
tb = traceback.format_exc()
self.Error(f"Failed to install module {moduleName!r}. Error: {e}\nTraceBack={tb}")
return False
return True
def isDocker(self):
cgroup = pathlib.Path('/proc/self/cgroup')
return pathlib.Path('/.dockerenv').is_file() or cgroup.is_file() and 'docker' in cgroup.read_text()
def spinProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
if trace:
self.Trace(f"Starting spinProcessBar loop; sleepSeconds={sleepSeconds}, maxPos={maxPos}")
pos = 1
while self.stopProcessBarSpin == False:
if trace:
self.Trace(f"progressBar({pos}, {maxPos})")
self.progressBar(pos, maxPos)
pos +=1
if pos > maxPos:
pos = 1
time.sleep(sleepSeconds)
def startSpinningProcessBar(self, sleepSeconds = 1, maxPos = 30, trace = False):
self.stopProcessBarSpin = False
if trace:
self.Trace(f"submitting spinProcessBar; sleepSeconds={sleepSeconds}, maxPos={maxPos}, trace={trace}")
self.submit(self.spinProcessBar, sleepSeconds, maxPos, trace)
def stopSpinningProcessBar(self, sleepSeconds = 1):
self.stopProcessBarSpin = True
time.sleep(sleepSeconds)
def createTagId(self, tagName, tagName_descp = "", deleteIfExist = False, ignoreAutoTag = False):
tagId = self.find_tags(q=tagName)
if len(tagId):
tagId = tagId[0]
if deleteIfExist:
self.destroy_tag(int(tagId['id']))
else:
return tagId['id']
tagId = self.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": ignoreAutoTag})
self.Log(f"Dup-tagId={tagId['id']}")
return tagId['id']
def removeTag(self, scene, tagName): # scene can be scene ID or scene metadata
scene_details = scene
if 'id' not in scene:
scene_details = self.find_scene(scene)
tagIds = []
doesHaveTagName = False
for tag in scene_details['tags']:
if tag['name'] != tagName:
tagIds += [tag['id']]
else:
doesHaveTagName = True
if doesHaveTagName:
dataDict = {'id' : scene_details['id']}
dataDict.update({'tag_ids' : tagIds})
self.update_scene(dataDict)
return doesHaveTagName
def addTag(self, scene, tagName): # scene can be scene ID or scene metadata
scene_details = scene
if 'id' not in scene:
scene_details = self.find_scene(scene)
tagIds = [self.createTagId(tagName)]
for tag in scene_details['tags']:
if tag['name'] != tagName:
tagIds += [tag['id']]
dataDict = {'id' : scene_details['id']}
dataDict.update({'tag_ids' : tagIds})
self.update_scene(dataDict)
def runPlugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
"""Runs a plugin operation. """Runs a plugin operation.
The operation is run immediately and does not use the job queue. The operation is run immediately and does not use the job queue.
This is a blocking call, and does not return until plugin completes.
Args: Args:
plugin_id (ID): plugin_id plugin_id (ID): plugin_id
task_name (str, optional): Plugin task to perform task_name (str, optional): Plugin task to perform
@@ -375,43 +589,26 @@ class StashPluginHelper(StashInterface):
"args": args, "args": args,
} }
if asyn: if asyn:
self.Submit(self.call_GQL, query, variables) self.submit(self.call_GQL, query, variables)
return f"Made asynchronous call for plugin {plugin_id}" return f"Made asynchronous call for plugin {plugin_id}"
else: else:
return self.call_GQL(query, variables) return self.call_GQL(query, variables)
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
# ################################################################################################# # ############################################################################################################
# Functions which are candidates to be added to parent class use snake_case naming convention.
# ############################################################################################################
# The below functions extends class StashInterface with functions which are not yet in the class or # The below functions extends class StashInterface with functions which are not yet in the class or
# fixes for functions which have not yet made it into official class. # fixes for functions which have not yet made it into official class.
def metadata_scan(self, paths:list=[], flags={}): def metadata_scan(self, paths:list=[], flags={}): # ToDo: Add option to add path to library if path not included when calling metadata_scan
query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }" query = "mutation MetadataScan($input:ScanMetadataInput!) { metadataScan(input: $input) }"
scan_metadata_input = {"paths": paths} scan_metadata_input = {"paths": paths}
if flags: if flags:
scan_metadata_input.update(flags) scan_metadata_input.update(flags)
else: elif scan_config := self.get_configuration_defaults("scan { ...ScanMetadataOptions }").get("scan"):
scanData = self.get_configuration_defaults("scan { ...ScanMetadataOptions }") scan_metadata_input.update(scan_config)
if scanData['scan'] != None:
scan_metadata_input.update(scanData.get("scan",{}))
result = self.call_GQL(query, {"input": scan_metadata_input}) result = self.call_GQL(query, {"input": scan_metadata_input})
return result["metadataScan"] return result["metadataScan"]
def get_all_scenes(self): def get_all_scenes(self):
query_all_scenes = """ query_all_scenes = """
query AllScenes { query AllScenes {
@@ -464,6 +661,43 @@ class StashPluginHelper(StashInterface):
def rename_generated_files(self): def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Direct SQL associated functions
def get_file_metadata(self, data, raw_data = False): # data is either file ID or scene metadata
results = None
if data == None:
return results
if 'files' in data and len(data['files']) > 0 and 'id' in data['files'][0]:
results = self.sql_query(f"select * from files where id = {data['files'][0]['id']}")
else:
results = self.sql_query(f"select * from files where id = {data}")
if raw_data:
return results
if 'rows' in results:
return results['rows'][0]
self.Error(f"Unknown error while SQL query with data='{data}'; Results='{results}'.")
return None
def set_file_basename(self, id, basename):
return self.sql_commit(f"update files set basename = '{basename}' where id = {id}")
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
srcData = None srcData = None
@@ -537,3 +771,54 @@ class mergeMetadata: # A class to merge scene metadata from source scene to dest
listToAdd += [item['id']] listToAdd += [item['id']]
self.dataDict.update({ updateFieldName : listToAdd}) self.dataDict.update({ updateFieldName : listToAdd})
# self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
class taskQueue:
taskqueue = None
def __init__(self, taskqueue):
self.taskqueue = taskqueue
def tooManyScanOnTaskQueue(self, tooManyQty = 5):
count = 0
if self.taskqueue == None:
return False
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Scanning...":
count += 1
if count < tooManyQty:
return False
return True
def cleanJobOnTaskQueue(self):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Cleaning...":
return True
return False
def cleanGeneratedJobOnTaskQueue(self):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Cleaning generated files...":
return True
return False
def isRunningPluginTaskJobOnTaskQueue(self, taskName):
for jobDetails in self.taskqueue:
if jobDetails['description'] == "Running plugin task: {taskName}":
return True
return False
def tagDuplicatesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Tag Duplicates")
def clearDupTagsJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Clear Tags")
def generatePhashMatchingJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Generate PHASH Matching")
def deleteDuplicatesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Delete Duplicates")
def deleteTaggedScenesJobOnTaskQueue(self):
return self.isRunningPluginTaskJobOnTaskQueue("Delete Tagged Scenes")

View File

@@ -17,7 +17,7 @@ Example Usage:
of.closeFile(r"B:\V\V\testdup\deleme2.mp4") of.closeFile(r"B:\V\V\testdup\deleme2.mp4")
""" """
import ctypes, os, sys, psutil, argparse, traceback, logging, numbers, string import ctypes, os, sys, argparse, traceback, logging, numbers, string
from ctypes import wintypes from ctypes import wintypes
# from StashPluginHelper import StashPluginHelper # from StashPluginHelper import StashPluginHelper
# Look at the following links to enhance this code: # Look at the following links to enhance this code:
@@ -30,8 +30,8 @@ from ctypes import wintypes
# getPid is the only function which does NOT require elevated admin privileges. # getPid is the only function which does NOT require elevated admin privileges.
class openedFile(): class openedFile():
# generic strings and constants # generic strings and constants
ntdll = ctypes.WinDLL('ntdll') ntdll = None
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) kernel32 = None
NTSTATUS = wintypes.LONG NTSTATUS = wintypes.LONG
INVALID_HANDLE_VALUE = wintypes.HANDLE(-1).value INVALID_HANDLE_VALUE = wintypes.HANDLE(-1).value
FILE_READ_ATTRIBUTES = 0x80 FILE_READ_ATTRIBUTES = 0x80
@@ -51,57 +51,62 @@ class openedFile():
self.stash = stash self.stash = stash
if handleExe == None or handleExe == "" or not os.path.isfile(handleExe): if handleExe == None or handleExe == "" or not os.path.isfile(handleExe):
raise Exception(f"handleExe requires a valid path to Sysinternals 'handle.exe' or 'handle64.exe' executable. Can be downloaded from following link:\nhttps://learn.microsoft.com/en-us/sysinternals/downloads/handle") raise Exception(f"handleExe requires a valid path to Sysinternals 'handle.exe' or 'handle64.exe' executable. Can be downloaded from following link:\nhttps://learn.microsoft.com/en-us/sysinternals/downloads/handle")
# create handle on concerned file with dwDesiredAccess == self.FILE_READ_ATTRIBUTES if self.stash != None and self.stash.IS_WINDOWS:
self.kernel32.CreateFileW.restype = wintypes.HANDLE self.ntdll = ctypes.WinDLL('ntdll')
self.kernel32.CreateFileW.argtypes = ( self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)
wintypes.LPCWSTR, # In lpFileName # create handle on concerned file with dwDesiredAccess == self.FILE_READ_ATTRIBUTES
wintypes.DWORD, # In dwDesiredAccess self.kernel32.CreateFileW.restype = wintypes.HANDLE
wintypes.DWORD, # In dwShareMode self.kernel32.CreateFileW.argtypes = (
self.LPSECURITY_ATTRIBUTES, # In_opt lpSecurityAttributes wintypes.LPCWSTR, # In lpFileName
wintypes.DWORD, # In dwCreationDisposition wintypes.DWORD, # In dwDesiredAccess
wintypes.DWORD, # In dwFlagsAndAttributes wintypes.DWORD, # In dwShareMode
wintypes.HANDLE) # In_opt hTemplateFile self.LPSECURITY_ATTRIBUTES, # In_opt lpSecurityAttributes
wintypes.DWORD, # In dwCreationDisposition
wintypes.DWORD, # In dwFlagsAndAttributes
wintypes.HANDLE) # In_opt hTemplateFile
def getPid(self, path): def getPid(self, path):
self.lastPath = path self.lastPath = path
hFile = self.kernel32.CreateFileW( # ToDo: Add Linux implementation
path, self.FILE_READ_ATTRIBUTES, self.FILE_SHARE_READ, None, self.OPEN_EXISTING, if self.stash != None and self.stash.IS_WINDOWS:
self.FILE_FLAG_BACKUP_SEMANTICS, None) hFile = self.kernel32.CreateFileW(
if hFile == self.INVALID_HANDLE_VALUE: path, self.FILE_READ_ATTRIBUTES, self.FILE_SHARE_READ, None, self.OPEN_EXISTING,
raise ctypes.WinError(ctypes.get_last_error()) self.FILE_FLAG_BACKUP_SEMANTICS, None)
# prepare data types for system call if hFile == self.INVALID_HANDLE_VALUE:
class IO_STATUS_BLOCK(ctypes.Structure): raise ctypes.WinError(ctypes.get_last_error())
class _STATUS(ctypes.Union): # prepare data types for system call
_fields_ = (('Status', self.NTSTATUS), class IO_STATUS_BLOCK(ctypes.Structure):
('Pointer', wintypes.LPVOID)) class _STATUS(ctypes.Union):
_anonymous_ = '_Status', _fields_ = (('Status', self.NTSTATUS),
_fields_ = (('_Status', _STATUS), ('Pointer', wintypes.LPVOID))
('Information', self.ULONG_PTR)) _anonymous_ = '_Status',
iosb = IO_STATUS_BLOCK() _fields_ = (('_Status', _STATUS),
class FILE_PROCESS_IDS_USING_FILE_INFORMATION(ctypes.Structure): ('Information', self.ULONG_PTR))
_fields_ = (('NumberOfProcessIdsInList', wintypes.LARGE_INTEGER), iosb = IO_STATUS_BLOCK()
('ProcessIdList', wintypes.LARGE_INTEGER * 64)) class FILE_PROCESS_IDS_USING_FILE_INFORMATION(ctypes.Structure):
info = FILE_PROCESS_IDS_USING_FILE_INFORMATION() _fields_ = (('NumberOfProcessIdsInList', wintypes.LARGE_INTEGER),
PIO_STATUS_BLOCK = ctypes.POINTER(IO_STATUS_BLOCK) ('ProcessIdList', wintypes.LARGE_INTEGER * 64))
self.ntdll.NtQueryInformationFile.restype = self.NTSTATUS info = FILE_PROCESS_IDS_USING_FILE_INFORMATION()
self.ntdll.NtQueryInformationFile.argtypes = ( PIO_STATUS_BLOCK = ctypes.POINTER(IO_STATUS_BLOCK)
wintypes.HANDLE, # In FileHandle self.ntdll.NtQueryInformationFile.restype = self.NTSTATUS
PIO_STATUS_BLOCK, # Out IoStatusBlock self.ntdll.NtQueryInformationFile.argtypes = (
wintypes.LPVOID, # Out FileInformation wintypes.HANDLE, # In FileHandle
wintypes.ULONG, # In Length PIO_STATUS_BLOCK, # Out IoStatusBlock
self.FILE_INFORMATION_CLASS) # In FileInformationClass wintypes.LPVOID, # Out FileInformation
# system call to retrieve list of PIDs currently using the file wintypes.ULONG, # In Length
status = self.ntdll.NtQueryInformationFile(hFile, ctypes.byref(iosb), self.FILE_INFORMATION_CLASS) # In FileInformationClass
ctypes.byref(info), # system call to retrieve list of PIDs currently using the file
ctypes.sizeof(info), status = self.ntdll.NtQueryInformationFile(hFile, ctypes.byref(iosb),
self.FileProcessIdsUsingFileInformation) ctypes.byref(info),
pidList = info.ProcessIdList[0:info.NumberOfProcessIdsInList] ctypes.sizeof(info),
if len(pidList) > 0: self.FileProcessIdsUsingFileInformation)
return pidList pidList = info.ProcessIdList[0:info.NumberOfProcessIdsInList]
if len(pidList) > 0:
return pidList
return None return None
def isAdmin(self): def isAdmin(self):
if os.name=='nt': if self.stash != None and self.stash.IS_WINDOWS:
try: try:
return ctypes.windll.shell32.IsUserAnAdmin() return ctypes.windll.shell32.IsUserAnAdmin()
except: except:
@@ -112,7 +117,7 @@ class openedFile():
def runMeAsAdmin(self): def runMeAsAdmin(self):
if self.isAdmin() == True: if self.isAdmin() == True:
return return
if os.name=='nt': if self.stash != None and self.stash.IS_WINDOWS:
# Below is a Windows only method which does NOT popup a console. # Below is a Windows only method which does NOT popup a console.
import win32com.shell.shell as shell # Requires: pip install pywin32 import win32com.shell.shell as shell # Requires: pip install pywin32
script = os.path.abspath(sys.argv[0]) script = os.path.abspath(sys.argv[0])
@@ -130,6 +135,7 @@ class openedFile():
return filename return filename
def getFilesOpen(self, pid:int): # Requires running with admin privileges. def getFilesOpen(self, pid:int): # Requires running with admin privileges.
import psutil # Requires: pip install psutil
p = psutil.Process(pid1) p = psutil.Process(pid1)
return p.open_files() return p.open_files()

View File

@@ -2,13 +2,13 @@
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
# Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer # Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer
import os, sys, shutil, json, requests, hashlib, pathlib, logging import os, sys, shutil, json, hashlib, pathlib, logging, time, traceback
from pathlib import Path from pathlib import Path
import stashapi.log as log # Importing stashapi.log as log for critical events ONLY import stashapi.log as log # Importing stashapi.log as log for critical events ONLY
from stashapi.stashapp import StashInterface from stashapi.stashapp import StashInterface
from StashPluginHelper import StashPluginHelper from StashPluginHelper import StashPluginHelper
from StashPluginHelper import taskQueue
from renamefile_settings import config # Import settings from renamefile_settings.py from renamefile_settings import config # Import settings from renamefile_settings.py
from openedFile import openedFile
# ********************************************************************** # **********************************************************************
# Constant global variables -------------------------------------------- # Constant global variables --------------------------------------------
@@ -26,6 +26,7 @@ QUERY_ALL_SCENES = """
# ********************************************************************** # **********************************************************************
# Global variables -------------------------------------------- # Global variables --------------------------------------------
inputToUpdateScenePost = False inputToUpdateScenePost = False
doNothing = False
exitMsg = "Change success!!" exitMsg = "Change success!!"
# ********************************************************************** # **********************************************************************
@@ -47,9 +48,11 @@ stash = StashPluginHelper(
config=config, config=config,
maxbytes=10*1024*1024, maxbytes=10*1024*1024,
) )
stash.Status(logLevel=logging.DEBUG) # stash.status(logLevel=logging.DEBUG)
if stash.PLUGIN_ID in stash.PLUGIN_CONFIGURATION: if stash.PLUGIN_ID in stash.PLUGIN_CONFIGURATION:
stash.pluginSettings.update(stash.PLUGIN_CONFIGURATION[stash.PLUGIN_ID]) stash.pluginSettings.update(stash.PLUGIN_CONFIGURATION[stash.PLUGIN_ID])
if stash.IS_DOCKER:
stash.log_to_wrn_set = stash.LOG_TO_STASH + stash.LOG_TO_FILE
# ---------------------------------------------------------------------- # ----------------------------------------------------------------------
WRAPPER_STYLES = config["wrapper_styles"] WRAPPER_STYLES = config["wrapper_styles"]
POSTFIX_STYLES = config["postfix_styles"] POSTFIX_STYLES = config["postfix_styles"]
@@ -58,10 +61,14 @@ POSTFIX_STYLES = config["postfix_styles"]
dry_run = stash.pluginSettings["zzdryRun"] dry_run = stash.pluginSettings["zzdryRun"]
dry_run_prefix = '' dry_run_prefix = ''
try: try:
if stash.JSON_INPUT['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice stash.Trace(f"hookContext={stash.JSON_INPUT['args']['hookContext']}")
if stash.JSON_INPUT['args']['hookContext']['input']:
if stash.JSON_INPUT['args']['hookContext']['input'] == None:
doNothing = True
else:
inputToUpdateScenePost = True # This avoids calling rename logic twice
except: except:
pass pass
stash.Trace("settings: %s " % (stash.pluginSettings,))
if dry_run: if dry_run:
stash.Log("Dry run mode is enabled.") stash.Log("Dry run mode is enabled.")
@@ -70,16 +77,20 @@ max_tag_keys = stash.pluginSettings["zmaximumTagKeys"] if stash.pluginSettings["
# ToDo: Add split logic here to slpit possible string array into an array # ToDo: Add split logic here to slpit possible string array into an array
exclude_paths = config["pathToExclude"] exclude_paths = config["pathToExclude"]
exclude_paths = exclude_paths.split() exclude_paths = exclude_paths.split()
stash.Trace(f"(exclude_paths={exclude_paths})") if len(exclude_paths) > 0:
stash.Trace(f"(exclude_paths={exclude_paths})")
excluded_tags = config["excludeTags"] excluded_tags = config["excludeTags"]
# Extract tag whitelist from settings # Extract tag whitelist from settings
tag_whitelist = config["tagWhitelist"] tag_whitelist = config["tagWhitelist"]
if not tag_whitelist: if not tag_whitelist:
tag_whitelist = "" tag_whitelist = ""
stash.Trace(f"(tag_whitelist={tag_whitelist})") if len(tag_whitelist) > 0:
stash.Trace(f"(tag_whitelist={tag_whitelist})")
handleExe = stash.pluginConfig['handleExe'] handleExe = stash.pluginConfig['handleExe']
openedfile = None openedfile = None
if handleExe != None and handleExe != "" and os.path.isfile(handleExe): if handleExe != None and handleExe != "" and os.path.isfile(handleExe):
stash.modulesInstalled(["psutil"], silent=True)
from openedFile import openedFile
openedfile = openedFile(handleExe, stash) openedfile = openedFile(handleExe, stash)
endpointHost = stash.JSON_INPUT['server_connection']['Host'] endpointHost = stash.JSON_INPUT['server_connection']['Host']
@@ -87,7 +98,7 @@ if endpointHost == "0.0.0.0":
endpointHost = "localhost" endpointHost = "localhost"
endpoint = f"{stash.JSON_INPUT['server_connection']['Scheme']}://{endpointHost}:{stash.JSON_INPUT['server_connection']['Port']}/graphql" endpoint = f"{stash.JSON_INPUT['server_connection']['Scheme']}://{endpointHost}:{stash.JSON_INPUT['server_connection']['Port']}/graphql"
stash.Trace(f"(endpoint={endpoint})") # stash.Trace(f"(endpoint={endpoint})")
move_files = stash.pluginSettings["zafileRenameViaMove"] move_files = stash.pluginSettings["zafileRenameViaMove"]
fieldKeyList = stash.pluginSettings["zfieldKeyList"] # Default Field Key List with the desired order fieldKeyList = stash.pluginSettings["zfieldKeyList"] # Default Field Key List with the desired order
if not fieldKeyList or fieldKeyList == "": if not fieldKeyList or fieldKeyList == "":
@@ -95,13 +106,13 @@ if not fieldKeyList or fieldKeyList == "":
fieldKeyList = fieldKeyList.replace(" ", "") fieldKeyList = fieldKeyList.replace(" ", "")
fieldKeyList = fieldKeyList.replace(";", ",") fieldKeyList = fieldKeyList.replace(";", ",")
fieldKeyList = fieldKeyList.split(",") fieldKeyList = fieldKeyList.split(",")
stash.Trace(f"(fieldKeyList={fieldKeyList})") # stash.Trace(f"(fieldKeyList={fieldKeyList})")
separator = stash.pluginSettings["zseparators"] separator = stash.pluginSettings["zseparators"]
# ---------------------------------------------------------------------- # ----------------------------------------------------------------------
# ********************************************************************** # **********************************************************************
double_separator = separator + separator double_separator = separator + separator
stash.Trace(f"(WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})") # stash.Trace(f"(WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})")
# Function to replace illegal characters in filenames # Function to replace illegal characters in filenames
def replace_illegal_characters(filename): def replace_illegal_characters(filename):
@@ -123,6 +134,7 @@ def form_filename(original_file_stem, scene_details):
tag_keys_added = 0 tag_keys_added = 0
default_title = '' default_title = ''
if_notitle_use_org_filename = config["if_notitle_use_org_filename"] if_notitle_use_org_filename = config["if_notitle_use_org_filename"]
excludeIgnoreAutoTags = config["excludeIgnoreAutoTags"]
include_keyField_if_in_name = stash.pluginSettings["z_keyFIeldsIncludeInFileName"] include_keyField_if_in_name = stash.pluginSettings["z_keyFIeldsIncludeInFileName"]
if if_notitle_use_org_filename: if if_notitle_use_org_filename:
default_title = original_file_stem default_title = original_file_stem
@@ -253,12 +265,11 @@ def form_filename(original_file_stem, scene_details):
stash.Trace(f"(gallery_name={gallery_name})") stash.Trace(f"(gallery_name={gallery_name})")
elif key == 'tags': elif key == 'tags':
if stash.pluginSettings["tagAppend"]: if stash.pluginSettings["tagAppend"]:
tags = [tag.get('name', '') for tag in scene_details.get('tags', [])] for tag in scene_details['tags']:
for tag_name in tags: stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag['name']}; ignore_auto_tag={tag['ignore_auto_tag']})")
stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})") if (excludeIgnoreAutoTags == False or tag['ignore_auto_tag'] == False) and (include_keyField_if_in_name or tag['name'].lower() not in title.lower()):
if include_keyField_if_in_name or tag_name.lower() not in title.lower(): add_tag(tag['name'] + POSTFIX_STYLES.get('tag'))
add_tag(tag_name + POSTFIX_STYLES.get('tag')) stash.Trace(f"(tag_name={tag['name']})")
stash.Trace(f"(tag_name={tag_name})")
stash.Trace(f"(filename_parts={filename_parts})") stash.Trace(f"(filename_parts={filename_parts})")
new_filename = separator.join(filename_parts).replace(double_separator, separator) new_filename = separator.join(filename_parts).replace(double_separator, separator)
@@ -273,13 +284,26 @@ def form_filename(original_file_stem, scene_details):
def rename_scene(scene_id): def rename_scene(scene_id):
global exitMsg global exitMsg
POST_SCAN_DELAY = 3
scene_details = stash.find_scene(scene_id) scene_details = stash.find_scene(scene_id)
stash.Trace(f"(scene_details1={scene_details})") stash.Trace(f"(scene_details={scene_details})")
if not scene_details: if not scene_details:
stash.Error(f"Scene with ID {scene_id} not found.") stash.Error(f"Scene with ID {scene_id} not found.")
return None return None
taskqueue = taskQueue(stash.job_queue())
original_file_path = scene_details['files'][0]['path'] original_file_path = scene_details['files'][0]['path']
original_parent_directory = Path(original_file_path).parent original_parent_directory = Path(original_file_path).parent
maxScanCountDefault = 5
maxScanCountForUpdate = 10
if scene_details['title'] == None or scene_details['title'] == "":
maxScanCountDefault = 1
maxScanCountForUpdate = 1
if not os.path.isfile(original_file_path) and not taskqueue.clearDupTagsJobOnTaskQueue() and not taskqueue.deleteTaggedScenesJobOnTaskQueue() and not taskqueue.tooManyScanOnTaskQueue(maxScanCountDefault):
stash.Warn(f"[metadata_scan] Have to rescan scene ID {scene_id}, because Stash library path '{original_file_path}' does not exist. Scanning path: {original_parent_directory.resolve().as_posix()}")
stash.metadata_scan(paths=[original_parent_directory.resolve().as_posix()])
time.sleep(POST_SCAN_DELAY) # After a scan, need a few seconds delay before fetching data.
scene_details = stash.find_scene(scene_id)
original_file_path = scene_details['files'][0]['path']
stash.Trace(f"(original_file_path={original_file_path})") stash.Trace(f"(original_file_path={original_file_path})")
# Check if the scene's path matches any of the excluded paths # Check if the scene's path matches any of the excluded paths
if exclude_paths and any(Path(original_file_path).match(exclude_path) for exclude_path in exclude_paths): if exclude_paths and any(Path(original_file_path).match(exclude_path) for exclude_path in exclude_paths):
@@ -318,17 +342,60 @@ def rename_scene(scene_id):
os.rename(original_file_path, new_file_path) os.rename(original_file_path, new_file_path)
exitMsg = f"{dry_run_prefix}Renamed file to '{new_file_path}' from '{original_file_path}'" exitMsg = f"{dry_run_prefix}Renamed file to '{new_file_path}' from '{original_file_path}'"
except OSError as e: except OSError as e:
exitMsg = f"Failed to move/rename file: From {original_file_path} to {new_file_path}. Error: {e}" exitMsg = f"Failed to move/rename file: From {original_file_path} to {new_file_path}; targetDidExist={targetDidExist}. Error: {e}"
stash.Error(exitMsg) stash.Error(exitMsg)
if not targetDidExist and os.path.isfile(new_file_path): if not taskqueue.tooManyScanOnTaskQueue(maxScanCountDefault):
stash.Trace(f"Calling [metadata_scan] for path {original_parent_directory.resolve().as_posix()}")
stash.metadata_scan(paths=[original_parent_directory.resolve().as_posix()])
if targetDidExist:
raise
if os.path.isfile(new_file_path):
if os.path.isfile(original_file_path): if os.path.isfile(original_file_path):
os.remove(original_file_path) os.remove(original_file_path)
pass pass
else: else:
# ToDo: Add delay rename here
raise raise
stash.Trace(f"scan path={original_parent_directory.resolve().as_posix()}") if not taskqueue.tooManyScanOnTaskQueue(maxScanCountForUpdate):
stash.metadata_scan(paths=[original_parent_directory.resolve().as_posix()]) stash.Trace(f"Calling [metadata_scan] for path {original_parent_directory.resolve().as_posix()}")
stash.metadata_scan(paths=[original_parent_directory.resolve().as_posix()])
time.sleep(POST_SCAN_DELAY) # After a scan, need a few seconds delay before fetching data.
scene_details = stash.find_scene(scene_id)
if new_file_path != scene_details['files'][0]['path'] and not targetDidExist and not taskqueue.tooManyScanOnTaskQueue(maxScanCountDefault):
stash.Trace(f"Calling [metadata_scan] for path {original_parent_directory.resolve().as_posix()}")
stash.metadata_scan(paths=[original_parent_directory.resolve().as_posix()])
time.sleep(POST_SCAN_DELAY) # After a scan, need a few seconds delay before fetching data.
scene_details = stash.find_scene(scene_id)
if new_file_path != scene_details['files'][0]['path']:
if not os.path.isfile(new_file_path):
stash.Error(f"Failed to rename file from {scene_details['files'][0]['path']} to {new_file_path}.")
elif os.path.isfile(scene_details['files'][0]['path']):
stash.Warn(f"Failed to rename file from {scene_details['files'][0]['path']} to {new_file_path}. Old file still exist. Will attempt delay deletion.")
for i in range(1, 5*60):
time.sleep(60)
if not os.path.isfile(new_file_path):
stash.Error(f"Not deleting old file name {original_file_path} because new file name (new_file_path) does NOT exist.")
break
os.remove(original_file_path)
if not os.path.isfile(original_file_path):
stash.Log(f"Deleted {original_file_path} in delay deletion after {i} minutes.")
stash.Trace(f"Calling [metadata_scan] for path {original_parent_directory.resolve().as_posix()}")
stash.metadata_scan(paths=[original_parent_directory.resolve().as_posix()])
break
else:
org_stem = Path(scene_details['files'][0]['path']).stem
new_stem = Path(new_file_path).stem
file_id = scene_details['files'][0]['id']
stash.Warn(f"Failed to update Stash library with new name. Will try direct SQL update. org_name={org_stem}; new_name={new_stem}; file_id={file_id}")
# stash.set_file_basename(file_id, new_stem)
else:
stash.Warn(f"Not performming [metadata_scan] because too many scan jobs are already on the Task Queue. Recommend running a full scan, and a clean job to make sure Stash DB is up to date.")
if not taskqueue.cleanJobOnTaskQueue():
stash.metadata_scan()
stash.metadata_clean()
if not taskqueue.cleanGeneratedJobOnTaskQueue():
stash.metadata_clean_generated()
stash.Log(exitMsg) stash.Log(exitMsg)
return new_filename return new_filename
@@ -353,13 +420,16 @@ def rename_files_task():
try: try:
if stash.PLUGIN_TASK_NAME == "rename_files_task": if stash.PLUGIN_TASK_NAME == "rename_files_task":
stash.Trace(f"PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME}")
rename_files_task() rename_files_task()
elif inputToUpdateScenePost: elif inputToUpdateScenePost:
rename_files_task() rename_files_task()
else:
stash.Trace(f"Nothing to do. doNothing={doNothing}")
except Exception as e: except Exception as e:
tb = traceback.format_exc() tb = traceback.format_exc()
stash.Error(f"Exception while running Plugin. Error: {e}\nTraceBack={tb}") stash.Error(f"Exception while running Plugin. Error: {e}\nTraceBack={tb}")
stash.log.exception('Got exception on main handler') # stash.log.exception('Got exception on main handler')
stash.Trace("\n*********************************\nEXITING ***********************\n*********************************") stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")

View File

@@ -38,7 +38,7 @@ config = {
"date": '', "date": '',
}, },
# Add tags to exclude from RenameFile. # Add tags to exclude from RenameFile.
"excludeTags": ["DuplicateMarkForDeletion", "DuplicateMarkForSwap", "DuplicateWhitelistFile","_DuplicateMarkForDeletion","_DuplicateMarkForSwap", "_DuplicateWhitelistFile"], "excludeTags": ["DuplicateMarkForDeletion", "DuplicateMarkForSwap", "DuplicateWhitelistFile","_DuplicateMarkForDeletion","_DuplicateMarkForSwap", "_DuplicateWhitelistFile","ExcludeDuplicateMarkForDeletion", "_ExcludeDuplicateMarkForDeletion"],
# Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath" # Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath"
"pathToExclude": "", "pathToExclude": "",
# Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3" # Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"
@@ -47,6 +47,8 @@ config = {
"if_notitle_use_org_filename": True, # Warning: Do not recommend setting this to False. "if_notitle_use_org_filename": True, # Warning: Do not recommend setting this to False.
# Current Stash DB schema only allows maximum base file name length to be 255 # Current Stash DB schema only allows maximum base file name length to be 255
"max_filename_length": 255, "max_filename_length": 255,
# Exclude tags with ignore_auto_tag set to True
"excludeIgnoreAutoTags": True,
# handleExe is for Windows only. # handleExe is for Windows only.
# In Windows, a file can't be renamed if the file is opened by another process. # In Windows, a file can't be renamed if the file is opened by another process.

View File

@@ -1,3 +1,3 @@
stashapp-tools >= 0.2.50 stashapp-tools >= 0.2.50
pyYAML requests
requests psutil