From d67ce6f3e213a0d29e2f172492f18af805ad3019 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Sun, 25 Aug 2024 19:16:44 -0400 Subject: [PATCH] Change UI options --- StashPluginHelper/StashPluginHelper.py | 3 + plugins/DupFileManager/DupFileManager.py | 157 ++++++++++-------- plugins/DupFileManager/DupFileManager.yml | 38 +++-- .../DupFileManager/DupFileManager_config.py | 12 +- plugins/DupFileManager/StashPluginHelper.py | 3 + plugins/FileMonitor/README.md | 2 +- plugins/FileMonitor/StashPluginHelper.py | 3 + plugins/FileMonitor/filemonitor.py | 11 +- plugins/FileMonitor/filemonitor.yml | 12 +- plugins/FileMonitor/filemonitor_config.py | 9 +- .../FileMonitor/filemonitor_self_unit_test.py | 45 ++--- plugins/RenameFile/README.md | 2 +- plugins/RenameFile/renamefile.py | 5 +- plugins/RenameFile/renamefile.yml | 2 +- 14 files changed, 179 insertions(+), 125 deletions(-) diff --git a/StashPluginHelper/StashPluginHelper.py b/StashPluginHelper/StashPluginHelper.py index 9ba3c9f..06368da 100644 --- a/StashPluginHelper/StashPluginHelper.py +++ b/StashPluginHelper/StashPluginHelper.py @@ -172,6 +172,9 @@ class StashPluginHelper(StashInterface): super().__init__(self.FRAGMENT_SERVER) self.STASH_INTERFACE_INIT = True + if self.STASH_URL.startswith("http://0.0.0.0:"): + self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:") + if self.STASH_INTERFACE_INIT: self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"] self.STASH_CONFIGURATION = self.get_configuration()["general"] diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py index fa79528..f961667 100644 --- a/plugins/DupFileManager/DupFileManager.py +++ b/plugins/DupFileManager/DupFileManager.py @@ -9,7 +9,7 @@ # Python library for parse-reparsepoint # https://pypi.org/project/parse-reparsepoint/ # pip install parse-reparsepoint -import os, sys, time, pathlib, argparse, platform, shutil +import os, sys, time, pathlib, argparse, platform, shutil, logging from StashPluginHelper import StashPluginHelper from DupFileManager_config import config # Import config from DupFileManager_config.py @@ -21,17 +21,17 @@ parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', parse_args = parser.parse_args() settings = { - "dupWhiteListTag": "", - "dupFileTag": "DuplicateMarkForDeletion", - "dupFileTagSwap": "DuplicateMarkForSwap", "mergeDupFilename": False, "permanentlyDelete": False, "whitelistDelDupInSameFolder": False, - "zcleanAfterDel": False, - "zwhitelist": "", - "zxgraylist": "", - "zyblacklist": "", - "zymaxDupToProcess": 0, + "whitelistDoTagLowResDup": False, + "zCleanAfterDel": False, + "zSwapHighRes": False, + "zSwapLongLength": False, + "zWhitelist": "", + "zxGraylist": "", + "zyBlacklist": "", + "zyMaxDupToProcess": 0, "zzdebugTracing": False, } stash = StashPluginHelper( @@ -39,54 +39,65 @@ stash = StashPluginHelper( debugTracing=parse_args.trace, settings=settings, config=config, - maxbytes=100*1024*1024, + maxbytes=50*1024*1024, ) -stash.Status() -stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") +stash.Status(logLevel=logging.DEBUG) +stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") -stash.Trace(f"(stashPaths={stash.STASH_PATHS})") +stash.Trace(f"(stashPaths={stash.STASH_PATHS}") +stash.Trace(f"(STASH_URL={stash.STASH_URL}") # stash.encodeToUtf8 = True +LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE listSeparator = stash.Setting('listSeparator', ',', notEmpty=True) addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails') mergeDupFilename = stash.Setting('mergeDupFilename') moveToTrashCan = False if stash.Setting('permanentlyDelete') else True alternateTrashCanPath = stash.Setting('dup_path') whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder') -maxDupToProcess = int(stash.Setting('zymaxDupToProcess')) -swapHighRes = stash.Setting('swapHighRes') -swapLongLength = stash.Setting('swapLongLength') +whitelistDoTagLowResDup = stash.Setting('whitelistDoTagLowResDup') +maxDupToProcess = int(stash.Setting('zyMaxDupToProcess')) +swapHighRes = stash.Setting('zSwapHighRes') +swapLongLength = stash.Setting('zSwapLongLength') significantTimeDiff = stash.Setting('significantTimeDiff') toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap') -cleanAfterDel = stash.Setting('zcleanAfterDel') +cleanAfterDel = stash.Setting('zCleanAfterDel') +duration_diff = float(stash.Setting('duration_diff')) +if duration_diff > 10: + duration_diff = 10 +elif duration_diff < 1: + duration_diff = 1 -duplicateMarkForDeletion = stash.Setting('dupFileTag') +# significantTimeDiff can not be higher than 1 and shouldn't be lower than .5 +if significantTimeDiff > 1: + significantTimeDiff = 1 +if significantTimeDiff < .5: + significantTimeDiff = .5 + + +duplicateMarkForDeletion = stash.Setting('DupFileTag') if duplicateMarkForDeletion == "": duplicateMarkForDeletion = 'DuplicateMarkForDeletion' - -DuplicateMarkForSwap = stash.Setting('dupFileTagSwap') -if DuplicateMarkForSwap == "": - DuplicateMarkForSwap = 'DuplicateMarkForSwap' -duplicateWhitelistTag = stash.Setting('dupWhiteListTag') +duplicateWhitelistTag = stash.Setting('DupWhiteListTag') +if duplicateWhitelistTag == "": + duplicateWhitelistTag = 'DuplicateWhitelistFile' -excludeMergeTags = [duplicateMarkForDeletion, DuplicateMarkForSwap] -if duplicateWhitelistTag != "": - excludeMergeTags += [duplicateWhitelistTag] +excludeMergeTags = [duplicateMarkForDeletion, duplicateWhitelistTag] stash.init_mergeMetadata(excludeMergeTags) -graylist = stash.Setting('zxgraylist').split(listSeparator) +graylist = stash.Setting('zxGraylist').split(listSeparator) graylist = [item.lower() for item in graylist] if graylist == [""] : graylist = [] -stash.Log(f"graylist = {graylist}") -whitelist = stash.Setting('zwhitelist').split(listSeparator) +stash.Trace(f"graylist = {graylist}") +whitelist = stash.Setting('zWhitelist').split(listSeparator) whitelist = [item.lower() for item in whitelist] if whitelist == [""] : whitelist = [] -stash.Log(f"whitelist = {whitelist}") -blacklist = stash.Setting('zyblacklist').split(listSeparator) +stash.Trace(f"whitelist = {whitelist}") +blacklist = stash.Setting('zyBlacklist').split(listSeparator) blacklist = [item.lower() for item in blacklist] if blacklist == [""] : blacklist = [] -stash.Log(f"blacklist = {blacklist}") +stash.Trace(f"blacklist = {blacklist}") def realpath(path): """ @@ -173,25 +184,32 @@ def createTagId(tagName, tagName_descp, deleteIfExist = False): stash.Log(f"Dup-tagId={tagId['id']}") return tagId['id'] -def setTagId(tagId, tagName, sceneDetails, PrimeDuplicateScene = ""): - if PrimeDuplicateScene != "" and addPrimaryDupPathToDetails: - BaseDupStr = f"BaseDup={PrimeDuplicateScene}" - if sceneDetails['details'].startswith(BaseDupStr) or sceneDetails['details'].startswith(f"Primary Duplicate = {PrimeDuplicateScene}"): - PrimeDuplicateScene = "" - elif sceneDetails['details'] == "": - PrimeDuplicateScene = BaseDupStr - else: - PrimeDuplicateScene = f"{BaseDupStr};\n{sceneDetails['details']}" +def setTagId(tagId, tagName, sceneDetails, DupFileToKeep): + stash.Trace() + details = "" + ORG_DATA_DICT = {'id' : sceneDetails['id']} + dataDict = ORG_DATA_DICT.copy() + doAddTag = True + if addPrimaryDupPathToDetails: + BaseDupStr = f"BaseDup={DupFileToKeep['files'][0]['path']}\n{stash.STASH_URL}/scenes/{DupFileToKeep['id']}\n" + if sceneDetails['details'] == "": + details = BaseDupStr + elif not sceneDetails['details'].startswith(BaseDupStr): + details = f"{BaseDupStr};\n{sceneDetails['details']}" for tag in sceneDetails['tags']: if tag['name'] == tagName: - if PrimeDuplicateScene != "" and addPrimaryDupPathToDetails: - stash.update_scene({'id' : sceneDetails['id'], 'details' : PrimeDuplicateScene}) - return - - if PrimeDuplicateScene == "" or not addPrimaryDupPathToDetails: - stash.update_scene({'id' : sceneDetails['id'], 'tag_ids' : tagId}) + doAddTag = False + break + if doAddTag: + dataDict.update({'tag_ids' : tagId}) + if details != "": + dataDict.update({'details' : details}) + if dataDict != ORG_DATA_DICT: + stash.update_scene(dataDict) + stash.Trace(f"[setTagId] Updated {sceneDetails['files'][0]['path']} with metadata {dataDict}", toAscii=True) else: - stash.update_scene({'id' : sceneDetails['id'], 'tag_ids' : tagId, 'details' : PrimeDuplicateScene}) + stash.Trace(f"[setTagId] Nothing to update {sceneDetails['files'][0]['path']}.", toAscii=True) + def isInList(listToCk, pathToCk): pathToCk = pathToCk.lower() @@ -229,28 +247,29 @@ def significantLessTime(durrationToKeep, durrationOther): return True return False -def isBetter(DupFileToKeep, DupFile): +def isSwapCandidate(DupFileToKeep, DupFile): # Don't move if both are in whitelist if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']): return False if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])): if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])): return True + else: + stash.Warn(f"File '{DupFileToKeep['files'][0]['path']}' has a higher resolution than '{DupFile['files'][0]['path']}', but the duration is significantly shorter.", toAscii=True) if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']): if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']): return True return False def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): - duration_diff = 10.00 duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.' - stash.Log(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}") + stash.Trace(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}") dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp) stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}") dupWhitelistTagId = None - if duplicateWhitelistTag != "": - stash.Log(f"duplicateWhitelistTag = {duplicateWhitelistTag}") + if whitelistDoTagLowResDup: + stash.Trace(f"duplicateWhitelistTag = {duplicateWhitelistTag}") duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.' dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp) stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}") @@ -266,10 +285,10 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): QtyMerge = 0 QtyDeleted = 0 stash.Log("#########################################################################") - stash.Log("#########################################################################") - stash.Log("Waiting for find_duplicate_scenes_diff to return results...") + stash.Trace("#########################################################################") + stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; duration_diff={duration_diff}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN) DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff) - stash.Log("#########################################################################") + stash.Trace("#########################################################################") for DupFileSet in DupFileSets: stash.Trace(f"DupFileSet={DupFileSet}") QtyDupSet+=1 @@ -318,25 +337,25 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): QtyMerge += 1 if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])): - if isBetter(DupFileToKeep, DupFile): + if isSwapCandidate(DupFileToKeep, DupFile): if merge: stash.merge_metadata(DupFileToKeep, DupFile) if toRecycleBeforeSwap: sendToTrash(DupFile['files'][0]['path']) shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path']) - stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'", toAscii=True) + stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) DupFileToKeep = DupFile QtySwap+=1 else: stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True) if dupWhitelistTagId and tagDuplicates: - setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep['files'][0]['path']) + setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep) QtySkipForDel+=1 else: if deleteDup: DupFileName = DupFile['files'][0]['path'] DupFileNameOnly = pathlib.Path(DupFileName).stem - stash.Log(f"Deleting duplicate '{DupFileName}'", toAscii=True) + stash.Warn(f"Deleting duplicate '{DupFileName}'", toAscii=True, printTo=LOG_STASH_N_PLUGIN) if alternateTrashCanPath != "": destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" if os.path.isfile(destPath): @@ -348,18 +367,18 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): QtyDeleted += 1 elif tagDuplicates: if QtyTagForDel == 0: - stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True) + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True, printTo=LOG_STASH_N_PLUGIN) else: - stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True) - setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep['files'][0]['path']) + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True, printTo=LOG_STASH_N_PLUGIN) + setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep) QtyTagForDel+=1 - stash.Log(SepLine) + stash.Trace(SepLine) if maxDupToProcess > 0 and QtyDup > maxDupToProcess: break - stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}") + stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN) if cleanAfterDel: - stash.Log("Adding clean jobs to the Task Queue") + stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN) stash.metadata_clean(paths=stash.STASH_PATHS) stash.metadata_clean_generated() stash.optimise_database() @@ -374,11 +393,11 @@ def testSetDupTagOnScene(sceneId): stash.Log(f"tag_ids={tag_ids}") stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids}) -if stash.PLUGIN_TASK_NAME == "merge_dup_filename_task": - mangeDupFiles(merge=True) +if stash.PLUGIN_TASK_NAME == "tag_duplicates_task": + mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") elif stash.PLUGIN_TASK_NAME == "delete_duplicates": - mangeDupFiles(deleteDup=True) + mangeDupFiles(deleteDup=True, merge=mergeDupFilename) stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") elif parse_args.dup_tag: mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename) diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml index 10031f4..03c34da 100644 --- a/plugins/DupFileManager/DupFileManager.yml +++ b/plugins/DupFileManager/DupFileManager.yml @@ -3,43 +3,47 @@ description: Manages duplicate files. version: 0.1.1 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager settings: - dupFileTag: - displayName: Duplicate File Tag Name - description: (Default = DuplicateMarkForDeletion) Tag used to tag duplicates with lower resolution, duration, and file name length. - type: STRING - dupWhiteListTag: - displayName: Duplicate Whitelist Tag Name - description: If populated, a tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile - type: STRING mergeDupFilename: displayName: Merge Duplicate Tags description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc... type: BOOLEAN permanentlyDelete: displayName: Permanent Delete - description: (Default=false) Enable to permanently delete files, instead of moving files to trash can. + description: Enable to permanently delete files, instead of moving files to trash can. type: BOOLEAN whitelistDelDupInSameFolder: displayName: Whitelist Delete In Same Folder - description: (Default=false) Allow whitelist deletion of duplicates within the same whitelist folder. + description: Allow whitelist deletion of duplicates within the same whitelist folder. type: BOOLEAN - zcleanAfterDel: + whitelistDoTagLowResDup: + displayName: Whitelist Duplicate Tagging + description: Enable to tag whitelist duplicates of lower resolution or duration or same folder. + type: BOOLEAN + zCleanAfterDel: displayName: Run Clean After Delete description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database. type: BOOLEAN - zwhitelist: + zSwapHighRes: + displayName: Swap High Resolution + description: If enabled, swap higher resolution duplicate files to preferred path. + type: BOOLEAN + zSwapLongLength: + displayName: Swap Longer Duration + description: If enabled, swap longer duration media files to preferred path. Longer is determine by significantLongerTime field. + type: BOOLEAN + zWhitelist: displayName: White List description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\ type: STRING - zxgraylist: + zxGraylist: displayName: Gray List description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\ type: STRING - zyblacklist: + zyBlacklist: displayName: Black List description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\ type: STRING - zymaxDupToProcess: + zyMaxDupToProcess: displayName: Max Dup Process description: Maximum number of duplicates to process. If 0, infinity type: NUMBER @@ -52,10 +56,10 @@ exec: - "{pluginDir}/DupFileManager.py" interface: raw tasks: - - name: Tag Duplicate Filename + - name: Tag Duplicates description: Set tag DuplicateMarkForDeletion to the duplicates with lower resolution, duration, file name length, and/or black list path. defaultArgs: - mode: merge_dup_filename_task + mode: tag_duplicates_task - name: Delete Duplicates description: Deletes duplicate files defaultArgs: diff --git a/plugins/DupFileManager/DupFileManager_config.py b/plugins/DupFileManager/DupFileManager_config.py index a7f71cb..ab5b817 100644 --- a/plugins/DupFileManager/DupFileManager_config.py +++ b/plugins/DupFileManager/DupFileManager_config.py @@ -6,16 +6,18 @@ config = { "addPrimaryDupPathToDetails" : True, # Alternative path to move duplicate files. "dup_path": "", #Example: "C:\\TempDeleteFolder" - # If enabled, swap higher resolution duplicate files to preferred path. - "swapHighRes" : True, - # If enabled, swap longer length media files to preferred path. Longer will be determine by significantLongerTime value. - "swapLongLength" : True, # The threshold as to what percentage is consider a significant shorter time. - "significantTimeDiff" : .90, # 95% threshold + "significantTimeDiff" : .90, # 90% threshold + # Valued passed to stash API function FindDuplicateScenes. + "duration_diff" : 10, # (default=10) A value from 1 to 10. # If enabled, moves destination file to recycle bin before swapping Hi-Res file. "toRecycleBeforeSwap" : True, # Character used to seperate items on the whitelist, blacklist, and graylist "listSeparator" : ",", + # Tag used to tag duplicates with lower resolution, duration, and file name length. + "DupFileTag" : "DuplicateMarkForDeletion", + # Tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile + "DupWhiteListTag" : "DuplicateWhitelistFile", # The following fields are ONLY used when running DupFileManager in script mode "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server diff --git a/plugins/DupFileManager/StashPluginHelper.py b/plugins/DupFileManager/StashPluginHelper.py index 9ba3c9f..06368da 100644 --- a/plugins/DupFileManager/StashPluginHelper.py +++ b/plugins/DupFileManager/StashPluginHelper.py @@ -172,6 +172,9 @@ class StashPluginHelper(StashInterface): super().__init__(self.FRAGMENT_SERVER) self.STASH_INTERFACE_INIT = True + if self.STASH_URL.startswith("http://0.0.0.0:"): + self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:") + if self.STASH_INTERFACE_INIT: self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"] self.STASH_CONFIGURATION = self.get_configuration()["general"] diff --git a/plugins/FileMonitor/README.md b/plugins/FileMonitor/README.md index 132d502..7747a6c 100644 --- a/plugins/FileMonitor/README.md +++ b/plugins/FileMonitor/README.md @@ -1,4 +1,4 @@ -# FileMonitor: Ver 0.8.8 (By David Maisonave) +# FileMonitor: Ver 0.8.9 (By David Maisonave) FileMonitor is a [Stash](https://github.com/stashapp/stash) plugin with the following two main features: - Updates Stash when any file changes occurs in the Stash library. - **Task Scheduler**: Runs scheduled task based on the scheduler configuration in **filemonitor_config.py**. diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py index 9ba3c9f..06368da 100644 --- a/plugins/FileMonitor/StashPluginHelper.py +++ b/plugins/FileMonitor/StashPluginHelper.py @@ -172,6 +172,9 @@ class StashPluginHelper(StashInterface): super().__init__(self.FRAGMENT_SERVER) self.STASH_INTERFACE_INIT = True + if self.STASH_URL.startswith("http://0.0.0.0:"): + self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:") + if self.STASH_INTERFACE_INIT: self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"] self.STASH_CONFIGURATION = self.get_configuration()["general"] diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index dd534c1..9048ad6 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -14,8 +14,10 @@ from filemonitor_task_examples import task_examples from filemonitor_self_unit_test import self_unit_test config['task_scheduler'] = config['task_scheduler'] + task_examples['task_scheduler'] -if self_unit_test['selfUnitTest']: - config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler'] +if self_unit_test['selfUnitTest_repeat']: + config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler_repeat'] +if self_unit_test['selfUnitTest_set_time']: + config['task_scheduler'] = config['task_scheduler'] + self_unit_test['task_scheduler_set_time'] CONTINUE_RUNNING_SIG = 99 STOP_RUNNING_SIG = 32 @@ -38,6 +40,7 @@ if parse_args.quit: settings = { "recursiveDisabled": False, "turnOnScheduler": False, + "turnOnSchedulerDeleteDup": False, "zmaximumBackups": 1, "zzdebugTracing": False } @@ -82,6 +85,7 @@ if CREATE_SPECIAL_FILE_TO_EXIT and os.path.isfile(SPECIAL_FILE_NAME): fileExtTypes = stash.pluginConfig['fileExtTypes'].split(",") if stash.pluginConfig['fileExtTypes'] != "" else [] includePathChanges = stash.pluginConfig['includePathChanges'] if len(stash.pluginConfig['includePathChanges']) > 0 else stash.STASH_PATHS excludePathChanges = stash.pluginConfig['excludePathChanges'] +turnOnSchedulerDeleteDup = stash.pluginSettings['turnOnSchedulerDeleteDup'] if stash.DRY_RUN: stash.Log("Dry run mode is enabled.") @@ -303,6 +307,9 @@ class StashScheduler: # Stash Scheduler if invalidDir: stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'") else: + if task['task'] == "Delete Duplicates" and not turnOnSchedulerDeleteDup: + stash.Warn(f"Not running task {task['task']}, because [Delete Duplicate Scheduler] is NOT enabled. See Stash UI option Settings->Plugins->Plugins->FileMonitor->[Delete Duplicate Scheduler]") + return None stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}") return stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task']) else: diff --git a/plugins/FileMonitor/filemonitor.yml b/plugins/FileMonitor/filemonitor.yml index 8529886..379cad6 100644 --- a/plugins/FileMonitor/filemonitor.yml +++ b/plugins/FileMonitor/filemonitor.yml @@ -1,6 +1,6 @@ name: FileMonitor description: Monitors the Stash library folders, and updates Stash if any changes occurs in the Stash library paths. -version: 0.8.8 +version: 0.8.9 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/FileMonitor settings: recursiveDisabled: @@ -11,9 +11,13 @@ settings: displayName: Scheduler description: Enable to turn on the scheduler. See filemonitor_config.py for more details. type: BOOLEAN + turnOnSchedulerDeleteDup: + displayName: Delete Duplicate Scheduler + description: Turn on scheduler for deleting duplicates in Stash library. (Requires plugin DupFileManager and [Scheduler] enabled) + type: BOOLEAN zmaximumBackups: displayName: Max DB Backups - description: When value greater than 1, will trim the number of database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath populated with path length longer than 4. + description: Trim database backup files to set value. Requires [Scheduler] enabled and backupDirectoryPath path length longer than 4. type: NUMBER zzdebugTracing: displayName: Debug Tracing @@ -29,10 +33,10 @@ tasks: defaultArgs: mode: start_library_monitor_service - name: Stop Library Monitor - description: Stops library monitoring within 2 minute. + description: Stops library monitoring within 2 minutes. defaultArgs: mode: stop_library_monitor - name: Monitor as a Plugin - description: Run [Library Monitor] as a plugin (*not recommended method*) + description: Run [Library Monitor] as a plugin (*Not recommended*) defaultArgs: mode: start_library_monitor diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py index 5f11bea..0cc1c8c 100644 --- a/plugins/FileMonitor/filemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -17,6 +17,9 @@ config = { # Task "Create Tags" is a plugin task. All plugin task have a REQUIRED pluginId field and an optional validateDir field. For detail usage, see examples #B1 and #B2 in filemonitor_task_examples.py {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] + # The following task runs plugin DupFileManager if the plugin is installed. + {"task" : "Tag Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager", + "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM) {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM) # The following tasks are scheduled weekly @@ -27,9 +30,6 @@ config = { {"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM) {"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM) {"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM) - # The following task runs plugin DupFileManager if the plugin is installed. - {"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager", - "weekday" : "saturday", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Every saturday at 2:30AM) # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field. # The monthly field value must be 1, 2, 3, or 4. @@ -40,6 +40,9 @@ config = { # The Backup task is scheduled monthly # Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) + # The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled. + {"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager", + "weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00) # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash. # This task only works if FileMonitor is started as a service or in command line mode. diff --git a/plugins/FileMonitor/filemonitor_self_unit_test.py b/plugins/FileMonitor/filemonitor_self_unit_test.py index 77bf859..44cd00d 100644 --- a/plugins/FileMonitor/filemonitor_self_unit_test.py +++ b/plugins/FileMonitor/filemonitor_self_unit_test.py @@ -4,7 +4,7 @@ # These tasks are usually executed before updating major releases on https://github.com/David-Maisonave/Axter-Stash/blob/main/plugins/FileMonitor # These tasks are ALWAYS executed before updating to https://github.com/stashapp/CommunityScripts self_unit_test = { - "task_scheduler": [ + "task_scheduler_repeat": [ {"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name {"task" : "execute", "minutes" : 1}, # Test invalid task (missing command) {"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts) @@ -17,27 +17,30 @@ self_unit_test = { {"task" : "Trace", "minutes" : 1}, # Test plugin trace logging {"task" : "LogOnce", "seconds" :15}, # Test LogOnce {"task" : "TraceOnce", "seconds" : 5}, # Test TraceOnce - # {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe" {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "seconds" :15}, # Test RunAfter - {"task" : "CheckStashIsRunning", "command" : "stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash. - {"task" : "Generate", "weekday" : "friday", "time" : "12:03"}, - {"task" : "Clean", "weekday" : "friday", "time" : "12:03"}, - {"task" : "Auto Tag", "weekday" : "friday", "time" : "12:03"}, - {"task" : "Optimise Database", "weekday" : "friday", "time" : "12:03"}, - {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Running plugin task: Create Tags - {"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager","weekday" : "friday", "time" : "12:03"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] - {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "friday", "time" : "12:03"}, - {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Optimising database... - {"task" : "Clean Generated Files", "weekday" : "friday", "time" : "12:03"}, - {"task" : "RenameGeneratedFiles", "weekday" : "friday", "time" : "12:03"}, # In task queue as -> Migrating scene hashes... - {"task" : "Backup", "maxBackups" : 0, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. - {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? - {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? - {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? - {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "friday", "time" : "12:03"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + {"task" : "CheckStashIsRunning", "command" : "stash-win.exe", "seconds" :10}, # Check if Stash is running. If not running, start up Stash. + # {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"}], "seconds" :15}, # To test CheckStashIsRunning, kill Stash after starting FileMonitor service via following command:taskkill /F /IM "stash-win.exe" ], - + "task_scheduler_set_time": [ + # Test [Delete Duplicates] with [Delete Duplicate Scheduler] disabled, and then with it enabled. + {"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager","weekday" : "sunday", "time" : "17:56"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] + {"task" : "Generate", "weekday" : "sunday", "time" : "17:56"}, + {"task" : "Clean", "weekday" : "sunday", "time" : "17:56"}, + {"task" : "Auto Tag", "weekday" : "sunday", "time" : "17:56"}, + {"task" : "Optimise Database", "weekday" : "sunday", "time" : "17:56"}, + {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "sunday", "time" : "17:56"}, # In task queue as -> Running plugin task: Create Tags + {"task" : "Tag Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager", "weekday" : "sunday", "time" : "17:56"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates] + {"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "sunday", "time" : "17:56"}, + {"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "17:56"}, # In task queue as -> Optimising database... + {"task" : "Clean Generated Files", "weekday" : "sunday", "time" : "17:56"}, + {"task" : "RenameGeneratedFiles", "weekday" : "sunday", "time" : "17:56"}, # In task queue as -> Migrating scene hashes... + {"task" : "Backup", "maxBackups" : 0, "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run. + {"task" : "python", "script" : "test_hello_world2.py", "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + {"task" : "python", "script" : "test_hello_world.py", "detach" : False, "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=??? + {"task" : "execute", "command" : "test_hello_world2.cmd", "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + {"task" : "execute", "command" : "test_hello_world.bat", "args" : "--name David", "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=??? + ], # MUST ToDo: Always set selfUnitTest to False before checking in this code!!! - # Enable to turn on self unit test. - "selfUnitTest": False, + "selfUnitTest_repeat" : False , # Enable to turn on self unit test. + "selfUnitTest_set_time" : False , # Enable to turn on self unit test. } diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md index 39e2f8a..6089b06 100644 --- a/plugins/RenameFile/README.md +++ b/plugins/RenameFile/README.md @@ -1,4 +1,4 @@ -# RenameFile: Ver 0.4.4 (By David Maisonave) +# RenameFile: Ver 0.4.5 (By David Maisonave) RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks. - **Rename Scene File Name** (On-The-Fly) - **Append tag names** to file name diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index 994145b..a0204ef 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -479,7 +479,10 @@ def rename_scene(scene_id, stash_directory): new_path_info = {'new_file_path': new_path} if debugTracing: logger.info(f"{dry_run_prefix}New filename: {new_path}") - if move_files and original_parent_directory.name != scene_details['studio']['name']: + studioName = "" + if 'studio' in scene_details and scene_details['studio'] != None and 'name' in scene_details['studio']: + studioName = scene_details['studio']['name'] + if move_files and studioName != "" and original_parent_directory.name != studioName: new_path = original_parent_directory / scene_details['studio']['name'] / (new_filename + Path(original_file_path).suffix) new_path_info = {'new_file_path': new_path} move_or_rename_files(scene_details, new_filename, original_parent_directory) diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index af9ee0a..8e978ff 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,6 +1,6 @@ name: RenameFile description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.4.4 +version: 0.4.5 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: performerAppend: