From 826e651a6e38bf8ec58b1bff7bf87860afba0364 Mon Sep 17 00:00:00 2001 From: David Maisonave <47364845+David-Maisonave@users.noreply.github.com> Date: Tue, 20 Aug 2024 20:35:06 -0400 Subject: [PATCH] updates --- StashPluginHelper/StashPluginHelper.py | 44 ++-- plugins/DupFileManager/DupFileManager.py | 200 ++++++++++++++++-- plugins/DupFileManager/DupFileManager.yml | 36 +++- .../DupFileManager/DupFileManager_config.py | 8 +- plugins/DupFileManager/StashPluginHelper.py | 44 ++-- plugins/DupFileManager/requirements.txt | 2 +- plugins/FileMonitor/StashPluginHelper.py | 44 ++-- plugins/FileMonitor/filemonitor_config.py | 34 +-- .../FileMonitor/filemonitor_task_examples.py | 2 +- plugins/FileMonitor/requirements.txt | 2 +- plugins/RenameFile/README.md | 2 +- plugins/RenameFile/renamefile.py | 29 +-- plugins/RenameFile/renamefile.yml | 2 +- plugins/RenameFile/renamefile_settings.py | 2 + plugins/RenameFile/requirements.txt | 2 +- 15 files changed, 312 insertions(+), 141 deletions(-) diff --git a/StashPluginHelper/StashPluginHelper.py b/StashPluginHelper/StashPluginHelper.py index c1b0600..eec93b6 100644 --- a/StashPluginHelper/StashPluginHelper.py +++ b/StashPluginHelper/StashPluginHelper.py @@ -1,6 +1,6 @@ from stashapi.stashapp import StashInterface from logging.handlers import RotatingFileHandler -import inspect, sys, os, pathlib, logging, json +import re, inspect, sys, os, pathlib, logging, json import concurrent.futures from stashapi.stash_types import PhashDistance import __main__ @@ -30,7 +30,6 @@ class StashPluginHelper(StashInterface): PLUGINS_PATH = None pluginSettings = None pluginConfig = None - STASH_INTERFACE_INIT = False STASH_URL = None STASH_CONFIGURATION = None JSON_INPUT = None @@ -62,6 +61,7 @@ class StashPluginHelper(StashInterface): pluginLog = None logLinePreviousHits = [] thredPool = None + STASH_INTERFACE_INIT = False # Prefix message value LEV_TRACE = "TRACE: " @@ -106,7 +106,7 @@ class StashPluginHelper(StashInterface): if logToNormSet: self.log_to_norm = logToNormSet if stash_url and len(stash_url): self.STASH_URL = stash_url self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__ - self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower() + self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr) self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log" self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent @@ -355,24 +355,20 @@ class StashPluginHelper(StashInterface): def rename_generated_files(self): return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") - # def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None): - # query = """ - # query FindDuplicateScenes($distance: Int) { - # findDuplicateScenes(distance: $distance) { - # ...SceneSlim - # } - # } - # """ - # if fragment: - # query = re.sub(r'\.\.\.SceneSlim', fragment, query) - # else: - # query = """ - # query FindDuplicateScenes($distance: Int) { - # findDuplicateScenes(distance: $distance) - # } - # """ - # variables = { - # "distance": distance - # } - # result = self.call_GQL(query, variables) - # return result['findDuplicateScenes'] \ No newline at end of file + + def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ): + query = """ + query FindDuplicateScenes($distance: Int, $duration_diff: Float) { + findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { + ...SceneSlim + } + } + """ + if fragment: + query = re.sub(r'\.\.\.SceneSlim', fragment, query) + else: + query += "fragment SceneSlim on Scene { id }" + + variables = { "distance": distance, "duration_diff": duration_diff } + result = self.call_GQL(query, variables) + return result['findDuplicateScenes'] diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py index a41cdd4..501cb76 100644 --- a/plugins/DupFileManager/DupFileManager.py +++ b/plugins/DupFileManager/DupFileManager.py @@ -1,21 +1,14 @@ # Description: This is a Stash plugin which manages duplicate files. # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager -# Note: To call this script outside of Stash, pass any argument. -# Example: python DupFileManager.py start +# Note: To call this script outside of Stash, pass argument --url +# Example: python DupFileManager.py --url http://localhost:9999 -a # Research: # Research following links to complete this plugin: -# https://github.com/WithoutPants/stash-plugin-duplicate-finder -# -# Look at options in programs from the following link: -# https://video.stackexchange.com/questions/25302/how-can-i-find-duplicate-videos-by-content -# # Python library for parse-reparsepoint # https://pypi.org/project/parse-reparsepoint/ # pip install parse-reparsepoint -# -# Look at stash API find_duplicate_scenes import os, sys, time, pathlib, argparse, platform from StashPluginHelper import StashPluginHelper from DupFileManager_config import config # Import config from DupFileManager_config.py @@ -24,15 +17,18 @@ parser = argparse.ArgumentParser() parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL') parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.') parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.') -parser.add_argument('--dryrun', '-d', dest='dryrun', action='store_true', help='Do dryrun for deleting duplicate files. No files are deleted, and only logging occurs.') +parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.') parse_args = parser.parse_args() settings = { "mergeDupFilename": True, "moveToTrashCan": False, - "whitelist": [], + "dupFileTag": "DuplicateMarkForDeletion", + "dupWhiteListTag": "", + "zxgraylist": "", + "zwhitelist": "", + "zzblacklist": "", "zzdebugTracing": False, - "zzdryRun": False, } stash = StashPluginHelper( stash_url=parse_args.stash_url, @@ -41,10 +37,13 @@ stash = StashPluginHelper( config=config ) stash.Status() -stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") +stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") stash.Trace(f"(stashPaths={stash.STASH_PATHS})") +listSeparator = stash.pluginConfig['listSeparator'] if stash.pluginConfig['listSeparator'] != "" else ',' +addPrimaryDupPathToDetails = stash.pluginConfig['addPrimaryDupPathToDetails'] + def realpath(path): """ get_symbolic_target for win @@ -77,7 +76,7 @@ def isReparsePoint(path): path = os.path.dirname(path) return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT -def mangeDupFiles(merge=False, deleteDup=False, DryRun=False): +def testReparsePointAndSymLink(merge=False, deleteDup=False): stash.Trace(f"Debug Tracing (platform.system()={platform.system()})") myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point @@ -117,22 +116,177 @@ def mangeDupFiles(merge=False, deleteDup=False, DryRun=False): stash.Log(f"Not isSymLink '{myTestPath6}'") return + +def createTagId(tagName, tagName_descp, deleteIfExist = False): + tagId = stash.find_tags(q=tagName) + if len(tagId): + tagId = tagId[0] + if deleteIfExist: + stash.destroy_tag(int(tagId['id'])) + else: + return tagId['id'] + tagId = stash.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": True}) + stash.Log(f"Dup-tagId={tagId['id']}") + return tagId['id'] + +def setTagId(tagId, tagName, sceneDetails, PrimeDuplicateScene = ""): + if PrimeDuplicateScene != "" and addPrimaryDupPathToDetails: + if sceneDetails['details'].startswith(f"Primary Duplicate = {PrimeDuplicateScene}"): + PrimeDuplicateScene = "" + elif sceneDetails['details'] == "" + PrimeDuplicateScene = f"Primary Duplicate = {PrimeDuplicateScene}" + else: + PrimeDuplicateScene = f"Primary Duplicate = {PrimeDuplicateScene}; {sceneDetails['details']}" + for tag in sceneDetails['tags']: + if tag['name'] == tagName: + if PrimeDuplicateScene != "" and addPrimaryDupPathToDetails: + stash.update_scene({'id' : sceneDetails['id'], 'details' : PrimeDuplicateScene}) + return + + if PrimeDuplicateScene == "" or not addPrimaryDupPathToDetails: + stash.update_scene({'id' : sceneDetails['id'], 'tag_ids' : tagId}) + else: + stash.update_scene({'id' : sceneDetails['id'], 'tag_ids' : tagId, 'details' : PrimeDuplicateScene}) + +def isInList(listToCk, pathToCk): + pathToCk = pathToCk.lower() + for item in listToCk: + if pathToCk.startswith(item): + return True + return False + +def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): + duration_diff = 10.00 + duplicateMarkForDeletion = stash.pluginSettings['dupFileTag'] + duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.' + if duplicateMarkForDeletion == "": + duplicateMarkForDeletion = 'DuplicateMarkForDeletion' + stash.Log(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}") + dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp) + stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}") + + duplicateWhitelistTag = stash.pluginSettings['dupWhiteListTag'] + dupWhitelistTagId = None + if duplicateWhitelistTag != "": + stash.Log(f"duplicateWhitelistTag = {duplicateWhitelistTag}") + duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.' + dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp) + stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}") + + + graylist = stash.pluginSettings['zxgraylist'].split(listSeparator) + graylist = [item.lower() for item in graylist] + if graylist == [""] : graylist = [] + stash.Log(f"graylist = {graylist}") + whitelist = stash.pluginSettings['zwhitelist'].split(listSeparator) + whitelist = [item.lower() for item in whitelist] + if whitelist == [""] : whitelist = [] + stash.Log(f"whitelist = {whitelist}") + blacklist = stash.pluginSettings['zzblacklist'].split(listSeparator) + blacklist = [item.lower() for item in blacklist] + if blacklist == [""] : blacklist = [] + stash.Log(f"blacklist = {blacklist}") + + QtyDupSet = 0 + QtyDup = 0 + QtyExactDup = 0 + QtyAlmostDup = 0 + QtyTagForDel = 0 + QtySkipForDel = 0 + stash.Log("Waiting for find_duplicate_scenes_diff to return results...") + DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff) + stash.Log("#########################################################################") + stash.Log("#########################################################################") + for DupFileSet in DupFileSets: + stash.Trace(f"DupFileSet={DupFileSet}") + QtyDupSet+=1 + SepLine = "---------------------------" + DupFileToKeep = "" + DupToCopyFrom = "" + DupFileDetailList = [] + for DupFile in DupFileSet: + QtyDup+=1 + Scene = stash.find_scene(DupFile['id']) + stash.Trace(f"Scene = {Scene.encode('ascii','ignore')}") + DupFileDetailList = DupFileDetailList + [Scene] + if DupFileToKeep != "": + if DupFileToKeep['files'][0]['duration'] == Scene['files'][0]['duration']: + QtyExactDup+=1 + else: + QtyAlmostDup+=1 + SepLine = "***************************" + if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']): + DupFileToKeep = Scene + elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']): + DupFileToKeep = Scene + elif int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']): + DupFileToKeep = Scene + elif len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']): + DupFileToKeep = Scene + elif isInList(whitelist, Scene['files'][0]['path']) and not isInList(whitelist, DupFileToKeep['files'][0]['path']): + DupFileToKeep = Scene + elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and not isInList(blacklist, Scene['files'][0]['path']): + DupFileToKeep = Scene + elif isInList(graylist, Scene['files'][0]['path']) and not isInList(graylist, DupFileToKeep['files'][0]['path']): + DupFileToKeep = Scene + else: + DupFileToKeep = Scene + # stash.Log(f"DupFileToKeep = {DupFileToKeep}") + stash.Trace(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path'].encode('ascii','ignore')}") + + for DupFile in DupFileDetailList: + if DupFile['id'] != DupFileToKeep['id']: + if isInList(whitelist, DupFile['files'][0]['path']): + stash.Log(f"NOT tagging duplicate, because it's in whitelist. '{DupFile['files'][0]['path'].encode('ascii','ignore')}'") + if dupWhitelistTagId and tagDuplicates: + setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep['files'][0]['path']) + QtySkipForDel+=1 + else: + if deleteDup: + stash.Log(f"Deleting duplicate '{DupFile['files'][0]['path'].encode('ascii','ignore')}'") + # ToDo: Add logic to check if moving file to deletion folder, or doing full delete. + # ToDo: Add logic to check if tag merging is needed before performing deletion. + elif tagDuplicates: + if QtyTagForDel == 0: + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path'].encode('ascii','ignore')} for deletion with tag {duplicateMarkForDeletion}.") + else: + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path'].encode('ascii','ignore')} for deletion.") + setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep['files'][0]['path']) + QtyTagForDel+=1 + stash.Log(SepLine) + if QtyDup > 200: + break + + stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}") + +def testSetDupTagOnScene(sceneId): + scene = stash.find_scene(sceneId) + stash.Log(f"scene={scene}") + stash.Log(f"scene tags={scene['tags']}") + tag_ids = [dupTagId] + for tag in scene['tags']: + tag_ids = tag_ids + [tag['id']] + stash.Log(f"tag_ids={tag_ids}") + stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids}) + if stash.PLUGIN_TASK_NAME == "merge_dup_filename_task": mangeDupFiles(merge=True) stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") elif stash.PLUGIN_TASK_NAME == "delete_duplicates": mangeDupFiles(deleteDup=True) stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") -elif stash.PLUGIN_TASK_NAME == "dryrun_delete_duplicates": - mangeDupFiles(deleteDup=True, DryRun=True) - stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") +elif parse_args.dup_tag: + mangeDupFiles(tagDuplicates=True) + stash.Trace(f"Tag duplicate EXIT") elif parse_args.remove: - mangeDupFiles(deleteDup=True, DryRun=parse_args.dryrun) - stash.Trace(f"Delete duplicate (DryRun={parse_args.dryrun}) EXIT") -elif parse_args.dryrun: - mangeDupFiles(deleteDup=True, DryRun=parse_args.dryrun) - stash.Trace(f"Dryrun delete duplicate EXIT") + mangeDupFiles(deleteDup=True) + stash.Trace(f"Delete duplicate EXIT") + else: - stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})") + stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})") + + + + stash.Trace("\n*********************************\nEXITING ***********************\n*********************************") diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml index 6ce047e..282462f 100644 --- a/plugins/DupFileManager/DupFileManager.yml +++ b/plugins/DupFileManager/DupFileManager.yml @@ -4,38 +4,54 @@ version: 0.1.0 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager settings: mergeDupFilename: - displayName: Before deletion, merge potential source in the duplicate file names for tag names, performers, and studios. - description: Enable to + displayName: Merge Duplicate Tags + description: Before deletion, merge potential source in the duplicate file names for tag names, performers, and studios. type: BOOLEAN moveToTrashCan: displayName: Trash Can description: Enable to move files to trash can instead of permanently delete file. type: BOOLEAN - whitelist: + dupFileTag: + displayName: Duplicate File Tag Name + description: (Default = DuplicateMarkForDeletion) Tag used to tag duplicates with lower resolution, duration, and file name length. + type: STRING + dupWhiteListTag: + displayName: Duplicate Whitelist Tag Name + description: If populated, a tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile + type: STRING + zwhitelist: displayName: White List - description: A comma seperated list of preferential paths to determine which duplicate should be the primary. Listed in order of preference. + description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\ + type: STRING + zxgraylist: + displayName: Gray List + description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\ + type: STRING + zzblacklist: + displayName: Black List + description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\ type: STRING zzdebugTracing: displayName: Debug Tracing description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log type: BOOLEAN - zzdryRun: - displayName: Dry Run - description: Enable to run script in [Dry Run] mode. In this mode, Stash does NOT call meta_scan, and only logs the action it would have taken. - type: BOOLEAN exec: - python - "{pluginDir}/DupFileManager.py" interface: raw tasks: - - name: Merge Duplicate Filename - description: Merge duplicate filename sourcetag names, performers, and studios. + - name: Tag Duplicate Filename + description: Set tag DuplicateMarkForDeletion to the duplicate with lower resolution, duration, file name length, and/or black list path. defaultArgs: mode: merge_dup_filename_task - name: Delete Duplicates description: Delete duplicate files defaultArgs: mode: delete_duplicates + - name: Merge Duplicate Filename + description: Merge duplicate filename sourcetag names, performers, and studios. + defaultArgs: + mode: merge_dup_filename_task - name: Dry Run Delete Duplicates description: Only perform a dry run (logging only) of duplicate file deletions. Dry Run setting is ignore when running this task. defaultArgs: diff --git a/plugins/DupFileManager/DupFileManager_config.py b/plugins/DupFileManager/DupFileManager_config.py index 7a649ef..1dfa780 100644 --- a/plugins/DupFileManager/DupFileManager_config.py +++ b/plugins/DupFileManager/DupFileManager_config.py @@ -2,9 +2,11 @@ # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager config = { - # Define black list to determine which duplicates should be deleted first. - "blacklist_paths": [], #Example: "blacklist_paths": ['C:\\SomeMediaPath\\subpath', "E:\\YetAnotherPath\\subpath', "E:\\YetAnotherPath\\secondSubPath'] - + # Character used to seperate items on the whitelist, blacklist, and graylist + "listSeparator" : ",", + # If enabled, adds the primary duplicate path to the scene detail. + "addPrimaryDupPathToDetails" : True, + # If enabled, ignore reparsepoints. For Windows NT drives only. "ignoreReparsepoints" : True, # If enabled, ignore symbolic links. diff --git a/plugins/DupFileManager/StashPluginHelper.py b/plugins/DupFileManager/StashPluginHelper.py index c1b0600..eec93b6 100644 --- a/plugins/DupFileManager/StashPluginHelper.py +++ b/plugins/DupFileManager/StashPluginHelper.py @@ -1,6 +1,6 @@ from stashapi.stashapp import StashInterface from logging.handlers import RotatingFileHandler -import inspect, sys, os, pathlib, logging, json +import re, inspect, sys, os, pathlib, logging, json import concurrent.futures from stashapi.stash_types import PhashDistance import __main__ @@ -30,7 +30,6 @@ class StashPluginHelper(StashInterface): PLUGINS_PATH = None pluginSettings = None pluginConfig = None - STASH_INTERFACE_INIT = False STASH_URL = None STASH_CONFIGURATION = None JSON_INPUT = None @@ -62,6 +61,7 @@ class StashPluginHelper(StashInterface): pluginLog = None logLinePreviousHits = [] thredPool = None + STASH_INTERFACE_INIT = False # Prefix message value LEV_TRACE = "TRACE: " @@ -106,7 +106,7 @@ class StashPluginHelper(StashInterface): if logToNormSet: self.log_to_norm = logToNormSet if stash_url and len(stash_url): self.STASH_URL = stash_url self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__ - self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower() + self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr) self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log" self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent @@ -355,24 +355,20 @@ class StashPluginHelper(StashInterface): def rename_generated_files(self): return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") - # def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None): - # query = """ - # query FindDuplicateScenes($distance: Int) { - # findDuplicateScenes(distance: $distance) { - # ...SceneSlim - # } - # } - # """ - # if fragment: - # query = re.sub(r'\.\.\.SceneSlim', fragment, query) - # else: - # query = """ - # query FindDuplicateScenes($distance: Int) { - # findDuplicateScenes(distance: $distance) - # } - # """ - # variables = { - # "distance": distance - # } - # result = self.call_GQL(query, variables) - # return result['findDuplicateScenes'] \ No newline at end of file + + def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ): + query = """ + query FindDuplicateScenes($distance: Int, $duration_diff: Float) { + findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { + ...SceneSlim + } + } + """ + if fragment: + query = re.sub(r'\.\.\.SceneSlim', fragment, query) + else: + query += "fragment SceneSlim on Scene { id }" + + variables = { "distance": distance, "duration_diff": duration_diff } + result = self.call_GQL(query, variables) + return result['findDuplicateScenes'] diff --git a/plugins/DupFileManager/requirements.txt b/plugins/DupFileManager/requirements.txt index e7825b0..19a1174 100644 --- a/plugins/DupFileManager/requirements.txt +++ b/plugins/DupFileManager/requirements.txt @@ -1,3 +1,3 @@ -stashapp-tools >= 0.2.49 +stashapp-tools >= 0.2.50 pyYAML watchdog \ No newline at end of file diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py index c1b0600..eec93b6 100644 --- a/plugins/FileMonitor/StashPluginHelper.py +++ b/plugins/FileMonitor/StashPluginHelper.py @@ -1,6 +1,6 @@ from stashapi.stashapp import StashInterface from logging.handlers import RotatingFileHandler -import inspect, sys, os, pathlib, logging, json +import re, inspect, sys, os, pathlib, logging, json import concurrent.futures from stashapi.stash_types import PhashDistance import __main__ @@ -30,7 +30,6 @@ class StashPluginHelper(StashInterface): PLUGINS_PATH = None pluginSettings = None pluginConfig = None - STASH_INTERFACE_INIT = False STASH_URL = None STASH_CONFIGURATION = None JSON_INPUT = None @@ -62,6 +61,7 @@ class StashPluginHelper(StashInterface): pluginLog = None logLinePreviousHits = [] thredPool = None + STASH_INTERFACE_INIT = False # Prefix message value LEV_TRACE = "TRACE: " @@ -106,7 +106,7 @@ class StashPluginHelper(StashInterface): if logToNormSet: self.log_to_norm = logToNormSet if stash_url and len(stash_url): self.STASH_URL = stash_url self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__ - self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower() + self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr) self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log" self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent @@ -355,24 +355,20 @@ class StashPluginHelper(StashInterface): def rename_generated_files(self): return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") - # def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None): - # query = """ - # query FindDuplicateScenes($distance: Int) { - # findDuplicateScenes(distance: $distance) { - # ...SceneSlim - # } - # } - # """ - # if fragment: - # query = re.sub(r'\.\.\.SceneSlim', fragment, query) - # else: - # query = """ - # query FindDuplicateScenes($distance: Int) { - # findDuplicateScenes(distance: $distance) - # } - # """ - # variables = { - # "distance": distance - # } - # result = self.call_GQL(query, variables) - # return result['findDuplicateScenes'] \ No newline at end of file + + def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ): + query = """ + query FindDuplicateScenes($distance: Int, $duration_diff: Float) { + findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) { + ...SceneSlim + } + } + """ + if fragment: + query = re.sub(r'\.\.\.SceneSlim', fragment, query) + else: + query += "fragment SceneSlim on Scene { id }" + + variables = { "distance": distance, "duration_diff": duration_diff } + result = self.call_GQL(query, variables) + return result['findDuplicateScenes'] diff --git a/plugins/FileMonitor/filemonitor_config.py b/plugins/FileMonitor/filemonitor_config.py index 96563d5..dbda631 100644 --- a/plugins/FileMonitor/filemonitor_config.py +++ b/plugins/FileMonitor/filemonitor_config.py @@ -11,16 +11,22 @@ config = { # The hour section in time MUST be a two digit number, and use military time format. Example: 1PM = "13:00" and 1AM = "01:00" # Note: Look at filemonitor_task_examples.py for many example task having more detailed usage. "task_scheduler": [ - # To create a daily task, include each day of the week for the weekday field. - {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) - {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM) + # To create a daily task, include each day of the week for the weekday field. + # Optional field for task "Auto Tag" is 'paths'. For detail usage, see example #A3: in filemonitor_task_examples.py + {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) + # Task "Create Tags" is a plugin task. All plugin task have a REQUIRED pluginId field and an optional validateDir field. For detail usage, see examples #B1 and #B2 in filemonitor_task_examples.py {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", - "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] - + "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] + {"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM) # The following tasks are scheduled weekly - {"task" : "Generate", "weekday" : "saturday", "time" : "07:00"}, # Generated Content-> [Generate] (Every saturday at 7AM) - {"task" : "Scan", "weekday" : "saturday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every saturday at 3AM) + # Optional field for task "Scan", "Auto Tag", and "Clean" is 'paths'. For detail usage, see examples #A3: in filemonitor_task_examples.py + {"task" : "Scan", "weekday" : "saturday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every saturday at 3AM) + {"task" : "Auto Tag", "weekday" : "saturday", "time" : "03:30"}, # Auto Tag -> [Auto Tag] (Weekly) (Every saturday at 3:30AM) + {"task" : "Generate", "weekday" : "saturday", "time" : "04:00"}, # Generated Content-> [Generate] (Every saturday at 4AM) + {"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM) + {"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM) + {"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM) # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field. # The monthly field value must be 1, 2, 3, or 4. @@ -28,25 +34,25 @@ config = { # 2 = 2nd specified weekday of the month. Example 2nd monday of the month. # 3 = 3rd specified weekday of the month. # 4 = 4th specified weekday of the month. - # The following task is scheduled monthly + # The Backup task is scheduled monthly + # Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) - {"task" : "Clean", "weekday" : "sunday", "time" : "01:00", "monthly" : 3}, # Maintenance -> [Clean] - {"task" : "Clean Generated Files", "weekday" : "sunday", "time" : "03:00", "monthly" : 3}, # Maintenance -> [Clean Generated Files] # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash. # This task only works if FileMonitor is started as a service or in command line mode. - # For more detailed usage, see examples #C1 and #C2 in filemonitor_task_examples.py + # Optional fields are 'command' and 'RunAfter'. For detail usage, see examples #C1 and #C2 in filemonitor_task_examples.py {"task" : "CheckStashIsRunning", "minutes" :5}, # Checks every 5 minutes ], - # ApiKey only needed when Stash credentials are set and while calling FileMonitor via command line. - "apiKey" : "", # Example: "eyJabccideJIUfg1NigRInD345I6dfpXVCfd.eyJ1abcDEfGheHRlHJiJklMonPQ32FsVewtsfSIsImlhdCI6MTcyMzg2NzkwOH0.5bkHU6sfs3532dsryu1ki3iFBwnd_4AHs325yHljsPw" # Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue. "timeOut": 60, + # ApiKey only needed when Stash credentials are set and while calling FileMonitor via command line. + "apiKey" : "", # Example: "eyJabccideJIUfg1NigRInD345I6dfpXVCfd.eyJ1abcDEfGheHRlHJiJklMonPQ32FsVewtsfSIsImlhdCI6MTcyMzg2NzkwOH0.5bkHU6sfs3532dsryu1ki3iFBwnd_4AHs325yHljsPw" # Enable to run metadata clean task after file deletion. "runCleanAfterDelete": False, # Enable to run metadata_generate (Generate Content) after metadata scan. "runGenerateContent": False, + # When populated (comma separated list [lower-case]), only scan for changes for specified file extension "fileExtTypes" : "", # Example: "mp4,mpg,mpeg,m2ts,wmv,avi,m4v,flv,mov,asf,mkv,divx,webm,ts,mp2t" # When populated, only include file changes in specified paths. @@ -54,7 +60,7 @@ config = { # When populated, exclude file changes in paths that start with specified entries. "excludePathChanges" :[], # Example: ["C:\\MyVideos\\SomeSubFolder\\", "C:\\MyImages\\folder\\Sub\\"] - # The following fields are ONLY used when running FileMonitor in script mode. + # The following fields are ONLY used when running FileMonitor in command line mode. "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server diff --git a/plugins/FileMonitor/filemonitor_task_examples.py b/plugins/FileMonitor/filemonitor_task_examples.py index 84b988b..3cbfad2 100644 --- a/plugins/FileMonitor/filemonitor_task_examples.py +++ b/plugins/FileMonitor/filemonitor_task_examples.py @@ -41,7 +41,7 @@ task_examples = { # Example#B4: Task to execute a command with optional args field, and using keyword , which gets replaced with filemonitor.py current directory. {"task" : "execute", "command" : "HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, - # Example#C1 Some OS may need the "command" field, which specifies the binary path + # Example#C1 Some OS may need the "command" field, which specifies the binary path. {"task" : "CheckStashIsRunning", "command" : "stash-linux-arm64v8", "minutes" :0}, # Example#C2 RunAfter field can be used to specify task to run after starting Stash {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "minutes" :0}, diff --git a/plugins/FileMonitor/requirements.txt b/plugins/FileMonitor/requirements.txt index e7825b0..19a1174 100644 --- a/plugins/FileMonitor/requirements.txt +++ b/plugins/FileMonitor/requirements.txt @@ -1,3 +1,3 @@ -stashapp-tools >= 0.2.49 +stashapp-tools >= 0.2.50 pyYAML watchdog \ No newline at end of file diff --git a/plugins/RenameFile/README.md b/plugins/RenameFile/README.md index c5610ea..bab3fca 100644 --- a/plugins/RenameFile/README.md +++ b/plugins/RenameFile/README.md @@ -1,4 +1,4 @@ -# RenameFile: Ver 0.4.1 (By David Maisonave) +# RenameFile: Ver 0.4.2 (By David Maisonave) RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks. - **Rename Scene File Name** (On-The-Fly) - **Append tag names** to file name diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index 884eaa8..00e112a 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -117,6 +117,7 @@ if debugTracing: logger.info("Debug Tracing................") exclude_paths = config["pathToExclude"] exclude_paths = exclude_paths.split() if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................") +excluded_tags = config["excludeTags"] # Extract tag whitelist from settings tag_whitelist = config["tagWhitelist"] if debugTracing: logger.info("Debug Tracing................") @@ -203,7 +204,9 @@ def form_filename(original_file_stem, scene_details): if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)): return # Skip adding more tags if the maximum limit is reached - + if tag_name in excluded_tags: + if debugTracing: logger.info(f"Debug Tracing EXCLUDING (tag_name={tag_name})") + return # Check if the tag name is in the whitelist if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist): if WRAPPER_STYLES.get('tag'): @@ -261,8 +264,8 @@ def form_filename(original_file_stem, scene_details): scene_date += POSTFIX_STYLES.get('date') if debugTracing: logger.info("Debug Tracing................") if WRAPPER_STYLES.get('date'): - filename_parts.append(f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}") - else: + scene_date = f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}" + if scene_date not in title: filename_parts.append(scene_date) elif key == 'resolution': width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string @@ -270,40 +273,40 @@ def form_filename(original_file_stem, scene_details): if width and height: resolution = width + POSTFIX_STYLES.get('width_height_seperator') + height + POSTFIX_STYLES.get('resolution') if WRAPPER_STYLES.get('resolution'): - filename_parts.append(f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}") - else: + resolution = f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}" + if resolution not in title: filename_parts.append(resolution) elif key == 'width': width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string if width: width += POSTFIX_STYLES.get('width') if WRAPPER_STYLES.get('width'): - filename_parts.append(f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}") - else: + width = f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}" + if width not in title: filename_parts.append(width) elif key == 'height': height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string if height: height += POSTFIX_STYLES.get('height') if WRAPPER_STYLES.get('height'): - filename_parts.append(f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}") - else: + height = f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}" + if height not in title: filename_parts.append(height) elif key == 'video_codec': video_codec = scene_details.get('files', [{}])[0].get('video_codec', '').upper() # Convert to uppercase if video_codec: video_codec += POSTFIX_STYLES.get('video_codec') if WRAPPER_STYLES.get('video_codec'): - filename_parts.append(f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}") - else: + video_codec = f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}" + if video_codec not in title: filename_parts.append(video_codec) elif key == 'frame_rate': frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + 'FPS' # Convert to string and append ' FPS' if frame_rate: frame_rate += POSTFIX_STYLES.get('frame_rate') if WRAPPER_STYLES.get('frame_rate'): - filename_parts.append(f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}") - else: + frame_rate = f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}" + if frame_rate not in title: filename_parts.append(frame_rate) elif key == 'galleries': galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])] diff --git a/plugins/RenameFile/renamefile.yml b/plugins/RenameFile/renamefile.yml index 20778b3..1c9d5ad 100644 --- a/plugins/RenameFile/renamefile.yml +++ b/plugins/RenameFile/renamefile.yml @@ -1,6 +1,6 @@ name: RenameFile description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. -version: 0.4.1 +version: 0.4.2 url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile settings: performerAppend: diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py index 24052f8..6a4445d 100644 --- a/plugins/RenameFile/renamefile_settings.py +++ b/plugins/RenameFile/renamefile_settings.py @@ -37,6 +37,8 @@ config = { "frame_rate": 'FR', "date": '', }, + # Add tags to exclude from RenameFile. + "excludeTags": ["DuplicateMarkForDeletion", "DuplicateWhitelistFile","_DuplicateMarkForDeletion", "_DuplicateWhitelistFile","_DuplicateMarkForDeletion_", "_DuplicateWhitelistFile_"], # Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath" "pathToExclude": "", # Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3" diff --git a/plugins/RenameFile/requirements.txt b/plugins/RenameFile/requirements.txt index 99d8096..a832f62 100644 --- a/plugins/RenameFile/requirements.txt +++ b/plugins/RenameFile/requirements.txt @@ -1,3 +1,3 @@ -stashapp-tools >= 0.2.49 +stashapp-tools >= 0.2.50 pyYAML requests \ No newline at end of file