diff --git a/StashPluginHelper/StashPluginHelper.py b/StashPluginHelper/StashPluginHelper.py index eec93b6..9ba3c9f 100644 --- a/StashPluginHelper/StashPluginHelper.py +++ b/StashPluginHelper/StashPluginHelper.py @@ -5,6 +5,8 @@ import concurrent.futures from stashapi.stash_types import PhashDistance import __main__ +_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_" + # StashPluginHelper (By David Maisonave aka Axter) # See end of this file for example usage # Log Features: @@ -41,6 +43,7 @@ class StashPluginHelper(StashInterface): STASHPATHSCONFIG = None STASH_PATHS = [] API_KEY = None + excludeMergeTags = None # printTo argument LOG_TO_FILE = 1 @@ -62,6 +65,9 @@ class StashPluginHelper(StashInterface): logLinePreviousHits = [] thredPool = None STASH_INTERFACE_INIT = False + _mergeMetadata = None + encodeToUtf8 = False + convertToAscii = False # If set True, it takes precedence over encodeToUtf8 # Prefix message value LEV_TRACE = "TRACE: " @@ -84,7 +90,7 @@ class StashPluginHelper(StashInterface): debugTracing = None, # Set debugTracing to True so as to output debug and trace logging logFormat = LOG_FORMAT, # Plugin log line format dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file - maxbytes = 2*1024*1024, # Max size of plugin log file + maxbytes = 8*1024*1024, # Max size of plugin log file backupcount = 2, # Backup counts when log file size reaches max size logToWrnSet = 0, # Customize the target output set which will get warning logging logToErrSet = 0, # Customize the target output set which will get error logging @@ -126,12 +132,9 @@ class StashPluginHelper(StashInterface): if debugTracing: self.DEBUG_TRACING = debugTracing if config: self.pluginConfig = config - if 'apiKey' in self.pluginConfig and self.pluginConfig['apiKey'] != "": - self.FRAGMENT_SERVER['ApiKey'] = self.pluginConfig['apiKey'] - if DebugTraceFieldName in self.pluginConfig: - self.DEBUG_TRACING = self.pluginConfig[DebugTraceFieldName] - if DryRunFieldName in self.pluginConfig: - self.DRY_RUN = self.pluginConfig[DryRunFieldName] + if self.Setting('apiKey', "") != "": + self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey') + if apiKey and apiKey != "": self.FRAGMENT_SERVER['ApiKey'] = apiKey @@ -181,12 +184,11 @@ class StashPluginHelper(StashInterface): self.pluginSettings = settings if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION: self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID]) - if DebugTraceFieldName in self.pluginSettings: - self.DEBUG_TRACING = self.pluginSettings[DebugTraceFieldName] - if DryRunFieldName in self.pluginSettings: - self.DRY_RUN = self.pluginSettings[DryRunFieldName] if 'apiKey' in self.STASH_CONFIGURATION: self.API_KEY = self.STASH_CONFIGURATION['apiKey'] + + self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN) + self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING) if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH]) @@ -197,7 +199,22 @@ class StashPluginHelper(StashInterface): def __del__(self): self.thredPool.shutdown(wait=False) - def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False): + def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False): + if self.pluginSettings != None and name in self.pluginSettings: + if notEmpty == False or self.pluginSettings[name] != "": + return self.pluginSettings[name] + if self.pluginConfig != None and name in self.pluginConfig: + if notEmpty == False or self.pluginConfig[name] != "": + return self.pluginConfig[name] + if default == _ARGUMENT_UNSPECIFIED_ and raiseEx: + raise Exception(f"Missing {name} from both UI settings and config file settings.") + return default + + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None): + if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)): + logMsg = self.asc2(logMsg) + else: + logMsg = logMsg if printTo == 0: printTo = self.log_to_norm elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO: @@ -238,7 +255,7 @@ class StashPluginHelper(StashInterface): if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr) - def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1): + def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None): if printTo == 0: printTo = self.LOG_TO_FILE if lineNo == -1: lineNo = inspect.currentframe().f_back.f_lineno @@ -246,39 +263,39 @@ class StashPluginHelper(StashInterface): if self.DEBUG_TRACING or logAlways: if logMsg == "": logMsg = f"Line number {lineNo}..." - self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways) + self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii) # Log once per session. Only logs the first time called from a particular line number in the code. - def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False): + def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None): lineNo = inspect.currentframe().f_back.f_lineno if self.DEBUG_TRACING or logAlways: FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" if FuncAndLineNo in self.logLinePreviousHits: return self.logLinePreviousHits.append(FuncAndLineNo) - self.Trace(logMsg, printTo, logAlways, lineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) # Log INFO on first call, then do Trace on remaining calls. - def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True): + def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None): if printTo == 0: printTo = self.LOG_TO_FILE lineNo = inspect.currentframe().f_back.f_lineno FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" if FuncAndLineNo in self.logLinePreviousHits: if traceOnRemainingCalls: - self.Trace(logMsg, printTo, logAlways, lineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) else: self.logLinePreviousHits.append(FuncAndLineNo) - self.Log(logMsg, printTo, logging.INFO, lineNo) + self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii) - def Warn(self, logMsg, printTo = 0): + def Warn(self, logMsg, printTo = 0, toAscii = None): if printTo == 0: printTo = self.log_to_wrn_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.WARN, lineNo) + self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii) - def Error(self, logMsg, printTo = 0): + def Error(self, logMsg, printTo = 0, toAscii = None): if printTo == 0: printTo = self.log_to_err_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.ERROR, lineNo) + self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): if printTo == 0: printTo = self.log_to_norm @@ -310,8 +327,26 @@ class StashPluginHelper(StashInterface): argsWithPython = [f"{PythonExe}"] + args return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) - def Submit(*args, **kwargs): - thredPool.submit(*args, **kwargs) + def Submit(self, *args, **kwargs): + return self.thredPool.submit(*args, **kwargs) + + def asc2(self, data, convertToAscii=None): + if convertToAscii or (convertToAscii == None and self.convertToAscii): + return ascii(data) + return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function + # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function + # return str(data)[2:-1] # strip out b'str' + + def init_mergeMetadata(self, excludeMergeTags=None): + self.excludeMergeTags = excludeMergeTags + self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) + + # Must call init_mergeMetadata, before calling merge_metadata + def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata + if type(SrcData) is int: + SrcData = self.find_scene(SrcData) + DestData = self.find_scene(DestData) + return self._mergeMetadata.merge(SrcData, DestData) # Extends class StashInterface with functions which are not yet in the class def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]): @@ -372,3 +407,76 @@ class StashPluginHelper(StashInterface): variables = { "distance": distance, "duration_diff": duration_diff } result = self.call_GQL(query, variables) return result['findDuplicateScenes'] + +class mergeMetadata: # A class to merge scene metadata from source scene to destination scene + srcData = None + destData = None + stash = None + excludeMergeTags = None + dataDict = None + result = "Nothing To Merge" + def __init__(self, stash, excludeMergeTags=None): + self.stash = stash + self.excludeMergeTags = excludeMergeTags + + def merge(self, SrcData, DestData): + self.srcData = SrcData + self.destData = DestData + ORG_DATA_DICT = {'id' : self.destData['id']} + self.dataDict = ORG_DATA_DICT.copy() + self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags) + self.mergeItems('performers', 'performer_ids', []) + self.mergeItems('galleries', 'gallery_ids', []) + self.mergeItems('movies', 'movies', []) + self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL) + self.mergeItem('studio', 'studio_id', 'id') + self.mergeItem('title') + self.mergeItem('director') + self.mergeItem('date') + self.mergeItem('details') + self.mergeItem('rating100') + self.mergeItem('code') + if self.dataDict != ORG_DATA_DICT: + self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True) + self.result = self.stash.update_scene(self.dataDict) + return self.result + + def Nothing(self, Data): + if not Data or Data == "" or (type(Data) is str and Data.strip() == ""): + return True + return False + + def mergeItem(self,fieldName, updateFieldName=None, subField=None): + if updateFieldName == None: + updateFieldName = fieldName + if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]): + if subField == None: + self.dataDict.update({ updateFieldName : self.srcData[fieldName]}) + else: + self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]}) + def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None): + dataAdded = "" + for item in self.srcData[fieldName]: + if item not in self.destData[fieldName]: + if NotStartWith == None or not item.startswith(NotStartWith): + if excludeName == None or item['name'] not in excludeName: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + dataAdded += f"{item['movie']['id']} " + elif updateFieldName == None: + listToAdd += [item] + dataAdded += f"{item} " + else: + listToAdd += [item['id']] + dataAdded += f"{item['id']} " + if dataAdded != "": + if updateFieldName == None: + updateFieldName = fieldName + else: + for item in self.destData[fieldName]: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + else: + listToAdd += [item['id']] + self.dataDict.update({ updateFieldName : listToAdd}) + # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) diff --git a/plugins/DupFileManager/DupFileManager.py b/plugins/DupFileManager/DupFileManager.py index f49230a..fa79528 100644 --- a/plugins/DupFileManager/DupFileManager.py +++ b/plugins/DupFileManager/DupFileManager.py @@ -9,7 +9,7 @@ # Python library for parse-reparsepoint # https://pypi.org/project/parse-reparsepoint/ # pip install parse-reparsepoint -import os, sys, time, pathlib, argparse, platform +import os, sys, time, pathlib, argparse, platform, shutil from StashPluginHelper import StashPluginHelper from DupFileManager_config import config # Import config from DupFileManager_config.py @@ -23,9 +23,11 @@ parse_args = parser.parse_args() settings = { "dupWhiteListTag": "", "dupFileTag": "DuplicateMarkForDeletion", + "dupFileTagSwap": "DuplicateMarkForSwap", "mergeDupFilename": False, "permanentlyDelete": False, "whitelistDelDupInSameFolder": False, + "zcleanAfterDel": False, "zwhitelist": "", "zxgraylist": "", "zyblacklist": "", @@ -37,29 +39,55 @@ stash = StashPluginHelper( debugTracing=parse_args.trace, settings=settings, config=config, - maxbytes=10*1024*1024, + maxbytes=100*1024*1024, ) stash.Status() stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") stash.Trace(f"(stashPaths={stash.STASH_PATHS})") +# stash.encodeToUtf8 = True -listSeparator = stash.pluginConfig['listSeparator'] if stash.pluginConfig['listSeparator'] != "" else ',' -addPrimaryDupPathToDetails = stash.pluginConfig['addPrimaryDupPathToDetails'] -mergeDupFilename = stash.pluginSettings['mergeDupFilename'] -moveToTrashCan = False if stash.pluginSettings['permanentlyDelete'] else True -alternateTrashCanPath = stash.pluginConfig['dup_path'] -whitelistDelDupInSameFolder = stash.pluginSettings['whitelistDelDupInSameFolder'] -maxDupToProcess = stash.pluginSettings['zymaxDupToProcess'] -duplicateMarkForDeletion = stash.pluginSettings['dupFileTag'] +listSeparator = stash.Setting('listSeparator', ',', notEmpty=True) +addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails') +mergeDupFilename = stash.Setting('mergeDupFilename') +moveToTrashCan = False if stash.Setting('permanentlyDelete') else True +alternateTrashCanPath = stash.Setting('dup_path') +whitelistDelDupInSameFolder = stash.Setting('whitelistDelDupInSameFolder') +maxDupToProcess = int(stash.Setting('zymaxDupToProcess')) +swapHighRes = stash.Setting('swapHighRes') +swapLongLength = stash.Setting('swapLongLength') +significantTimeDiff = stash.Setting('significantTimeDiff') +toRecycleBeforeSwap = stash.Setting('toRecycleBeforeSwap') +cleanAfterDel = stash.Setting('zcleanAfterDel') + +duplicateMarkForDeletion = stash.Setting('dupFileTag') if duplicateMarkForDeletion == "": duplicateMarkForDeletion = 'DuplicateMarkForDeletion' -duplicateWhitelistTag = stash.pluginSettings['dupWhiteListTag'] + +DuplicateMarkForSwap = stash.Setting('dupFileTagSwap') +if DuplicateMarkForSwap == "": + DuplicateMarkForSwap = 'DuplicateMarkForSwap' -excludeMergeTags = [duplicateMarkForDeletion] +duplicateWhitelistTag = stash.Setting('dupWhiteListTag') + +excludeMergeTags = [duplicateMarkForDeletion, DuplicateMarkForSwap] if duplicateWhitelistTag != "": - excludeMergeTags = excludeMergeTags + [duplicateWhitelistTag] + excludeMergeTags += [duplicateWhitelistTag] +stash.init_mergeMetadata(excludeMergeTags) +graylist = stash.Setting('zxgraylist').split(listSeparator) +graylist = [item.lower() for item in graylist] +if graylist == [""] : graylist = [] +stash.Log(f"graylist = {graylist}") +whitelist = stash.Setting('zwhitelist').split(listSeparator) +whitelist = [item.lower() for item in whitelist] +if whitelist == [""] : whitelist = [] +stash.Log(f"whitelist = {whitelist}") +blacklist = stash.Setting('zyblacklist').split(listSeparator) +blacklist = [item.lower() for item in blacklist] +if blacklist == [""] : blacklist = [] +stash.Log(f"blacklist = {blacklist}") + def realpath(path): """ get_symbolic_target for win @@ -177,85 +205,41 @@ def hasSameDir(path1, path2): return True return False -def prnt(data): - return ascii(data) # return data.encode('ascii','ignore') +def sendToTrash(path): + if not os.path.isfile(path): + stash.Warn(f"File does not exist: {path}.", toAscii=True) + return False + try: + from send2trash import send2trash # Requirement: pip install Send2Trash + send2trash(path) + return True + except Exception as e: + stash.Error(f"Failed to send file {path} to recycle bin. Error: {e}", toAscii=True) + try: + if os.path.isfile(path): + os.remove(path) + return True + except Exception as e: + stash.Error(f"Failed to delete file {path}. Error: {e}", toAscii=True) + return False -def mergeData(SrcData, DestData): - # Merge tags - dataAdded = "" - for tag in SrcData['tags']: - if tag not in DestData['tags'] and tag['name'] not in excludeMergeTags: - stash.update_scene({'id' : DestData['id'], 'tag_ids' : tag['id']}) - dataAdded += f"{tag['name']} " - if dataAdded != "": - stash.Trace(f"Added tags ({dataAdded}) to file {prnt(DestData['files'][0]['path'])}") - # Merge URLs - dataAdded = "" - listToAdd = DestData['urls'] - for url in SrcData['urls']: - if url not in DestData['urls'] and not url.startswith(stash.STASH_URL): - listToAdd += [url] - dataAdded += f"{url} " - if dataAdded != "": - stash.update_scene({'id' : DestData['id'], 'urls' : listToAdd}) - stash.Trace(f"Added urls ({dataAdded}) to file {prnt(DestData['files'][0]['path'])}") - # Merge performers - dataAdded = "" - listToAdd = [] - for performer in SrcData['performers']: - if performer not in DestData['performers']: - listToAdd += [performer['id']] - dataAdded += f"{performer['id']} " - if dataAdded != "": - for performer in DestData['performers']: - listToAdd += [performer['id']] - stash.update_scene({'id' : DestData['id'], 'performer_ids' : listToAdd}) - stash.Trace(f"Added performers ({dataAdded}) to file {prnt(DestData['files'][0]['path'])}") - # Merge studio - if DestData['studio'] == None and SrcData['studio'] != None: - stash.update_scene({'id' : DestData['id'], 'studio_id' : SrcData['studio']['id']}) - # Merge galleries - dataAdded = "" - listToAdd = [] - for gallery in SrcData['galleries']: - if gallery not in DestData['galleries']: - listToAdd += [gallery['id']] - dataAdded += f"{gallery['id']} " - if dataAdded != "": - for gallery in DestData['galleries']: - listToAdd += [gallery['id']] - stash.update_scene({'id' : DestData['id'], 'gallery_ids' : listToAdd}) - stash.Trace(f"Added galleries ({dataAdded}) to file {prnt(DestData['files'][0]['path'])}") - # Merge title - if DestData['title'] == "" and SrcData['title'] != "": - stash.update_scene({'id' : DestData['id'], 'title' : SrcData['title']}) - # Merge director - if DestData['director'] == "" and SrcData['director'] != "": - stash.update_scene({'id' : DestData['id'], 'director' : SrcData['director']}) - # Merge date - if DestData['date'] == None and SrcData['date'] != None: - stash.update_scene({'id' : DestData['id'], 'date' : SrcData['date']}) - # Merge details - if DestData['details'] == "" and SrcData['details'] != "": - stash.update_scene({'id' : DestData['id'], 'details' : SrcData['details']}) - # Merge movies - dataAdded = "" - listToAdd = [] - for movie in SrcData['movies']: - if movie not in DestData['movies']: - listToAdd += [{"movie_id" : movie['movie']['id'], "scene_index" : movie['scene_index']}] - dataAdded += f"{movie['movie']['id']} " - if dataAdded != "": - for movie in DestData['movies']: - listToAdd += [{"movie_id" : movie['movie']['id'], "scene_index" : movie['scene_index']}] - stash.update_scene({'id' : DestData['id'], 'movies' : listToAdd}) - stash.Trace(f"Added movies ({dataAdded}) to file {prnt(DestData['files'][0]['path'])}") - # Merge rating100 - if DestData['rating100'] == None and SrcData['rating100'] != None: - stash.update_scene({'id' : DestData['id'], 'rating100' : SrcData['rating100']}) - # Merge code (Studio Code) - if DestData['code'] == "" and SrcData['code'] != "": - stash.update_scene({'id' : DestData['id'], 'code' : SrcData['code']}) +def significantLessTime(durrationToKeep, durrationOther): + timeDiff = durrationToKeep / durrationOther + if timeDiff < significantTimeDiff: + return True + return False + +def isBetter(DupFileToKeep, DupFile): + # Don't move if both are in whitelist + if isInList(whitelist, DupFileToKeep['files'][0]['path']) and isInList(whitelist, DupFile['files'][0]['path']): + return False + if swapHighRes and (int(DupFileToKeep['files'][0]['width']) > int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) > int(DupFile['files'][0]['height'])): + if not significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(DupFile['files'][0]['duration'])): + return True + if swapLongLength and int(DupFileToKeep['files'][0]['duration']) > int(DupFile['files'][0]['duration']): + if int(DupFileToKeep['files'][0]['width']) >= int(DupFile['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) >= int(DupFile['files'][0]['height']): + return True + return False def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): duration_diff = 10.00 @@ -271,26 +255,16 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp) stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}") - - graylist = stash.pluginSettings['zxgraylist'].split(listSeparator) - graylist = [item.lower() for item in graylist] - if graylist == [""] : graylist = [] - stash.Log(f"graylist = {graylist}") - whitelist = stash.pluginSettings['zwhitelist'].split(listSeparator) - whitelist = [item.lower() for item in whitelist] - if whitelist == [""] : whitelist = [] - stash.Log(f"whitelist = {whitelist}") - blacklist = stash.pluginSettings['zyblacklist'].split(listSeparator) - blacklist = [item.lower() for item in blacklist] - if blacklist == [""] : blacklist = [] - stash.Log(f"blacklist = {blacklist}") - QtyDupSet = 0 QtyDup = 0 QtyExactDup = 0 QtyAlmostDup = 0 + QtyRealTimeDiff = 0 QtyTagForDel = 0 QtySkipForDel = 0 + QtySwap = 0 + QtyMerge = 0 + QtyDeleted = 0 stash.Log("#########################################################################") stash.Log("#########################################################################") stash.Log("Waiting for find_duplicate_scenes_diff to return results...") @@ -307,66 +281,88 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False): QtyDup+=1 Scene = stash.find_scene(DupFile['id']) sceneData = f"Scene = {Scene}" - stash.Trace(prnt(sceneData)) + stash.Trace(sceneData, toAscii=True) DupFileDetailList = DupFileDetailList + [Scene] if DupFileToKeep != "": - if DupFileToKeep['files'][0]['duration'] == Scene['files'][0]['duration']: + if int(DupFileToKeep['files'][0]['duration']) == int(Scene['files'][0]['duration']): # Do not count fractions of a second as a difference QtyExactDup+=1 else: QtyAlmostDup+=1 SepLine = "***************************" + if significantLessTime(int(DupFileToKeep['files'][0]['duration']), int(Scene['files'][0]['duration'])): + QtyRealTimeDiff += 1 if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']): DupFileToKeep = Scene elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']): DupFileToKeep = Scene - elif int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']): - DupFileToKeep = Scene - elif len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']): - DupFileToKeep = Scene elif isInList(whitelist, Scene['files'][0]['path']) and not isInList(whitelist, DupFileToKeep['files'][0]['path']): DupFileToKeep = Scene elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and not isInList(blacklist, Scene['files'][0]['path']): DupFileToKeep = Scene elif isInList(graylist, Scene['files'][0]['path']) and not isInList(graylist, DupFileToKeep['files'][0]['path']): DupFileToKeep = Scene + elif len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']): + DupFileToKeep = Scene + elif int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']): + DupFileToKeep = Scene else: DupFileToKeep = Scene # stash.Trace(f"DupFileToKeep = {DupFileToKeep}") - stash.Trace(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={prnt(Scene['files'][0]['path'])}, KeepPath={prnt(DupFileToKeep['files'][0]['path'])}") + stash.Trace(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path']}, KeepPath={DupFileToKeep['files'][0]['path']}", toAscii=True) for DupFile in DupFileDetailList: if DupFile['id'] != DupFileToKeep['id']: + if merge: + result = stash.merge_metadata(DupFile, DupFileToKeep) + if result != "Nothing To Merge": + QtyMerge += 1 + if isInList(whitelist, DupFile['files'][0]['path']) and (not whitelistDelDupInSameFolder or not hasSameDir(DupFile['files'][0]['path'], DupFileToKeep['files'][0]['path'])): - stash.Log(f"NOT tagging duplicate, because it's in whitelist. '{prnt(DupFile['files'][0]['path'])}'") - if dupWhitelistTagId and tagDuplicates: - setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep['files'][0]['path']) + if isBetter(DupFileToKeep, DupFile): + if merge: + stash.merge_metadata(DupFileToKeep, DupFile) + if toRecycleBeforeSwap: + sendToTrash(DupFile['files'][0]['path']) + shutil.move(DupFileToKeep['files'][0]['path'], DupFile['files'][0]['path']) + stash.Log(f"Moved better file '{DupFileToKeep['files'][0]['path']}' to '{DupFile['files'][0]['path']}'", toAscii=True) + DupFileToKeep = DupFile + QtySwap+=1 + else: + stash.Log(f"NOT processing duplicate, because it's in whitelist. '{DupFile['files'][0]['path']}'", toAscii=True) + if dupWhitelistTagId and tagDuplicates: + setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep['files'][0]['path']) QtySkipForDel+=1 else: - if merge: - mergeData(DupFile, DupFileToKeep) if deleteDup: DupFileName = DupFile['files'][0]['path'] DupFileNameOnly = pathlib.Path(DupFileName).stem - stash.Log(f"Deleting duplicate '{prnt(DupFileName)}'") + stash.Log(f"Deleting duplicate '{DupFileName}'", toAscii=True) if alternateTrashCanPath != "": - shutil.move(DupFileName, f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}") + destPath = f"{alternateTrashCanPath }{os.sep}{DupFileNameOnly}" + if os.path.isfile(destPath): + destPath = f"{alternateTrashCanPath }{os.sep}_{time.time()}_{DupFileNameOnly}" + shutil.move(DupFileName, destPath) elif moveToTrashCan: - from send2trash import send2trash # Requirement: pip install Send2Trash - send2trash(DupFileName) - else: - os.remove(DupFileName) + sendToTrash(DupFileName) + stash.destroy_scene(DupFile['id'], delete_file=True) + QtyDeleted += 1 elif tagDuplicates: if QtyTagForDel == 0: - stash.Log(f"Tagging duplicate {prnt(DupFile['files'][0]['path'])} for deletion with tag {duplicateMarkForDeletion}.") + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion with tag {duplicateMarkForDeletion}.", toAscii=True) else: - stash.Log(f"Tagging duplicate {prnt(DupFile['files'][0]['path'])} for deletion.") + stash.Log(f"Tagging duplicate {DupFile['files'][0]['path']} for deletion.", toAscii=True) setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep['files'][0]['path']) QtyTagForDel+=1 stash.Log(SepLine) if maxDupToProcess > 0 and QtyDup > maxDupToProcess: break - stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}") + stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}") + if cleanAfterDel: + stash.Log("Adding clean jobs to the Task Queue") + stash.metadata_clean(paths=stash.STASH_PATHS) + stash.metadata_clean_generated() + stash.optimise_database() def testSetDupTagOnScene(sceneId): scene = stash.find_scene(sceneId) diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml index ecb5b85..8e0f80d 100644 --- a/plugins/DupFileManager/DupFileManager.yml +++ b/plugins/DupFileManager/DupFileManager.yml @@ -23,6 +23,10 @@ settings: displayName: Whitelist Delete In Same Folder description: (Default=false) Allow whitelist deletion of duplicates within the same whitelist folder. type: BOOLEAN + zcleanAfterDel: + displayName: Run Clean After Delete + description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database. + type: BOOLEAN zwhitelist: displayName: White List description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\ @@ -56,11 +60,3 @@ tasks: description: Delete duplicate files defaultArgs: mode: delete_duplicates - - name: Merge Duplicate Filename - description: Merge duplicate filename sourcetag names, performers, and studios. - defaultArgs: - mode: merge_dup_filename_task - - name: Dry Run Delete Duplicates - description: Only perform a dry run (logging only) of duplicate file deletions. Dry Run setting is ignore when running this task. - defaultArgs: - mode: dryrun_delete_duplicates diff --git a/plugins/DupFileManager/DupFileManager_config.py b/plugins/DupFileManager/DupFileManager_config.py index 29250f2..3c02fc9 100644 --- a/plugins/DupFileManager/DupFileManager_config.py +++ b/plugins/DupFileManager/DupFileManager_config.py @@ -6,8 +6,16 @@ config = { "listSeparator" : ",", # If enabled, adds the primary duplicate path to the scene detail. "addPrimaryDupPathToDetails" : True, - # Alternative path to move duplicate files. Path needs to be in the same drive as the duplicate file. + # Alternative path to move duplicate files. "dup_path": "", #Example: "C:\\TempDeleteFolder" + # If enabled, swap higher resolution duplicate files to preferred path. + "swapHighRes" : True, + # If enabled, swap longer length media files to preferred path. Longer will be determine by significantLongerTime value. + "swapLongLength" : True, + # The threshold as to what percentage is consider a significant shorter time. + "significantTimeDiff" : .90, # 95% threshold + # If enabled, moves destination file to recycle bin before swapping Hi-Res file. + "toRecycleBeforeSwap" : True, # The following fields are ONLY used when running DupFileManager in script mode "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server diff --git a/plugins/DupFileManager/DupFileManager_config_dev.py b/plugins/DupFileManager/DupFileManager_config_dev.py index a88844b..e4829be 100644 --- a/plugins/DupFileManager/DupFileManager_config_dev.py +++ b/plugins/DupFileManager/DupFileManager_config_dev.py @@ -5,10 +5,6 @@ config_dev = { # If enabled, ignore symbolic links. "ignoreSymbolicLinks" : True, - # If enabled, swap higher resolution duplicate files to preferred path. - "swapHighRes" : True, - # If enabled, swap longer length media files to preferred path. Longer will be determine by significantLongerTime value. - "swapLongLength" : True, # If enabled, swap longer file name to preferred path. "swapLongFileName" : False, @@ -20,8 +16,6 @@ config_dev = { "keepLowerResolution" : False, # If enabled, keep duplicate media with high resolution over media with significant longer time. "keepHighResOverLen" : False, # Requires keepBothHighResAndLongerLen = False - # The threshold as to what percentage is consider a significant longer time. Default is 15% longer. - "significantLongerTime" : 15, # 15% longer time # If enabled, keep both duplicate files if the LOWER resolution file is significantly longer. "keepBothHighResAndLongerLen" : True, diff --git a/plugins/DupFileManager/StashPluginHelper.py b/plugins/DupFileManager/StashPluginHelper.py index eec93b6..9ba3c9f 100644 --- a/plugins/DupFileManager/StashPluginHelper.py +++ b/plugins/DupFileManager/StashPluginHelper.py @@ -5,6 +5,8 @@ import concurrent.futures from stashapi.stash_types import PhashDistance import __main__ +_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_" + # StashPluginHelper (By David Maisonave aka Axter) # See end of this file for example usage # Log Features: @@ -41,6 +43,7 @@ class StashPluginHelper(StashInterface): STASHPATHSCONFIG = None STASH_PATHS = [] API_KEY = None + excludeMergeTags = None # printTo argument LOG_TO_FILE = 1 @@ -62,6 +65,9 @@ class StashPluginHelper(StashInterface): logLinePreviousHits = [] thredPool = None STASH_INTERFACE_INIT = False + _mergeMetadata = None + encodeToUtf8 = False + convertToAscii = False # If set True, it takes precedence over encodeToUtf8 # Prefix message value LEV_TRACE = "TRACE: " @@ -84,7 +90,7 @@ class StashPluginHelper(StashInterface): debugTracing = None, # Set debugTracing to True so as to output debug and trace logging logFormat = LOG_FORMAT, # Plugin log line format dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file - maxbytes = 2*1024*1024, # Max size of plugin log file + maxbytes = 8*1024*1024, # Max size of plugin log file backupcount = 2, # Backup counts when log file size reaches max size logToWrnSet = 0, # Customize the target output set which will get warning logging logToErrSet = 0, # Customize the target output set which will get error logging @@ -126,12 +132,9 @@ class StashPluginHelper(StashInterface): if debugTracing: self.DEBUG_TRACING = debugTracing if config: self.pluginConfig = config - if 'apiKey' in self.pluginConfig and self.pluginConfig['apiKey'] != "": - self.FRAGMENT_SERVER['ApiKey'] = self.pluginConfig['apiKey'] - if DebugTraceFieldName in self.pluginConfig: - self.DEBUG_TRACING = self.pluginConfig[DebugTraceFieldName] - if DryRunFieldName in self.pluginConfig: - self.DRY_RUN = self.pluginConfig[DryRunFieldName] + if self.Setting('apiKey', "") != "": + self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey') + if apiKey and apiKey != "": self.FRAGMENT_SERVER['ApiKey'] = apiKey @@ -181,12 +184,11 @@ class StashPluginHelper(StashInterface): self.pluginSettings = settings if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION: self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID]) - if DebugTraceFieldName in self.pluginSettings: - self.DEBUG_TRACING = self.pluginSettings[DebugTraceFieldName] - if DryRunFieldName in self.pluginSettings: - self.DRY_RUN = self.pluginSettings[DryRunFieldName] if 'apiKey' in self.STASH_CONFIGURATION: self.API_KEY = self.STASH_CONFIGURATION['apiKey'] + + self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN) + self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING) if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH]) @@ -197,7 +199,22 @@ class StashPluginHelper(StashInterface): def __del__(self): self.thredPool.shutdown(wait=False) - def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False): + def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False): + if self.pluginSettings != None and name in self.pluginSettings: + if notEmpty == False or self.pluginSettings[name] != "": + return self.pluginSettings[name] + if self.pluginConfig != None and name in self.pluginConfig: + if notEmpty == False or self.pluginConfig[name] != "": + return self.pluginConfig[name] + if default == _ARGUMENT_UNSPECIFIED_ and raiseEx: + raise Exception(f"Missing {name} from both UI settings and config file settings.") + return default + + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None): + if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)): + logMsg = self.asc2(logMsg) + else: + logMsg = logMsg if printTo == 0: printTo = self.log_to_norm elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO: @@ -238,7 +255,7 @@ class StashPluginHelper(StashInterface): if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr) - def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1): + def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None): if printTo == 0: printTo = self.LOG_TO_FILE if lineNo == -1: lineNo = inspect.currentframe().f_back.f_lineno @@ -246,39 +263,39 @@ class StashPluginHelper(StashInterface): if self.DEBUG_TRACING or logAlways: if logMsg == "": logMsg = f"Line number {lineNo}..." - self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways) + self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii) # Log once per session. Only logs the first time called from a particular line number in the code. - def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False): + def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None): lineNo = inspect.currentframe().f_back.f_lineno if self.DEBUG_TRACING or logAlways: FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" if FuncAndLineNo in self.logLinePreviousHits: return self.logLinePreviousHits.append(FuncAndLineNo) - self.Trace(logMsg, printTo, logAlways, lineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) # Log INFO on first call, then do Trace on remaining calls. - def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True): + def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None): if printTo == 0: printTo = self.LOG_TO_FILE lineNo = inspect.currentframe().f_back.f_lineno FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" if FuncAndLineNo in self.logLinePreviousHits: if traceOnRemainingCalls: - self.Trace(logMsg, printTo, logAlways, lineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) else: self.logLinePreviousHits.append(FuncAndLineNo) - self.Log(logMsg, printTo, logging.INFO, lineNo) + self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii) - def Warn(self, logMsg, printTo = 0): + def Warn(self, logMsg, printTo = 0, toAscii = None): if printTo == 0: printTo = self.log_to_wrn_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.WARN, lineNo) + self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii) - def Error(self, logMsg, printTo = 0): + def Error(self, logMsg, printTo = 0, toAscii = None): if printTo == 0: printTo = self.log_to_err_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.ERROR, lineNo) + self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): if printTo == 0: printTo = self.log_to_norm @@ -310,8 +327,26 @@ class StashPluginHelper(StashInterface): argsWithPython = [f"{PythonExe}"] + args return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) - def Submit(*args, **kwargs): - thredPool.submit(*args, **kwargs) + def Submit(self, *args, **kwargs): + return self.thredPool.submit(*args, **kwargs) + + def asc2(self, data, convertToAscii=None): + if convertToAscii or (convertToAscii == None and self.convertToAscii): + return ascii(data) + return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function + # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function + # return str(data)[2:-1] # strip out b'str' + + def init_mergeMetadata(self, excludeMergeTags=None): + self.excludeMergeTags = excludeMergeTags + self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) + + # Must call init_mergeMetadata, before calling merge_metadata + def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata + if type(SrcData) is int: + SrcData = self.find_scene(SrcData) + DestData = self.find_scene(DestData) + return self._mergeMetadata.merge(SrcData, DestData) # Extends class StashInterface with functions which are not yet in the class def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]): @@ -372,3 +407,76 @@ class StashPluginHelper(StashInterface): variables = { "distance": distance, "duration_diff": duration_diff } result = self.call_GQL(query, variables) return result['findDuplicateScenes'] + +class mergeMetadata: # A class to merge scene metadata from source scene to destination scene + srcData = None + destData = None + stash = None + excludeMergeTags = None + dataDict = None + result = "Nothing To Merge" + def __init__(self, stash, excludeMergeTags=None): + self.stash = stash + self.excludeMergeTags = excludeMergeTags + + def merge(self, SrcData, DestData): + self.srcData = SrcData + self.destData = DestData + ORG_DATA_DICT = {'id' : self.destData['id']} + self.dataDict = ORG_DATA_DICT.copy() + self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags) + self.mergeItems('performers', 'performer_ids', []) + self.mergeItems('galleries', 'gallery_ids', []) + self.mergeItems('movies', 'movies', []) + self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL) + self.mergeItem('studio', 'studio_id', 'id') + self.mergeItem('title') + self.mergeItem('director') + self.mergeItem('date') + self.mergeItem('details') + self.mergeItem('rating100') + self.mergeItem('code') + if self.dataDict != ORG_DATA_DICT: + self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True) + self.result = self.stash.update_scene(self.dataDict) + return self.result + + def Nothing(self, Data): + if not Data or Data == "" or (type(Data) is str and Data.strip() == ""): + return True + return False + + def mergeItem(self,fieldName, updateFieldName=None, subField=None): + if updateFieldName == None: + updateFieldName = fieldName + if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]): + if subField == None: + self.dataDict.update({ updateFieldName : self.srcData[fieldName]}) + else: + self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]}) + def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None): + dataAdded = "" + for item in self.srcData[fieldName]: + if item not in self.destData[fieldName]: + if NotStartWith == None or not item.startswith(NotStartWith): + if excludeName == None or item['name'] not in excludeName: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + dataAdded += f"{item['movie']['id']} " + elif updateFieldName == None: + listToAdd += [item] + dataAdded += f"{item} " + else: + listToAdd += [item['id']] + dataAdded += f"{item['id']} " + if dataAdded != "": + if updateFieldName == None: + updateFieldName = fieldName + else: + for item in self.destData[fieldName]: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + else: + listToAdd += [item['id']] + self.dataDict.update({ updateFieldName : listToAdd}) + # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) diff --git a/plugins/FileMonitor/StashPluginHelper.py b/plugins/FileMonitor/StashPluginHelper.py index eec93b6..9ba3c9f 100644 --- a/plugins/FileMonitor/StashPluginHelper.py +++ b/plugins/FileMonitor/StashPluginHelper.py @@ -5,6 +5,8 @@ import concurrent.futures from stashapi.stash_types import PhashDistance import __main__ +_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_" + # StashPluginHelper (By David Maisonave aka Axter) # See end of this file for example usage # Log Features: @@ -41,6 +43,7 @@ class StashPluginHelper(StashInterface): STASHPATHSCONFIG = None STASH_PATHS = [] API_KEY = None + excludeMergeTags = None # printTo argument LOG_TO_FILE = 1 @@ -62,6 +65,9 @@ class StashPluginHelper(StashInterface): logLinePreviousHits = [] thredPool = None STASH_INTERFACE_INIT = False + _mergeMetadata = None + encodeToUtf8 = False + convertToAscii = False # If set True, it takes precedence over encodeToUtf8 # Prefix message value LEV_TRACE = "TRACE: " @@ -84,7 +90,7 @@ class StashPluginHelper(StashInterface): debugTracing = None, # Set debugTracing to True so as to output debug and trace logging logFormat = LOG_FORMAT, # Plugin log line format dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file - maxbytes = 2*1024*1024, # Max size of plugin log file + maxbytes = 8*1024*1024, # Max size of plugin log file backupcount = 2, # Backup counts when log file size reaches max size logToWrnSet = 0, # Customize the target output set which will get warning logging logToErrSet = 0, # Customize the target output set which will get error logging @@ -126,12 +132,9 @@ class StashPluginHelper(StashInterface): if debugTracing: self.DEBUG_TRACING = debugTracing if config: self.pluginConfig = config - if 'apiKey' in self.pluginConfig and self.pluginConfig['apiKey'] != "": - self.FRAGMENT_SERVER['ApiKey'] = self.pluginConfig['apiKey'] - if DebugTraceFieldName in self.pluginConfig: - self.DEBUG_TRACING = self.pluginConfig[DebugTraceFieldName] - if DryRunFieldName in self.pluginConfig: - self.DRY_RUN = self.pluginConfig[DryRunFieldName] + if self.Setting('apiKey', "") != "": + self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey') + if apiKey and apiKey != "": self.FRAGMENT_SERVER['ApiKey'] = apiKey @@ -181,12 +184,11 @@ class StashPluginHelper(StashInterface): self.pluginSettings = settings if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION: self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID]) - if DebugTraceFieldName in self.pluginSettings: - self.DEBUG_TRACING = self.pluginSettings[DebugTraceFieldName] - if DryRunFieldName in self.pluginSettings: - self.DRY_RUN = self.pluginSettings[DryRunFieldName] if 'apiKey' in self.STASH_CONFIGURATION: self.API_KEY = self.STASH_CONFIGURATION['apiKey'] + + self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN) + self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING) if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH]) @@ -197,7 +199,22 @@ class StashPluginHelper(StashInterface): def __del__(self): self.thredPool.shutdown(wait=False) - def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False): + def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False): + if self.pluginSettings != None and name in self.pluginSettings: + if notEmpty == False or self.pluginSettings[name] != "": + return self.pluginSettings[name] + if self.pluginConfig != None and name in self.pluginConfig: + if notEmpty == False or self.pluginConfig[name] != "": + return self.pluginConfig[name] + if default == _ARGUMENT_UNSPECIFIED_ and raiseEx: + raise Exception(f"Missing {name} from both UI settings and config file settings.") + return default + + def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None): + if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)): + logMsg = self.asc2(logMsg) + else: + logMsg = logMsg if printTo == 0: printTo = self.log_to_norm elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO: @@ -238,7 +255,7 @@ class StashPluginHelper(StashInterface): if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways): print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr) - def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1): + def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None): if printTo == 0: printTo = self.LOG_TO_FILE if lineNo == -1: lineNo = inspect.currentframe().f_back.f_lineno @@ -246,39 +263,39 @@ class StashPluginHelper(StashInterface): if self.DEBUG_TRACING or logAlways: if logMsg == "": logMsg = f"Line number {lineNo}..." - self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways) + self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii) # Log once per session. Only logs the first time called from a particular line number in the code. - def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False): + def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None): lineNo = inspect.currentframe().f_back.f_lineno if self.DEBUG_TRACING or logAlways: FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" if FuncAndLineNo in self.logLinePreviousHits: return self.logLinePreviousHits.append(FuncAndLineNo) - self.Trace(logMsg, printTo, logAlways, lineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) # Log INFO on first call, then do Trace on remaining calls. - def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True): + def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None): if printTo == 0: printTo = self.LOG_TO_FILE lineNo = inspect.currentframe().f_back.f_lineno FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}" if FuncAndLineNo in self.logLinePreviousHits: if traceOnRemainingCalls: - self.Trace(logMsg, printTo, logAlways, lineNo) + self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii) else: self.logLinePreviousHits.append(FuncAndLineNo) - self.Log(logMsg, printTo, logging.INFO, lineNo) + self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii) - def Warn(self, logMsg, printTo = 0): + def Warn(self, logMsg, printTo = 0, toAscii = None): if printTo == 0: printTo = self.log_to_wrn_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.WARN, lineNo) + self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii) - def Error(self, logMsg, printTo = 0): + def Error(self, logMsg, printTo = 0, toAscii = None): if printTo == 0: printTo = self.log_to_err_set lineNo = inspect.currentframe().f_back.f_lineno - self.Log(logMsg, printTo, logging.ERROR, lineNo) + self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii) def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1): if printTo == 0: printTo = self.log_to_norm @@ -310,8 +327,26 @@ class StashPluginHelper(StashInterface): argsWithPython = [f"{PythonExe}"] + args return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach) - def Submit(*args, **kwargs): - thredPool.submit(*args, **kwargs) + def Submit(self, *args, **kwargs): + return self.thredPool.submit(*args, **kwargs) + + def asc2(self, data, convertToAscii=None): + if convertToAscii or (convertToAscii == None and self.convertToAscii): + return ascii(data) + return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function + # data = str(data).encode('ascii','ignore') # This works better for logging than ascii function + # return str(data)[2:-1] # strip out b'str' + + def init_mergeMetadata(self, excludeMergeTags=None): + self.excludeMergeTags = excludeMergeTags + self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags) + + # Must call init_mergeMetadata, before calling merge_metadata + def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata + if type(SrcData) is int: + SrcData = self.find_scene(SrcData) + DestData = self.find_scene(DestData) + return self._mergeMetadata.merge(SrcData, DestData) # Extends class StashInterface with functions which are not yet in the class def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]): @@ -372,3 +407,76 @@ class StashPluginHelper(StashInterface): variables = { "distance": distance, "duration_diff": duration_diff } result = self.call_GQL(query, variables) return result['findDuplicateScenes'] + +class mergeMetadata: # A class to merge scene metadata from source scene to destination scene + srcData = None + destData = None + stash = None + excludeMergeTags = None + dataDict = None + result = "Nothing To Merge" + def __init__(self, stash, excludeMergeTags=None): + self.stash = stash + self.excludeMergeTags = excludeMergeTags + + def merge(self, SrcData, DestData): + self.srcData = SrcData + self.destData = DestData + ORG_DATA_DICT = {'id' : self.destData['id']} + self.dataDict = ORG_DATA_DICT.copy() + self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags) + self.mergeItems('performers', 'performer_ids', []) + self.mergeItems('galleries', 'gallery_ids', []) + self.mergeItems('movies', 'movies', []) + self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL) + self.mergeItem('studio', 'studio_id', 'id') + self.mergeItem('title') + self.mergeItem('director') + self.mergeItem('date') + self.mergeItem('details') + self.mergeItem('rating100') + self.mergeItem('code') + if self.dataDict != ORG_DATA_DICT: + self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True) + self.result = self.stash.update_scene(self.dataDict) + return self.result + + def Nothing(self, Data): + if not Data or Data == "" or (type(Data) is str and Data.strip() == ""): + return True + return False + + def mergeItem(self,fieldName, updateFieldName=None, subField=None): + if updateFieldName == None: + updateFieldName = fieldName + if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]): + if subField == None: + self.dataDict.update({ updateFieldName : self.srcData[fieldName]}) + else: + self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]}) + def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None): + dataAdded = "" + for item in self.srcData[fieldName]: + if item not in self.destData[fieldName]: + if NotStartWith == None or not item.startswith(NotStartWith): + if excludeName == None or item['name'] not in excludeName: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + dataAdded += f"{item['movie']['id']} " + elif updateFieldName == None: + listToAdd += [item] + dataAdded += f"{item} " + else: + listToAdd += [item['id']] + dataAdded += f"{item['id']} " + if dataAdded != "": + if updateFieldName == None: + updateFieldName = fieldName + else: + for item in self.destData[fieldName]: + if fieldName == 'movies': + listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}] + else: + listToAdd += [item['id']] + self.dataDict.update({ updateFieldName : listToAdd}) + # self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True) diff --git a/plugins/FileMonitor/filemonitor.py b/plugins/FileMonitor/filemonitor.py index 75398d3..dd534c1 100644 --- a/plugins/FileMonitor/filemonitor.py +++ b/plugins/FileMonitor/filemonitor.py @@ -48,6 +48,7 @@ stash = StashPluginHelper( config=config, logToErrSet=logToErrSet, logToNormSet=logToNormSet, + maxbytes=10*1024*1024, apiKey=parse_args.apikey ) stash.Status() diff --git a/plugins/RenameFile/renamefile.py b/plugins/RenameFile/renamefile.py index a8ab1fd..994145b 100644 --- a/plugins/RenameFile/renamefile.py +++ b/plugins/RenameFile/renamefile.py @@ -38,7 +38,7 @@ QUERY_ALL_SCENES = """ RFH = RotatingFileHandler( filename=LOG_FILE_PATH, mode='a', - maxBytes=2*1024*1024, # Configure logging for this script with max log file size of 2000K + maxBytes=8*1024*1024, # Configure logging for this script with max log file size of 2000K backupCount=2, encoding=None, delay=0 diff --git a/plugins/RenameFile/renamefile_settings.py b/plugins/RenameFile/renamefile_settings.py index 6a4445d..4eef5e5 100644 --- a/plugins/RenameFile/renamefile_settings.py +++ b/plugins/RenameFile/renamefile_settings.py @@ -38,7 +38,7 @@ config = { "date": '', }, # Add tags to exclude from RenameFile. - "excludeTags": ["DuplicateMarkForDeletion", "DuplicateWhitelistFile","_DuplicateMarkForDeletion", "_DuplicateWhitelistFile","_DuplicateMarkForDeletion_", "_DuplicateWhitelistFile_"], + "excludeTags": ["DuplicateMarkForDeletion", "DuplicateMarkForSwap", "DuplicateWhitelistFile","_DuplicateMarkForDeletion","_DuplicateMarkForSwap", "_DuplicateWhitelistFile"], # Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath" "pathToExclude": "", # Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"