")
+ fileHtmlReport.write(f"{stash.Setting('htmlReportTable')}\n")
+ fileHtmlReport.write(f"{htmlReportTableRow}{htmlReportTableHeader}Scene{htmlReportTableHeader}Duplicate to Delete{htmlReportTableHeader}Scene-ToKeep{htmlReportTableHeader}Duplicate to Keep\n")
+
+ if graylistTagging and stash.startsWithInList(graylist, DupFile['files'][0]['path']):
stash.addTag(DupFile, graylistMarkForDeletion, ignoreAutoTag=True)
if didAddTag:
QtyNewlyTag+=1
@@ -589,25 +775,78 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
if maxDupToProcess > 0 and QtyDup > maxDupToProcess:
break
+ if fileHtmlReport != None:
+ fileHtmlReport.write("\n")
+ if PaginateId > 0:
+ homeHtmReportLink = f"[Home]"
+ if PaginateId > 1:
+ prevHtmReport = htmlReportNameHomePage.replace(".html", f"_{PaginateId-1}.html")
+ else:
+ prevHtmReport = htmlReportNameHomePage
+ prevHtmReportLink = f"[Prev]"
+ fileHtmlReport.write(f"
{homeHtmReportLink}
{prevHtmReportLink}
")
+ fileHtmlReport.write(f"
Total Tagged for Deletion {QtyTagForDel}
\n")
+ # ToDo: Add a menu after the report with the following options:
+ # Delete all Dup tagged files (any match)
+ # Remove all Dup tagged files (Just remove from stash, and leave file)
+ # Delete Blacklist Dup tagged files
+ # Remove Blacklist Dup tagged files
+ # Delete all Dup tagged files (Exact Match)
+ # Remove all Dup tagged files (Exact Match)
+ # Delete all Dup tagged files (High Match)
+ # Remove all Dup tagged files (High Match)
+ # Delete all Dup tagged files (Medium Match)
+ # Remove all Dup tagged files (Medium Match)
+ # Delete all Dup tagged files (Low Match)
+ # Remove all Dup tagged files (Low Match)
+ # Clear dup tag from all scenes
+ # Delete dup tag
+ # Clear ExcludeDup tag
+ # Delete ExcludeDup tag
+ # Clear GraylistMarkForDel tag
+ # Delete GraylistMarkForDel tag
+ # Clear all DupFileManager created tags
+ # Delete all DupFileManager created tags
+ fileHtmlReport.write(f"{stash.Setting('htmlReportPostfix')}")
+ fileHtmlReport.close()
+ # ToDo: Add a better working method to open HTML page htmlReportName
+ stash.Log(f"Opening web page {htmlReportName}")
+ import webbrowser
+ webbrowser.open(htmlReportName, new=2, autoraise=True)
+ os.system(f"start file://{htmlReportName}")
+ stash.Log(f"************************************************************", printTo = stash.LogTo.STASH)
+ stash.Log(f"************************************************************", printTo = stash.LogTo.STASH)
+ stash.Log(f"View Stash duplicate report using the following link: file://{htmlReportName}", printTo = stash.LogTo.STASH)
+ stash.Log(f"************************************************************", printTo = stash.LogTo.STASH)
+ stash.Log(f"************************************************************", printTo = stash.LogTo.STASH)
+
+
stash.Debug("#####################################################")
stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyDeleted={QtyDeleted}, QtySwap={QtySwap}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExcludeForDel={QtyExcludeForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}, QtyMerge={QtyMerge}, QtyRealTimeDiff={QtyRealTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
killScanningJobs()
- if cleanAfterDel:
+ if cleanAfterDel and deleteDup:
stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN)
stash.metadata_clean()
stash.metadata_clean_generated()
stash.optimise_database()
- if doNotGeneratePhash == False:
+ if doGeneratePhash:
stash.metadata_generate({"phashes": True})
+def findCurrentTagId(tagNames):
+ tagNames = [i for n, i in enumerate(tagNames) if i not in tagNames[:n]]
+ for tagName in tagNames:
+ tagId = stash.find_tags(q=tagName)
+ if len(tagId) > 0 and 'id' in tagId[0]:
+ stash.Debug("Using tag name {tagName} with Tag ID {tagId[0]['id']}")
+ return tagId[0]['id']
+ return "-1"
+
def manageTagggedDuplicates(deleteScenes=False, clearTag=False, setGrayListTag=False):
- tagId = stash.find_tags(q=duplicateMarkForDeletion)
- if len(tagId) > 0 and 'id' in tagId[0]:
- tagId = tagId[0]['id']
- else:
+ tagId = findCurrentTagId([duplicateMarkForDeletion, base1_duplicateWhitelistTag, base2_duplicateWhitelistTag, 'DuplicateMarkForDeletion', '_DuplicateMarkForDeletion'])
+ if int(tagId) < 0:
stash.Warn(f"Could not find tag ID for tag '{duplicateMarkForDeletion}'.")
return
-
+
excludedTags = [duplicateMarkForDeletion]
if clearAllDupfileManagerTags:
excludedTags = [duplicateMarkForDeletion, duplicateWhitelistTag, excludeDupFileDeleteTag, graylistMarkForDeletion, longerDurationLowerResolution]
@@ -683,8 +922,53 @@ def manageTagggedDuplicates(deleteScenes=False, clearTag=False, setGrayListTag=F
stash.Debug("#####################################################")
stash.Log(f"QtyDup={QtyDup}, QtyClearedTags={QtyClearedTags}, QtySetGraylistTag={QtySetGraylistTag}, QtyDeleted={QtyDeleted}, QtyFailedQuery={QtyFailedQuery}", printTo=LOG_STASH_N_PLUGIN)
killScanningJobs()
- # if doNotGeneratePhash == False and clearTag == False:
- # stash.metadata_generate({"phashes": True})
+ if deleteScenes:
+ if cleanAfterDel:
+ stash.Log("Adding clean jobs to the Task Queue", printTo=LOG_STASH_N_PLUGIN)
+ stash.metadata_clean()
+ stash.metadata_clean_generated()
+ stash.optimise_database()
+ if doNotGeneratePhash:
+ stash.metadata_generate({"phashes": True})
+
+def removeDupTag():
+ if 'removeDupTag' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find removeDupTag in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return
+ sceneToRemoveTag = stash.JSON_INPUT['args']['removeDupTag']
+ stash.removeTag(sceneToRemoveTag, duplicateMarkForDeletion)
+ stash.Log(f"Done removing tag from scene {sceneToRemoveTag}.")
+
+def addExcludeForDelTag():
+ if 'addExcludeForDelTag' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find addExcludeForDelTag in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return
+ scene = stash.JSON_INPUT['args']['addExcludeForDelTag']
+ stash.addTag(scene, excludeDupFileDeleteTag)
+ stash.Log(f"Done adding exclude tag to scene {scene}.")
+
+def removeExcludeForDelTag():
+ if 'removeExcludeForDelTag' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find removeExcludeForDelTag in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return
+ scene = stash.JSON_INPUT['args']['removeExcludeForDelTag']
+ stash.removeTag(scene, excludeDupFileDeleteTag)
+ stash.Log(f"Done removing exclude tag from scene {scene}.")
+
+def mergeTags():
+ if 'mergeScenes' not in stash.JSON_INPUT['args']:
+ stash.Error(f"Could not find mergeScenes in JSON_INPUT ({stash.JSON_INPUT['args']})")
+ return
+ mergeScenes = stash.JSON_INPUT['args']['mergeScenes']
+ scenes = mergeScenes.split(":")
+ if len(scenes) < 2:
+ stash.Error(f"Could not get both scenes from string {mergeScenes}")
+ return
+ stash.Log(f"Merging tags for scene {scenes[0]} and scene {scenes[1]}")
+ scene1 = stash.find_scene(int(scenes[0]))
+ scene2 = stash.find_scene(int(scenes[1]))
+ stash.mergeMetadata(scene1, scene2)
+ stash.Log(f"Done merging scenes for scene {scenes[0]} and scene {scenes[1]}")
try:
if stash.PLUGIN_TASK_NAME == "tag_duplicates_task":
@@ -705,6 +989,18 @@ try:
elif stash.PLUGIN_TASK_NAME == "generate_phash_task":
stash.metadata_generate({"phashes": True})
stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "remove_a_duplicate_tag":
+ removeDupTag()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "add_an_exclude_tag":
+ addExcludeForDelTag()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "remove_an_exclude_tag":
+ removeExcludeForDelTag()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
+ elif stash.PLUGIN_TASK_NAME == "merge_tags":
+ mergeTags()
+ stash.Debug(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif parse_args.dup_tag:
stash.PLUGIN_TASK_NAME = "dup_tag"
mangeDupFiles(tagDuplicates=True, merge=mergeDupFilename)
@@ -731,8 +1027,4 @@ except Exception as e:
stash.convertToAscii = False
stash.Error(f"Error: {e}\nTraceBack={tb}")
-
-
-
-
stash.Log("\n*********************************\nEXITING ***********************\n*********************************")
diff --git a/plugins/DupFileManager/DupFileManager.yml b/plugins/DupFileManager/DupFileManager.yml
index d3b029e..3cdb045 100644
--- a/plugins/DupFileManager/DupFileManager.yml
+++ b/plugins/DupFileManager/DupFileManager.yml
@@ -3,38 +3,14 @@ description: Manages duplicate files.
version: 0.1.6
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
settings:
- clearAllDupfileManagerTags:
- displayName: Clear All Tags
- description: Clear Tags task clears scenes of all tags (DuplicateMarkForDeletion, _DuplicateWhite..., _ExcludeDup..., _Graylist..., _LongerDur...)
- type: BOOLEAN
- doNotGeneratePhash:
- displayName: Do Not Generate PHASH
- description: Do not generate PHASH after tag or delete task.
- type: BOOLEAN
mergeDupFilename:
displayName: Merge Duplicate Tags
description: Before deletion, merge metadata from duplicate. E.g. Tag names, performers, studios, title, galleries, rating, details, etc...
type: BOOLEAN
- permanentlyDelete:
- displayName: Permanent Delete
- description: Enable to permanently delete files, instead of moving files to trash can.
- type: BOOLEAN
whitelistDelDupInSameFolder:
displayName: Whitelist Delete In Same Folder
description: Allow whitelist deletion of duplicates within the same whitelist folder.
type: BOOLEAN
- whitelistDoTagLowResDup:
- displayName: Whitelist Duplicate Tagging
- description: Enable to tag whitelist duplicates of lower resolution or duration or same folder.
- type: BOOLEAN
- xGrayListTagging:
- displayName: Tag Graylist
- description: When adding tag DuplicateMarkForDeletion to graylist scene, also add tag _GraylistMarkForDeletion.
- type: BOOLEAN
- zCleanAfterDel:
- displayName: Run Clean After Delete
- description: After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
- type: BOOLEAN
zSwapBetterBitRate:
displayName: Swap Better Bit Rate
description: Swap better bit rate for duplicate files. Use with DupFileManager_config.py file option favorHighBitRate
@@ -73,7 +49,7 @@ settings:
type: NUMBER
zyMaxDupToProcess:
displayName: Max Dup Process
- description: Maximum number of duplicates to process. If 0, infinity
+ description: (Default=0) Maximum number of duplicates to process. If 0, infinity.
type: NUMBER
zzDebug:
displayName: Debug
diff --git a/plugins/DupFileManager/DupFileManager_config.py b/plugins/DupFileManager/DupFileManager_config.py
index c219ca1..7ae717b 100644
--- a/plugins/DupFileManager/DupFileManager_config.py
+++ b/plugins/DupFileManager/DupFileManager_config.py
@@ -12,6 +12,22 @@ config = {
"toRecycleBeforeSwap" : True,
# Character used to seperate items on the whitelist, blacklist, and graylist
"listSeparator" : ",",
+ # Enable to permanently delete files, instead of moving files to trash can.
+ "permanentlyDelete" : False,
+ # After running a 'Delete Duplicates' task, run Clean, Clean-Generated, and Optimize-Database.
+ "cleanAfterDel" : True,
+ # Generate PHASH after tag or delete task.
+ "doGeneratePhash" : False,
+ # If enabled, skip processing tagged scenes. This option is ignored if createHtmlReport is True
+ "skipIfTagged" : False,
+ # If enabled, stop multiple scanning jobs after processing duplicates
+ "killScanningPostProcess" : True,
+ # If enabled, tag scenes which have longer duration, but lower resolution
+ "tagLongDurationLowRes" : True,
+ # If enabled, bit-rate is used in important comparisons for function allThingsEqual
+ "bitRateIsImporantComp" : True,
+ # If enabled, codec is used in important comparisons for function allThingsEqual
+ "codecIsImporantComp" : True,
# Tag names **************************************************
# Tag used to tag duplicates with lower resolution, duration, and file name length.
@@ -25,6 +41,16 @@ config = {
# Tag name for scenes with significant longer duration but lower resolution
"longerDurationLowerResolution" : "_LongerDurationLowerResolution",
+ # Other tag related options **************************************************
+ # If enabled, when adding tag DuplicateMarkForDeletion to graylist scene, also add tag _GraylistMarkForDeletion.
+ "graylistTagging" : True,
+ # If enabled, the Clear Tags task clears scenes of all tags (DuplicateMarkForDeletion, _DuplicateWhite..., _ExcludeDup..., _Graylist..., _LongerDur...)
+ "clearAllDupfileManagerTags" : True,
+ # If enabled, append dup tag name with match duplicate distance number. I.E. (DuplicateMarkForDeletion_0) or (DuplicateMarkForDeletion_1)
+ "appendMatchDupDistance" : True,
+ # If enabled, start dup tag name with an underscore. I.E. (_DuplicateMarkForDeletion). Places tag at the end of tag list.
+ "underscoreDupFileTag" : True,
+
# Favor setings *********************************************
# If enabled, favor longer file name over shorter. If disabled, favor shorter file name.
"favorLongerFileName" : True,
@@ -51,16 +77,155 @@ config = {
# Determines which codecRankingSet to use when ranking codec. Default is 1 for codecRankingSet1
"codecRankingSetToUse" : 1,
- # If enabled, skip processing tagged scenes
- "skipIfTagged" : True,
- # If enabled, stop multiple scanning jobs after processing duplicates
- "killScanningPostProcess" : True,
- # If enabled, tag scenes which have longer duration, but lower resolution
- "tagLongDurationLowRes" : True,
- # If enabled, bit-rate is used in important comparisons for function allThingsEqual
- "bitRateIsImporantComp" : True,
- # If enabled, codec is used in important comparisons for function allThingsEqual
- "codecIsImporantComp" : True,
+ # HTML Report **************************************************
+ # If enabled, create an HTML report when tagging duplicate files
+ "createHtmlReport" : True,
+ # If enabled, report displays stream instead of preview for video
+ "streamOverPreview" : False, # This option works in Chrome, but does not work very well on firefox.
+ # If enabled, create an HTML report when tagging duplicate files
+ "htmlReportName" : "DuplicateTagScenes.html",
+ # HTML report prefix, before table listing
+ "htmlReportPrefix" : """
+
+
+Stash Duplicate Report
+
+
+
+
+
+