This commit is contained in:
David Maisonave
2024-08-20 20:35:06 -04:00
parent fb0760acde
commit 826e651a6e
15 changed files with 312 additions and 141 deletions

View File

@@ -1,6 +1,6 @@
from stashapi.stashapp import StashInterface from stashapi.stashapp import StashInterface
from logging.handlers import RotatingFileHandler from logging.handlers import RotatingFileHandler
import inspect, sys, os, pathlib, logging, json import re, inspect, sys, os, pathlib, logging, json
import concurrent.futures import concurrent.futures
from stashapi.stash_types import PhashDistance from stashapi.stash_types import PhashDistance
import __main__ import __main__
@@ -30,7 +30,6 @@ class StashPluginHelper(StashInterface):
PLUGINS_PATH = None PLUGINS_PATH = None
pluginSettings = None pluginSettings = None
pluginConfig = None pluginConfig = None
STASH_INTERFACE_INIT = False
STASH_URL = None STASH_URL = None
STASH_CONFIGURATION = None STASH_CONFIGURATION = None
JSON_INPUT = None JSON_INPUT = None
@@ -62,6 +61,7 @@ class StashPluginHelper(StashInterface):
pluginLog = None pluginLog = None
logLinePreviousHits = [] logLinePreviousHits = []
thredPool = None thredPool = None
STASH_INTERFACE_INIT = False
# Prefix message value # Prefix message value
LEV_TRACE = "TRACE: " LEV_TRACE = "TRACE: "
@@ -106,7 +106,7 @@ class StashPluginHelper(StashInterface):
if logToNormSet: self.log_to_norm = logToNormSet if logToNormSet: self.log_to_norm = logToNormSet
if stash_url and len(stash_url): self.STASH_URL = stash_url if stash_url and len(stash_url): self.STASH_URL = stash_url
self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__ self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__
self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower() self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem
# print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr) # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr)
self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log" self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log"
self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent
@@ -355,24 +355,20 @@ class StashPluginHelper(StashInterface):
def rename_generated_files(self): def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
# def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None):
# query = """ def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
# query FindDuplicateScenes($distance: Int) { query = """
# findDuplicateScenes(distance: $distance) { query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
# ...SceneSlim findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
# } ...SceneSlim
# } }
# """ }
# if fragment: """
# query = re.sub(r'\.\.\.SceneSlim', fragment, query) if fragment:
# else: query = re.sub(r'\.\.\.SceneSlim', fragment, query)
# query = """ else:
# query FindDuplicateScenes($distance: Int) { query += "fragment SceneSlim on Scene { id }"
# findDuplicateScenes(distance: $distance)
# } variables = { "distance": distance, "duration_diff": duration_diff }
# """ result = self.call_GQL(query, variables)
# variables = { return result['findDuplicateScenes']
# "distance": distance
# }
# result = self.call_GQL(query, variables)
# return result['findDuplicateScenes']

View File

@@ -1,21 +1,14 @@
# Description: This is a Stash plugin which manages duplicate files. # Description: This is a Stash plugin which manages duplicate files.
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
# Note: To call this script outside of Stash, pass any argument. # Note: To call this script outside of Stash, pass argument --url
# Example: python DupFileManager.py start # Example: python DupFileManager.py --url http://localhost:9999 -a
# Research: # Research:
# Research following links to complete this plugin: # Research following links to complete this plugin:
# https://github.com/WithoutPants/stash-plugin-duplicate-finder
#
# Look at options in programs from the following link:
# https://video.stackexchange.com/questions/25302/how-can-i-find-duplicate-videos-by-content
#
# Python library for parse-reparsepoint # Python library for parse-reparsepoint
# https://pypi.org/project/parse-reparsepoint/ # https://pypi.org/project/parse-reparsepoint/
# pip install parse-reparsepoint # pip install parse-reparsepoint
#
# Look at stash API find_duplicate_scenes
import os, sys, time, pathlib, argparse, platform import os, sys, time, pathlib, argparse, platform
from StashPluginHelper import StashPluginHelper from StashPluginHelper import StashPluginHelper
from DupFileManager_config import config # Import config from DupFileManager_config.py from DupFileManager_config import config # Import config from DupFileManager_config.py
@@ -24,15 +17,18 @@ parser = argparse.ArgumentParser()
parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL') parser.add_argument('--url', '-u', dest='stash_url', type=str, help='Add Stash URL')
parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.') parser.add_argument('--trace', '-t', dest='trace', action='store_true', help='Enables debug trace mode.')
parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.') parser.add_argument('--remove_dup', '-r', dest='remove', action='store_true', help='Remove (delete) duplicate files.')
parser.add_argument('--dryrun', '-d', dest='dryrun', action='store_true', help='Do dryrun for deleting duplicate files. No files are deleted, and only logging occurs.') parser.add_argument('--add_dup_tag', '-a', dest='dup_tag', action='store_true', help='Set a tag to duplicate files.')
parse_args = parser.parse_args() parse_args = parser.parse_args()
settings = { settings = {
"mergeDupFilename": True, "mergeDupFilename": True,
"moveToTrashCan": False, "moveToTrashCan": False,
"whitelist": [], "dupFileTag": "DuplicateMarkForDeletion",
"dupWhiteListTag": "",
"zxgraylist": "",
"zwhitelist": "",
"zzblacklist": "",
"zzdebugTracing": False, "zzdebugTracing": False,
"zzdryRun": False,
} }
stash = StashPluginHelper( stash = StashPluginHelper(
stash_url=parse_args.stash_url, stash_url=parse_args.stash_url,
@@ -41,10 +37,13 @@ stash = StashPluginHelper(
config=config config=config
) )
stash.Status() stash.Status()
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************") stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
stash.Trace(f"(stashPaths={stash.STASH_PATHS})") stash.Trace(f"(stashPaths={stash.STASH_PATHS})")
listSeparator = stash.pluginConfig['listSeparator'] if stash.pluginConfig['listSeparator'] != "" else ','
addPrimaryDupPathToDetails = stash.pluginConfig['addPrimaryDupPathToDetails']
def realpath(path): def realpath(path):
""" """
get_symbolic_target for win get_symbolic_target for win
@@ -77,7 +76,7 @@ def isReparsePoint(path):
path = os.path.dirname(path) path = os.path.dirname(path)
return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT return win32api.GetFileAttributes(path) & win32con.FILE_ATTRIBUTE_REPARSE_POINT
def mangeDupFiles(merge=False, deleteDup=False, DryRun=False): def testReparsePointAndSymLink(merge=False, deleteDup=False):
stash.Trace(f"Debug Tracing (platform.system()={platform.system()})") stash.Trace(f"Debug Tracing (platform.system()={platform.system()})")
myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link myTestPath1 = r"B:\V\V\Tip\POV - Holly Molly petite ginger anal slut - RedTube.mp4" # not a reparse point or symbolic link
myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point myTestPath2 = r"B:\_\SpecialSet\Amateur Anal Attempts\BRCC test studio name.m2ts" # reparse point
@@ -117,22 +116,177 @@ def mangeDupFiles(merge=False, deleteDup=False, DryRun=False):
stash.Log(f"Not isSymLink '{myTestPath6}'") stash.Log(f"Not isSymLink '{myTestPath6}'")
return return
def createTagId(tagName, tagName_descp, deleteIfExist = False):
tagId = stash.find_tags(q=tagName)
if len(tagId):
tagId = tagId[0]
if deleteIfExist:
stash.destroy_tag(int(tagId['id']))
else:
return tagId['id']
tagId = stash.create_tag({"name":tagName, "description":tagName_descp, "ignore_auto_tag": True})
stash.Log(f"Dup-tagId={tagId['id']}")
return tagId['id']
def setTagId(tagId, tagName, sceneDetails, PrimeDuplicateScene = ""):
if PrimeDuplicateScene != "" and addPrimaryDupPathToDetails:
if sceneDetails['details'].startswith(f"Primary Duplicate = {PrimeDuplicateScene}"):
PrimeDuplicateScene = ""
elif sceneDetails['details'] == ""
PrimeDuplicateScene = f"Primary Duplicate = {PrimeDuplicateScene}"
else:
PrimeDuplicateScene = f"Primary Duplicate = {PrimeDuplicateScene}; {sceneDetails['details']}"
for tag in sceneDetails['tags']:
if tag['name'] == tagName:
if PrimeDuplicateScene != "" and addPrimaryDupPathToDetails:
stash.update_scene({'id' : sceneDetails['id'], 'details' : PrimeDuplicateScene})
return
if PrimeDuplicateScene == "" or not addPrimaryDupPathToDetails:
stash.update_scene({'id' : sceneDetails['id'], 'tag_ids' : tagId})
else:
stash.update_scene({'id' : sceneDetails['id'], 'tag_ids' : tagId, 'details' : PrimeDuplicateScene})
def isInList(listToCk, pathToCk):
pathToCk = pathToCk.lower()
for item in listToCk:
if pathToCk.startswith(item):
return True
return False
def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
duration_diff = 10.00
duplicateMarkForDeletion = stash.pluginSettings['dupFileTag']
duplicateMarkForDeletion_descp = 'Tag added to duplicate scenes so-as to tag them for deletion.'
if duplicateMarkForDeletion == "":
duplicateMarkForDeletion = 'DuplicateMarkForDeletion'
stash.Log(f"duplicateMarkForDeletion = {duplicateMarkForDeletion}")
dupTagId = createTagId(duplicateMarkForDeletion, duplicateMarkForDeletion_descp)
stash.Trace(f"dupTagId={dupTagId} name={duplicateMarkForDeletion}")
duplicateWhitelistTag = stash.pluginSettings['dupWhiteListTag']
dupWhitelistTagId = None
if duplicateWhitelistTag != "":
stash.Log(f"duplicateWhitelistTag = {duplicateWhitelistTag}")
duplicateWhitelistTag_descp = 'Tag added to duplicate scenes which are in the whitelist. This means there are two or more duplicates in the whitelist.'
dupWhitelistTagId = createTagId(duplicateWhitelistTag, duplicateWhitelistTag_descp)
stash.Trace(f"dupWhitelistTagId={dupWhitelistTagId} name={duplicateWhitelistTag}")
graylist = stash.pluginSettings['zxgraylist'].split(listSeparator)
graylist = [item.lower() for item in graylist]
if graylist == [""] : graylist = []
stash.Log(f"graylist = {graylist}")
whitelist = stash.pluginSettings['zwhitelist'].split(listSeparator)
whitelist = [item.lower() for item in whitelist]
if whitelist == [""] : whitelist = []
stash.Log(f"whitelist = {whitelist}")
blacklist = stash.pluginSettings['zzblacklist'].split(listSeparator)
blacklist = [item.lower() for item in blacklist]
if blacklist == [""] : blacklist = []
stash.Log(f"blacklist = {blacklist}")
QtyDupSet = 0
QtyDup = 0
QtyExactDup = 0
QtyAlmostDup = 0
QtyTagForDel = 0
QtySkipForDel = 0
stash.Log("Waiting for find_duplicate_scenes_diff to return results...")
DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff)
stash.Log("#########################################################################")
stash.Log("#########################################################################")
for DupFileSet in DupFileSets:
stash.Trace(f"DupFileSet={DupFileSet}")
QtyDupSet+=1
SepLine = "---------------------------"
DupFileToKeep = ""
DupToCopyFrom = ""
DupFileDetailList = []
for DupFile in DupFileSet:
QtyDup+=1
Scene = stash.find_scene(DupFile['id'])
stash.Trace(f"Scene = {Scene.encode('ascii','ignore')}")
DupFileDetailList = DupFileDetailList + [Scene]
if DupFileToKeep != "":
if DupFileToKeep['files'][0]['duration'] == Scene['files'][0]['duration']:
QtyExactDup+=1
else:
QtyAlmostDup+=1
SepLine = "***************************"
if int(DupFileToKeep['files'][0]['width']) < int(Scene['files'][0]['width']) or int(DupFileToKeep['files'][0]['height']) < int(Scene['files'][0]['height']):
DupFileToKeep = Scene
elif int(DupFileToKeep['files'][0]['duration']) < int(Scene['files'][0]['duration']):
DupFileToKeep = Scene
elif int(DupFileToKeep['files'][0]['size']) < int(Scene['files'][0]['size']):
DupFileToKeep = Scene
elif len(DupFileToKeep['files'][0]['path']) < len(Scene['files'][0]['path']):
DupFileToKeep = Scene
elif isInList(whitelist, Scene['files'][0]['path']) and not isInList(whitelist, DupFileToKeep['files'][0]['path']):
DupFileToKeep = Scene
elif isInList(blacklist, DupFileToKeep['files'][0]['path']) and not isInList(blacklist, Scene['files'][0]['path']):
DupFileToKeep = Scene
elif isInList(graylist, Scene['files'][0]['path']) and not isInList(graylist, DupFileToKeep['files'][0]['path']):
DupFileToKeep = Scene
else:
DupFileToKeep = Scene
# stash.Log(f"DupFileToKeep = {DupFileToKeep}")
stash.Trace(f"KeepID={DupFileToKeep['id']}, ID={DupFile['id']} duration=({Scene['files'][0]['duration']}), Size=({Scene['files'][0]['size']}), Res=({Scene['files'][0]['width']} x {Scene['files'][0]['height']}) Name={Scene['files'][0]['path'].encode('ascii','ignore')}")
for DupFile in DupFileDetailList:
if DupFile['id'] != DupFileToKeep['id']:
if isInList(whitelist, DupFile['files'][0]['path']):
stash.Log(f"NOT tagging duplicate, because it's in whitelist. '{DupFile['files'][0]['path'].encode('ascii','ignore')}'")
if dupWhitelistTagId and tagDuplicates:
setTagId(dupWhitelistTagId, duplicateWhitelistTag, DupFile, DupFileToKeep['files'][0]['path'])
QtySkipForDel+=1
else:
if deleteDup:
stash.Log(f"Deleting duplicate '{DupFile['files'][0]['path'].encode('ascii','ignore')}'")
# ToDo: Add logic to check if moving file to deletion folder, or doing full delete.
# ToDo: Add logic to check if tag merging is needed before performing deletion.
elif tagDuplicates:
if QtyTagForDel == 0:
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path'].encode('ascii','ignore')} for deletion with tag {duplicateMarkForDeletion}.")
else:
stash.Log(f"Tagging duplicate {DupFile['files'][0]['path'].encode('ascii','ignore')} for deletion.")
setTagId(dupTagId, duplicateMarkForDeletion, DupFile, DupFileToKeep['files'][0]['path'])
QtyTagForDel+=1
stash.Log(SepLine)
if QtyDup > 200:
break
stash.Log(f"QtyDupSet={QtyDupSet}, QtyDup={QtyDup}, QtyTagForDel={QtyTagForDel}, QtySkipForDel={QtySkipForDel}, QtyExactDup={QtyExactDup}, QtyAlmostDup={QtyAlmostDup}")
def testSetDupTagOnScene(sceneId):
scene = stash.find_scene(sceneId)
stash.Log(f"scene={scene}")
stash.Log(f"scene tags={scene['tags']}")
tag_ids = [dupTagId]
for tag in scene['tags']:
tag_ids = tag_ids + [tag['id']]
stash.Log(f"tag_ids={tag_ids}")
stash.update_scene({'id' : scene['id'], 'tag_ids' : tag_ids})
if stash.PLUGIN_TASK_NAME == "merge_dup_filename_task": if stash.PLUGIN_TASK_NAME == "merge_dup_filename_task":
mangeDupFiles(merge=True) mangeDupFiles(merge=True)
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif stash.PLUGIN_TASK_NAME == "delete_duplicates": elif stash.PLUGIN_TASK_NAME == "delete_duplicates":
mangeDupFiles(deleteDup=True) mangeDupFiles(deleteDup=True)
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT")
elif stash.PLUGIN_TASK_NAME == "dryrun_delete_duplicates": elif parse_args.dup_tag:
mangeDupFiles(deleteDup=True, DryRun=True) mangeDupFiles(tagDuplicates=True)
stash.Trace(f"{stash.PLUGIN_TASK_NAME} EXIT") stash.Trace(f"Tag duplicate EXIT")
elif parse_args.remove: elif parse_args.remove:
mangeDupFiles(deleteDup=True, DryRun=parse_args.dryrun) mangeDupFiles(deleteDup=True)
stash.Trace(f"Delete duplicate (DryRun={parse_args.dryrun}) EXIT") stash.Trace(f"Delete duplicate EXIT")
elif parse_args.dryrun:
mangeDupFiles(deleteDup=True, DryRun=parse_args.dryrun)
stash.Trace(f"Dryrun delete duplicate EXIT")
else: else:
stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE})") stash.Log(f"Nothing to do!!! (PLUGIN_ARGS_MODE={stash.PLUGIN_TASK_NAME})")
stash.Trace("\n*********************************\nEXITING ***********************\n*********************************") stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")

View File

@@ -4,38 +4,54 @@ version: 0.1.0
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
settings: settings:
mergeDupFilename: mergeDupFilename:
displayName: Before deletion, merge potential source in the duplicate file names for tag names, performers, and studios. displayName: Merge Duplicate Tags
description: Enable to description: Before deletion, merge potential source in the duplicate file names for tag names, performers, and studios.
type: BOOLEAN type: BOOLEAN
moveToTrashCan: moveToTrashCan:
displayName: Trash Can displayName: Trash Can
description: Enable to move files to trash can instead of permanently delete file. description: Enable to move files to trash can instead of permanently delete file.
type: BOOLEAN type: BOOLEAN
whitelist: dupFileTag:
displayName: Duplicate File Tag Name
description: (Default = DuplicateMarkForDeletion) Tag used to tag duplicates with lower resolution, duration, and file name length.
type: STRING
dupWhiteListTag:
displayName: Duplicate Whitelist Tag Name
description: If populated, a tag name used to tag duplicates in the whitelist. E.g. DuplicateWhitelistFile
type: STRING
zwhitelist:
displayName: White List displayName: White List
description: A comma seperated list of preferential paths to determine which duplicate should be the primary. Listed in order of preference. description: A comma seperated list of paths NOT to be deleted. E.g. C:\Favorite\,E:\MustKeep\
type: STRING
zxgraylist:
displayName: Gray List
description: List of preferential paths to determine which duplicate should be the primary. E.g. C:\2nd_Favorite\,H:\ShouldKeep\
type: STRING
zzblacklist:
displayName: Black List
description: List of LEAST preferential paths to determine primary candidates for deletion. E.g. C:\Downloads\,F:\DeleteMeFirst\
type: STRING type: STRING
zzdebugTracing: zzdebugTracing:
displayName: Debug Tracing displayName: Debug Tracing
description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log description: (Default=false) [***For Advanced Users***] Enable debug tracing. When enabled, additional tracing logging is added to Stash\plugins\DupFileManager\DupFileManager.log
type: BOOLEAN type: BOOLEAN
zzdryRun:
displayName: Dry Run
description: Enable to run script in [Dry Run] mode. In this mode, Stash does NOT call meta_scan, and only logs the action it would have taken.
type: BOOLEAN
exec: exec:
- python - python
- "{pluginDir}/DupFileManager.py" - "{pluginDir}/DupFileManager.py"
interface: raw interface: raw
tasks: tasks:
- name: Merge Duplicate Filename - name: Tag Duplicate Filename
description: Merge duplicate filename sourcetag names, performers, and studios. description: Set tag DuplicateMarkForDeletion to the duplicate with lower resolution, duration, file name length, and/or black list path.
defaultArgs: defaultArgs:
mode: merge_dup_filename_task mode: merge_dup_filename_task
- name: Delete Duplicates - name: Delete Duplicates
description: Delete duplicate files description: Delete duplicate files
defaultArgs: defaultArgs:
mode: delete_duplicates mode: delete_duplicates
- name: Merge Duplicate Filename
description: Merge duplicate filename sourcetag names, performers, and studios.
defaultArgs:
mode: merge_dup_filename_task
- name: Dry Run Delete Duplicates - name: Dry Run Delete Duplicates
description: Only perform a dry run (logging only) of duplicate file deletions. Dry Run setting is ignore when running this task. description: Only perform a dry run (logging only) of duplicate file deletions. Dry Run setting is ignore when running this task.
defaultArgs: defaultArgs:

View File

@@ -2,8 +2,10 @@
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/) # By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager # Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/DupFileManager
config = { config = {
# Define black list to determine which duplicates should be deleted first. # Character used to seperate items on the whitelist, blacklist, and graylist
"blacklist_paths": [], #Example: "blacklist_paths": ['C:\\SomeMediaPath\\subpath', "E:\\YetAnotherPath\\subpath', "E:\\YetAnotherPath\\secondSubPath'] "listSeparator" : ",",
# If enabled, adds the primary duplicate path to the scene detail.
"addPrimaryDupPathToDetails" : True,
# If enabled, ignore reparsepoints. For Windows NT drives only. # If enabled, ignore reparsepoints. For Windows NT drives only.
"ignoreReparsepoints" : True, "ignoreReparsepoints" : True,

View File

@@ -1,6 +1,6 @@
from stashapi.stashapp import StashInterface from stashapi.stashapp import StashInterface
from logging.handlers import RotatingFileHandler from logging.handlers import RotatingFileHandler
import inspect, sys, os, pathlib, logging, json import re, inspect, sys, os, pathlib, logging, json
import concurrent.futures import concurrent.futures
from stashapi.stash_types import PhashDistance from stashapi.stash_types import PhashDistance
import __main__ import __main__
@@ -30,7 +30,6 @@ class StashPluginHelper(StashInterface):
PLUGINS_PATH = None PLUGINS_PATH = None
pluginSettings = None pluginSettings = None
pluginConfig = None pluginConfig = None
STASH_INTERFACE_INIT = False
STASH_URL = None STASH_URL = None
STASH_CONFIGURATION = None STASH_CONFIGURATION = None
JSON_INPUT = None JSON_INPUT = None
@@ -62,6 +61,7 @@ class StashPluginHelper(StashInterface):
pluginLog = None pluginLog = None
logLinePreviousHits = [] logLinePreviousHits = []
thredPool = None thredPool = None
STASH_INTERFACE_INIT = False
# Prefix message value # Prefix message value
LEV_TRACE = "TRACE: " LEV_TRACE = "TRACE: "
@@ -106,7 +106,7 @@ class StashPluginHelper(StashInterface):
if logToNormSet: self.log_to_norm = logToNormSet if logToNormSet: self.log_to_norm = logToNormSet
if stash_url and len(stash_url): self.STASH_URL = stash_url if stash_url and len(stash_url): self.STASH_URL = stash_url
self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__ self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__
self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower() self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem
# print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr) # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr)
self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log" self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log"
self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent
@@ -355,24 +355,20 @@ class StashPluginHelper(StashInterface):
def rename_generated_files(self): def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
# def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None):
# query = """ def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
# query FindDuplicateScenes($distance: Int) { query = """
# findDuplicateScenes(distance: $distance) { query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
# ...SceneSlim findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
# } ...SceneSlim
# } }
# """ }
# if fragment: """
# query = re.sub(r'\.\.\.SceneSlim', fragment, query) if fragment:
# else: query = re.sub(r'\.\.\.SceneSlim', fragment, query)
# query = """ else:
# query FindDuplicateScenes($distance: Int) { query += "fragment SceneSlim on Scene { id }"
# findDuplicateScenes(distance: $distance)
# } variables = { "distance": distance, "duration_diff": duration_diff }
# """ result = self.call_GQL(query, variables)
# variables = { return result['findDuplicateScenes']
# "distance": distance
# }
# result = self.call_GQL(query, variables)
# return result['findDuplicateScenes']

View File

@@ -1,3 +1,3 @@
stashapp-tools >= 0.2.49 stashapp-tools >= 0.2.50
pyYAML pyYAML
watchdog watchdog

View File

@@ -1,6 +1,6 @@
from stashapi.stashapp import StashInterface from stashapi.stashapp import StashInterface
from logging.handlers import RotatingFileHandler from logging.handlers import RotatingFileHandler
import inspect, sys, os, pathlib, logging, json import re, inspect, sys, os, pathlib, logging, json
import concurrent.futures import concurrent.futures
from stashapi.stash_types import PhashDistance from stashapi.stash_types import PhashDistance
import __main__ import __main__
@@ -30,7 +30,6 @@ class StashPluginHelper(StashInterface):
PLUGINS_PATH = None PLUGINS_PATH = None
pluginSettings = None pluginSettings = None
pluginConfig = None pluginConfig = None
STASH_INTERFACE_INIT = False
STASH_URL = None STASH_URL = None
STASH_CONFIGURATION = None STASH_CONFIGURATION = None
JSON_INPUT = None JSON_INPUT = None
@@ -62,6 +61,7 @@ class StashPluginHelper(StashInterface):
pluginLog = None pluginLog = None
logLinePreviousHits = [] logLinePreviousHits = []
thredPool = None thredPool = None
STASH_INTERFACE_INIT = False
# Prefix message value # Prefix message value
LEV_TRACE = "TRACE: " LEV_TRACE = "TRACE: "
@@ -106,7 +106,7 @@ class StashPluginHelper(StashInterface):
if logToNormSet: self.log_to_norm = logToNormSet if logToNormSet: self.log_to_norm = logToNormSet
if stash_url and len(stash_url): self.STASH_URL = stash_url if stash_url and len(stash_url): self.STASH_URL = stash_url
self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__ self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__
self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem.lower() self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem
# print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr) # print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr)
self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log" self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log"
self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent
@@ -355,24 +355,20 @@ class StashPluginHelper(StashInterface):
def rename_generated_files(self): def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}") return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
# def find_duplicate_scenes(self, distance: PhashDistance=PhashDistance.EXACT, fragment=None):
# query = """ def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
# query FindDuplicateScenes($distance: Int) { query = """
# findDuplicateScenes(distance: $distance) { query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
# ...SceneSlim findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
# } ...SceneSlim
# } }
# """ }
# if fragment: """
# query = re.sub(r'\.\.\.SceneSlim', fragment, query) if fragment:
# else: query = re.sub(r'\.\.\.SceneSlim', fragment, query)
# query = """ else:
# query FindDuplicateScenes($distance: Int) { query += "fragment SceneSlim on Scene { id }"
# findDuplicateScenes(distance: $distance)
# } variables = { "distance": distance, "duration_diff": duration_diff }
# """ result = self.call_GQL(query, variables)
# variables = { return result['findDuplicateScenes']
# "distance": distance
# }
# result = self.call_GQL(query, variables)
# return result['findDuplicateScenes']

View File

@@ -12,15 +12,21 @@ config = {
# Note: Look at filemonitor_task_examples.py for many example task having more detailed usage. # Note: Look at filemonitor_task_examples.py for many example task having more detailed usage.
"task_scheduler": [ "task_scheduler": [
# To create a daily task, include each day of the week for the weekday field. # To create a daily task, include each day of the week for the weekday field.
{"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "06:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM) # Optional field for task "Auto Tag" is 'paths'. For detail usage, see example #A3: in filemonitor_task_examples.py
{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM) {"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
# Task "Create Tags" is a plugin task. All plugin task have a REQUIRED pluginId field and an optional validateDir field. For detail usage, see examples #B1 and #B2 in filemonitor_task_examples.py
{"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", {"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser",
"weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser] "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM)
# The following tasks are scheduled weekly # The following tasks are scheduled weekly
{"task" : "Generate", "weekday" : "saturday", "time" : "07:00"}, # Generated Content-> [Generate] (Every saturday at 7AM) # Optional field for task "Scan", "Auto Tag", and "Clean" is 'paths'. For detail usage, see examples #A3: in filemonitor_task_examples.py
{"task" : "Scan", "weekday" : "saturday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every saturday at 3AM) {"task" : "Scan", "weekday" : "saturday", "time" : "03:00"}, # Library -> [Scan] (Weekly) (Every saturday at 3AM)
{"task" : "Auto Tag", "weekday" : "saturday", "time" : "03:30"}, # Auto Tag -> [Auto Tag] (Weekly) (Every saturday at 3:30AM)
{"task" : "Generate", "weekday" : "saturday", "time" : "04:00"}, # Generated Content-> [Generate] (Every saturday at 4AM)
{"task" : "Clean", "weekday" : "saturday", "time" : "04:30"}, # Maintenance -> [Clean] (Every saturday at 4:30AM)
{"task" : "Clean Generated Files", "weekday" : "saturday", "time" : "05:00"}, # Maintenance -> [Clean Generated Files] (Every saturday at 5AM)
{"task" : "Optimise Database", "weekday" : "saturday", "time" : "05:30"}, # Maintenance -> [Optimise Database] (Every saturday at 5:30AM)
# To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field. # To perform a task monthly, specify the day of the month as in the weekly schedule format, and add a monthly field.
# The monthly field value must be 1, 2, 3, or 4. # The monthly field value must be 1, 2, 3, or 4.
@@ -28,25 +34,25 @@ config = {
# 2 = 2nd specified weekday of the month. Example 2nd monday of the month. # 2 = 2nd specified weekday of the month. Example 2nd monday of the month.
# 3 = 3rd specified weekday of the month. # 3 = 3rd specified weekday of the month.
# 4 = 4th specified weekday of the month. # 4 = 4th specified weekday of the month.
# The following task is scheduled monthly # The Backup task is scheduled monthly
# Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py
{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00) {"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
{"task" : "Clean", "weekday" : "sunday", "time" : "01:00", "monthly" : 3}, # Maintenance -> [Clean]
{"task" : "Clean Generated Files", "weekday" : "sunday", "time" : "03:00", "monthly" : 3}, # Maintenance -> [Clean Generated Files]
# The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash. # The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash.
# This task only works if FileMonitor is started as a service or in command line mode. # This task only works if FileMonitor is started as a service or in command line mode.
# For more detailed usage, see examples #C1 and #C2 in filemonitor_task_examples.py # Optional fields are 'command' and 'RunAfter'. For detail usage, see examples #C1 and #C2 in filemonitor_task_examples.py
{"task" : "CheckStashIsRunning", "minutes" :5}, # Checks every 5 minutes {"task" : "CheckStashIsRunning", "minutes" :5}, # Checks every 5 minutes
], ],
# ApiKey only needed when Stash credentials are set and while calling FileMonitor via command line.
"apiKey" : "", # Example: "eyJabccideJIUfg1NigRInD345I6dfpXVCfd.eyJ1abcDEfGheHRlHJiJklMonPQ32FsVewtsfSIsImlhdCI6MTcyMzg2NzkwOH0.5bkHU6sfs3532dsryu1ki3iFBwnd_4AHs325yHljsPw"
# Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue. # Timeout in seconds. This is how often FileMonitor will check the scheduler and (in-plugin mode) check if another job (Task) is in the queue.
"timeOut": 60, "timeOut": 60,
# ApiKey only needed when Stash credentials are set and while calling FileMonitor via command line.
"apiKey" : "", # Example: "eyJabccideJIUfg1NigRInD345I6dfpXVCfd.eyJ1abcDEfGheHRlHJiJklMonPQ32FsVewtsfSIsImlhdCI6MTcyMzg2NzkwOH0.5bkHU6sfs3532dsryu1ki3iFBwnd_4AHs325yHljsPw"
# Enable to run metadata clean task after file deletion. # Enable to run metadata clean task after file deletion.
"runCleanAfterDelete": False, "runCleanAfterDelete": False,
# Enable to run metadata_generate (Generate Content) after metadata scan. # Enable to run metadata_generate (Generate Content) after metadata scan.
"runGenerateContent": False, "runGenerateContent": False,
# When populated (comma separated list [lower-case]), only scan for changes for specified file extension # When populated (comma separated list [lower-case]), only scan for changes for specified file extension
"fileExtTypes" : "", # Example: "mp4,mpg,mpeg,m2ts,wmv,avi,m4v,flv,mov,asf,mkv,divx,webm,ts,mp2t" "fileExtTypes" : "", # Example: "mp4,mpg,mpeg,m2ts,wmv,avi,m4v,flv,mov,asf,mkv,divx,webm,ts,mp2t"
# When populated, only include file changes in specified paths. # When populated, only include file changes in specified paths.
@@ -54,7 +60,7 @@ config = {
# When populated, exclude file changes in paths that start with specified entries. # When populated, exclude file changes in paths that start with specified entries.
"excludePathChanges" :[], # Example: ["C:\\MyVideos\\SomeSubFolder\\", "C:\\MyImages\\folder\\Sub\\"] "excludePathChanges" :[], # Example: ["C:\\MyVideos\\SomeSubFolder\\", "C:\\MyImages\\folder\\Sub\\"]
# The following fields are ONLY used when running FileMonitor in script mode. # The following fields are ONLY used when running FileMonitor in command line mode.
"endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server "endpoint_Scheme" : "http", # Define endpoint to use when contacting the Stash server
"endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server "endpoint_Host" : "0.0.0.0", # Define endpoint to use when contacting the Stash server
"endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server "endpoint_Port" : 9999, # Define endpoint to use when contacting the Stash server

View File

@@ -41,7 +41,7 @@ task_examples = {
# Example#B4: Task to execute a command with optional args field, and using keyword <plugin_path>, which gets replaced with filemonitor.py current directory. # Example#B4: Task to execute a command with optional args field, and using keyword <plugin_path>, which gets replaced with filemonitor.py current directory.
{"task" : "execute", "command" : "<plugin_path>HelloWorld.cmd", "args" : "--name David", "minutes" : 0}, {"task" : "execute", "command" : "<plugin_path>HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
# Example#C1 Some OS may need the "command" field, which specifies the binary path # Example#C1 Some OS may need the "command" field, which specifies the binary path.
{"task" : "CheckStashIsRunning", "command" : "<stash_path>stash-linux-arm64v8", "minutes" :0}, {"task" : "CheckStashIsRunning", "command" : "<stash_path>stash-linux-arm64v8", "minutes" :0},
# Example#C2 RunAfter field can be used to specify task to run after starting Stash # Example#C2 RunAfter field can be used to specify task to run after starting Stash
{"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "minutes" :0}, {"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "minutes" :0},

View File

@@ -1,3 +1,3 @@
stashapp-tools >= 0.2.49 stashapp-tools >= 0.2.50
pyYAML pyYAML
watchdog watchdog

View File

@@ -1,4 +1,4 @@
# RenameFile: Ver 0.4.1 (By David Maisonave) # RenameFile: Ver 0.4.2 (By David Maisonave)
RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks. RenameFile is a [Stash](https://github.com/stashapp/stash) plugin which performs the following tasks.
- **Rename Scene File Name** (On-The-Fly) - **Rename Scene File Name** (On-The-Fly)
- **Append tag names** to file name - **Append tag names** to file name

View File

@@ -117,6 +117,7 @@ if debugTracing: logger.info("Debug Tracing................")
exclude_paths = config["pathToExclude"] exclude_paths = config["pathToExclude"]
exclude_paths = exclude_paths.split() exclude_paths = exclude_paths.split()
if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................") if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................")
excluded_tags = config["excludeTags"]
# Extract tag whitelist from settings # Extract tag whitelist from settings
tag_whitelist = config["tagWhitelist"] tag_whitelist = config["tagWhitelist"]
if debugTracing: logger.info("Debug Tracing................") if debugTracing: logger.info("Debug Tracing................")
@@ -203,7 +204,9 @@ def form_filename(original_file_stem, scene_details):
if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................") if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................")
if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)): if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)):
return # Skip adding more tags if the maximum limit is reached return # Skip adding more tags if the maximum limit is reached
if tag_name in excluded_tags:
if debugTracing: logger.info(f"Debug Tracing EXCLUDING (tag_name={tag_name})")
return
# Check if the tag name is in the whitelist # Check if the tag name is in the whitelist
if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist): if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist):
if WRAPPER_STYLES.get('tag'): if WRAPPER_STYLES.get('tag'):
@@ -261,8 +264,8 @@ def form_filename(original_file_stem, scene_details):
scene_date += POSTFIX_STYLES.get('date') scene_date += POSTFIX_STYLES.get('date')
if debugTracing: logger.info("Debug Tracing................") if debugTracing: logger.info("Debug Tracing................")
if WRAPPER_STYLES.get('date'): if WRAPPER_STYLES.get('date'):
filename_parts.append(f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}") scene_date = f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}"
else: if scene_date not in title:
filename_parts.append(scene_date) filename_parts.append(scene_date)
elif key == 'resolution': elif key == 'resolution':
width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string
@@ -270,40 +273,40 @@ def form_filename(original_file_stem, scene_details):
if width and height: if width and height:
resolution = width + POSTFIX_STYLES.get('width_height_seperator') + height + POSTFIX_STYLES.get('resolution') resolution = width + POSTFIX_STYLES.get('width_height_seperator') + height + POSTFIX_STYLES.get('resolution')
if WRAPPER_STYLES.get('resolution'): if WRAPPER_STYLES.get('resolution'):
filename_parts.append(f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}") resolution = f"{WRAPPER_STYLES['resolution'][0]}{resolution}{WRAPPER_STYLES['width'][1]}"
else: if resolution not in title:
filename_parts.append(resolution) filename_parts.append(resolution)
elif key == 'width': elif key == 'width':
width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string width = str(scene_details.get('files', [{}])[0].get('width', '')) # Convert width to string
if width: if width:
width += POSTFIX_STYLES.get('width') width += POSTFIX_STYLES.get('width')
if WRAPPER_STYLES.get('width'): if WRAPPER_STYLES.get('width'):
filename_parts.append(f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}") width = f"{WRAPPER_STYLES['width'][0]}{width}{WRAPPER_STYLES['width'][1]}"
else: if width not in title:
filename_parts.append(width) filename_parts.append(width)
elif key == 'height': elif key == 'height':
height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string height = str(scene_details.get('files', [{}])[0].get('height', '')) # Convert height to string
if height: if height:
height += POSTFIX_STYLES.get('height') height += POSTFIX_STYLES.get('height')
if WRAPPER_STYLES.get('height'): if WRAPPER_STYLES.get('height'):
filename_parts.append(f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}") height = f"{WRAPPER_STYLES['height'][0]}{height}{WRAPPER_STYLES['height'][1]}"
else: if height not in title:
filename_parts.append(height) filename_parts.append(height)
elif key == 'video_codec': elif key == 'video_codec':
video_codec = scene_details.get('files', [{}])[0].get('video_codec', '').upper() # Convert to uppercase video_codec = scene_details.get('files', [{}])[0].get('video_codec', '').upper() # Convert to uppercase
if video_codec: if video_codec:
video_codec += POSTFIX_STYLES.get('video_codec') video_codec += POSTFIX_STYLES.get('video_codec')
if WRAPPER_STYLES.get('video_codec'): if WRAPPER_STYLES.get('video_codec'):
filename_parts.append(f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}") video_codec = f"{WRAPPER_STYLES['video_codec'][0]}{video_codec}{WRAPPER_STYLES['video_codec'][1]}"
else: if video_codec not in title:
filename_parts.append(video_codec) filename_parts.append(video_codec)
elif key == 'frame_rate': elif key == 'frame_rate':
frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + 'FPS' # Convert to string and append ' FPS' frame_rate = str(scene_details.get('files', [{}])[0].get('frame_rate', '')) + 'FPS' # Convert to string and append ' FPS'
if frame_rate: if frame_rate:
frame_rate += POSTFIX_STYLES.get('frame_rate') frame_rate += POSTFIX_STYLES.get('frame_rate')
if WRAPPER_STYLES.get('frame_rate'): if WRAPPER_STYLES.get('frame_rate'):
filename_parts.append(f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}") frame_rate = f"{WRAPPER_STYLES['frame_rate'][0]}{frame_rate}{WRAPPER_STYLES['frame_rate'][1]}"
else: if frame_rate not in title:
filename_parts.append(frame_rate) filename_parts.append(frame_rate)
elif key == 'galleries': elif key == 'galleries':
galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])] galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])]

View File

@@ -1,6 +1,6 @@
name: RenameFile name: RenameFile
description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab. description: Renames video (scene) file names when the user edits the [Title] field located in the scene [Edit] tab.
version: 0.4.1 version: 0.4.2
url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile url: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
settings: settings:
performerAppend: performerAppend:

View File

@@ -37,6 +37,8 @@ config = {
"frame_rate": 'FR', "frame_rate": 'FR',
"date": '', "date": '',
}, },
# Add tags to exclude from RenameFile.
"excludeTags": ["DuplicateMarkForDeletion", "DuplicateWhitelistFile","_DuplicateMarkForDeletion", "_DuplicateWhitelistFile","_DuplicateMarkForDeletion_", "_DuplicateWhitelistFile_"],
# Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath" # Add path(s) to exclude from RenameFile. Example Usage: r"/path/to/exclude1" When entering multiple paths, use space. Example: r"/path_1_to/exclude" r"/someOtherPath2Exclude" r"/yetAnotherPath"
"pathToExclude": "", "pathToExclude": "",
# Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3" # Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"

View File

@@ -1,3 +1,3 @@
stashapp-tools >= 0.2.49 stashapp-tools >= 0.2.50
pyYAML pyYAML
requests requests