forked from Github/Axter-Stash
refrag code for RenameFile plugin
This commit is contained in:
@@ -42,10 +42,16 @@ stash = StashPluginHelper(
|
||||
config=config,
|
||||
maxbytes=10*1024*1024,
|
||||
)
|
||||
if len(sys.argv) > 1:
|
||||
stash.Log(f"argv = {sys.argv}")
|
||||
else:
|
||||
stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}")
|
||||
stash.Status(logLevel=logging.DEBUG)
|
||||
stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
||||
|
||||
# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
||||
# stash.encodeToUtf8 = True
|
||||
|
||||
|
||||
LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE
|
||||
listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
|
||||
addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
|
||||
@@ -285,16 +291,20 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
|
||||
stash.Trace("#########################################################################")
|
||||
stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; duration_diff={duration_diff}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
|
||||
DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff)
|
||||
qtyResults = len(DupFileSets)
|
||||
stash.Trace("#########################################################################")
|
||||
for DupFileSet in DupFileSets:
|
||||
stash.Trace(f"DupFileSet={DupFileSet}")
|
||||
QtyDupSet+=1
|
||||
stash.Progress(QtyDupSet, qtyResults)
|
||||
SepLine = "---------------------------"
|
||||
DupFileToKeep = ""
|
||||
DupToCopyFrom = ""
|
||||
DupFileDetailList = []
|
||||
for DupFile in DupFileSet:
|
||||
QtyDup+=1
|
||||
stash.log.sl.progress(f"Scene ID = {DupFile['id']}")
|
||||
time.sleep(2)
|
||||
Scene = stash.find_scene(DupFile['id'])
|
||||
sceneData = f"Scene = {Scene}"
|
||||
stash.Trace(sceneData, toAscii=True)
|
||||
@@ -392,10 +402,13 @@ def deleteTagggedDuplicates():
|
||||
QtyFailedQuery = 0
|
||||
stash.Trace("#########################################################################")
|
||||
sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id')
|
||||
stash.Trace(f"Found the following scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}")
|
||||
qtyResults = len(sceneIDs)
|
||||
stash.Trace(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}")
|
||||
for sceneID in sceneIDs:
|
||||
# stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.")
|
||||
QtyDup += 1
|
||||
prgs = QtyDup / qtyResults
|
||||
stash.Progress(QtyDup, qtyResults)
|
||||
scene = stash.find_scene(sceneID['id'])
|
||||
if scene == None or len(scene) == 0:
|
||||
stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.")
|
||||
|
||||
@@ -351,7 +351,65 @@ class StashPluginHelper(StashInterface):
|
||||
DestData = self.find_scene(DestData)
|
||||
return self._mergeMetadata.merge(SrcData, DestData)
|
||||
|
||||
# Extends class StashInterface with functions which are not yet in the class
|
||||
def Progress(self, currentIndex, maxCount):
|
||||
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
|
||||
self.log.progress(progress)
|
||||
|
||||
def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
|
||||
"""Runs a plugin operation.
|
||||
The operation is run immediately and does not use the job queue.
|
||||
Args:
|
||||
plugin_id (ID): plugin_id
|
||||
task_name (str, optional): Plugin task to perform
|
||||
args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args']
|
||||
Returns:
|
||||
A map of the result.
|
||||
"""
|
||||
query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) {
|
||||
runPluginOperation(plugin_id: $plugin_id, args: $args)
|
||||
}"""
|
||||
if task_mode != None:
|
||||
args.update({"mode" : task_mode})
|
||||
variables = {
|
||||
"plugin_id": plugin_id,
|
||||
"args": args,
|
||||
}
|
||||
if asyn:
|
||||
self.Submit(self.call_GQL, query, variables)
|
||||
return f"Made asynchronous call for plugin {plugin_id}"
|
||||
else:
|
||||
return self.call_GQL(query, variables)
|
||||
|
||||
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
|
||||
query = """
|
||||
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
|
||||
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
|
||||
...SceneSlim
|
||||
}
|
||||
}
|
||||
"""
|
||||
if fragment:
|
||||
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
|
||||
else:
|
||||
query += "fragment SceneSlim on Scene { id }"
|
||||
|
||||
variables = { "distance": distance, "duration_diff": duration_diff }
|
||||
result = self.call_GQL(query, variables)
|
||||
return result['findDuplicateScenes']
|
||||
|
||||
# #################################################################################################
|
||||
# The below functions extends class StashInterface with functions which are not yet in the class
|
||||
def get_all_scenes(self):
|
||||
query_all_scenes = """
|
||||
query AllScenes {
|
||||
allScenes {
|
||||
id
|
||||
updated_at
|
||||
}
|
||||
}
|
||||
"""
|
||||
return self.call_GQL(query_all_scenes)
|
||||
|
||||
def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
|
||||
query = """
|
||||
mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
|
||||
@@ -394,23 +452,6 @@ class StashPluginHelper(StashInterface):
|
||||
def rename_generated_files(self):
|
||||
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
|
||||
|
||||
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
|
||||
query = """
|
||||
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
|
||||
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
|
||||
...SceneSlim
|
||||
}
|
||||
}
|
||||
"""
|
||||
if fragment:
|
||||
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
|
||||
else:
|
||||
query += "fragment SceneSlim on Scene { id }"
|
||||
|
||||
variables = { "distance": distance, "duration_diff": duration_diff }
|
||||
result = self.call_GQL(query, variables)
|
||||
return result['findDuplicateScenes']
|
||||
|
||||
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
|
||||
srcData = None
|
||||
destData = None
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
Hello, world#2 from batch script! arg =
|
||||
@@ -0,0 +1 @@
|
||||
Hello, world from batch script! arg = "--name David"
|
||||
@@ -351,7 +351,65 @@ class StashPluginHelper(StashInterface):
|
||||
DestData = self.find_scene(DestData)
|
||||
return self._mergeMetadata.merge(SrcData, DestData)
|
||||
|
||||
# Extends class StashInterface with functions which are not yet in the class
|
||||
def Progress(self, currentIndex, maxCount):
|
||||
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
|
||||
self.log.progress(progress)
|
||||
|
||||
def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
|
||||
"""Runs a plugin operation.
|
||||
The operation is run immediately and does not use the job queue.
|
||||
Args:
|
||||
plugin_id (ID): plugin_id
|
||||
task_name (str, optional): Plugin task to perform
|
||||
args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args']
|
||||
Returns:
|
||||
A map of the result.
|
||||
"""
|
||||
query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) {
|
||||
runPluginOperation(plugin_id: $plugin_id, args: $args)
|
||||
}"""
|
||||
if task_mode != None:
|
||||
args.update({"mode" : task_mode})
|
||||
variables = {
|
||||
"plugin_id": plugin_id,
|
||||
"args": args,
|
||||
}
|
||||
if asyn:
|
||||
self.Submit(self.call_GQL, query, variables)
|
||||
return f"Made asynchronous call for plugin {plugin_id}"
|
||||
else:
|
||||
return self.call_GQL(query, variables)
|
||||
|
||||
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
|
||||
query = """
|
||||
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
|
||||
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
|
||||
...SceneSlim
|
||||
}
|
||||
}
|
||||
"""
|
||||
if fragment:
|
||||
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
|
||||
else:
|
||||
query += "fragment SceneSlim on Scene { id }"
|
||||
|
||||
variables = { "distance": distance, "duration_diff": duration_diff }
|
||||
result = self.call_GQL(query, variables)
|
||||
return result['findDuplicateScenes']
|
||||
|
||||
# #################################################################################################
|
||||
# The below functions extends class StashInterface with functions which are not yet in the class
|
||||
def get_all_scenes(self):
|
||||
query_all_scenes = """
|
||||
query AllScenes {
|
||||
allScenes {
|
||||
id
|
||||
updated_at
|
||||
}
|
||||
}
|
||||
"""
|
||||
return self.call_GQL(query_all_scenes)
|
||||
|
||||
def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
|
||||
query = """
|
||||
mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
|
||||
@@ -394,23 +452,6 @@ class StashPluginHelper(StashInterface):
|
||||
def rename_generated_files(self):
|
||||
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
|
||||
|
||||
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
|
||||
query = """
|
||||
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
|
||||
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
|
||||
...SceneSlim
|
||||
}
|
||||
}
|
||||
"""
|
||||
if fragment:
|
||||
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
|
||||
else:
|
||||
query += "fragment SceneSlim on Scene { id }"
|
||||
|
||||
variables = { "distance": distance, "duration_diff": duration_diff }
|
||||
result = self.call_GQL(query, variables)
|
||||
return result['findDuplicateScenes']
|
||||
|
||||
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
|
||||
srcData = None
|
||||
destData = None
|
||||
|
||||
@@ -54,7 +54,7 @@ stash = StashPluginHelper(
|
||||
maxbytes=10*1024*1024,
|
||||
apiKey=parse_args.apikey
|
||||
)
|
||||
stash.Status()
|
||||
stash.Status(logLevel=logging.DEBUG)
|
||||
stash.Log(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.DRY_RUN={stash.DRY_RUN}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
|
||||
|
||||
exitMsg = "Change success!!"
|
||||
@@ -162,29 +162,31 @@ class StashScheduler: # Stash Scheduler
|
||||
weekDays = task['weekday'].lower()
|
||||
if 'monthly' in task:
|
||||
stash.Log(f"Adding to scheduler task '{task['task']}' monthly on number {task['monthly']} {task['weekday']} at {task['time']}")
|
||||
elif task['weekday'] == "every":
|
||||
stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every day at {task['time']}")
|
||||
else:
|
||||
stash.Log(f"Adding to scheduler task '{task['task']}' (weekly) every {task['weekday']} at {task['time']}")
|
||||
|
||||
hasValidDay = False
|
||||
if "monday" in weekDays:
|
||||
if "monday" in weekDays or "every" in weekDays:
|
||||
schedule.every().monday.at(task['time']).do(self.runTask, task)
|
||||
hasValidDay = True
|
||||
if "tuesday" in weekDays:
|
||||
if "tuesday" in weekDays or "every" in weekDays:
|
||||
schedule.every().tuesday.at(task['time']).do(self.runTask, task)
|
||||
hasValidDay = True
|
||||
if "wednesday" in weekDays:
|
||||
if "wednesday" in weekDays or "every" in weekDays:
|
||||
schedule.every().wednesday.at(task['time']).do(self.runTask, task)
|
||||
hasValidDay = True
|
||||
if "thursday" in weekDays:
|
||||
if "thursday" in weekDays or "every" in weekDays:
|
||||
schedule.every().thursday.at(task['time']).do(self.runTask, task)
|
||||
hasValidDay = True
|
||||
if "friday" in weekDays:
|
||||
if "friday" in weekDays or "every" in weekDays:
|
||||
schedule.every().friday.at(task['time']).do(self.runTask, task)
|
||||
hasValidDay = True
|
||||
if "saturday" in weekDays:
|
||||
if "saturday" in weekDays or "every" in weekDays or "weekend" in weekDays:
|
||||
schedule.every().saturday.at(task['time']).do(self.runTask, task)
|
||||
hasValidDay = True
|
||||
if "sunday" in weekDays:
|
||||
if "sunday" in weekDays or "every" in weekDays or "weekend" in weekDays:
|
||||
schedule.every().sunday.at(task['time']).do(self.runTask, task)
|
||||
hasValidDay = True
|
||||
|
||||
@@ -214,7 +216,7 @@ class StashScheduler: # Stash Scheduler
|
||||
if task['task'] == "Clean":
|
||||
result = self.jobIdOutput(stash.metadata_clean(paths=targetPaths, dry_run=stash.DRY_RUN))
|
||||
elif task['task'] == "Clean Generated Files":
|
||||
result = self.jobIdOutput(stash.metadata_clean_generated()))
|
||||
result = self.jobIdOutput(stash.metadata_clean_generated())
|
||||
elif task['task'] == "Generate":
|
||||
result = self.jobIdOutput(stash.metadata_generate())
|
||||
elif task['task'] == "Backup":
|
||||
@@ -302,7 +304,6 @@ class StashScheduler: # Stash Scheduler
|
||||
|
||||
def runPluginTask(self, task):
|
||||
try:
|
||||
if 'pluginId' in task and task['pluginId'] != "":
|
||||
invalidDir = False
|
||||
validDirMsg = ""
|
||||
if 'validateDir' in task and task['validateDir'] != "":
|
||||
@@ -317,16 +318,29 @@ class StashScheduler: # Stash Scheduler
|
||||
validDirMsg = f"Valid path in {basePluginPath}"
|
||||
if invalidDir:
|
||||
stash.Error(f"Could not run task '{task['task']}' because sub directory '{task['validateDir']}' does not exist under path '{stash.PLUGINS_PATH}'")
|
||||
else:
|
||||
if task['task'] == "Delete Duplicates" and not turnOnSchedulerDeleteDup:
|
||||
return None
|
||||
if not turnOnSchedulerDeleteDup and (task['task'] == "Delete Duplicates" or ('taskName' in task and task['taskName'] == "Delete Duplicates") or ('taskMode' in task and task['taskMode'] == "delete_duplicates_task")):
|
||||
stash.Warn(f"Not running task {task['task']}, because [Delete Duplicate Scheduler] is NOT enabled. See Stash UI option Settings->Plugins->Plugins->FileMonitor->[Delete Duplicate Scheduler]")
|
||||
return None
|
||||
stash.Trace(f"Running plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}")
|
||||
# The pluginId field is only here for backward compatibility, and should not be used in future scheduler configurations
|
||||
if 'pluginId' in task and task['pluginId'] != "": # Obsolete method
|
||||
stash.Trace(f"Adding to Task Queue plugin task pluginID={task['pluginId']}, task name = {task['task']}. {validDirMsg}")
|
||||
return stash.run_plugin_task(plugin_id=task['pluginId'], task_name=task['task'])
|
||||
else:
|
||||
stash.Error(f"Can not run task '{task['task']}', because it's an invalid task.")
|
||||
stash.LogOnce(f"If task '{task['task']}' is supposed to be a built-in task, check for correct task name spelling.")
|
||||
stash.LogOnce(f"If task '{task['task']}' is supposed to be a plugin, make sure to include the pluginId field in the task. task={task}")
|
||||
taskName = None
|
||||
taskMode = None
|
||||
if 'taskName' in task:
|
||||
taskName = task['taskName']
|
||||
if 'taskMode' in task:
|
||||
taskMode = task['taskMode']
|
||||
if ('taskQue' in task and task['taskQue'] == False) or taskName == None:
|
||||
stash.Log(f"Running plugin task pluginID={task['task']}, task mode = {taskMode}. {validDirMsg}")
|
||||
# Asynchronous threading logic to call run_plugin, because it's a blocking call.
|
||||
stash.run_plugin(plugin_id=task['task'], task_mode=taskMode, asyn=True)
|
||||
return None
|
||||
else:
|
||||
stash.Trace(f"Adding to Task Queue plugin task pluginID={task['task']}, task name = {taskName}. {validDirMsg}")
|
||||
return stash.run_plugin_task(plugin_id=task['task'], task_name=taskName)
|
||||
except Exception as e:
|
||||
stash.LogOnce(f"Failed to call plugin {task['task']} with plugin-ID {task['pluginId']}. Error: {e}")
|
||||
pass
|
||||
@@ -720,7 +734,7 @@ elif stash.PLUGIN_TASK_NAME == StartFileMonitorAsAPluginTaskID:
|
||||
elif not stash.CALLED_AS_STASH_PLUGIN:
|
||||
try:
|
||||
start_library_monitor()
|
||||
stash.Trace(f"Command line FileMonitor EXIT")
|
||||
stash.Trace("Command line FileMonitor EXIT")
|
||||
except Exception as e:
|
||||
tb = traceback.format_exc()
|
||||
stash.Error(f"Exception while running FileMonitor from the command line. Error: {e}\nTraceBack={tb}")
|
||||
|
||||
@@ -11,15 +11,15 @@ config = {
|
||||
# The hour section in time MUST be a two digit number, and use military time format. Example: 1PM = "13:00" and 1AM = "01:00"
|
||||
# Note: Look at filemonitor_task_examples.py for many example task having more detailed usage.
|
||||
"task_scheduler": [
|
||||
# To create a daily task, include each day of the week for the weekday field.
|
||||
# To create a daily task, include each day of the week for the weekday field or "every"
|
||||
# Optional field for task "Auto Tag" is 'paths'. For detail usage, see example #A3: in filemonitor_task_examples.py
|
||||
{"task" : "Auto Tag", "weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:00"}, # Auto Tag -> [Auto Tag] (Daily at 6AM)
|
||||
# Task "Create Tags" is a plugin task. All plugin task have a REQUIRED pluginId field and an optional validateDir field. For detail usage, see examples #B1 and #B2 in filemonitor_task_examples.py
|
||||
{"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser",
|
||||
"weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
|
||||
# The following task runs plugin DupFileManager if the plugin is installed.
|
||||
{"task" : "Tag Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager",
|
||||
"weekday" : "monday,tuesday,wednesday,thursday,friday,saturday,sunday", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM)
|
||||
# Task "Create Tags" is a plugin task. Optional fields are taskName and validateDir field. For detail usage, see examples #B1, #B2, #B3, and #B4 in filemonitor_task_examples.py
|
||||
{"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser",
|
||||
"weekday" : "every", "time" : "05:30"}, # [Plugin Tasks] - > [Path Parser] -> [Create Tags] (Daily at 5AM) : This task requires plugin [Path Parser]
|
||||
# The following task runs plugin DupFileManager (tag_duplicates_task) if the plugin is installed. The task runs in the background because of "taskQue" : False
|
||||
{"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "validateDir" : "DupFileManager", "taskQue" : False,
|
||||
"weekday" : "every", "time" : "02:30"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] (Daily at 2:30AM)
|
||||
{"task" : "Optimise Database", "weekday" : "monday,tuesday,wednesday,thursday,friday", "time" : "07:00"}, # Maintenance -> [Optimise Database] (Every weekday at 7AM)
|
||||
|
||||
# The following tasks are scheduled weekly
|
||||
@@ -41,7 +41,7 @@ config = {
|
||||
# Optional field for task "Backup" is maxBackup. For detail usage, see example #A5 in filemonitor_task_examples.py
|
||||
{"task" : "Backup", "weekday" : "sunday", "time" : "01:00", "monthly" : 2}, # Backup -> [Backup] 2nd sunday of the month at 1AM (01:00)
|
||||
# The following task requires plugin DupFileManager and UI option [Delete Duplicate Scheduler] enabled.
|
||||
{"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager",
|
||||
{"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager",
|
||||
"weekday" : "sunday", "time" : "02:00", "monthly" : 2}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates] 2nd sunday of the month at 2AM (02:00)
|
||||
|
||||
# The [CheckStashIsRunning] task checks if Stash is running. If not running, it will start up stash.
|
||||
|
||||
@@ -8,11 +8,9 @@ self_unit_test = {
|
||||
{"task" : "TestBadTaskNameError", "minutes" : 1}, # Test invalid task name
|
||||
{"task" : "execute", "minutes" : 1}, # Test invalid task (missing command)
|
||||
{"task" : "python", "minutes" : 1}, # Test invalid task (missing scripts)
|
||||
{"task" : "PluginWithOutID", "minutes" : 1}, # Test invalid task (missing pluginId)
|
||||
{"task" : "execute", "command" : "", "minutes" : 1}, # Test invalid task (missing command)
|
||||
{"task" : "python", "script" : "", "minutes" : 1}, # Test invalid task (missing scripts)
|
||||
{"task" : "PluginWithOutID", "pluginId" : "", "minutes" : 1}, # Test invalid task (missing pluginId)
|
||||
{"task" : "Foo","pluginId":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory)
|
||||
{"task" : "Foo","taskName":"foo","validateDir":"foo", "minutes" : 1}, # Test invalid task (missing plugin directory)
|
||||
{"task" : "Log", "msg" : "Testing Scheduled Log", "minutes" : 1}, # Test plugin log file
|
||||
{"task" : "Trace", "minutes" : 1}, # Test plugin trace logging
|
||||
{"task" : "LogOnce", "seconds" :15}, # Test LogOnce
|
||||
@@ -23,24 +21,25 @@ self_unit_test = {
|
||||
],
|
||||
"task_scheduler_set_time": [
|
||||
# Test [Delete Duplicates] with [Delete Duplicate Scheduler] disabled, and then with it enabled.
|
||||
{"task" : "Delete Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager","weekday" : "sunday", "time" : "17:56"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates]
|
||||
{"task" : "Generate", "weekday" : "sunday", "time" : "17:56"},
|
||||
{"task" : "Clean", "weekday" : "sunday", "time" : "17:56"},
|
||||
{"task" : "Auto Tag", "weekday" : "sunday", "time" : "17:56"},
|
||||
{"task" : "Optimise Database", "weekday" : "sunday", "time" : "17:56"},
|
||||
{"task" : "Create Tags", "pluginId" : "pathParser", "validateDir" : "pathParser", "weekday" : "sunday", "time" : "17:56"}, # In task queue as -> Running plugin task: Create Tags
|
||||
{"task" : "Tag Duplicates", "pluginId" : "DupFileManager", "validateDir" : "DupFileManager", "weekday" : "sunday", "time" : "17:56"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
|
||||
{"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "sunday", "time" : "17:56"},
|
||||
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "sunday", "time" : "17:56"}, # In task queue as -> Optimising database...
|
||||
{"task" : "Clean Generated Files", "weekday" : "sunday", "time" : "17:56"},
|
||||
{"task" : "RenameGeneratedFiles", "weekday" : "sunday", "time" : "17:56"}, # In task queue as -> Migrating scene hashes...
|
||||
{"task" : "Backup", "maxBackups" : 0, "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
|
||||
{"task" : "python", "script" : "<plugin_path>test_hello_world2.py", "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
|
||||
{"task" : "python", "script" : "<plugin_path>test_hello_world.py", "detach" : False, "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
|
||||
{"task" : "execute", "command" : "<plugin_path>test_hello_world2.cmd", "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
|
||||
{"task" : "execute", "command" : "<plugin_path>test_hello_world.bat", "args" : "--name David", "weekday" : "sunday", "time" : "17:56"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
|
||||
{"task" : "DupFileManager", "taskName" : "Delete Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "09:05"}, # [Plugin Tasks] -> DupFileManager -> [Delete Duplicates]
|
||||
{"task" : "Generate", "weekday" : "every", "time" : "09:05"},
|
||||
{"task" : "Clean", "weekday" : "every", "time" : "09:05"},
|
||||
{"task" : "Auto Tag", "weekday" : "every", "time" : "09:05"},
|
||||
{"task" : "Optimise Database", "weekday" : "every", "time" : "09:05"},
|
||||
{"task" : "pathParser", "taskName" : "Create Tags", "validateDir" : "pathParser", "weekday" : "every", "time" : "09:05"}, # In task queue as -> Running plugin task: Create Tags
|
||||
{"task" : "DupFileManager", "taskMode" : "tag_duplicates_task", "taskQue":False, "weekday" : "every", "time" : "10:09"}, # Does NOT run in the task queue
|
||||
{"task" : "DupFileManager", "taskName" : "Tag Duplicates", "validateDir" : "DupFileManager", "weekday" : "every", "time" : "10:30"}, # [Plugin Tasks] -> DupFileManager -> [Tag Duplicates]
|
||||
{"task" : "Scan","paths": [r"B:\_\SpecialSet", r"C:\foo"], "weekday" : "every", "time" : "09:05"},
|
||||
{"task" : "GQL", "input" : "mutation OptimiseDatabase { optimiseDatabase }", "weekday" : "every", "time" : "09:05"}, # In task queue as -> Optimising database...
|
||||
{"task" : "Clean Generated Files", "weekday" : "every", "time" : "09:05"},
|
||||
{"task" : "RenameGeneratedFiles", "weekday" : "every", "time" : "09:05"}, # In task queue as -> Migrating scene hashes...
|
||||
{"task" : "Backup", "maxBackups" : 0, "weekday" : "every", "time" : "09:05"}, # Does NOT show up in the Task Queue. Must check STASH log file to verify run.
|
||||
{"task" : "python", "script" : "<plugin_path>test_hello_world2.py", "weekday" : "every", "time" : "09:05"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
|
||||
{"task" : "python", "script" : "<plugin_path>test_hello_world.py", "detach" : False, "weekday" : "every", "time" : "09:05"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'python' result=???
|
||||
{"task" : "execute", "command" : "<plugin_path>test_hello_world2.cmd", "weekday" : "every", "time" : "09:05"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
|
||||
{"task" : "execute", "command" : "<plugin_path>test_hello_world.bat", "args" : "--name David", "weekday" : "every", "time" : "09:05"}, # Does NOT show up in the Task Queue. Check FileMonitor log file, and look for -> Task 'execute' result=???
|
||||
],
|
||||
# MUST ToDo: Always set selfUnitTest to False before checking in this code!!!
|
||||
"selfUnitTest_repeat" : False , # Enable to turn on self unit test.
|
||||
"selfUnitTest_set_time" : False , # Enable to turn on self unit test.
|
||||
"selfUnitTest_set_time" : True , # Enable to turn on self unit test.
|
||||
}
|
||||
|
||||
@@ -30,20 +30,24 @@ task_examples = {
|
||||
# And days usage is discourage, because it only works if FileMonitor is running for X many days non-stop.
|
||||
# The below example tasks are done using hours and minutes, however any of these task types can be converted to a daily, weekly, or monthly syntax.
|
||||
|
||||
# Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin name for the [task] field, and the plugin-ID for the [pluginId] field.
|
||||
{"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "hours" : 0}, # The zero frequency value makes this task disabled.
|
||||
# Example#B1: The following task is the syntax used for a plugin. A plugin task requires the plugin-ID for the [task] field. Optional fields are taskName, taskMode, validateDir, and taskQue.
|
||||
{"task" : "PluginId_Here", "taskName" : "Task Name or Plugin Button Name Here", "hours" : 0}, # The zero frequency value makes this task disabled.
|
||||
# Example#B2: Optionally, the validateDir field can be included which is used to validate that the plugin is installed either under the plugins folder or under the plugins-community folder.
|
||||
{"task" : "PluginButtonName_Here", "pluginId" : "PluginId_Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0}, # The zero frequency value makes this task disabled.
|
||||
{"task" : "PluginId_Here", "taskName" : "Task Name or Plugin Button Name Here", "validateDir" : "UsuallySameAsPluginID", "hours" : 0},
|
||||
# Example#B3: To run a plugin WITHOUT using the Task Queue, use taskMode instead of taskName and/or add field "taskQue":False. The plugin will run immediately
|
||||
{"task" : "PluginId_Here", "taskMode" : "Plugin_Task_MODE", "taskQue" : False, "hours" : 0}, # Do NOT use taskName when including "taskQue":False
|
||||
# Example#B4: When taskName field is missing, it will always run the task without using the Task Queue. The plugin will run immediately
|
||||
{"task" : "PluginId_Here", "hours" : 0},
|
||||
|
||||
# Example#B3: Task to execute a command
|
||||
# Example#C1: Task to execute a command
|
||||
{"task" : "execute", "command" : "C:\\MyPath\\HelloWorld.bat", "hours" : 0},
|
||||
|
||||
# Example#B4: Task to execute a command with optional args field, and using keyword <plugin_path>, which gets replaced with filemonitor.py current directory.
|
||||
# Example#C2: Task to execute a command with optional args field, and using keyword <plugin_path>, which gets replaced with filemonitor.py current directory.
|
||||
{"task" : "execute", "command" : "<plugin_path>HelloWorld.cmd", "args" : "--name David", "minutes" : 0},
|
||||
|
||||
# Example#C1 Some OS may need the "command" field, which specifies the binary path.
|
||||
# Example#D1 Some OS may need the "command" field, which specifies the binary path.
|
||||
{"task" : "CheckStashIsRunning", "command" : "<stash_path>stash-linux-arm64v8", "minutes" :0},
|
||||
# Example#C2 RunAfter field can be used to specify task to run after starting Stash
|
||||
# Example#D2 RunAfter field can be used to specify task to run after starting Stash
|
||||
{"task" : "CheckStashIsRunning", "RunAfter" : [{"task" : "Scan"},{"task" : "Backup", "maxBackup" : 0},{"task" : "Clean"}], "minutes" :0},
|
||||
],
|
||||
}
|
||||
|
||||
526
plugins/RenameFile/StashPluginHelper.py
Normal file
526
plugins/RenameFile/StashPluginHelper.py
Normal file
@@ -0,0 +1,526 @@
|
||||
from stashapi.stashapp import StashInterface
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import re, inspect, sys, os, pathlib, logging, json
|
||||
import concurrent.futures
|
||||
from stashapi.stash_types import PhashDistance
|
||||
import __main__
|
||||
|
||||
_ARGUMENT_UNSPECIFIED_ = "_ARGUMENT_UNSPECIFIED_"
|
||||
|
||||
# StashPluginHelper (By David Maisonave aka Axter)
|
||||
# See end of this file for example usage
|
||||
# Log Features:
|
||||
# Can optionally log out to multiple outputs for each Log or Trace call.
|
||||
# Logging includes source code line number
|
||||
# Sets a maximum plugin log file size
|
||||
# Stash Interface Features:
|
||||
# Gets STASH_URL value from command line argument and/or from STDIN_READ
|
||||
# Sets FRAGMENT_SERVER based on command line arguments or STDIN_READ
|
||||
# Sets PLUGIN_ID based on the main script file name (in lower case)
|
||||
# Gets PLUGIN_TASK_NAME value
|
||||
# Sets pluginSettings (The plugin UI settings)
|
||||
# Misc Features:
|
||||
# Gets DRY_RUN value from command line argument and/or from UI and/or from config file
|
||||
# Gets DEBUG_TRACING value from command line argument and/or from UI and/or from config file
|
||||
# Sets RUNNING_IN_COMMAND_LINE_MODE to True if detects multiple arguments
|
||||
# Sets CALLED_AS_STASH_PLUGIN to True if it's able to read from STDIN_READ
|
||||
class StashPluginHelper(StashInterface):
|
||||
# Primary Members for external reference
|
||||
PLUGIN_TASK_NAME = None
|
||||
PLUGIN_ID = None
|
||||
PLUGIN_CONFIGURATION = None
|
||||
PLUGINS_PATH = None
|
||||
pluginSettings = None
|
||||
pluginConfig = None
|
||||
STASH_URL = None
|
||||
STASH_CONFIGURATION = None
|
||||
JSON_INPUT = None
|
||||
DEBUG_TRACING = False
|
||||
DRY_RUN = False
|
||||
CALLED_AS_STASH_PLUGIN = False
|
||||
RUNNING_IN_COMMAND_LINE_MODE = False
|
||||
FRAGMENT_SERVER = None
|
||||
STASHPATHSCONFIG = None
|
||||
STASH_PATHS = []
|
||||
API_KEY = None
|
||||
excludeMergeTags = None
|
||||
|
||||
# printTo argument
|
||||
LOG_TO_FILE = 1
|
||||
LOG_TO_CONSOLE = 2 # Note: Only see output when running in command line mode. In plugin mode, this output is lost.
|
||||
LOG_TO_STDERR = 4 # Note: In plugin mode, output to StdErr ALWAYS gets sent to stash logging as an error.
|
||||
LOG_TO_STASH = 8
|
||||
LOG_TO_WARN = 16
|
||||
LOG_TO_ERROR = 32
|
||||
LOG_TO_CRITICAL = 64
|
||||
LOG_TO_ALL = LOG_TO_FILE + LOG_TO_CONSOLE + LOG_TO_STDERR + LOG_TO_STASH
|
||||
|
||||
# Misc class variables
|
||||
MAIN_SCRIPT_NAME = None
|
||||
LOG_LEVEL = logging.INFO
|
||||
LOG_FILE_DIR = None
|
||||
LOG_FILE_NAME = None
|
||||
STDIN_READ = None
|
||||
pluginLog = None
|
||||
logLinePreviousHits = []
|
||||
thredPool = None
|
||||
STASH_INTERFACE_INIT = False
|
||||
_mergeMetadata = None
|
||||
encodeToUtf8 = False
|
||||
convertToAscii = False # If set True, it takes precedence over encodeToUtf8
|
||||
|
||||
# Prefix message value
|
||||
LEV_TRACE = "TRACE: "
|
||||
LEV_DBG = "DBG: "
|
||||
LEV_INF = "INF: "
|
||||
LEV_WRN = "WRN: "
|
||||
LEV_ERR = "ERR: "
|
||||
LEV_CRITICAL = "CRITICAL: "
|
||||
|
||||
# Default format
|
||||
LOG_FORMAT = "[%(asctime)s] %(message)s"
|
||||
|
||||
# Externally modifiable variables
|
||||
log_to_err_set = LOG_TO_FILE + LOG_TO_STDERR # This can be changed by the calling source in order to customize what targets get error messages
|
||||
log_to_norm = LOG_TO_FILE + LOG_TO_CONSOLE # Can be change so-as to set target output for normal logging
|
||||
# Warn message goes to both plugin log file and stash when sent to Stash log file.
|
||||
log_to_wrn_set = LOG_TO_STASH # This can be changed by the calling source in order to customize what targets get warning messages
|
||||
|
||||
def __init__(self,
|
||||
debugTracing = None, # Set debugTracing to True so as to output debug and trace logging
|
||||
logFormat = LOG_FORMAT, # Plugin log line format
|
||||
dateFmt = "%y%m%d %H:%M:%S", # Date format when logging to plugin log file
|
||||
maxbytes = 8*1024*1024, # Max size of plugin log file
|
||||
backupcount = 2, # Backup counts when log file size reaches max size
|
||||
logToWrnSet = 0, # Customize the target output set which will get warning logging
|
||||
logToErrSet = 0, # Customize the target output set which will get error logging
|
||||
logToNormSet = 0, # Customize the target output set which will get normal logging
|
||||
logFilePath = "", # Plugin log file. If empty, the log file name will be set based on current python file name and path
|
||||
mainScriptName = "", # The main plugin script file name (full path)
|
||||
pluginID = "",
|
||||
settings = None, # Default settings for UI fields
|
||||
config = None, # From pluginName_config.py or pluginName_setting.py
|
||||
fragmentServer = None,
|
||||
stash_url = None, # Stash URL (endpoint URL) Example: http://localhost:9999
|
||||
apiKey = None, # API Key only needed when username and password set while running script via command line
|
||||
DebugTraceFieldName = "zzdebugTracing",
|
||||
DryRunFieldName = "zzdryRun",
|
||||
setStashLoggerAsPluginLogger = False):
|
||||
self.thredPool = concurrent.futures.ThreadPoolExecutor(max_workers=2)
|
||||
if logToWrnSet: self.log_to_wrn_set = logToWrnSet
|
||||
if logToErrSet: self.log_to_err_set = logToErrSet
|
||||
if logToNormSet: self.log_to_norm = logToNormSet
|
||||
if stash_url and len(stash_url): self.STASH_URL = stash_url
|
||||
self.MAIN_SCRIPT_NAME = mainScriptName if mainScriptName != "" else __main__.__file__
|
||||
self.PLUGIN_ID = pluginID if pluginID != "" else pathlib.Path(self.MAIN_SCRIPT_NAME).stem
|
||||
# print(f"self.MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME}, self.PLUGIN_ID={self.PLUGIN_ID}", file=sys.stderr)
|
||||
self.LOG_FILE_NAME = logFilePath if logFilePath != "" else f"{pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}{os.sep}{pathlib.Path(self.MAIN_SCRIPT_NAME).stem}.log"
|
||||
self.LOG_FILE_DIR = pathlib.Path(self.LOG_FILE_NAME).resolve().parent
|
||||
RFH = RotatingFileHandler(
|
||||
filename=self.LOG_FILE_NAME,
|
||||
mode='a',
|
||||
maxBytes=maxbytes,
|
||||
backupCount=backupcount,
|
||||
encoding=None,
|
||||
delay=0
|
||||
)
|
||||
if fragmentServer:
|
||||
self.FRAGMENT_SERVER = fragmentServer
|
||||
else:
|
||||
self.FRAGMENT_SERVER = {'Scheme': 'http', 'Host': '0.0.0.0', 'Port': '9999', 'SessionCookie': {'Name': 'session', 'Value': '', 'Path': '', 'Domain': '', 'Expires': '0001-01-01T00:00:00Z', 'RawExpires': '', 'MaxAge': 0, 'Secure': False, 'HttpOnly': False, 'SameSite': 0, 'Raw': '', 'Unparsed': None}, 'Dir': os.path.dirname(pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent), 'PluginDir': pathlib.Path(self.MAIN_SCRIPT_NAME).resolve().parent}
|
||||
|
||||
if debugTracing: self.DEBUG_TRACING = debugTracing
|
||||
if config:
|
||||
self.pluginConfig = config
|
||||
if self.Setting('apiKey', "") != "":
|
||||
self.FRAGMENT_SERVER['ApiKey'] = self.Setting('apiKey')
|
||||
|
||||
|
||||
if apiKey and apiKey != "":
|
||||
self.FRAGMENT_SERVER['ApiKey'] = apiKey
|
||||
|
||||
if len(sys.argv) > 1:
|
||||
RUNNING_IN_COMMAND_LINE_MODE = True
|
||||
if not debugTracing or not stash_url:
|
||||
for argValue in sys.argv[1:]:
|
||||
if argValue.lower() == "--trace":
|
||||
self.DEBUG_TRACING = True
|
||||
elif argValue.lower() == "--dry_run" or argValue.lower() == "--dryrun":
|
||||
self.DRY_RUN = True
|
||||
elif ":" in argValue and not self.STASH_URL:
|
||||
self.STASH_URL = argValue
|
||||
if self.STASH_URL:
|
||||
endpointUrlArr = self.STASH_URL.split(":")
|
||||
if len(endpointUrlArr) == 3:
|
||||
self.FRAGMENT_SERVER['Scheme'] = endpointUrlArr[0]
|
||||
self.FRAGMENT_SERVER['Host'] = endpointUrlArr[1][2:]
|
||||
self.FRAGMENT_SERVER['Port'] = endpointUrlArr[2]
|
||||
super().__init__(self.FRAGMENT_SERVER)
|
||||
self.STASH_INTERFACE_INIT = True
|
||||
else:
|
||||
try:
|
||||
self.STDIN_READ = sys.stdin.read()
|
||||
self.CALLED_AS_STASH_PLUGIN = True
|
||||
except:
|
||||
pass
|
||||
if self.STDIN_READ:
|
||||
self.JSON_INPUT = json.loads(self.STDIN_READ)
|
||||
if "args" in self.JSON_INPUT and "mode" in self.JSON_INPUT["args"]:
|
||||
self.PLUGIN_TASK_NAME = self.JSON_INPUT["args"]["mode"]
|
||||
self.FRAGMENT_SERVER = self.JSON_INPUT["server_connection"]
|
||||
self.STASH_URL = f"{self.FRAGMENT_SERVER['Scheme']}://{self.FRAGMENT_SERVER['Host']}:{self.FRAGMENT_SERVER['Port']}"
|
||||
super().__init__(self.FRAGMENT_SERVER)
|
||||
self.STASH_INTERFACE_INIT = True
|
||||
|
||||
if self.STASH_URL.startswith("http://0.0.0.0:"):
|
||||
self.STASH_URL = self.STASH_URL.replace("http://0.0.0.0:", "http://localhost:")
|
||||
|
||||
if self.STASH_INTERFACE_INIT:
|
||||
self.PLUGIN_CONFIGURATION = self.get_configuration()["plugins"]
|
||||
self.STASH_CONFIGURATION = self.get_configuration()["general"]
|
||||
self.STASHPATHSCONFIG = self.STASH_CONFIGURATION['stashes']
|
||||
if 'pluginsPath' in self.STASH_CONFIGURATION:
|
||||
self.PLUGINS_PATH = self.STASH_CONFIGURATION['pluginsPath']
|
||||
for item in self.STASHPATHSCONFIG:
|
||||
self.STASH_PATHS.append(item["path"])
|
||||
if settings:
|
||||
self.pluginSettings = settings
|
||||
if self.PLUGIN_ID in self.PLUGIN_CONFIGURATION:
|
||||
self.pluginSettings.update(self.PLUGIN_CONFIGURATION[self.PLUGIN_ID])
|
||||
if 'apiKey' in self.STASH_CONFIGURATION:
|
||||
self.API_KEY = self.STASH_CONFIGURATION['apiKey']
|
||||
|
||||
self.DRY_RUN = self.Setting(DryRunFieldName, self.DRY_RUN)
|
||||
self.DEBUG_TRACING = self.Setting(DebugTraceFieldName, self.DEBUG_TRACING)
|
||||
if self.DEBUG_TRACING: self.LOG_LEVEL = logging.DEBUG
|
||||
|
||||
logging.basicConfig(level=self.LOG_LEVEL, format=logFormat, datefmt=dateFmt, handlers=[RFH])
|
||||
self.pluginLog = logging.getLogger(pathlib.Path(self.MAIN_SCRIPT_NAME).stem)
|
||||
if setStashLoggerAsPluginLogger:
|
||||
self.log = self.pluginLog
|
||||
|
||||
def __del__(self):
|
||||
self.thredPool.shutdown(wait=False)
|
||||
|
||||
def Setting(self, name, default=_ARGUMENT_UNSPECIFIED_, raiseEx=True, notEmpty=False):
|
||||
if self.pluginSettings != None and name in self.pluginSettings:
|
||||
if notEmpty == False or self.pluginSettings[name] != "":
|
||||
return self.pluginSettings[name]
|
||||
if self.pluginConfig != None and name in self.pluginConfig:
|
||||
if notEmpty == False or self.pluginConfig[name] != "":
|
||||
return self.pluginConfig[name]
|
||||
if default == _ARGUMENT_UNSPECIFIED_ and raiseEx:
|
||||
raise Exception(f"Missing {name} from both UI settings and config file settings.")
|
||||
return default
|
||||
|
||||
def Log(self, logMsg, printTo = 0, logLevel = logging.INFO, lineNo = -1, levelStr = "", logAlways = False, toAscii = None):
|
||||
if toAscii or (toAscii == None and (self.encodeToUtf8 or self.convertToAscii)):
|
||||
logMsg = self.asc2(logMsg)
|
||||
else:
|
||||
logMsg = logMsg
|
||||
if printTo == 0:
|
||||
printTo = self.log_to_norm
|
||||
elif printTo == self.LOG_TO_ERROR and logLevel == logging.INFO:
|
||||
logLevel = logging.ERROR
|
||||
printTo = self.log_to_err_set
|
||||
elif printTo == self.LOG_TO_CRITICAL and logLevel == logging.INFO:
|
||||
logLevel = logging.CRITICAL
|
||||
printTo = self.log_to_err_set
|
||||
elif printTo == self.LOG_TO_WARN and logLevel == logging.INFO:
|
||||
logLevel = logging.WARN
|
||||
printTo = self.log_to_wrn_set
|
||||
if lineNo == -1:
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
LN_Str = f"[LN:{lineNo}]"
|
||||
# print(f"{LN_Str}, {logAlways}, {self.LOG_LEVEL}, {logging.DEBUG}, {levelStr}, {logMsg}")
|
||||
if logLevel == logging.DEBUG and (logAlways == False or self.LOG_LEVEL == logging.DEBUG):
|
||||
if levelStr == "": levelStr = self.LEV_DBG
|
||||
if printTo & self.LOG_TO_FILE: self.pluginLog.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: self.log.debug(f"{LN_Str} {levelStr}{logMsg}")
|
||||
elif logLevel == logging.INFO or logLevel == logging.DEBUG:
|
||||
if levelStr == "": levelStr = self.LEV_INF if logLevel == logging.INFO else self.LEV_DBG
|
||||
if printTo & self.LOG_TO_FILE: self.pluginLog.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: self.log.info(f"{LN_Str} {levelStr}{logMsg}")
|
||||
elif logLevel == logging.WARN:
|
||||
if levelStr == "": levelStr = self.LEV_WRN
|
||||
if printTo & self.LOG_TO_FILE: self.pluginLog.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: self.log.warning(f"{LN_Str} {levelStr}{logMsg}")
|
||||
elif logLevel == logging.ERROR:
|
||||
if levelStr == "": levelStr = self.LEV_ERR
|
||||
if printTo & self.LOG_TO_FILE: self.pluginLog.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||
elif logLevel == logging.CRITICAL:
|
||||
if levelStr == "": levelStr = self.LEV_CRITICAL
|
||||
if printTo & self.LOG_TO_FILE: self.pluginLog.critical(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if printTo & self.LOG_TO_STASH: self.log.error(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if (printTo & self.LOG_TO_CONSOLE) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||
print(f"{LN_Str} {levelStr}{logMsg}")
|
||||
if (printTo & self.LOG_TO_STDERR) and (logLevel != logging.DEBUG or self.DEBUG_TRACING or logAlways):
|
||||
print(f"StdErr: {LN_Str} {levelStr}{logMsg}", file=sys.stderr)
|
||||
|
||||
def Trace(self, logMsg = "", printTo = 0, logAlways = False, lineNo = -1, toAscii = None):
|
||||
if printTo == 0: printTo = self.LOG_TO_FILE
|
||||
if lineNo == -1:
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
logLev = logging.INFO if logAlways else logging.DEBUG
|
||||
if self.DEBUG_TRACING or logAlways:
|
||||
if logMsg == "":
|
||||
logMsg = f"Line number {lineNo}..."
|
||||
self.Log(logMsg, printTo, logLev, lineNo, self.LEV_TRACE, logAlways, toAscii=toAscii)
|
||||
|
||||
# Log once per session. Only logs the first time called from a particular line number in the code.
|
||||
def TraceOnce(self, logMsg = "", printTo = 0, logAlways = False, toAscii = None):
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
if self.DEBUG_TRACING or logAlways:
|
||||
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||
if FuncAndLineNo in self.logLinePreviousHits:
|
||||
return
|
||||
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||
|
||||
# Log INFO on first call, then do Trace on remaining calls.
|
||||
def LogOnce(self, logMsg = "", printTo = 0, logAlways = False, traceOnRemainingCalls = True, toAscii = None):
|
||||
if printTo == 0: printTo = self.LOG_TO_FILE
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
FuncAndLineNo = f"{inspect.currentframe().f_back.f_code.co_name}:{lineNo}"
|
||||
if FuncAndLineNo in self.logLinePreviousHits:
|
||||
if traceOnRemainingCalls:
|
||||
self.Trace(logMsg, printTo, logAlways, lineNo, toAscii=toAscii)
|
||||
else:
|
||||
self.logLinePreviousHits.append(FuncAndLineNo)
|
||||
self.Log(logMsg, printTo, logging.INFO, lineNo, toAscii=toAscii)
|
||||
|
||||
def Warn(self, logMsg, printTo = 0, toAscii = None):
|
||||
if printTo == 0: printTo = self.log_to_wrn_set
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
self.Log(logMsg, printTo, logging.WARN, lineNo, toAscii=toAscii)
|
||||
|
||||
def Error(self, logMsg, printTo = 0, toAscii = None):
|
||||
if printTo == 0: printTo = self.log_to_err_set
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
|
||||
|
||||
def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
|
||||
if printTo == 0: printTo = self.log_to_norm
|
||||
if lineNo == -1:
|
||||
lineNo = inspect.currentframe().f_back.f_lineno
|
||||
self.Log(f"StashPluginHelper Status: (CALLED_AS_STASH_PLUGIN={self.CALLED_AS_STASH_PLUGIN}), (RUNNING_IN_COMMAND_LINE_MODE={self.RUNNING_IN_COMMAND_LINE_MODE}), (DEBUG_TRACING={self.DEBUG_TRACING}), (DRY_RUN={self.DRY_RUN}), (PLUGIN_ID={self.PLUGIN_ID}), (PLUGIN_TASK_NAME={self.PLUGIN_TASK_NAME}), (STASH_URL={self.STASH_URL}), (MAIN_SCRIPT_NAME={self.MAIN_SCRIPT_NAME})",
|
||||
printTo, logLevel, lineNo)
|
||||
|
||||
def ExecuteProcess(self, args, ExecDetach=False):
|
||||
import platform, subprocess
|
||||
is_windows = any(platform.win32_ver())
|
||||
pid = None
|
||||
self.Trace(f"is_windows={is_windows} args={args}")
|
||||
if is_windows:
|
||||
if ExecDetach:
|
||||
self.Trace("Executing process using Windows DETACHED_PROCESS")
|
||||
DETACHED_PROCESS = 0x00000008
|
||||
pid = subprocess.Popen(args,creationflags=DETACHED_PROCESS, shell=True).pid
|
||||
else:
|
||||
pid = subprocess.Popen(args, shell=True).pid
|
||||
else:
|
||||
self.Trace("Executing process using normal Popen")
|
||||
pid = subprocess.Popen(args).pid
|
||||
self.Trace(f"pid={pid}")
|
||||
return pid
|
||||
|
||||
def ExecutePythonScript(self, args, ExecDetach=True):
|
||||
PythonExe = f"{sys.executable}"
|
||||
argsWithPython = [f"{PythonExe}"] + args
|
||||
return self.ExecuteProcess(argsWithPython,ExecDetach=ExecDetach)
|
||||
|
||||
def Submit(self, *args, **kwargs):
|
||||
return self.thredPool.submit(*args, **kwargs)
|
||||
|
||||
def asc2(self, data, convertToAscii=None):
|
||||
if convertToAscii or (convertToAscii == None and self.convertToAscii):
|
||||
return ascii(data)
|
||||
return str(str(data).encode('utf-8'))[2:-1] # This works better for logging than ascii function
|
||||
# data = str(data).encode('ascii','ignore') # This works better for logging than ascii function
|
||||
# return str(data)[2:-1] # strip out b'str'
|
||||
|
||||
def init_mergeMetadata(self, excludeMergeTags=None):
|
||||
self.excludeMergeTags = excludeMergeTags
|
||||
self._mergeMetadata = mergeMetadata(self, self.excludeMergeTags)
|
||||
|
||||
# Must call init_mergeMetadata, before calling merge_metadata
|
||||
def merge_metadata(self, SrcData, DestData): # Input arguments can be scene ID or scene metadata
|
||||
if type(SrcData) is int:
|
||||
SrcData = self.find_scene(SrcData)
|
||||
DestData = self.find_scene(DestData)
|
||||
return self._mergeMetadata.merge(SrcData, DestData)
|
||||
|
||||
def Progress(self, currentIndex, maxCount):
|
||||
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
|
||||
self.log.progress(progress)
|
||||
|
||||
def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
|
||||
"""Runs a plugin operation.
|
||||
The operation is run immediately and does not use the job queue.
|
||||
Args:
|
||||
plugin_id (ID): plugin_id
|
||||
task_name (str, optional): Plugin task to perform
|
||||
args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args']
|
||||
Returns:
|
||||
A map of the result.
|
||||
"""
|
||||
query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) {
|
||||
runPluginOperation(plugin_id: $plugin_id, args: $args)
|
||||
}"""
|
||||
if task_mode != None:
|
||||
args.update({"mode" : task_mode})
|
||||
variables = {
|
||||
"plugin_id": plugin_id,
|
||||
"args": args,
|
||||
}
|
||||
if asyn:
|
||||
self.Submit(self.call_GQL, query, variables)
|
||||
return f"Made asynchronous call for plugin {plugin_id}"
|
||||
else:
|
||||
return self.call_GQL(query, variables)
|
||||
|
||||
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
|
||||
query = """
|
||||
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
|
||||
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
|
||||
...SceneSlim
|
||||
}
|
||||
}
|
||||
"""
|
||||
if fragment:
|
||||
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
|
||||
else:
|
||||
query += "fragment SceneSlim on Scene { id }"
|
||||
|
||||
variables = { "distance": distance, "duration_diff": duration_diff }
|
||||
result = self.call_GQL(query, variables)
|
||||
return result['findDuplicateScenes']
|
||||
|
||||
# #################################################################################################
|
||||
# The below functions extends class StashInterface with functions which are not yet in the class
|
||||
def get_all_scenes(self):
|
||||
query_all_scenes = """
|
||||
query AllScenes {
|
||||
allScenes {
|
||||
id
|
||||
updated_at
|
||||
}
|
||||
}
|
||||
"""
|
||||
return self.call_GQL(query_all_scenes)
|
||||
|
||||
def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
|
||||
query = """
|
||||
mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
|
||||
metadataAutoTag(input: $input)
|
||||
}
|
||||
"""
|
||||
metadata_autotag_input = {
|
||||
"paths":paths,
|
||||
"performers": performers,
|
||||
"studios":studios,
|
||||
"tags":tags,
|
||||
}
|
||||
result = self.call_GQL(query, {"input": metadata_autotag_input})
|
||||
return result
|
||||
|
||||
def backup_database(self):
|
||||
return self.call_GQL("mutation { backupDatabase(input: {download: false})}")
|
||||
|
||||
def optimise_database(self):
|
||||
return self.call_GQL("mutation OptimiseDatabase { optimiseDatabase }")
|
||||
|
||||
def metadata_clean_generated(self, blobFiles=True, dryRun=False, imageThumbnails=True, markers=True, screenshots=True, sprites=True, transcodes=True):
|
||||
query = """
|
||||
mutation MetadataCleanGenerated($input: CleanGeneratedInput!) {
|
||||
metadataCleanGenerated(input: $input)
|
||||
}
|
||||
"""
|
||||
clean_metadata_input = {
|
||||
"blobFiles": blobFiles,
|
||||
"dryRun": dryRun,
|
||||
"imageThumbnails": imageThumbnails,
|
||||
"markers": markers,
|
||||
"screenshots": screenshots,
|
||||
"sprites": sprites,
|
||||
"transcodes": transcodes,
|
||||
}
|
||||
result = self.call_GQL(query, {"input": clean_metadata_input})
|
||||
return result
|
||||
|
||||
def rename_generated_files(self):
|
||||
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
|
||||
|
||||
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
|
||||
srcData = None
|
||||
destData = None
|
||||
stash = None
|
||||
excludeMergeTags = None
|
||||
dataDict = None
|
||||
result = "Nothing To Merge"
|
||||
def __init__(self, stash, excludeMergeTags=None):
|
||||
self.stash = stash
|
||||
self.excludeMergeTags = excludeMergeTags
|
||||
|
||||
def merge(self, SrcData, DestData):
|
||||
self.srcData = SrcData
|
||||
self.destData = DestData
|
||||
ORG_DATA_DICT = {'id' : self.destData['id']}
|
||||
self.dataDict = ORG_DATA_DICT.copy()
|
||||
self.mergeItems('tags', 'tag_ids', [], excludeName=self.excludeMergeTags)
|
||||
self.mergeItems('performers', 'performer_ids', [])
|
||||
self.mergeItems('galleries', 'gallery_ids', [])
|
||||
self.mergeItems('movies', 'movies', [])
|
||||
self.mergeItems('urls', listToAdd=self.destData['urls'], NotStartWith=self.stash.STASH_URL)
|
||||
self.mergeItem('studio', 'studio_id', 'id')
|
||||
self.mergeItem('title')
|
||||
self.mergeItem('director')
|
||||
self.mergeItem('date')
|
||||
self.mergeItem('details')
|
||||
self.mergeItem('rating100')
|
||||
self.mergeItem('code')
|
||||
if self.dataDict != ORG_DATA_DICT:
|
||||
self.stash.Trace(f"Updating scene ID({self.destData['id']}) with {self.dataDict}; path={self.destData['files'][0]['path']}", toAscii=True)
|
||||
self.result = self.stash.update_scene(self.dataDict)
|
||||
return self.result
|
||||
|
||||
def Nothing(self, Data):
|
||||
if not Data or Data == "" or (type(Data) is str and Data.strip() == ""):
|
||||
return True
|
||||
return False
|
||||
|
||||
def mergeItem(self,fieldName, updateFieldName=None, subField=None):
|
||||
if updateFieldName == None:
|
||||
updateFieldName = fieldName
|
||||
if self.Nothing(self.destData[fieldName]) and not self.Nothing(self.srcData[fieldName]):
|
||||
if subField == None:
|
||||
self.dataDict.update({ updateFieldName : self.srcData[fieldName]})
|
||||
else:
|
||||
self.dataDict.update({ updateFieldName : self.srcData[fieldName][subField]})
|
||||
def mergeItems(self, fieldName, updateFieldName=None, listToAdd=[], NotStartWith=None, excludeName=None):
|
||||
dataAdded = ""
|
||||
for item in self.srcData[fieldName]:
|
||||
if item not in self.destData[fieldName]:
|
||||
if NotStartWith == None or not item.startswith(NotStartWith):
|
||||
if excludeName == None or item['name'] not in excludeName:
|
||||
if fieldName == 'movies':
|
||||
listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}]
|
||||
dataAdded += f"{item['movie']['id']} "
|
||||
elif updateFieldName == None:
|
||||
listToAdd += [item]
|
||||
dataAdded += f"{item} "
|
||||
else:
|
||||
listToAdd += [item['id']]
|
||||
dataAdded += f"{item['id']} "
|
||||
if dataAdded != "":
|
||||
if updateFieldName == None:
|
||||
updateFieldName = fieldName
|
||||
else:
|
||||
for item in self.destData[fieldName]:
|
||||
if fieldName == 'movies':
|
||||
listToAdd += [{"movie_id" : item['movie']['id'], "scene_index" : item['scene_index']}]
|
||||
else:
|
||||
listToAdd += [item['id']]
|
||||
self.dataDict.update({ updateFieldName : listToAdd})
|
||||
# self.stash.Trace(f"Added {fieldName} ({dataAdded}) to scene ID({self.destData['id']})", toAscii=True)
|
||||
@@ -2,30 +2,17 @@
|
||||
# By David Maisonave (aka Axter) Jul-2024 (https://www.axter.com/)
|
||||
# Get the latest developers version from following link: https://github.com/David-Maisonave/Axter-Stash/tree/main/plugins/RenameFile
|
||||
# Based on source code from https://github.com/Serechops/Serechops-Stash/tree/main/plugins/Renamer
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import hashlib
|
||||
import json
|
||||
import os, sys, shutil, json, requests, hashlib, pathlib, logging
|
||||
from pathlib import Path
|
||||
import requests
|
||||
import logging
|
||||
from logging.handlers import RotatingFileHandler
|
||||
import stashapi.log as log # Importing stashapi.log as log for critical events ONLY
|
||||
from stashapi.stashapp import StashInterface
|
||||
from StashPluginHelper import StashPluginHelper
|
||||
from renamefile_settings import config # Import settings from renamefile_settings.py
|
||||
|
||||
# **********************************************************************
|
||||
# Constant global variables --------------------------------------------
|
||||
LOG_FILE_PATH = f"{Path(__file__).resolve().parent}\\{Path(__file__).stem}.log"
|
||||
FORMAT = "[%(asctime)s - LN:%(lineno)s] %(message)s"
|
||||
DEFAULT_FIELD_KEY_LIST = "title,performers,studio,tags" # Default Field Key List with the desired order
|
||||
PLUGIN_ID = Path(__file__).stem.lower()
|
||||
DEFAULT_SEPERATOR = "-"
|
||||
PLUGIN_ARGS = False
|
||||
PLUGIN_ARGS_MODE = False
|
||||
WRAPPER_STYLES = config["wrapper_styles"]
|
||||
POSTFIX_STYLES = config["postfix_styles"]
|
||||
# GraphQL query to fetch all scenes
|
||||
QUERY_ALL_SCENES = """
|
||||
query AllScenes {
|
||||
@@ -35,133 +22,81 @@ QUERY_ALL_SCENES = """
|
||||
}
|
||||
}
|
||||
"""
|
||||
RFH = RotatingFileHandler(
|
||||
filename=LOG_FILE_PATH,
|
||||
mode='a',
|
||||
maxBytes=8*1024*1024, # Configure logging for this script with max log file size of 2000K
|
||||
backupCount=2,
|
||||
encoding=None,
|
||||
delay=0
|
||||
)
|
||||
|
||||
# **********************************************************************
|
||||
# Global variables --------------------------------------------
|
||||
inputToUpdateScenePost = False
|
||||
exitMsg = "Change success!!"
|
||||
|
||||
# Configure local log file for plugin within plugin folder having a limited max log file size
|
||||
logging.basicConfig(level=logging.INFO, format=FORMAT, datefmt="%y%m%d %H:%M:%S", handlers=[RFH])
|
||||
logger = logging.getLogger(PLUGIN_ID)
|
||||
|
||||
# **********************************************************************
|
||||
# ----------------------------------------------------------------------
|
||||
# Code section to fetch variables from Plugin UI and from renamefile_settings.py
|
||||
json_input = json.loads(sys.stdin.read())
|
||||
FRAGMENT_SERVER = json_input['server_connection']
|
||||
stash = StashInterface(FRAGMENT_SERVER)
|
||||
pluginConfiguration = stash.get_configuration()["plugins"]
|
||||
|
||||
settings = {
|
||||
"performerAppend": False,
|
||||
"studioAppend": False,
|
||||
"tagAppend": False,
|
||||
"z_keyFIeldsIncludeInFileName": False,
|
||||
"zafileRenameViaRaname": False,
|
||||
"zafileRenameViaMove": False,
|
||||
"zfieldKeyList": DEFAULT_FIELD_KEY_LIST,
|
||||
"zmaximumTagKeys": 12,
|
||||
"zseparators": DEFAULT_SEPERATOR,
|
||||
"zzdebugTracing": False,
|
||||
"zzdryRun": False,
|
||||
}
|
||||
if PLUGIN_ID in pluginConfiguration:
|
||||
settings.update(pluginConfiguration[PLUGIN_ID])
|
||||
stash = StashPluginHelper(
|
||||
settings=settings,
|
||||
config=config,
|
||||
maxbytes=10*1024*1024,
|
||||
)
|
||||
stash.Status(logLevel=logging.DEBUG)
|
||||
if stash.PLUGIN_ID in stash.PLUGIN_CONFIGURATION:
|
||||
stash.pluginSettings.update(stash.PLUGIN_CONFIGURATION[stash.PLUGIN_ID])
|
||||
# ----------------------------------------------------------------------
|
||||
debugTracing = settings["zzdebugTracing"]
|
||||
WRAPPER_STYLES = config["wrapper_styles"]
|
||||
POSTFIX_STYLES = config["postfix_styles"]
|
||||
|
||||
# Extract dry_run setting from settings
|
||||
dry_run = settings["zzdryRun"]
|
||||
dry_run = stash.pluginSettings["zzdryRun"]
|
||||
dry_run_prefix = ''
|
||||
try:
|
||||
PLUGIN_ARGS = json_input['args']
|
||||
PLUGIN_ARGS_MODE = json_input['args']["mode"]
|
||||
if stash.JSON_INPUT['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
if json_input['args']['hookContext']['input']: inputToUpdateScenePost = True # This avoids calling rename logic twice
|
||||
except:
|
||||
pass
|
||||
logger.info(f"\nStarting (debugTracing={debugTracing}) (dry_run={dry_run}) (PLUGIN_ARGS_MODE={PLUGIN_ARGS_MODE}) (inputToUpdateScenePost={inputToUpdateScenePost})************************************************")
|
||||
if debugTracing: logger.info("settings: %s " % (settings,))
|
||||
|
||||
if PLUGIN_ID in pluginConfiguration:
|
||||
if debugTracing: logger.info(f"Debug Tracing (pluginConfiguration[PLUGIN_ID]={pluginConfiguration[PLUGIN_ID]})................")
|
||||
# if 'zmaximumTagKeys' not in pluginConfiguration[PLUGIN_ID]:
|
||||
# if debugTracing: logger.info("Debug Tracing................")
|
||||
# try:
|
||||
# stash.configure_plugin(PLUGIN_ID, settings)
|
||||
# stash.configure_plugin("renamefile", {"zmaximumTagKeys": 12})
|
||||
# except Exception as e:
|
||||
# logger.error(f"configure_plugin failed!!! Error: {e}")
|
||||
# logger.exception('Got exception on main handler')
|
||||
# pass
|
||||
# # stash.configure_plugin(PLUGIN_ID, settings) # , init_defaults=True
|
||||
# if debugTracing: logger.info("Debug Tracing................")
|
||||
stash.Trace("settings: %s " % (stash.pluginSettings,))
|
||||
|
||||
if dry_run:
|
||||
logger.info("Dry run mode is enabled.")
|
||||
stash.Log("Dry run mode is enabled.")
|
||||
dry_run_prefix = "Would've "
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
max_tag_keys = settings["zmaximumTagKeys"] if settings["zmaximumTagKeys"] != 0 else 12 # Need this incase use explicitly sets value to zero in UI
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
max_tag_keys = stash.pluginSettings["zmaximumTagKeys"] if stash.pluginSettings["zmaximumTagKeys"] != 0 else 12 # Need this incase use explicitly sets value to zero in UI
|
||||
# ToDo: Add split logic here to slpit possible string array into an array
|
||||
exclude_paths = config["pathToExclude"]
|
||||
exclude_paths = exclude_paths.split()
|
||||
if debugTracing: logger.info(f"Debug Tracing (exclude_paths={exclude_paths})................")
|
||||
stash.Trace(f"(exclude_paths={exclude_paths})")
|
||||
excluded_tags = config["excludeTags"]
|
||||
# Extract tag whitelist from settings
|
||||
tag_whitelist = config["tagWhitelist"]
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
if not tag_whitelist:
|
||||
tag_whitelist = ""
|
||||
if debugTracing: logger.info(f"Debug Tracing (tag_whitelist={tag_whitelist})................")
|
||||
stash.Trace(f"(tag_whitelist={tag_whitelist})")
|
||||
|
||||
endpointHost = json_input['server_connection']['Host']
|
||||
endpointHost = stash.JSON_INPUT['server_connection']['Host']
|
||||
if endpointHost == "0.0.0.0":
|
||||
endpointHost = "localhost"
|
||||
endpoint = f"{json_input['server_connection']['Scheme']}://{endpointHost}:{json_input['server_connection']['Port']}/graphql"
|
||||
endpoint = f"{stash.JSON_INPUT['server_connection']['Scheme']}://{endpointHost}:{stash.JSON_INPUT['server_connection']['Port']}/graphql"
|
||||
|
||||
if debugTracing: logger.info(f"Debug Tracing (endpoint={endpoint})................")
|
||||
# Extract rename_files and move_files settings from renamefile_settings.py
|
||||
rename_files = config["rename_files"]
|
||||
move_files = False if settings["zafileRenameViaRaname"] else True
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
fieldKeyList = settings["zfieldKeyList"] # Default Field Key List with the desired order
|
||||
stash.Trace(f"(endpoint={endpoint})")
|
||||
move_files = stash.pluginSettings["zafileRenameViaMove"]
|
||||
fieldKeyList = stash.pluginSettings["zfieldKeyList"] # Default Field Key List with the desired order
|
||||
if not fieldKeyList or fieldKeyList == "":
|
||||
fieldKeyList = DEFAULT_FIELD_KEY_LIST
|
||||
fieldKeyList = fieldKeyList.replace(" ", "")
|
||||
fieldKeyList = fieldKeyList.replace(";", ",")
|
||||
fieldKeyList = fieldKeyList.split(",")
|
||||
if debugTracing: logger.info(f"Debug Tracing (fieldKeyList={fieldKeyList})................")
|
||||
separator = settings["zseparators"]
|
||||
stash.Trace(f"(fieldKeyList={fieldKeyList})")
|
||||
separator = stash.pluginSettings["zseparators"]
|
||||
# ----------------------------------------------------------------------
|
||||
# **********************************************************************
|
||||
|
||||
double_separator = separator + separator
|
||||
if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ARGS={PLUGIN_ARGS}) (WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})................")
|
||||
if debugTracing: logger.info(f"Debug Tracing (PLUGIN_ID=\"{PLUGIN_ID}\")................")
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
|
||||
# Function to make GraphQL requests
|
||||
def graphql_request(query, variables=None):
|
||||
if debugTracing: logger.info("Debug Tracing................%s", query)
|
||||
data = {'query': query}
|
||||
if variables:
|
||||
data['variables'] = variables
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
response = requests.post(endpoint, json=data)
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
return response.json()
|
||||
stash.Trace(f"(WRAPPER_STYLES={WRAPPER_STYLES}) (POSTFIX_STYLES={POSTFIX_STYLES})")
|
||||
|
||||
# Function to replace illegal characters in filenames
|
||||
def replace_illegal_characters(filename):
|
||||
@@ -179,12 +114,11 @@ def should_exclude_path(scene_details):
|
||||
|
||||
# Function to form the new filename based on scene details and user settings
|
||||
def form_filename(original_file_stem, scene_details):
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
filename_parts = []
|
||||
tag_keys_added = 0
|
||||
default_title = ''
|
||||
if_notitle_use_org_filename = config["if_notitle_use_org_filename"]
|
||||
include_keyField_if_in_name = settings["z_keyFIeldsIncludeInFileName"]
|
||||
include_keyField_if_in_name = stash.pluginSettings["z_keyFIeldsIncludeInFileName"]
|
||||
if if_notitle_use_org_filename:
|
||||
default_title = original_file_stem
|
||||
# ...................
|
||||
@@ -195,44 +129,39 @@ def form_filename(original_file_stem, scene_details):
|
||||
title = default_title
|
||||
# ...................
|
||||
|
||||
if debugTracing: logger.info(f"Debug Tracing (title=\"{title}\")................")
|
||||
stash.Trace(f"(title=\"{title}\")")
|
||||
|
||||
# Function to add tag to filename
|
||||
def add_tag(tag_name):
|
||||
nonlocal tag_keys_added
|
||||
nonlocal filename_parts
|
||||
if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................")
|
||||
stash.Trace(f"(tag_name={tag_name})")
|
||||
if max_tag_keys == -1 or (max_tag_keys is not None and tag_keys_added >= int(max_tag_keys)):
|
||||
return # Skip adding more tags if the maximum limit is reached
|
||||
if tag_name in excluded_tags:
|
||||
if debugTracing: logger.info(f"Debug Tracing EXCLUDING (tag_name={tag_name})")
|
||||
stash.Trace(f"EXCLUDING (tag_name={tag_name})")
|
||||
return
|
||||
# Check if the tag name is in the whitelist
|
||||
if tag_whitelist == "" or tag_whitelist == None or (tag_whitelist and tag_name in tag_whitelist):
|
||||
if WRAPPER_STYLES.get('tag'):
|
||||
filename_parts.append(f"{WRAPPER_STYLES['tag'][0]}{tag_name}{WRAPPER_STYLES['tag'][1]}")
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
else:
|
||||
filename_parts.append(tag_name)
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
tag_keys_added += 1
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
else:
|
||||
logger.info(f"Skipping tag not in whitelist: {tag_name}")
|
||||
if debugTracing: logger.info(f"Debug Tracing (tag_keys_added={tag_keys_added})................")
|
||||
stash.Log(f"Skipping tag not in whitelist: {tag_name}")
|
||||
stash.Trace(f"(tag_keys_added={tag_keys_added})")
|
||||
|
||||
for key in fieldKeyList:
|
||||
if key == 'studio':
|
||||
if settings["studioAppend"]:
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
if stash.pluginSettings["studioAppend"]:
|
||||
studio_name = scene_details.get('studio', {})
|
||||
if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................")
|
||||
stash.Trace(f"(studio_name={studio_name})")
|
||||
if studio_name:
|
||||
studio_name = scene_details.get('studio', {}).get('name', '')
|
||||
if debugTracing: logger.info(f"Debug Tracing (studio_name={studio_name})................")
|
||||
stash.Trace(f"(studio_name={studio_name})")
|
||||
if studio_name:
|
||||
studio_name += POSTFIX_STYLES.get('studio')
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
if include_keyField_if_in_name or studio_name.lower() not in title.lower():
|
||||
if WRAPPER_STYLES.get('studio'):
|
||||
filename_parts.append(f"{WRAPPER_STYLES['studio'][0]}{studio_name}{WRAPPER_STYLES['studio'][1]}")
|
||||
@@ -246,23 +175,21 @@ def form_filename(original_file_stem, scene_details):
|
||||
else:
|
||||
filename_parts.append(title)
|
||||
elif key == 'performers':
|
||||
if settings["performerAppend"]:
|
||||
if stash.pluginSettings["performerAppend"]:
|
||||
performers = '-'.join([performer.get('name', '') for performer in scene_details.get('performers', [])])
|
||||
if performers:
|
||||
performers += POSTFIX_STYLES.get('performers')
|
||||
if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name})................")
|
||||
stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name})")
|
||||
if include_keyField_if_in_name or performers.lower() not in title.lower():
|
||||
if debugTracing: logger.info(f"Debug Tracing (performers={performers})................")
|
||||
stash.Trace(f"(performers={performers})")
|
||||
if WRAPPER_STYLES.get('performers'):
|
||||
filename_parts.append(f"{WRAPPER_STYLES['performers'][0]}{performers}{WRAPPER_STYLES['performers'][1]}")
|
||||
else:
|
||||
filename_parts.append(performers)
|
||||
elif key == 'date':
|
||||
scene_date = scene_details.get('date', '')
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
if scene_date:
|
||||
scene_date += POSTFIX_STYLES.get('date')
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
if WRAPPER_STYLES.get('date'):
|
||||
scene_date = f"{WRAPPER_STYLES['date'][0]}{scene_date}{WRAPPER_STYLES['date'][1]}"
|
||||
if scene_date not in title:
|
||||
@@ -310,200 +237,53 @@ def form_filename(original_file_stem, scene_details):
|
||||
filename_parts.append(frame_rate)
|
||||
elif key == 'galleries':
|
||||
galleries = [gallery.get('title', '') for gallery in scene_details.get('galleries', [])]
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
for gallery_name in galleries:
|
||||
if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (gallery_name={gallery_name})................")
|
||||
stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name}) (gallery_name={gallery_name})")
|
||||
if include_keyField_if_in_name or gallery_name.lower() not in title.lower():
|
||||
gallery_name += POSTFIX_STYLES.get('galleries')
|
||||
if WRAPPER_STYLES.get('galleries'):
|
||||
filename_parts.append(f"{WRAPPER_STYLES['galleries'][0]}{gallery_name}{WRAPPER_STYLES['galleries'][1]}")
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
else:
|
||||
filename_parts.append(gallery_name)
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
if debugTracing: logger.info(f"Debug Tracing (gallery_name={gallery_name})................")
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
stash.Trace(f"(gallery_name={gallery_name})")
|
||||
elif key == 'tags':
|
||||
if settings["tagAppend"]:
|
||||
if stash.pluginSettings["tagAppend"]:
|
||||
tags = [tag.get('name', '') for tag in scene_details.get('tags', [])]
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
for tag_name in tags:
|
||||
if debugTracing: logger.info(f"Debug Tracing (include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})................")
|
||||
stash.Trace(f"(include_keyField_if_in_name={include_keyField_if_in_name}) (tag_name={tag_name})")
|
||||
if include_keyField_if_in_name or tag_name.lower() not in title.lower():
|
||||
add_tag(tag_name + POSTFIX_STYLES.get('tag'))
|
||||
if debugTracing: logger.info(f"Debug Tracing (tag_name={tag_name})................")
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
stash.Trace(f"(tag_name={tag_name})")
|
||||
|
||||
if debugTracing: logger.info(f"Debug Tracing (filename_parts={filename_parts})................")
|
||||
stash.Trace(f"(filename_parts={filename_parts})")
|
||||
new_filename = separator.join(filename_parts).replace(double_separator, separator)
|
||||
if debugTracing: logger.info(f"Debug Tracing (new_filename={new_filename})................")
|
||||
stash.Trace(f"(new_filename={new_filename})")
|
||||
|
||||
# Check if the scene's path matches any of the excluded paths
|
||||
if exclude_paths and should_exclude_path(scene_details):
|
||||
logger.info(f"Scene belongs to an excluded path. Skipping filename modification.")
|
||||
stash.Log(f"Scene belongs to an excluded path. Skipping filename modification.")
|
||||
return Path(scene_details['files'][0]['path']).name # Return the original filename
|
||||
|
||||
return replace_illegal_characters(new_filename)
|
||||
|
||||
def find_scene_by_id(scene_id):
|
||||
query_find_scene = """
|
||||
query FindScene($scene_id: ID!) {
|
||||
findScene(id: $scene_id) {
|
||||
id
|
||||
title
|
||||
date
|
||||
files {
|
||||
path
|
||||
width
|
||||
height
|
||||
video_codec
|
||||
frame_rate
|
||||
}
|
||||
galleries {
|
||||
title
|
||||
}
|
||||
studio {
|
||||
name
|
||||
}
|
||||
performers {
|
||||
name
|
||||
}
|
||||
tags {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
scene_result = graphql_request(query_find_scene, variables={"scene_id": scene_id})
|
||||
return scene_result.get('data', {}).get('findScene')
|
||||
|
||||
def move_or_rename_files(scene_details, new_filename, original_parent_directory):
|
||||
def rename_scene(scene_id):
|
||||
global exitMsg
|
||||
studio_directory = None
|
||||
for file_info in scene_details['files']:
|
||||
path = file_info['path']
|
||||
original_path = Path(path)
|
||||
|
||||
# Check if the file's path matches any of the excluded paths
|
||||
if exclude_paths and any(original_path.match(exclude_path) for exclude_path in exclude_paths):
|
||||
logger.info(f"File {path} belongs to an excluded path. Skipping modification.")
|
||||
continue
|
||||
|
||||
new_path = original_parent_directory if not move_files else original_parent_directory / scene_details['studio']['name']
|
||||
if rename_files:
|
||||
new_path = new_path / (new_filename + original_path.suffix)
|
||||
try:
|
||||
if move_files:
|
||||
if studio_directory is None:
|
||||
studio_directory = original_parent_directory / scene_details['studio']['name']
|
||||
studio_directory.mkdir(parents=True, exist_ok=True)
|
||||
if rename_files: # Check if rename_files is True
|
||||
if not dry_run:
|
||||
shutil.move(original_path, new_path)
|
||||
logger.info(f"{dry_run_prefix}Moved and renamed file: {path} -> {new_path}")
|
||||
else:
|
||||
if not dry_run:
|
||||
shutil.move(original_path, new_path)
|
||||
logger.info(f"{dry_run_prefix}Moved file: {path} -> {new_path}")
|
||||
else:
|
||||
if rename_files: # Check if rename_files is True
|
||||
if not dry_run:
|
||||
original_path.rename(new_path)
|
||||
logger.info(f"{dry_run_prefix}Renamed file: {path} -> {new_path}")
|
||||
else:
|
||||
if not dry_run:
|
||||
shutil.move(original_path, new_path)
|
||||
logger.info(f"{dry_run_prefix}Moved file: {path} -> {new_path}")
|
||||
except FileNotFoundError:
|
||||
log.error(f"File not found: {path}. Skipping...")
|
||||
logger.error(f"File not found: {path}. Skipping...")
|
||||
exitMsg = "File not found"
|
||||
continue
|
||||
except OSError as e:
|
||||
log.error(f"Failed to move or rename file: {path}. Error: {e}")
|
||||
logger.error(f"Failed to move or rename file: {path}. Error: {e}")
|
||||
exitMsg = "Failed to move or rename file"
|
||||
continue
|
||||
return new_path # Return the new_path variable after the loop
|
||||
|
||||
def perform_metadata_scan(metadata_scan_path):
|
||||
metadata_scan_path_windows = metadata_scan_path.resolve().as_posix()
|
||||
mutation_metadata_scan = """
|
||||
mutation {
|
||||
metadataScan(input: { paths: "%s" })
|
||||
}
|
||||
""" % metadata_scan_path_windows
|
||||
if debugTracing:
|
||||
logger.info(f"Attempting metadata scan mutation with path: {metadata_scan_path_windows}")
|
||||
logger.info(f"Mutation string: {mutation_metadata_scan}")
|
||||
graphql_request(mutation_metadata_scan)
|
||||
|
||||
def rename_scene(scene_id, stash_directory):
|
||||
global exitMsg
|
||||
scene_details = find_scene_by_id(scene_id)
|
||||
if debugTracing: logger.info(f"Debug Tracing (scene_details={scene_details})................")
|
||||
scene_details = stash.find_scene(scene_id)
|
||||
stash.Trace(f"(scene_details1={scene_details})")
|
||||
if not scene_details:
|
||||
log.error(f"Scene with ID {scene_id} not found.")
|
||||
logger.error(f"Scene with ID {scene_id} not found.")
|
||||
return
|
||||
|
||||
if debugTracing: logger.info(f"Debug Tracing................")
|
||||
|
||||
stash.Error(f"Scene with ID {scene_id} not found.")
|
||||
return None
|
||||
original_file_path = scene_details['files'][0]['path']
|
||||
original_parent_directory = Path(original_file_path).parent
|
||||
if debugTracing: logger.info(f"Debug Tracing (original_file_path={original_file_path})................")
|
||||
|
||||
stash.Trace(f"(original_file_path={original_file_path})")
|
||||
# Check if the scene's path matches any of the excluded paths
|
||||
if exclude_paths and any(Path(original_file_path).match(exclude_path) for exclude_path in exclude_paths):
|
||||
logger.info(f"Scene with ID {scene_id} belongs to an excluded path. Skipping modifications.")
|
||||
return
|
||||
|
||||
if debugTracing: logger.info(f"Debug Tracing................")
|
||||
original_path_info = {'original_file_path': original_file_path,
|
||||
'original_parent_directory': original_parent_directory}
|
||||
|
||||
new_path_info = None
|
||||
stash.Log(f"Scene with ID {scene_id} belongs to an excluded path. Skipping modifications.")
|
||||
return None
|
||||
|
||||
original_file_stem = Path(original_file_path).stem
|
||||
original_file_name = Path(original_file_path).name
|
||||
new_filename = form_filename(original_file_stem, scene_details)
|
||||
newFilenameWithExt = new_filename + Path(original_file_path).suffix
|
||||
if debugTracing: logger.info(f"Debug Tracing (original_file_name={original_file_name})(newFilenameWithExt={newFilenameWithExt})................")
|
||||
if original_file_name == newFilenameWithExt:
|
||||
logger.info(f"Nothing to do, because new file name matches original file name: (newFilenameWithExt={newFilenameWithExt})")
|
||||
return
|
||||
if debugTracing: logger.info(f"Debug Tracing................")
|
||||
|
||||
if rename_files:
|
||||
new_path = original_parent_directory / (newFilenameWithExt)
|
||||
new_path_info = {'new_file_path': new_path}
|
||||
if debugTracing: logger.info(f"{dry_run_prefix}New filename: {new_path}")
|
||||
|
||||
studioName = ""
|
||||
if 'studio' in scene_details and scene_details['studio'] != None and 'name' in scene_details['studio']:
|
||||
studioName = scene_details['studio']['name']
|
||||
if move_files and studioName != "" and original_parent_directory.name != studioName:
|
||||
new_path = original_parent_directory / scene_details['studio']['name'] / (new_filename + Path(original_file_path).suffix)
|
||||
new_path_info = {'new_file_path': new_path}
|
||||
move_or_rename_files(scene_details, new_filename, original_parent_directory)
|
||||
logger.info(f"{dry_run_prefix}Moved to directory: '{new_path}'")
|
||||
|
||||
# If rename_files is True, attempt renaming even if move_files is False
|
||||
if rename_files:
|
||||
new_file_path = original_parent_directory / (new_filename + Path(original_file_name).suffix)
|
||||
if original_file_name != new_filename:
|
||||
try:
|
||||
if not dry_run:
|
||||
os.rename(original_file_path, new_file_path)
|
||||
logger.info(f"{dry_run_prefix}Renamed file: {original_file_path} -> {new_file_path}")
|
||||
except Exception as e:
|
||||
exitMsg = "Failed to rename file"
|
||||
log.error(f"Failed to rename file: {original_file_path}. Error: {e}")
|
||||
logger.error(f"Failed to rename file: {original_file_path}. Error: {e}")
|
||||
|
||||
metadata_scan_path = original_parent_directory
|
||||
perform_metadata_scan(metadata_scan_path)
|
||||
|
||||
max_filename_length = int(config["max_filename_length"])
|
||||
if len(new_filename) > max_filename_length:
|
||||
extension_length = len(Path(original_file_path).suffix)
|
||||
@@ -511,61 +291,61 @@ def rename_scene(scene_id, stash_directory):
|
||||
truncated_filename = new_filename[:max_base_filename_length]
|
||||
hash_suffix = hashlib.md5(new_filename.encode()).hexdigest()
|
||||
new_filename = truncated_filename + '_' + hash_suffix + Path(original_file_path).suffix
|
||||
newFilenameWithExt = new_filename + Path(original_file_path).suffix
|
||||
new_file_path = f"{original_parent_directory}{os.sep}{new_filename}{Path(original_file_name).suffix}"
|
||||
stash.Trace(f"(original_file_name={original_file_name})(new_file_path={new_file_path})")
|
||||
if original_file_name == newFilenameWithExt or original_file_name == new_filename:
|
||||
stash.Log(f"Nothing to do, because new file name matches original file name: (newFilenameWithExt={newFilenameWithExt})")
|
||||
return None
|
||||
targetDidExist = True if os.path.isfile(new_file_path) else False
|
||||
try:
|
||||
if move_files:
|
||||
if not dry_run:
|
||||
shutil.move(original_file_path, new_file_path)
|
||||
exitMsg = f"{dry_run_prefix}Moved file to '{new_file_path}' from '{original_file_path}'"
|
||||
else:
|
||||
if not dry_run:
|
||||
os.rename(original_file_path, new_file_path)
|
||||
exitMsg = f"{dry_run_prefix}Renamed file to '{new_file_path}' from '{original_file_path}'"
|
||||
except OSError as e:
|
||||
exitMsg = f"Failed to move/rename file: From {original_file_path} to {new_file_path}. Error: {e}"
|
||||
stash.Error(exitMsg)
|
||||
if not targetDidExist and os.path.isfile(new_file_path):
|
||||
if os.path.isfile(original_file_path):
|
||||
os.remove(original_file_path)
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
|
||||
if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................")
|
||||
return new_filename, original_path_info, new_path_info
|
||||
stash.metadata_scan(paths=[original_parent_directory.resolve().as_posix()])
|
||||
stash.Log(exitMsg)
|
||||
return new_filename
|
||||
|
||||
# Main default function for rename scene
|
||||
def rename_files_task():
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
# Execute the GraphQL query to fetch all scenes
|
||||
scene_result = graphql_request(QUERY_ALL_SCENES)
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
all_scenes = scene_result.get('data', {}).get('allScenes', [])
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
scene_result = stash.get_all_scenes()
|
||||
all_scenes = scene_result['allScenes']
|
||||
if not all_scenes:
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
log.error("No scenes found.")
|
||||
logger.error("No scenes found.")
|
||||
stash.Error("No scenes found.")
|
||||
exit()
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
|
||||
# Find the scene with the latest updated_at timestamp
|
||||
latest_scene = max(all_scenes, key=lambda scene: scene['updated_at'])
|
||||
|
||||
# Extract the ID of the latest scene
|
||||
latest_scene_id = latest_scene.get('id')
|
||||
|
||||
# Read stash directory from renamefile_settings.py
|
||||
stash_directory = config.get('stash_directory', '')
|
||||
if debugTracing: logger.info("Debug Tracing................")
|
||||
|
||||
# Rename the latest scene and trigger metadata scan
|
||||
new_filename = rename_scene(latest_scene_id, stash_directory)
|
||||
if debugTracing: logger.info(f"Debug Tracing (exitMsg={exitMsg})................")
|
||||
|
||||
new_filename = rename_scene(latest_scene_id)
|
||||
# Log dry run state and indicate if no changes were made
|
||||
if dry_run:
|
||||
log.info("Dry run: Script executed in dry run mode. No changes were made.")
|
||||
logger.info("Dry run: Script executed in dry run mode. No changes were made.")
|
||||
stash.Log("Dry run: Script executed in dry run mode. No changes were made.")
|
||||
elif not new_filename:
|
||||
logger.info("No changes were made.")
|
||||
else:
|
||||
logger.info(f"{exitMsg}")
|
||||
stash.Log("No changes were made.")
|
||||
return
|
||||
|
||||
def fetch_dup_filename_tags(): # Place holder for new implementation
|
||||
return
|
||||
|
||||
if PLUGIN_ARGS_MODE == "fetch_dup_filename_tags":
|
||||
fetch_dup_filename_tags()
|
||||
elif PLUGIN_ARGS_MODE == "rename_files_task":
|
||||
if stash.PLUGIN_TASK_NAME == "rename_files_task":
|
||||
rename_files_task()
|
||||
elif inputToUpdateScenePost:
|
||||
rename_files_task()
|
||||
|
||||
if debugTracing: logger.info("\n*********************************\nEXITING ***********************\n*********************************")
|
||||
stash.Trace("\n*********************************\nEXITING ***********************\n*********************************")
|
||||
|
||||
# ToDo: Wish List
|
||||
# Add logic to update Sqlite DB on file name change, instead of perform_metadata_scan.
|
||||
# Add code to get tags from duplicate filenames
|
||||
@@ -19,9 +19,9 @@ settings:
|
||||
displayName: Include Existing Key Field
|
||||
description: Enable to append performer, tags, studios, & galleries even if name already exists in the original file name.
|
||||
type: BOOLEAN
|
||||
zafileRenameViaRaname:
|
||||
displayName: Rename Instead of Move
|
||||
description: Enable to rename file instead of Move file. (Not recommended for Windows OS)
|
||||
zafileRenameViaMove:
|
||||
displayName: Move Instead of Rename
|
||||
description: Enable to move file instead of rename file. (Not recommended for Windows OS)
|
||||
type: BOOLEAN
|
||||
zfieldKeyList:
|
||||
displayName: Key Fields
|
||||
|
||||
@@ -43,8 +43,6 @@ config = {
|
||||
"pathToExclude": "",
|
||||
# Define a whitelist of allowed tags or EMPTY to allow all tags. Example Usage: "tag1", "tag2", "tag3"
|
||||
"tagWhitelist": "",
|
||||
# Define whether files should be renamed when moved
|
||||
"rename_files": True,
|
||||
# Define whether the original file name should be used if title is empty
|
||||
"if_notitle_use_org_filename": True, # Warning: Do not recommend setting this to False.
|
||||
# Current Stash DB schema only allows maximum base file name length to be 255
|
||||
|
||||
Reference in New Issue
Block a user