refrag code for RenameFile plugin

This commit is contained in:
David Maisonave
2024-08-27 19:24:28 -04:00
parent 7063168536
commit aba11888ce
15 changed files with 848 additions and 430 deletions

View File

@@ -42,10 +42,16 @@ stash = StashPluginHelper(
config=config,
maxbytes=10*1024*1024,
)
if len(sys.argv) > 1:
stash.Log(f"argv = {sys.argv}")
else:
stash.Trace(f"No command line arguments. JSON_INPUT['args'] = {stash.JSON_INPUT['args']}")
stash.Status(logLevel=logging.DEBUG)
stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
# stash.Trace(f"\nStarting (__file__={__file__}) (stash.CALLED_AS_STASH_PLUGIN={stash.CALLED_AS_STASH_PLUGIN}) (stash.DEBUG_TRACING={stash.DEBUG_TRACING}) (stash.PLUGIN_TASK_NAME={stash.PLUGIN_TASK_NAME})************************************************")
# stash.encodeToUtf8 = True
LOG_STASH_N_PLUGIN = stash.LOG_TO_STASH if stash.CALLED_AS_STASH_PLUGIN else stash.LOG_TO_CONSOLE + stash.LOG_TO_FILE
listSeparator = stash.Setting('listSeparator', ',', notEmpty=True)
addPrimaryDupPathToDetails = stash.Setting('addPrimaryDupPathToDetails')
@@ -285,16 +291,20 @@ def mangeDupFiles(merge=False, deleteDup=False, tagDuplicates=False):
stash.Trace("#########################################################################")
stash.Log(f"Waiting for find_duplicate_scenes_diff to return results; duration_diff={duration_diff}; significantTimeDiff={significantTimeDiff}", printTo=LOG_STASH_N_PLUGIN)
DupFileSets = stash.find_duplicate_scenes_diff(duration_diff=duration_diff)
qtyResults = len(DupFileSets)
stash.Trace("#########################################################################")
for DupFileSet in DupFileSets:
stash.Trace(f"DupFileSet={DupFileSet}")
QtyDupSet+=1
stash.Progress(QtyDupSet, qtyResults)
SepLine = "---------------------------"
DupFileToKeep = ""
DupToCopyFrom = ""
DupFileDetailList = []
for DupFile in DupFileSet:
QtyDup+=1
stash.log.sl.progress(f"Scene ID = {DupFile['id']}")
time.sleep(2)
Scene = stash.find_scene(DupFile['id'])
sceneData = f"Scene = {Scene}"
stash.Trace(sceneData, toAscii=True)
@@ -392,10 +402,13 @@ def deleteTagggedDuplicates():
QtyFailedQuery = 0
stash.Trace("#########################################################################")
sceneIDs = stash.find_scenes(f={"tags": {"value":tagId, "modifier":"INCLUDES"}}, fragment='id')
stash.Trace(f"Found the following scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}")
qtyResults = len(sceneIDs)
stash.Trace(f"Found {qtyResults} scenes with tag ({duplicateMarkForDeletion}): sceneIDs = {sceneIDs}")
for sceneID in sceneIDs:
# stash.Trace(f"Getting scene data for scene ID {sceneID['id']}.")
QtyDup += 1
prgs = QtyDup / qtyResults
stash.Progress(QtyDup, qtyResults)
scene = stash.find_scene(sceneID['id'])
if scene == None or len(scene) == 0:
stash.Warn(f"Could not get scene data for scene ID {sceneID['id']}.")

View File

@@ -299,7 +299,7 @@ class StashPluginHelper(StashInterface):
if printTo == 0: printTo = self.log_to_err_set
lineNo = inspect.currentframe().f_back.f_lineno
self.Log(logMsg, printTo, logging.ERROR, lineNo, toAscii=toAscii)
def Status(self, printTo = 0, logLevel = logging.INFO, lineNo = -1):
if printTo == 0: printTo = self.log_to_norm
if lineNo == -1:
@@ -351,7 +351,65 @@ class StashPluginHelper(StashInterface):
DestData = self.find_scene(DestData)
return self._mergeMetadata.merge(SrcData, DestData)
# Extends class StashInterface with functions which are not yet in the class
def Progress(self, currentIndex, maxCount):
progress = (currentIndex / maxCount) if currentIndex < maxCount else (maxCount / currentIndex)
self.log.progress(progress)
def run_plugin(self, plugin_id, task_mode=None, args:dict={}, asyn=False):
"""Runs a plugin operation.
The operation is run immediately and does not use the job queue.
Args:
plugin_id (ID): plugin_id
task_name (str, optional): Plugin task to perform
args (dict, optional): Arguments to pass to plugin. Plugin access via JSON_INPUT['args']
Returns:
A map of the result.
"""
query = """mutation RunPluginOperation($plugin_id: ID!, $args: Map!) {
runPluginOperation(plugin_id: $plugin_id, args: $args)
}"""
if task_mode != None:
args.update({"mode" : task_mode})
variables = {
"plugin_id": plugin_id,
"args": args,
}
if asyn:
self.Submit(self.call_GQL, query, variables)
return f"Made asynchronous call for plugin {plugin_id}"
else:
return self.call_GQL(query, variables)
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
# #################################################################################################
# The below functions extends class StashInterface with functions which are not yet in the class
def get_all_scenes(self):
query_all_scenes = """
query AllScenes {
allScenes {
id
updated_at
}
}
"""
return self.call_GQL(query_all_scenes)
def metadata_autotag(self, paths:list=[], performers:list=[], studios:list=[], tags:list=[]):
query = """
mutation MetadataAutoTag($input:AutoTagMetadataInput!) {
@@ -393,23 +451,6 @@ class StashPluginHelper(StashInterface):
def rename_generated_files(self):
return self.call_GQL("mutation MigrateHashNaming {migrateHashNaming}")
def find_duplicate_scenes_diff(self, distance: PhashDistance=PhashDistance.EXACT, fragment='id', duration_diff: float=10.00 ):
query = """
query FindDuplicateScenes($distance: Int, $duration_diff: Float) {
findDuplicateScenes(distance: $distance, duration_diff: $duration_diff) {
...SceneSlim
}
}
"""
if fragment:
query = re.sub(r'\.\.\.SceneSlim', fragment, query)
else:
query += "fragment SceneSlim on Scene { id }"
variables = { "distance": distance, "duration_diff": duration_diff }
result = self.call_GQL(query, variables)
return result['findDuplicateScenes']
class mergeMetadata: # A class to merge scene metadata from source scene to destination scene
srcData = None