Initial commit
This commit is contained in:
291
Deprecated/POC/Blender-MaterialsFromNodegroups.py
Normal file
291
Deprecated/POC/Blender-MaterialsFromNodegroups.py
Normal file
@@ -0,0 +1,291 @@
|
||||
import bpy
|
||||
from pathlib import Path
|
||||
import time
|
||||
import os
|
||||
import math
|
||||
|
||||
# Try importing NumPy
|
||||
try:
|
||||
import numpy as np
|
||||
numpy_available = True
|
||||
# print("NumPy module found.") # Less verbose
|
||||
except ImportError:
|
||||
print("Warning: NumPy module not found. Median calc disabled, mean uses loop.")
|
||||
numpy_available = False
|
||||
|
||||
# --- Configuration ---
|
||||
ASSET_LIBRARY_NAME = "Nodes-Linked" # <<< Name of Asset Library in Prefs
|
||||
TEMPLATE_MATERIAL_NAME = "Template_PBRMaterial" # <<< Name of template Material in current file
|
||||
PLACEHOLDER_NODE_LABEL = "PBRSET_PLACEHOLDER" # <<< Label of placeholder node in template mat
|
||||
ASSET_NAME_PREFIX = "PBRSET_" # <<< Prefix of Node Group assets to process
|
||||
MATERIAL_NAME_PREFIX = "Mat_" # <<< Prefix for created Materials
|
||||
THUMBNAIL_PROPERTY_NAME = "thumbnail_filepath" # <<< Custom property name on Node Groups
|
||||
VALID_EXTENSIONS = {".jpg", ".jpeg", ".png", ".tif", ".tiff"}
|
||||
DERIVED_MAP_EXTENSIONS = ['.png', '.jpg', '.jpeg', '.tif', '.tiff']
|
||||
VIEWPORT_GAMMA = 0.4
|
||||
SCALED_SIZE = (32, 32) # Downscale target size for calculations
|
||||
|
||||
# --- >>> SET MATERIAL CREATION LIMIT HERE <<< ---
|
||||
# Max number of *new* materials created per run (0 = no limit)
|
||||
MATERIAL_CREATION_LIMIT = 900
|
||||
# ------------------------------------------------
|
||||
|
||||
# --- Helper Functions ---
|
||||
def find_node_by_label(node_tree, label, node_type=None):
|
||||
# Finds first node by label and optional type (using node.type)
|
||||
if not node_tree: return None
|
||||
for node in node_tree.nodes:
|
||||
if node.label and node.label == label:
|
||||
if node_type is None or node.type == node_type: return node
|
||||
return None
|
||||
|
||||
def calculate_value_from_image(image, target_size=(64, 64), mode='color', method='median'):
|
||||
# Calculates median/mean from downscaled image copy, cleans up temp image
|
||||
temp_img = None; #... (Full implementation from previous step) ...
|
||||
if not image: return None
|
||||
try:
|
||||
if not image.has_data:
|
||||
try: _ = len(image.pixels); image.update()
|
||||
except Exception: pass
|
||||
if not image.has_data: return None # Cannot proceed
|
||||
temp_img = image.copy()
|
||||
if not temp_img: return None
|
||||
temp_img.scale(target_size[0], target_size[1])
|
||||
try: _ = len(temp_img.pixels); temp_img.update()
|
||||
except Exception: pass # Ignore access error, check has_data
|
||||
if not temp_img.has_data: return None
|
||||
width=temp_img.size[0]; height=temp_img.size[1]; channels=temp_img.channels
|
||||
if width == 0 or height == 0 or channels == 0: return None
|
||||
pixels = temp_img.pixels[:]; result_value = None;
|
||||
if numpy_available: # Use NumPy
|
||||
np_pixels = np.array(pixels); num_elements = len(np_pixels); num_pixels_actual = num_elements // channels;
|
||||
if num_pixels_actual == 0: return None
|
||||
np_pixels = np_pixels[:num_pixels_actual * channels]; pixels_reshaped = np_pixels.reshape((num_pixels_actual, channels))
|
||||
if mode == 'color': # Color Median/Mean (NumPy)
|
||||
if channels < 3: return None
|
||||
calc_linear = np.median(pixels_reshaped[:, :3], axis=0) if method == 'median' else np.mean(pixels_reshaped[:, :3], axis=0)
|
||||
inv_gamma = 1.0 / VIEWPORT_GAMMA; calc_linear_clamped = np.clip(calc_linear, 0.0, None)
|
||||
calc_srgb_np = np.power(calc_linear_clamped, inv_gamma); calc_srgb_clamped = np.clip(calc_srgb_np, 0.0, 1.0)
|
||||
result_value = (calc_srgb_clamped[0], calc_srgb_clamped[1], calc_srgb_clamped[2], 1.0)
|
||||
elif mode == 'grayscale': # Grayscale Median/Mean (NumPy)
|
||||
calc_val = np.median(pixels_reshaped[:, 0]) if method == 'median' else np.mean(pixels_reshaped[:, 0])
|
||||
result_value = min(max(0.0, calc_val), 1.0)
|
||||
elif method == 'mean': # Fallback Mean Loop
|
||||
# print(" Calculating mean using standard loop...") # Verbose
|
||||
actual_len = len(pixels); #... (Mean loop logic) ...
|
||||
if actual_len == 0: return None; num_pixels_in_buffer=actual_len//channels; max_elements=num_pixels_in_buffer*channels
|
||||
if num_pixels_in_buffer == 0: return None
|
||||
if mode == 'color':
|
||||
sum_r,sum_g,sum_b = 0.0,0.0,0.0; step=channels
|
||||
for i in range(0, max_elements, step):
|
||||
if i+2 >= actual_len: break; sum_r+=pixels[i]; sum_g+=pixels[i+1]; sum_b+=pixels[i+2]
|
||||
avg_r_lin,avg_g_lin,avg_b_lin = sum_r/num_pixels_in_buffer, sum_g/num_pixels_in_buffer, sum_b/num_pixels_in_buffer
|
||||
inv_gamma = 1.0/VIEWPORT_GAMMA
|
||||
avg_r_srgb,avg_g_srgb,avg_b_srgb = min(max(0.0,pow(max(0.0,avg_r_lin),inv_gamma)),1.0), min(max(0.0,pow(max(0.0,avg_g_lin),inv_gamma)),1.0), min(max(0.0,pow(max(0.0,avg_b_lin),inv_gamma)),1.0)
|
||||
result_value = (avg_r_srgb, avg_g_srgb, avg_b_srgb, 1.0)
|
||||
elif mode == 'grayscale':
|
||||
sum_val=0.0; step=channels
|
||||
for i in range(0, max_elements, step): sum_val+=pixels[i]
|
||||
result_value = min(max(0.0, sum_val/num_pixels_in_buffer), 1.0)
|
||||
else: print(" Error: NumPy required for median calculation."); return None
|
||||
return result_value
|
||||
except Exception as e: print(f" Error during value calculation for '{image.name}': {e}"); return None
|
||||
finally: # Cleanup
|
||||
if temp_img:
|
||||
try: bpy.data.images.remove(temp_img, do_unlink=True)
|
||||
except Exception: pass # Ignore cleanup errors
|
||||
|
||||
|
||||
# --- Main Function ---
|
||||
def create_materials_for_library_assets(library_name):
|
||||
start_time = time.time(); print(f"--- Starting Material Creation for Library '{library_name}' ---")
|
||||
print(f"Material Creation Limit per run: {'Unlimited' if MATERIAL_CREATION_LIMIT <= 0 else MATERIAL_CREATION_LIMIT}")
|
||||
# (Prerequisite checks...)
|
||||
template_mat=bpy.data.materials.get(TEMPLATE_MATERIAL_NAME); #... etc ...
|
||||
if not template_mat or not template_mat.use_nodes or not find_node_by_label(template_mat.node_tree, PLACEHOLDER_NODE_LABEL, 'GROUP'): print("Template Prereq Failed."); return
|
||||
library=bpy.context.preferences.filepaths.asset_libraries.get(library_name); #... etc ...
|
||||
if not library or not Path(bpy.path.abspath(library.path)).exists(): print("Library Prereq Failed."); return
|
||||
print(f"Found template material and library path...")
|
||||
|
||||
# (File scanning...)
|
||||
materials_created=0; materials_skipped=0; nodegroups_processed=0; link_errors=0; files_to_process=[]; library_path_obj=Path(bpy.path.abspath(library.path))
|
||||
#... (populate files_to_process) ...
|
||||
if library_path_obj.is_dir():
|
||||
for item in library_path_obj.iterdir():
|
||||
if item.is_file() and item.suffix.lower() == '.blend': files_to_process.append(str(item))
|
||||
if not files_to_process: print(f"Warning: No .blend files found in dir: {library_path_obj}")
|
||||
elif library_path_obj.is_file() and library_path_obj.suffix.lower() == '.blend':
|
||||
files_to_process.append(str(library_path_obj))
|
||||
else: print(f"Error: Library path not dir or .blend: {library_path_obj}"); return
|
||||
print(f"Found {len(files_to_process)} .blend file(s) to inspect.")
|
||||
|
||||
# Initialize counters and flag for limit
|
||||
created_in_this_run = 0
|
||||
limit_reached_flag = False
|
||||
|
||||
for blend_file_path in files_to_process: # ... (inspect loop) ...
|
||||
print(f"\nInspecting library file: {os.path.basename(blend_file_path)}...")
|
||||
potential_nodegroups = []; # ... (inspection logic) ...
|
||||
try:
|
||||
with bpy.data.libraries.load(blend_file_path, link=False) as (data_from, data_to): potential_nodegroups = list(data_from.node_groups)
|
||||
except Exception as e_load_inspect: print(f" Error inspecting file '{blend_file_path}': {e_load_inspect}"); continue
|
||||
print(f" Found {len(potential_nodegroups)} NGs. Checking for '{ASSET_NAME_PREFIX}'...")
|
||||
|
||||
|
||||
for asset_nodegroup_name in potential_nodegroups: # ... (NG loop) ...
|
||||
if not asset_nodegroup_name.startswith(ASSET_NAME_PREFIX): continue
|
||||
nodegroups_processed += 1
|
||||
base_name = asset_nodegroup_name.removeprefix(ASSET_NAME_PREFIX)
|
||||
material_name = f"{MATERIAL_NAME_PREFIX}{base_name}"
|
||||
if bpy.data.materials.get(material_name): materials_skipped += 1; continue
|
||||
|
||||
linked_nodegroup = None; preview_path = None
|
||||
try: # --- Start Main Processing Block for NG ---
|
||||
# (Linking logic...)
|
||||
existing_group = bpy.data.node_groups.get(asset_nodegroup_name); #... etc linking ...
|
||||
is_correctly_linked = (existing_group and existing_group.library and bpy.path.abspath(existing_group.library.filepath) == blend_file_path)
|
||||
if is_correctly_linked: linked_nodegroup = existing_group
|
||||
else: # Link it
|
||||
with bpy.data.libraries.load(blend_file_path, link=True, relative=False) as (data_from, data_to):
|
||||
if asset_nodegroup_name in data_from.node_groups: data_to.node_groups = [asset_nodegroup_name]
|
||||
else: print(f" Error: NG '{asset_nodegroup_name}' not found during link."); continue # Skip NG
|
||||
linked_nodegroup = bpy.data.node_groups.get(asset_nodegroup_name)
|
||||
if not linked_nodegroup or not linked_nodegroup.library: print(f" Error: NG '{asset_nodegroup_name}' link failed."); linked_nodegroup = None; link_errors += 1
|
||||
if not linked_nodegroup: print(f" Failed link NG '{asset_nodegroup_name}'. Skip."); continue # Skip NG
|
||||
|
||||
preview_path = linked_nodegroup.get(THUMBNAIL_PROPERTY_NAME) # Path to COL-1 1K
|
||||
|
||||
# (Duplicate, Rename, Replace Placeholder...)
|
||||
new_material = template_mat.copy(); #... checks ...
|
||||
if not new_material: print(f" Error: Failed copy template mat. Skip."); continue
|
||||
new_material.name = material_name
|
||||
if not new_material.use_nodes or not new_material.node_tree: print(f" Error: New mat '{material_name}' no nodes."); continue
|
||||
placeholder_node = find_node_by_label(new_material.node_tree, PLACEHOLDER_NODE_LABEL, 'GROUP'); #... checks ...
|
||||
if not placeholder_node: print(f" Error: Placeholder '{PLACEHOLDER_NODE_LABEL}' not found."); continue
|
||||
placeholder_node.node_tree = linked_nodegroup
|
||||
print(f" Created material '{material_name}' and linked NG '{linked_nodegroup.name}'.")
|
||||
|
||||
# --- Load base COL-1 image once ---
|
||||
thumbnail_image = None
|
||||
if preview_path and Path(preview_path).is_file():
|
||||
try: thumbnail_image = bpy.data.images.load(preview_path, check_existing=True)
|
||||
except Exception as e_load_base: print(f" Error loading base thumbnail '{preview_path}': {e_load_base}")
|
||||
|
||||
# --- Set Viewport Color (Median) ---
|
||||
median_color = None
|
||||
if thumbnail_image: median_color = calculate_value_from_image(thumbnail_image, target_size=SCALED_SIZE, mode='color', method='median')
|
||||
if median_color: new_material.diffuse_color = median_color; print(f" Set viewport color: {median_color[:3]}")
|
||||
else: print(f" Warn: Could not set viewport color.")
|
||||
|
||||
|
||||
# --- Determine Paths and Metal Map Existence ---
|
||||
roughness_path = None; metallic_path = None; metal_map_found = False; #... etc ...
|
||||
if preview_path and "_COL-1" in preview_path:
|
||||
try: # ... path derivation logic ...
|
||||
base_path_obj=Path(preview_path); directory=base_path_obj.parent; base_stem=base_path_obj.stem
|
||||
if "_COL-1" in base_stem:
|
||||
rough_stem=base_stem.replace("_COL-1", "_ROUGH")
|
||||
for ext in DERIVED_MAP_EXTENSIONS:
|
||||
potential_path=directory/f"{rough_stem}{ext}";
|
||||
if potential_path.is_file(): roughness_path=str(potential_path); break
|
||||
metal_stem=base_stem.replace("_COL-1", "_METAL")
|
||||
for ext in DERIVED_MAP_EXTENSIONS:
|
||||
potential_path=directory/f"{metal_stem}{ext}";
|
||||
if potential_path.is_file(): metallic_path=str(potential_path); metal_map_found=True; break
|
||||
except Exception as e_derive: print(f" Error deriving paths: {e_derive}")
|
||||
if not metal_map_found: print(f" Info: No METAL map found. Assuming Spec/Gloss.")
|
||||
|
||||
# --- Set Viewport Roughness (Median, Conditional Inversion) ---
|
||||
median_roughness = None; # ... etc ...
|
||||
if roughness_path:
|
||||
try: rough_img = bpy.data.images.load(roughness_path, check_existing=True)
|
||||
except Exception as e_load_rough: print(f" Error loading rough image: {e_load_rough}")
|
||||
if rough_img: median_roughness = calculate_value_from_image(rough_img, target_size=SCALED_SIZE, mode='grayscale', method='median')
|
||||
else: print(f" Error: load None for rough path.")
|
||||
if median_roughness is not None:
|
||||
final_roughness_value = median_roughness
|
||||
if not metal_map_found: final_roughness_value = 1.0 - median_roughness; print(f" Inverting ROUGH->Gloss: {median_roughness:.3f} -> {final_roughness_value:.3f}")
|
||||
new_material.roughness = min(max(0.0, final_roughness_value), 1.0); print(f" Set viewport roughness: {new_material.roughness:.3f}")
|
||||
else: print(f" Warn: Could not set viewport roughness.")
|
||||
|
||||
|
||||
# --- Set Viewport Metallic (Median) ---
|
||||
median_metallic = None; # ... etc ...
|
||||
if metal_map_found:
|
||||
try: metal_img = bpy.data.images.load(metallic_path, check_existing=True)
|
||||
except Exception as e_load_metal: print(f" Error loading metal image: {e_load_metal}")
|
||||
if metal_img: median_metallic = calculate_value_from_image(metal_img, target_size=SCALED_SIZE, mode='grayscale', method='median')
|
||||
else: print(f" Error: load None for metal path.")
|
||||
if median_metallic is not None: new_material.metallic = median_metallic; print(f" Set viewport metallic: {median_metallic:.3f}")
|
||||
else: new_material.metallic = 0.0; # Default
|
||||
if metal_map_found: print(f" Warn: Could not calc viewport metallic. Set 0.0.")
|
||||
else: print(f" Set viewport metallic to default: 0.0")
|
||||
|
||||
|
||||
# --- Mark Material as Asset ---
|
||||
mat_asset_data = None; # ... (logic remains same) ...
|
||||
try: # ... asset marking ...
|
||||
if not new_material.asset_data: new_material.asset_mark(); print(f" Marked material as asset.")
|
||||
mat_asset_data = new_material.asset_data
|
||||
except Exception as e_asset: print(f" Error marking mat asset: {e_asset}")
|
||||
|
||||
# --- Copy Asset Tags ---
|
||||
if mat_asset_data and linked_nodegroup.asset_data: # ... (logic remains same) ...
|
||||
try: # ... tag copying ...
|
||||
source_tags=linked_nodegroup.asset_data.tags; target_tags=mat_asset_data.tags
|
||||
tags_copied_count=0; existing_target_tag_names={t.name for t in target_tags}
|
||||
for src_tag in source_tags:
|
||||
if src_tag.name not in existing_target_tag_names: target_tags.new(name=src_tag.name); tags_copied_count += 1
|
||||
if tags_copied_count > 0: print(f" Copied {tags_copied_count} asset tags.")
|
||||
except Exception as e_tags: print(f" Error copying tags: {e_tags}")
|
||||
|
||||
# --- Set Custom Preview for Material ---
|
||||
if preview_path and Path(preview_path).is_file(): # ... (logic remains same) ...
|
||||
try: # ... preview setting ...
|
||||
with bpy.context.temp_override(id=new_material): bpy.ops.ed.lib_id_load_custom_preview(filepath=preview_path)
|
||||
except RuntimeError as e_op: print(f" Error running preview op for mat '{new_material.name}': {e_op}")
|
||||
except Exception as e_prev: print(f" Unexpected preview error for mat: {e_prev}")
|
||||
elif preview_path: print(f" Warn: Thumb path not found for preview step: '{preview_path}'")
|
||||
|
||||
|
||||
# --- Increment Counters & Check Limit ---
|
||||
materials_created += 1 # Overall counter for summary
|
||||
created_in_this_run += 1 # Counter for this run's limit
|
||||
|
||||
# Check limit AFTER successful creation
|
||||
if MATERIAL_CREATION_LIMIT > 0 and created_in_this_run >= MATERIAL_CREATION_LIMIT:
|
||||
print(f"\n--- Material Creation Limit ({MATERIAL_CREATION_LIMIT}) Reached ---")
|
||||
limit_reached_flag = True
|
||||
break # Exit inner loop
|
||||
|
||||
except Exception as e: # Catch errors for the whole NG processing block
|
||||
print(f" An unexpected error occurred processing NG '{asset_nodegroup_name}': {e}")
|
||||
# --- End Main Processing Block for NG ---
|
||||
|
||||
# Check flag to stop outer loop
|
||||
if limit_reached_flag:
|
||||
print("Stopping library file iteration due to limit.")
|
||||
break # Exit outer loop
|
||||
|
||||
# (Completion summary...)
|
||||
end_time = time.time(); duration = end_time - start_time; print("\n--- Material Creation Finished ---"); # ... etc ...
|
||||
print(f"Duration: {duration:.2f} seconds")
|
||||
print(f"Summary: Processed {nodegroups_processed} NGs. Created {materials_created} Mats this run. Skipped {materials_skipped}. Link Errors {link_errors}.")
|
||||
if limit_reached_flag: print(f"NOTE: Script stopped early due to creation limit ({MATERIAL_CREATION_LIMIT}). Run again to process more.")
|
||||
|
||||
# --- How to Run ---
|
||||
# 1. Rerun Script 1 to add "thumbnail_filepath" property.
|
||||
# 2. Setup Asset Library in Prefs. Set ASSET_LIBRARY_NAME below.
|
||||
# 3. In current file, create "Template_PBRMaterial" with "PBRSET_PLACEHOLDER" node.
|
||||
# 4. Set MATERIAL_CREATION_LIMIT in Config section above (0 for unlimited).
|
||||
# 5. Paste script & Run (Alt+P).
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Only need ASSET_LIBRARY_NAME configuration here now
|
||||
if ASSET_LIBRARY_NAME == "My Asset Library": # Default check
|
||||
print("\nERROR: Please update the 'ASSET_LIBRARY_NAME' variable in the script's Configuration section.")
|
||||
print(" Set it to the name of your asset library in Blender Preferences before running.\n")
|
||||
elif not bpy.data.materials.get(TEMPLATE_MATERIAL_NAME):
|
||||
print(f"\nERROR: Template material '{TEMPLATE_MATERIAL_NAME}' not found in current file.\n")
|
||||
else:
|
||||
create_materials_for_library_assets(ASSET_LIBRARY_NAME)
|
||||
988
Deprecated/POC/Blender-NodegroupsFromPBRSETS.py
Normal file
988
Deprecated/POC/Blender-NodegroupsFromPBRSETS.py
Normal file
@@ -0,0 +1,988 @@
|
||||
# Full script - PBR Texture Importer V4 (Manifest, Auto-Save/Reload, Aspect Ratio, Asset Tags)
|
||||
|
||||
import bpy
|
||||
import os # For auto-save rename/remove
|
||||
from pathlib import Path
|
||||
import time
|
||||
import base64
|
||||
import numpy as np # For stats calculation
|
||||
import json # For manifest handling
|
||||
import re # For parsing scaling string
|
||||
|
||||
# --- USER CONFIGURATION ---
|
||||
# File Paths & Templates
|
||||
texture_root_directory = r"G:\02 Content\10-19 Content\13 Textures Power of Two\13.00" # <<< CHANGE THIS PATH!
|
||||
PARENT_TEMPLATE_NAME = "Template_PBRSET" # Name of the parent node group template
|
||||
CHILD_TEMPLATE_NAME = "Template_PBRTYPE" # Name of the child node group template
|
||||
|
||||
# Processing Limits & Intervals
|
||||
MAX_NEW_GROUPS_PER_RUN = 1000 # Max NEW parent groups created per run before stopping
|
||||
SAVE_INTERVAL = 25 # Auto-save interval during NEW group creation (every N groups)
|
||||
|
||||
# Features & Behavior
|
||||
AUTO_SAVE_ENABLED = True # Enable periodic auto-saving (main file + manifest) during processing?
|
||||
AUTO_RELOAD_ON_FINISH = True # Save and reload the blend file upon successful script completion?
|
||||
|
||||
# Naming & Structure Conventions
|
||||
VALID_EXTENSIONS = {".jpg", ".jpeg", ".png", ".tif", ".tiff"} # Allowed texture file types
|
||||
RESOLUTION_LABELS = ["1k", "2k", "4k", "8k"] # Expected resolution labels (LOWEST FIRST for aspect/tag calc)
|
||||
SG_VALUE_NODE_LABEL = "SpecularGlossy" # Label for the Specular/Glossy value node in parent template
|
||||
HISTOGRAM_NODE_PREFIX = "Histogram-" # Prefix for Combine XYZ nodes storing stats (e.g., "Histogram-ROUGH")
|
||||
ASPECT_RATIO_NODE_LABEL = "AspectRatioCorrection" # Label for the Value node storing the aspect ratio correction factor
|
||||
|
||||
# Texture Map Properties
|
||||
PBR_COLOR_SPACE_MAP = { # Map PBR type (from filename) to Blender color space
|
||||
"AO": "sRGB", "COL-1": "sRGB", "COL-2": "sRGB", "COL-3": "sRGB",
|
||||
"DISP": "Non-Color", "NRM": "Non-Color", "REFL": "Non-Color", "ROUGH": "Non-Color",
|
||||
"METAL": "Non-Color", "FUZZ": "Non-Color", "MASK": "Non-Color", "SSS": "sRGB",
|
||||
}
|
||||
DEFAULT_COLOR_SPACE = "sRGB" # Fallback color space if PBR type not in map
|
||||
|
||||
# --- END USER CONFIGURATION ---
|
||||
|
||||
|
||||
# --- Helper Functions ---
|
||||
|
||||
def parse_texture_filename(filename_stem):
|
||||
"""Parses texture filename stem based on expected convention."""
|
||||
parts = filename_stem.split('_');
|
||||
# Expecting Tag_Groupname_Resolution_Scaling_PBRType
|
||||
if len(parts) == 5:
|
||||
return {"Tag": parts[0], "Groupname": parts[1], "Resolution": parts[2], "Scaling": parts[3], "PBRType": parts[4]}
|
||||
else:
|
||||
print(f" Warn: Skip '{filename_stem}' - Expected 5 parts, found {len(parts)}.");
|
||||
return None
|
||||
|
||||
def find_nodes_by_label(node_tree, label, node_type=None):
|
||||
"""Finds ALL nodes in a node tree matching the label and optionally type."""
|
||||
if not node_tree:
|
||||
return []
|
||||
matching_nodes = []
|
||||
for node in node_tree.nodes:
|
||||
if node.label and node.label == label:
|
||||
if node_type is None or node.type == node_type:
|
||||
matching_nodes.append(node)
|
||||
return matching_nodes
|
||||
|
||||
def encode_name_b64(name_str):
|
||||
"""Encodes a string using URL-safe Base64 for node group names."""
|
||||
try:
|
||||
return base64.urlsafe_b64encode(name_str.encode('utf-8')).decode('ascii')
|
||||
except Exception as e:
|
||||
print(f" Error base64 encoding '{name_str}': {e}");
|
||||
return name_str # Fallback to original name on error
|
||||
|
||||
def calculate_image_stats(image):
|
||||
"""Calculates Min, Max, Median of the first channel of a Blender image."""
|
||||
if not image: return None
|
||||
pixels_arr, value_channel_arr, result = None, None, None
|
||||
try:
|
||||
width = image.size[0]; height = image.size[1]; channels = image.channels
|
||||
if width == 0 or height == 0 or channels == 0:
|
||||
print(f" Warn: Invalid dims for '{image.name}'. Skip stats."); return None
|
||||
actual_len = len(image.pixels); expected_len = width * height * channels
|
||||
if expected_len != actual_len:
|
||||
print(f" Warn: Pixel buffer mismatch for '{image.name}'. Skip stats."); return None
|
||||
if actual_len == 0: return None
|
||||
|
||||
pixels_arr = np.fromiter(image.pixels, dtype=np.float32, count=actual_len)
|
||||
|
||||
if channels == 1: value_channel_arr = pixels_arr
|
||||
elif channels >= 2: value_channel_arr = pixels_arr[0::channels]
|
||||
else: return None
|
||||
|
||||
if value_channel_arr is None or value_channel_arr.size == 0:
|
||||
print(f" Warn: No value channel for '{image.name}'. Skip stats."); return None
|
||||
|
||||
min_val = float(np.min(value_channel_arr))
|
||||
max_val = float(np.max(value_channel_arr))
|
||||
median_val = float(np.median(value_channel_arr))
|
||||
result = (min_val, max_val, median_val)
|
||||
|
||||
except MemoryError:
|
||||
print(f" Error: Not enough memory for stats calc on '{image.name}'.")
|
||||
except Exception as e:
|
||||
print(f" Error during stats calc for '{image.name}': {e}");
|
||||
import traceback; traceback.print_exc()
|
||||
finally:
|
||||
# Explicitly delete potentially large numpy arrays
|
||||
if 'value_channel_arr' in locals() and value_channel_arr is not None:
|
||||
try:
|
||||
del value_channel_arr
|
||||
except NameError:
|
||||
pass # Ignore if already gone
|
||||
if 'pixels_arr' in locals() and pixels_arr is not None:
|
||||
try:
|
||||
del pixels_arr
|
||||
except NameError:
|
||||
pass # Ignore if already gone
|
||||
return result
|
||||
|
||||
def calculate_aspect_ratio_factor(image_width, image_height, scaling_string):
|
||||
"""Calculates the X-axis UV scaling factor based on image dims and scaling string."""
|
||||
if image_height <= 0:
|
||||
print(" Warn: Image height is zero, cannot calculate aspect ratio. Returning 1.0.")
|
||||
return 1.0 # Return 1.0 if height is invalid
|
||||
|
||||
# Calculate the actual aspect ratio of the image file
|
||||
current_aspect_ratio = image_width / image_height
|
||||
|
||||
# Check the scaling string
|
||||
if scaling_string.upper() == "EVEN":
|
||||
# 'EVEN' means uniform scaling was applied (or none needed).
|
||||
# The correction factor is the image's own aspect ratio.
|
||||
return current_aspect_ratio
|
||||
else:
|
||||
# Handle non-uniform scaling cases ("Xnnn", "Ynnn")
|
||||
match = re.match(r"([XY])(\d+)", scaling_string, re.IGNORECASE)
|
||||
if not match:
|
||||
print(f" Warn: Invalid Scaling string format '{scaling_string}'. Returning current ratio {current_aspect_ratio:.4f} as fallback.")
|
||||
# Fallback to the image's own ratio if scaling string is invalid
|
||||
return current_aspect_ratio
|
||||
|
||||
axis = match.group(1).upper()
|
||||
try:
|
||||
amount = int(match.group(2))
|
||||
if amount <= 0:
|
||||
print(f" Warn: Zero or negative Amount in Scaling string '{scaling_string}'. Returning current ratio {current_aspect_ratio:.4f}.")
|
||||
return current_aspect_ratio
|
||||
except ValueError:
|
||||
print(f" Warn: Invalid Amount in Scaling string '{scaling_string}'. Returning current ratio {current_aspect_ratio:.4f}.")
|
||||
return current_aspect_ratio
|
||||
|
||||
# Apply the non-uniform correction formula
|
||||
factor = current_aspect_ratio # Default to current ratio in case of issues below
|
||||
scaling_factor_percent = amount / 100.0
|
||||
try:
|
||||
if axis == 'X':
|
||||
if scaling_factor_percent == 0: raise ZeroDivisionError
|
||||
factor = current_aspect_ratio / scaling_factor_percent
|
||||
elif axis == 'Y':
|
||||
factor = current_aspect_ratio * scaling_factor_percent
|
||||
# No 'else' needed due to regex structure
|
||||
|
||||
except ZeroDivisionError:
|
||||
print(f" Warn: Division by zero during factor calculation. Returning current ratio {current_aspect_ratio:.4f}.")
|
||||
return current_aspect_ratio
|
||||
|
||||
return factor
|
||||
|
||||
# --- Manifest Helper Functions ---
|
||||
def get_manifest_path(context_filepath):
|
||||
"""Gets the expected path for the manifest JSON file based on blend filepath."""
|
||||
if not context_filepath:
|
||||
return None
|
||||
blend_path = Path(context_filepath)
|
||||
manifest_filename = f"{blend_path.stem}_manifest.json"
|
||||
return blend_path.parent / manifest_filename
|
||||
|
||||
def load_manifest(manifest_path):
|
||||
"""Loads the manifest data from the JSON file."""
|
||||
if not manifest_path or not manifest_path.exists():
|
||||
return {}
|
||||
try:
|
||||
with open(manifest_path, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
print(f" Loaded manifest from: {manifest_path.name}")
|
||||
return data
|
||||
except json.JSONDecodeError:
|
||||
print(f"!!! ERROR: Manifest file '{manifest_path.name}' is corrupted. Starting fresh. !!!")
|
||||
return {}
|
||||
except Exception as e:
|
||||
print(f"!!! ERROR: Could not load manifest file '{manifest_path.name}': {e} !!!")
|
||||
return {}
|
||||
|
||||
def save_manifest(manifest_path, data):
|
||||
"""Saves the manifest data to the JSON file."""
|
||||
if not manifest_path:
|
||||
return False
|
||||
try:
|
||||
with open(manifest_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"!!!!!!!!!!!!!!!!!!!\n!!! Manifest save FAILED: {e} !!!\n!!!!!!!!!!!!!!!!!!!")
|
||||
return False
|
||||
|
||||
# --- Auto-Save Helper Function ---
|
||||
def perform_safe_autosave(manifest_path, manifest_data):
|
||||
"""Performs a safe auto-save of the main blend file and manifest."""
|
||||
blend_filepath = bpy.data.filepath
|
||||
if not blend_filepath or not manifest_path:
|
||||
print(" Skipping auto-save: Blend file is not saved.")
|
||||
return False
|
||||
|
||||
print(f"\n--- Attempting Auto-Save ({time.strftime('%H:%M:%S')}) ---")
|
||||
blend_path = Path(blend_filepath)
|
||||
manifest_path_obj = Path(manifest_path) # Ensure it's a Path object
|
||||
|
||||
blend_bak_path = blend_path.with_suffix('.blend.bak')
|
||||
manifest_bak_path = manifest_path_obj.with_suffix('.json.bak')
|
||||
|
||||
# 1. Delete old backups if they exist
|
||||
try:
|
||||
if blend_bak_path.exists():
|
||||
blend_bak_path.unlink()
|
||||
if manifest_bak_path.exists():
|
||||
manifest_bak_path.unlink()
|
||||
except OSError as e:
|
||||
print(f" Warn: Could not delete old backup file: {e}")
|
||||
# Continue anyway, renaming might still work
|
||||
|
||||
# 2. Rename current files to backup
|
||||
renamed_blend = False
|
||||
renamed_manifest = False
|
||||
try:
|
||||
if blend_path.exists():
|
||||
os.rename(blend_path, blend_bak_path)
|
||||
renamed_blend = True
|
||||
# print(f" Renamed '{blend_path.name}' to '{blend_bak_path.name}'") # Optional verbose log
|
||||
if manifest_path_obj.exists():
|
||||
os.rename(manifest_path_obj, manifest_bak_path)
|
||||
renamed_manifest = True
|
||||
# print(f" Renamed '{manifest_path_obj.name}' to '{manifest_bak_path.name}'") # Optional verbose log
|
||||
except OSError as e:
|
||||
print(f"!!! ERROR: Failed to rename files for backup: {e} !!!")
|
||||
# Attempt to roll back renames if only one succeeded
|
||||
if renamed_blend and not renamed_manifest and blend_bak_path.exists():
|
||||
print(f" Attempting rollback: Renaming {blend_bak_path.name} back...")
|
||||
try:
|
||||
os.rename(blend_bak_path, blend_path)
|
||||
except OSError as rb_e:
|
||||
print(f" Rollback rename of blend file FAILED: {rb_e}")
|
||||
if renamed_manifest and not renamed_blend and manifest_bak_path.exists():
|
||||
print(f" Attempting rollback: Renaming {manifest_bak_path.name} back...")
|
||||
try:
|
||||
os.rename(manifest_bak_path, manifest_path_obj)
|
||||
except OSError as rb_e:
|
||||
print(f" Rollback rename of manifest file FAILED: {rb_e}")
|
||||
print("--- Auto-Save ABORTED ---")
|
||||
return False
|
||||
|
||||
# 3. Save new main blend file
|
||||
save_blend_success = False
|
||||
try:
|
||||
bpy.ops.wm.save_mainfile()
|
||||
print(f" Saved main blend file: {blend_path.name}")
|
||||
save_blend_success = True
|
||||
except Exception as e:
|
||||
print(f"!!!!!!!!!!!!!!!!!!!!!!!!!\n!!! Auto-Save FAILED (Blend File Save): {e} !!!\n!!!!!!!!!!!!!!!!!!!!!!!!!")
|
||||
# Attempt to restore from backup
|
||||
print(" Attempting to restore from backup...")
|
||||
try:
|
||||
if blend_bak_path.exists():
|
||||
os.rename(blend_bak_path, blend_path)
|
||||
if manifest_bak_path.exists():
|
||||
os.rename(manifest_bak_path, manifest_path_obj)
|
||||
print(" Restored from backup.")
|
||||
except OSError as re:
|
||||
print(f"!!! CRITICAL: Failed to restore from backup after save failure: {re} !!!")
|
||||
print(f"!!! Please check for '.bak' files manually in: {blend_path.parent} !!!")
|
||||
print("--- Auto-Save ABORTED ---")
|
||||
return False
|
||||
|
||||
# 4. Save new manifest file (only if blend save succeeded)
|
||||
if save_blend_success:
|
||||
if save_manifest(manifest_path, manifest_data):
|
||||
print(f" Saved manifest file: {manifest_path_obj.name}")
|
||||
print("--- Auto-Save Successful ---")
|
||||
return True
|
||||
else:
|
||||
# Manifest save failed, but blend file is okay. Warn user.
|
||||
print("!!! WARNING: Auto-save completed for blend file, but manifest save FAILED. Manifest may be out of sync. !!!")
|
||||
return True # Still counts as 'completed' in terms of blend file safety
|
||||
|
||||
return False # Should not be reached
|
||||
|
||||
# --- Asset Tagging Helper Functions ---
|
||||
|
||||
def add_tag_if_new(asset_data, tag_name):
|
||||
"""Adds a tag to the asset data if it's not None/empty and doesn't already exist."""
|
||||
if not asset_data or not tag_name or not isinstance(tag_name, str) or tag_name.strip() == "":
|
||||
return False # Invalid input
|
||||
cleaned_tag_name = tag_name.strip() # Remove leading/trailing whitespace
|
||||
if not cleaned_tag_name:
|
||||
return False # Don't add empty tags
|
||||
|
||||
# Check if tag already exists
|
||||
if cleaned_tag_name not in [t.name for t in asset_data.tags]:
|
||||
try:
|
||||
asset_data.tags.new(cleaned_tag_name)
|
||||
print(f" + Added Asset Tag: '{cleaned_tag_name}'")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f" Error adding tag '{cleaned_tag_name}': {e}")
|
||||
return False
|
||||
else:
|
||||
# print(f" Tag '{cleaned_tag_name}' already exists.") # Optional info
|
||||
return False # Not added because it existed
|
||||
|
||||
def get_supplier_tag_from_path(file_path_str, groupname):
|
||||
"""
|
||||
Determines supplier tag based on directory structure.
|
||||
Assumes structure is .../Supplier/Groupname/file.ext or .../Supplier/file.ext
|
||||
"""
|
||||
try:
|
||||
file_path = Path(file_path_str).resolve()
|
||||
groupname_lower = groupname.lower()
|
||||
|
||||
if not file_path.is_file():
|
||||
print(f" Warn (get_supplier_tag): Input path is not a file: {file_path_str}")
|
||||
return None
|
||||
|
||||
current_dir = file_path.parent # Directory containing the file
|
||||
if not current_dir:
|
||||
print(f" Warn (get_supplier_tag): Cannot get parent directory for {file_path_str}")
|
||||
return None # Cannot determine without parent
|
||||
|
||||
parent_dir = current_dir.parent # Directory potentially containing the 'supplier' name
|
||||
# Check if we are at the root or have no parent
|
||||
if not parent_dir or parent_dir == current_dir:
|
||||
# If the file is in the root scan directory or similar shallow path,
|
||||
# maybe the directory it's in IS the supplier tag? Or return None?
|
||||
# Returning current_dir.name might be unexpected, let's return None for safety.
|
||||
print(f" Warn (get_supplier_tag): File path too shallow to determine supplier reliably: {file_path_str}")
|
||||
return None
|
||||
|
||||
# Compare the file's directory name with the groupname
|
||||
if current_dir.name.lower() == groupname_lower:
|
||||
# Structure is likely .../Supplier/Groupname/file.ext
|
||||
# Return the name of the directory ABOVE the groupname directory
|
||||
return parent_dir.name
|
||||
else:
|
||||
# Structure is likely .../Supplier/file.ext
|
||||
# Return the name of the directory CONTAINING the file
|
||||
return current_dir.name
|
||||
|
||||
except Exception as e:
|
||||
print(f" Error getting supplier tag for {groupname} from path {file_path_str}: {e}")
|
||||
return None
|
||||
|
||||
def apply_asset_tags(parent_group, groupname, group_info):
|
||||
"""Applies various asset tags to the parent node group."""
|
||||
if not parent_group:
|
||||
return
|
||||
|
||||
# 1. Ensure group is marked as an asset
|
||||
try:
|
||||
if not parent_group.asset_data:
|
||||
print(f" Marking '{parent_group.name}' as asset for tagging.")
|
||||
parent_group.asset_mark()
|
||||
|
||||
# Ensure asset_data is available after marking
|
||||
if not parent_group.asset_data:
|
||||
print(f" Error: Could not access asset_data for '{parent_group.name}' after marking.")
|
||||
return
|
||||
|
||||
asset_data = parent_group.asset_data
|
||||
except Exception as e_mark:
|
||||
print(f" Error marking group '{parent_group.name}' as asset: {e_mark}")
|
||||
return # Cannot proceed without asset_data
|
||||
|
||||
# 2. Apply Supplier Tag (Current Requirement)
|
||||
try:
|
||||
# Find lowest resolution path (reuse logic from aspect ratio)
|
||||
lowest_res_path = None; found_res = False
|
||||
pbr_types_dict = group_info.get("pbr_types", {})
|
||||
# Check RESOLUTION_LABELS in order (assuming lowest is first)
|
||||
for res_label in RESOLUTION_LABELS:
|
||||
for res_data in pbr_types_dict.values(): # Check all PBR types for this res
|
||||
if res_label in res_data:
|
||||
lowest_res_path = res_data[res_label]
|
||||
found_res = True
|
||||
break # Found path for this resolution label
|
||||
if found_res:
|
||||
break # Found lowest available resolution path
|
||||
|
||||
if lowest_res_path:
|
||||
supplier_tag = get_supplier_tag_from_path(lowest_res_path, groupname)
|
||||
if supplier_tag:
|
||||
add_tag_if_new(asset_data, supplier_tag) # Use helper to add if new
|
||||
else:
|
||||
print(f" Warn (apply_asset_tags): No image path found for group '{groupname}' to determine supplier tag.")
|
||||
|
||||
except Exception as e_supp:
|
||||
print(f" Error during supplier tag processing for '{groupname}': {e_supp}")
|
||||
|
||||
# 3. --- Future Tagging Logic Placeholder ---
|
||||
# Example: Tag based on PBR Types present
|
||||
# try:
|
||||
# present_pbr_types = list(group_info.get("pbr_types", {}).keys())
|
||||
# for pbr_tag in present_pbr_types:
|
||||
# # Maybe add prefix or modify tag name
|
||||
# add_tag_if_new(asset_data, f"PBR_{pbr_tag}")
|
||||
# except Exception as e_pbr:
|
||||
# print(f" Error during PBR type tagging for '{groupname}': {e_pbr}")
|
||||
|
||||
# Example: Tag based on filename Tag (if not default like 'T-MR')
|
||||
# filename_tag = group_info.get("tag") # Need to store 'Tag' in group_info during scan
|
||||
# if filename_tag and filename_tag not in ["T-MR", "T-SG"]:
|
||||
# add_tag_if_new(asset_data, f"Tag_{filename_tag}")
|
||||
|
||||
# --- End Future Tagging Logic ---
|
||||
|
||||
|
||||
# --- Main Processing Function ---
|
||||
def process_textures_to_groups(root_directory):
|
||||
"""Scans textures, creates/updates node groups based on templates and manifest."""
|
||||
start_time = time.time()
|
||||
print(f"--- Starting Texture Processing ---")
|
||||
print(f"Scanning directory: {root_directory}")
|
||||
root_path = Path(root_directory)
|
||||
if not root_path.is_dir():
|
||||
print(f"Error: Directory not found: {root_directory}")
|
||||
return False # Indicate failure
|
||||
|
||||
# --- Manifest Setup ---
|
||||
current_blend_filepath = bpy.data.filepath
|
||||
manifest_path = get_manifest_path(current_blend_filepath)
|
||||
manifest_data = {}
|
||||
manifest_enabled = False
|
||||
if manifest_path:
|
||||
manifest_data = load_manifest(manifest_path)
|
||||
manifest_enabled = True
|
||||
# Flag will be True if any change requires saving the manifest
|
||||
manifest_needs_saving = False
|
||||
# --- End Manifest Setup ---
|
||||
|
||||
# --- Load Templates ---
|
||||
template_parent = bpy.data.node_groups.get(PARENT_TEMPLATE_NAME)
|
||||
template_child = bpy.data.node_groups.get(CHILD_TEMPLATE_NAME)
|
||||
if not template_parent:
|
||||
print(f"Error: Parent template '{PARENT_TEMPLATE_NAME}' not found.")
|
||||
return False
|
||||
if not template_child:
|
||||
print(f"Error: Child template '{CHILD_TEMPLATE_NAME}' not found.")
|
||||
return False
|
||||
print(f"Found templates: '{PARENT_TEMPLATE_NAME}', '{CHILD_TEMPLATE_NAME}'")
|
||||
# --- End Load Templates ---
|
||||
|
||||
# --- Initialize Data Structures ---
|
||||
# Stores {"GroupName": {"pbr_types": {...}, "scaling": "...", "sg": False, "thumb": "..."}}
|
||||
texture_data = {}
|
||||
file_count, processed_files = 0, 0
|
||||
groups_created, groups_updated, child_groups_created, child_groups_updated = 0, 0, 0, 0
|
||||
nodes_updated, links_created = 0, 0
|
||||
# Cache for image datablocks loaded in THIS RUN only
|
||||
loaded_images_this_run = {}
|
||||
# --- End Initialize Data Structures ---
|
||||
|
||||
print("Scanning files...")
|
||||
# --- File Scanning ---
|
||||
for dirpath, _, filenames in os.walk(root_directory):
|
||||
for filename in filenames:
|
||||
file_path = Path(dirpath) / filename
|
||||
# Check extension
|
||||
if file_path.suffix.lower() not in VALID_EXTENSIONS:
|
||||
continue
|
||||
|
||||
file_count += 1
|
||||
filename_stem = file_path.stem
|
||||
parsed = parse_texture_filename(filename_stem)
|
||||
if not parsed:
|
||||
continue # Skip if filename doesn't match format
|
||||
|
||||
# Extract parts
|
||||
groupname = parsed["Groupname"]
|
||||
pbr_type = parsed["PBRType"]
|
||||
resolution_label = parsed["Resolution"].lower()
|
||||
scaling_str = parsed["Scaling"]
|
||||
tag_str = parsed["Tag"].upper()
|
||||
file_path_str = str(file_path)
|
||||
|
||||
# Validate resolution label
|
||||
if resolution_label not in RESOLUTION_LABELS:
|
||||
print(f"Warn: Skip '{filename}' - Invalid Res '{resolution_label}'. Expected one of {RESOLUTION_LABELS}")
|
||||
continue
|
||||
|
||||
# Ensure base structure for group exists in texture_data
|
||||
group_entry = texture_data.setdefault(groupname, {
|
||||
"pbr_types": {}, "scaling": None, "sg": False, "thumb": None
|
||||
})
|
||||
|
||||
# Store texture path under the specific PBR type and resolution
|
||||
group_entry["pbr_types"].setdefault(pbr_type, {})[resolution_label] = file_path_str
|
||||
|
||||
# Store scaling string ONCE per groupname (first encountered wins)
|
||||
if group_entry["scaling"] is None:
|
||||
group_entry["scaling"] = scaling_str
|
||||
elif group_entry["scaling"] != scaling_str:
|
||||
# Warn only once per group if inconsistency found
|
||||
if not group_entry.get("scaling_warning_printed", False):
|
||||
print(f" Warn: Inconsistent 'Scaling' string found for group '{groupname}'. "
|
||||
f"Using first encountered: '{group_entry['scaling']}'.")
|
||||
group_entry["scaling_warning_printed"] = True
|
||||
|
||||
# Track SG status and thumbnail path
|
||||
if tag_str == "T-SG":
|
||||
group_entry["sg"] = True
|
||||
# Use 1k COL-1 as the potential thumbnail source
|
||||
if resolution_label == "1k" and pbr_type == "COL-1":
|
||||
group_entry["thumb"] = file_path_str
|
||||
|
||||
processed_files += 1
|
||||
# --- End File Scanning ---
|
||||
|
||||
print(f"\nFile Scan Complete. Found {file_count} files, parsed {processed_files} valid textures.")
|
||||
total_groups_found = len(texture_data)
|
||||
print(f"Total unique Groupnames found: {total_groups_found}")
|
||||
if not texture_data:
|
||||
print("No valid textures found. Exiting.")
|
||||
return True # No work needed is considered success
|
||||
|
||||
print("\n--- Processing Node Groups ---")
|
||||
|
||||
all_groupnames = sorted(list(texture_data.keys()))
|
||||
processing_stopped_early = False
|
||||
|
||||
# --- Main Processing Loop ---
|
||||
for groupname in all_groupnames:
|
||||
group_info = texture_data[groupname] # Get pre-scanned info
|
||||
pbr_types_data = group_info.get("pbr_types", {})
|
||||
scaling_string_for_group = group_info.get("scaling")
|
||||
sg_status_for_group = group_info.get("sg", False)
|
||||
thumbnail_path_for_group = group_info.get("thumb")
|
||||
|
||||
target_parent_name = f"PBRSET_{groupname}"
|
||||
print(f"\nProcessing Group: '{target_parent_name}'")
|
||||
|
||||
parent_group = bpy.data.node_groups.get(target_parent_name)
|
||||
is_new_parent = False
|
||||
|
||||
# --- Find or Create Parent Group ---
|
||||
if parent_group is None:
|
||||
# Check batch limit BEFORE creating
|
||||
if groups_created >= MAX_NEW_GROUPS_PER_RUN:
|
||||
print(f"\n--- Reached NEW parent group limit ({MAX_NEW_GROUPS_PER_RUN}). Stopping. ---")
|
||||
processing_stopped_early = True
|
||||
break # Exit the main groupname loop
|
||||
|
||||
print(f" Creating new parent group: '{target_parent_name}'")
|
||||
parent_group = template_parent.copy()
|
||||
if not parent_group:
|
||||
print(f" Error: Failed copy parent template. Skip group '{groupname}'.")
|
||||
continue # Skip to next groupname
|
||||
parent_group.name = target_parent_name
|
||||
groups_created += 1
|
||||
is_new_parent = True
|
||||
|
||||
# --- Auto-Save Trigger ---
|
||||
# Trigger AFTER creating the group and incrementing counter
|
||||
if AUTO_SAVE_ENABLED and groups_created > 0 and groups_created % SAVE_INTERVAL == 0:
|
||||
if perform_safe_autosave(manifest_path, manifest_data):
|
||||
# If auto-save succeeded, manifest is up-to-date on disk
|
||||
manifest_needs_saving = False
|
||||
else:
|
||||
# Auto-save failed, continue but warn
|
||||
print("!!! WARNING: Auto-save failed. Continuing processing... !!!")
|
||||
# --- End Auto-Save Trigger ---
|
||||
|
||||
else: # Update Existing Parent Group
|
||||
print(f" Updating existing parent group: '{target_parent_name}'")
|
||||
groups_updated += 1
|
||||
# --- End Find or Create Parent Group ---
|
||||
|
||||
# --- Process Parent Group Internals ---
|
||||
# This block processes both newly created and existing parent groups
|
||||
try:
|
||||
# --- Calculate and Store Aspect Ratio Correction (Once per group) ---
|
||||
# Find the designated Value node in the parent template
|
||||
aspect_node_list = find_nodes_by_label(parent_group, ASPECT_RATIO_NODE_LABEL, 'VALUE')
|
||||
if aspect_node_list:
|
||||
aspect_node = aspect_node_list[0] # Assume first found is correct
|
||||
if scaling_string_for_group:
|
||||
# Find the path to the lowest resolution image available
|
||||
lowest_res_path = None; found_res = False
|
||||
# Check resolution labels in configured order (e.g., "1k", "2k"...)
|
||||
for res_label in RESOLUTION_LABELS:
|
||||
# Check all PBR types for this resolution
|
||||
for res_data in pbr_types_data.values():
|
||||
if res_label in res_data:
|
||||
lowest_res_path = res_data[res_label]
|
||||
found_res = True
|
||||
break # Found path for this resolution label
|
||||
if found_res:
|
||||
break # Found lowest available resolution path
|
||||
|
||||
if lowest_res_path:
|
||||
# Load the image (use cache if possible)
|
||||
img = None; img_load_error = False
|
||||
if lowest_res_path in loaded_images_this_run:
|
||||
img = loaded_images_this_run[lowest_res_path]
|
||||
img_load_error = (img is None) # Check if cached result was failure
|
||||
else:
|
||||
# Attempt to load if not cached
|
||||
try:
|
||||
img_path_obj = Path(lowest_res_path)
|
||||
if img_path_obj.is_file():
|
||||
img = bpy.data.images.load(lowest_res_path, check_existing=True)
|
||||
else:
|
||||
img_load_error = True
|
||||
print(f" Error: Aspect source image not found: {lowest_res_path}")
|
||||
if img is None and not img_load_error: # Check if load function returned None
|
||||
img_load_error = True
|
||||
print(f" Error: Failed loading aspect source image: {lowest_res_path}")
|
||||
except Exception as e_load_aspect:
|
||||
print(f" Error loading aspect source image: {e_load_aspect}")
|
||||
img_load_error = True
|
||||
# Cache the result (image object or None)
|
||||
loaded_images_this_run[lowest_res_path] = img if not img_load_error else None
|
||||
|
||||
if not img_load_error and img:
|
||||
# Get dimensions and calculate factor
|
||||
img_width, img_height = img.size[0], img.size[1]
|
||||
factor = calculate_aspect_ratio_factor(img_width, img_height, scaling_string_for_group)
|
||||
print(f" Calculated Aspect Ratio Factor: {factor:.4f} (from {img_width}x{img_height}, Scaling='{scaling_string_for_group}')")
|
||||
|
||||
# Store factor in node if value changed significantly
|
||||
if abs(aspect_node.outputs[0].default_value - factor) > 0.0001:
|
||||
aspect_node.outputs[0].default_value = factor
|
||||
print(f" Set '{ASPECT_RATIO_NODE_LABEL}' node value to {factor:.4f}")
|
||||
else:
|
||||
print(f" Warn: Could not load image '{lowest_res_path}' for aspect ratio calc.")
|
||||
else:
|
||||
print(f" Warn: No suitable image found (e.g., 1k) to calculate aspect ratio for '{groupname}'.")
|
||||
else:
|
||||
print(f" Warn: No Scaling string found for group '{groupname}'. Cannot calculate aspect ratio.")
|
||||
# else: # Optional Warning if node is missing from template
|
||||
# print(f" Warn: Value node '{ASPECT_RATIO_NODE_LABEL}' not found in parent group '{parent_group.name}'. Cannot store aspect ratio.")
|
||||
# --- End Aspect Ratio Correction ---
|
||||
|
||||
# Set SG Value
|
||||
sg_nodes = find_nodes_by_label(parent_group, SG_VALUE_NODE_LABEL, 'VALUE')
|
||||
if sg_nodes:
|
||||
sg_node = sg_nodes[0]
|
||||
target_val = 1.0 if sg_status_for_group else 0.0
|
||||
if abs(sg_node.outputs[0].default_value - target_val) > 0.001:
|
||||
sg_node.outputs[0].default_value = target_val
|
||||
print(f" Set '{SG_VALUE_NODE_LABEL}' to: {target_val}")
|
||||
|
||||
# Set Asset Info (Thumbnail Path Prop, Initial Preview & Tagging)
|
||||
# This block runs for both new and existing groups
|
||||
try:
|
||||
# 1. Set/Update Thumbnail Path Property & Mark Asset
|
||||
if not parent_group.asset_data:
|
||||
parent_group.asset_mark()
|
||||
print(f" Marked '{parent_group.name}' as asset.")
|
||||
# Update thumbnail property logic
|
||||
if thumbnail_path_for_group:
|
||||
thumb_path_obj = Path(thumbnail_path_for_group)
|
||||
if thumb_path_obj.is_file():
|
||||
if parent_group.get("thumbnail_filepath") != thumbnail_path_for_group:
|
||||
parent_group["thumbnail_filepath"] = thumbnail_path_for_group
|
||||
if not is_new_parent: print(f" Updated thumbnail path property.") # Log update only if not new
|
||||
elif "thumbnail_filepath" in parent_group:
|
||||
del parent_group["thumbnail_filepath"]
|
||||
if not is_new_parent: print(f" Removed thumbnail path property (file not found).")
|
||||
elif "thumbnail_filepath" in parent_group:
|
||||
del parent_group["thumbnail_filepath"]
|
||||
if not is_new_parent: print(f" Removed old thumbnail path property.")
|
||||
|
||||
# 2. Set Initial Preview (Only if NEW parent)
|
||||
if is_new_parent and thumbnail_path_for_group and Path(thumbnail_path_for_group).is_file():
|
||||
print(f" Attempting initial preview from '{Path(thumbnail_path_for_group).name}'...")
|
||||
try:
|
||||
with bpy.context.temp_override(id=parent_group):
|
||||
bpy.ops.ed.lib_id_load_custom_preview(filepath=thumbnail_path_for_group)
|
||||
print(f" Set initial custom preview.")
|
||||
except Exception as e_prev:
|
||||
print(f" Preview Error: {e_prev}")
|
||||
|
||||
# 3. Apply Asset Tags (Supplier, etc.)
|
||||
apply_asset_tags(parent_group, groupname, group_info)
|
||||
|
||||
except Exception as e_asset_info:
|
||||
print(f" Error setting asset info/tags: {e_asset_info}")
|
||||
# --- End Asset Info ---
|
||||
|
||||
|
||||
# --- Process Child Groups (PBR Types) ---
|
||||
for pbr_type, resolutions_data in pbr_types_data.items():
|
||||
# print(f" Processing PBR Type: {pbr_type}") # Can be verbose
|
||||
|
||||
# Find placeholder node in parent
|
||||
holder_nodes = find_nodes_by_label(parent_group, pbr_type, 'GROUP')
|
||||
if not holder_nodes:
|
||||
print(f" Warn: No placeholder node labeled '{pbr_type}' in parent group '{parent_group.name}'. Skipping PBR Type.")
|
||||
continue
|
||||
holder_node = holder_nodes[0] # Assume first is correct
|
||||
|
||||
# Determine child group name (Base64 encoded)
|
||||
logical_child_name = f"{groupname}_{pbr_type}"
|
||||
target_child_name_b64 = encode_name_b64(logical_child_name)
|
||||
|
||||
# Find or Create Child Group
|
||||
child_group = bpy.data.node_groups.get(target_child_name_b64)
|
||||
if child_group is None:
|
||||
# print(f" Creating new child group for '{pbr_type}'") # Verbose
|
||||
child_group = template_child.copy()
|
||||
if not child_group:
|
||||
print(f" Error: Failed copy child template. Skip PBR Type.")
|
||||
continue
|
||||
child_group.name = target_child_name_b64
|
||||
child_groups_created += 1
|
||||
else:
|
||||
# print(f" Updating existing child group for '{pbr_type}'") # Verbose
|
||||
child_groups_updated += 1
|
||||
|
||||
# Assign child group to placeholder if needed
|
||||
if holder_node.node_tree != child_group:
|
||||
holder_node.node_tree = child_group
|
||||
print(f" Assigned child group '{child_group.name}' to placeholder '{holder_node.label}'.")
|
||||
|
||||
# Connect placeholder output to parent output socket if needed
|
||||
try:
|
||||
source_socket = holder_node.outputs[0] if holder_node.outputs else None
|
||||
group_output_node = next((n for n in parent_group.nodes if n.type == 'GROUP_OUTPUT'), None)
|
||||
target_socket = None
|
||||
if group_output_node:
|
||||
target_socket = group_output_node.inputs.get(pbr_type) # Get socket by name/label
|
||||
|
||||
if source_socket and target_socket:
|
||||
# Check if link already exists
|
||||
link_exists = any(link.from_socket == source_socket and link.to_socket == target_socket for link in parent_group.links)
|
||||
if not link_exists:
|
||||
parent_group.links.new(source_socket, target_socket)
|
||||
links_created += 1
|
||||
print(f" Connected '{holder_node.label}' output to parent output socket '{pbr_type}'.")
|
||||
# else: # Optional warning if sockets aren't found
|
||||
# if not source_socket: print(f" Warn: No output socket found on placeholder '{holder_node.label}'.")
|
||||
# if not target_socket: print(f" Warn: No input socket '{pbr_type}' found on parent output node.")
|
||||
|
||||
except Exception as e_link:
|
||||
print(f" Error linking sockets for '{pbr_type}': {e_link}")
|
||||
|
||||
# Ensure parent output socket type is Color
|
||||
try:
|
||||
item = parent_group.interface.items_tree.get(pbr_type)
|
||||
if item and item.in_out == 'OUTPUT' and item.socket_type != 'NodeSocketColor':
|
||||
item.socket_type = 'NodeSocketColor'
|
||||
# print(f" Set parent output socket '{pbr_type}' type to Color.") # Optional info
|
||||
except Exception as e_sock:
|
||||
print(f" Error updating socket type for '{pbr_type}': {e_sock}")
|
||||
|
||||
|
||||
# --- Process Resolutions within Child Group ---
|
||||
for resolution_label, image_path_str in resolutions_data.items():
|
||||
|
||||
# Find image texture nodes within the CHILD group
|
||||
image_nodes = find_nodes_by_label(child_group, resolution_label, 'TEX_IMAGE')
|
||||
if not image_nodes:
|
||||
# print(f" Warn: No node labeled '{resolution_label}' found in child group for '{pbr_type}'.") # Optional
|
||||
continue
|
||||
|
||||
# --- >>> Manifest Check <<< ---
|
||||
is_processed = False
|
||||
if manifest_enabled: # Only check if manifest is enabled
|
||||
# Check if this specific group/pbr/res combo is done
|
||||
processed_resolutions = manifest_data.get(groupname, {}).get(pbr_type, [])
|
||||
if resolution_label in processed_resolutions:
|
||||
is_processed = True
|
||||
# print(f" Skipping {groupname}/{pbr_type}/{resolution_label} (Manifest)") # Verbose skip log
|
||||
|
||||
if is_processed:
|
||||
continue # Skip to the next resolution
|
||||
# --- >>> End Manifest Check <<< ---
|
||||
|
||||
# --- Load Image & Assign (if not skipped) ---
|
||||
# print(f" Processing Resolution: {resolution_label} for {pbr_type}") # Verbose
|
||||
img = None
|
||||
image_load_failed = False
|
||||
|
||||
# Check intra-run cache first
|
||||
if image_path_str in loaded_images_this_run:
|
||||
img = loaded_images_this_run[image_path_str]
|
||||
image_load_failed = (img is None) # Respect cached failure
|
||||
else:
|
||||
# Not cached in this run, attempt to load
|
||||
try:
|
||||
image_path = Path(image_path_str)
|
||||
if not image_path.is_file():
|
||||
print(f" Error: Image file not found: {image_path_str}")
|
||||
image_load_failed = True
|
||||
else:
|
||||
# Use check_existing=True to potentially reuse existing datablocks
|
||||
img = bpy.data.images.load(str(image_path), check_existing=True)
|
||||
if not img:
|
||||
print(f" Error: Failed loading image via bpy.data.images.load: {image_path_str}")
|
||||
image_load_failed = True
|
||||
# else: # Success block is handled below
|
||||
# pass
|
||||
except RuntimeError as e_runtime_load:
|
||||
print(f" Runtime Error loading image '{image_path_str}': {e_runtime_load}")
|
||||
image_load_failed = True
|
||||
except Exception as e_gen_load:
|
||||
print(f" Unexpected error loading image '{image_path_str}': {e_gen_load}")
|
||||
image_load_failed = True
|
||||
# Cache result (image object or None for failure)
|
||||
loaded_images_this_run[image_path_str] = img if not image_load_failed else None
|
||||
|
||||
# --- Process image if loaded/cached successfully ---
|
||||
if not image_load_failed and img:
|
||||
try:
|
||||
# Set Color Space
|
||||
correct_color_space = PBR_COLOR_SPACE_MAP.get(pbr_type, DEFAULT_COLOR_SPACE)
|
||||
if img.colorspace_settings.name != correct_color_space:
|
||||
print(f" Setting '{Path(img.filepath).name}' color space -> {correct_color_space}")
|
||||
img.colorspace_settings.name = correct_color_space
|
||||
|
||||
# Histogram Stats Calculation
|
||||
if resolution_label == "1k" and pbr_type in ["ROUGH", "DISP"]:
|
||||
target_node_label = f"{HISTOGRAM_NODE_PREFIX}{pbr_type}"
|
||||
target_nodes = find_nodes_by_label(parent_group, target_node_label, 'COMBXYZ')
|
||||
if target_nodes:
|
||||
target_node = target_nodes[0]
|
||||
try:
|
||||
socket_x = target_node.inputs.get("X")
|
||||
socket_y = target_node.inputs.get("Y")
|
||||
socket_z = target_node.inputs.get("Z")
|
||||
if socket_x and socket_y and socket_z:
|
||||
print(f" Calculating histogram stats for {pbr_type} 1K...")
|
||||
stats = calculate_image_stats(img)
|
||||
if stats:
|
||||
min_val, max_val, median_val = stats
|
||||
print(f" Stats: Min={min_val:.4f}, Max={max_val:.4f}, Median={median_val:.4f}")
|
||||
# Store stats in the Combine XYZ node
|
||||
socket_x.default_value = min_val
|
||||
socket_y.default_value = max_val
|
||||
socket_z.default_value = median_val
|
||||
print(f" Stored stats in '{target_node_label}'.")
|
||||
else:
|
||||
print(f" Warn: Failed calc stats for '{Path(img.filepath).name}'.")
|
||||
# else: print(f" Warn: Node '{target_node_label}' missing X/Y/Z sockets.")
|
||||
except Exception as e_combxyz_store:
|
||||
print(f" Error processing stats in '{target_node_label}': {e_combxyz_store}")
|
||||
# else: print(f" Warn: No stats node '{target_node_label}' found.")
|
||||
|
||||
# Assign Image to nodes in child group
|
||||
nodes_updated_this_res = 0
|
||||
for image_node in image_nodes:
|
||||
if image_node.image != img:
|
||||
image_node.image = img
|
||||
nodes_updated_this_res += 1
|
||||
nodes_updated += nodes_updated_this_res
|
||||
if nodes_updated_this_res > 0:
|
||||
print(f" Assigned image '{Path(img.filepath).name}' to {nodes_updated_this_res} node(s).")
|
||||
|
||||
# --- >>> Update Manifest <<< ---
|
||||
if manifest_enabled:
|
||||
# Ensure nested structure exists
|
||||
manifest_data.setdefault(groupname, {}).setdefault(pbr_type, [])
|
||||
# Add resolution if not already present
|
||||
if resolution_label not in manifest_data[groupname][pbr_type]:
|
||||
manifest_data[groupname][pbr_type].append(resolution_label)
|
||||
# Keep the list sorted for consistency in the JSON file
|
||||
manifest_data[groupname][pbr_type].sort()
|
||||
manifest_needs_saving = True # Mark that we need to save later
|
||||
# print(f" Marked {groupname}/{pbr_type}/{resolution_label} processed in manifest.") # Verbose
|
||||
# --- >>> End Update Manifest <<< ---
|
||||
|
||||
except Exception as e_proc_img:
|
||||
print(f" Error during post-load processing for image '{image_path_str}': {e_proc_img}")
|
||||
# Continue to next resolution even if post-load fails
|
||||
# --- End Process image ---
|
||||
# --- End Resolution Loop ---
|
||||
# --- End PBR Type Loop ---
|
||||
except Exception as e_group:
|
||||
print(f" !!! ERROR processing group '{groupname}': {e_group} !!!")
|
||||
import traceback; traceback.print_exc()
|
||||
continue # Continue to next groupname
|
||||
|
||||
# --- End Main Processing Loop ---
|
||||
|
||||
# --- Final Manifest Save ---
|
||||
# Save if manifest is enabled AND changes were made since the last save/start.
|
||||
# This happens even if the script stopped early due to MAX_NEW_GROUPS_PER_RUN.
|
||||
if manifest_enabled and manifest_needs_saving:
|
||||
print("\n--- Attempting Final Manifest Save (End of Run) ---")
|
||||
if save_manifest(manifest_path, manifest_data):
|
||||
print(" Manifest saved successfully.")
|
||||
# Error message handled within save_manifest
|
||||
# --- End Final Manifest Save ---
|
||||
|
||||
# --- Final Summary ---
|
||||
end_time = time.time(); duration = end_time - start_time
|
||||
print("\n--- Script Run Finished ---")
|
||||
if processing_stopped_early:
|
||||
print(f"--- NOTE: Reached NEW parent group processing limit ({MAX_NEW_GROUPS_PER_RUN}). ---")
|
||||
print(f"--- You may need to SAVE manually, REVERT/RELOAD file, and RUN SCRIPT AGAIN. ---")
|
||||
print(f"Duration: {duration:.2f} seconds this run.")
|
||||
print(f"Summary: New Parents={groups_created}, Updated Parents={groups_updated}, New Children={child_groups_created}, Updated Children={child_groups_updated}.")
|
||||
print(f" Images assigned={nodes_updated} times. Links created={links_created}.")
|
||||
# Add other stats if needed, e.g., number of tags added
|
||||
# --- End Final Summary ---
|
||||
|
||||
return True # Indicate successful completion (or reaching limit)
|
||||
|
||||
|
||||
# --- How to Run ---
|
||||
# 1. Ensure 'numpy' is available in Blender's Python environment.
|
||||
# 2. Create Node Group "Template_PBRSET": Configure placeholders, Value nodes (SG, Aspect Ratio), Stats nodes, outputs.
|
||||
# 3. Create Node Group "Template_PBRTYPE": Configure Image Texture nodes labeled by resolution.
|
||||
# 4. !! SAVE YOUR BLEND FILE AT LEAST ONCE !! for manifest, auto-saving, and auto-reloading to work.
|
||||
# 5. Adjust variables in the '--- USER CONFIGURATION ---' section at the top as needed.
|
||||
# 6. Paste into Blender's Text Editor and run (Alt+P or Run Script button). Check Window -> Toggle System Console.
|
||||
# 7. If script stops due to limit: SAVE manually, REVERT/REOPEN file, RUN SCRIPT AGAIN. Manifest prevents reprocessing.
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(f"Script execution started at: {time.strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
|
||||
# Pre-run Checks using variables from CONFIG section
|
||||
valid_run_setup = True
|
||||
try:
|
||||
tex_dir_path = Path(texture_root_directory)
|
||||
# Basic check if path looks like a placeholder or doesn't exist
|
||||
if texture_root_directory == r"C:\path\to\your\texture\library" or not tex_dir_path.is_dir() :
|
||||
print(f"\nERROR: 'texture_root_directory' is invalid or a placeholder.")
|
||||
print(f" Current value: '{texture_root_directory}'")
|
||||
valid_run_setup = False
|
||||
except Exception as e_path:
|
||||
print(f"\nERROR checking texture_root_directory: {e_path}")
|
||||
valid_run_setup = False
|
||||
|
||||
# Check templates
|
||||
if not bpy.data.node_groups.get(PARENT_TEMPLATE_NAME):
|
||||
print(f"\nERROR: Parent template node group '{PARENT_TEMPLATE_NAME}' not found.")
|
||||
valid_run_setup = False
|
||||
if not bpy.data.node_groups.get(CHILD_TEMPLATE_NAME):
|
||||
print(f"\nERROR: Child template node group '{CHILD_TEMPLATE_NAME}' not found.")
|
||||
valid_run_setup = False
|
||||
|
||||
# Check numpy (needed for stats)
|
||||
try:
|
||||
import numpy
|
||||
except ImportError:
|
||||
print("\nCRITICAL ERROR: Python library 'numpy' not found (required for image stats).")
|
||||
print(" Please install numpy into Blender's Python environment.")
|
||||
valid_run_setup = False
|
||||
|
||||
# Execute main function if setup checks pass
|
||||
script_completed_successfully = False
|
||||
if valid_run_setup:
|
||||
# Check if file is saved before running features that depend on it
|
||||
if not bpy.data.filepath:
|
||||
print("\nWARNING: Blend file not saved. Manifest, Auto-Save, and Auto-Reload features disabled.")
|
||||
script_completed_successfully = process_textures_to_groups(texture_root_directory)
|
||||
else:
|
||||
print("\nScript aborted due to configuration errors.")
|
||||
|
||||
# --- Final Save & Reload ---
|
||||
# Use config variables directly as they are in module scope
|
||||
if script_completed_successfully and AUTO_RELOAD_ON_FINISH:
|
||||
if bpy.data.filepath: # Only if file is saved
|
||||
print("\n--- Auto-saving and reloading blend file ---")
|
||||
try:
|
||||
bpy.ops.wm.save_mainfile()
|
||||
print(" Blend file saved.")
|
||||
print(" Reloading...")
|
||||
# Ensure script execution stops cleanly before reload starts
|
||||
bpy.ops.wm.open_mainfile(filepath=bpy.data.filepath)
|
||||
# Script execution effectively stops here upon reload
|
||||
except Exception as e:
|
||||
print(f"!!! ERROR during final save/reload: {e} !!!")
|
||||
else:
|
||||
print("\nSkipping final save & reload because the blend file is not saved.")
|
||||
# --- End Final Save & Reload ---
|
||||
|
||||
# This print might not be reached if reload occurs
|
||||
print(f"Script execution finished processing at: {time.strftime('%Y-%m-%d %H:%M:%S')}")
|
||||
1639
Deprecated/POC/Standalonebatcher-Main.py
Normal file
1639
Deprecated/POC/Standalonebatcher-Main.py
Normal file
File diff suppressed because it is too large
Load Diff
90
Deprecated/detailed_documentation_plan.md
Normal file
90
Deprecated/detailed_documentation_plan.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Final Plan for Updating documentation.txt with Debugging Details
|
||||
|
||||
This document outlines the final plan for enhancing `documentation.txt` with detailed internal information crucial for debugging the Asset Processor Tool. This plan incorporates analysis of `asset_processor.py`, `configuration.py`, `main.py`, and `gui/processing_handler.py`.
|
||||
|
||||
## Task Objective
|
||||
|
||||
Analyze relevant source code (`asset_processor.py`, `configuration.py`, `main.py`, `gui/processing_handler.py`) to gather specific details about internal logic, state management, error handling, data structures, concurrency, resource management, and limitations. Integrate this information into the existing `documentation.txt` to aid developers in debugging.
|
||||
|
||||
## Analysis Steps Completed
|
||||
|
||||
1. Read and analyzed `readme.md`.
|
||||
2. Listed code definitions in the root directory (`.`).
|
||||
3. Listed code definitions in the `gui/` directory.
|
||||
4. Read and analyzed `asset_processor.py`.
|
||||
5. Read and analyzed `configuration.py`.
|
||||
6. Read and analyzed `main.py`.
|
||||
7. Read and analyzed `gui/processing_handler.py`.
|
||||
|
||||
## Final Integration Plan (Merged Structure)
|
||||
|
||||
1. **Add New Top-Level Section:**
|
||||
* Append the following section header to the end of `documentation.txt`:
|
||||
```
|
||||
================================
|
||||
Internal Details for Debugging
|
||||
================================
|
||||
```
|
||||
|
||||
2. **Create Subsections:**
|
||||
* Under the new "Internal Details" section, create the following subsections (renumbering from 7 onwards):
|
||||
* `7. Internal Logic & Algorithms`
|
||||
* `8. State Management`
|
||||
* `9. Error Handling & Propagation`
|
||||
* `10. Key Data Structures`
|
||||
* `11. Concurrency Models (CLI & GUI)`
|
||||
* `12. Resource Management`
|
||||
* `13. Known Limitations & Edge Cases`
|
||||
|
||||
3. **Populate Subsections with Specific Details:**
|
||||
|
||||
* **7. Internal Logic & Algorithms:**
|
||||
* **Configuration Preparation (`Configuration` class):** Detail the `__init__` process: loading `config.py` and preset JSON, validating structure (`_validate_configs`), compiling regex (`_compile_regex_patterns` using `_fnmatch_to_regex`). Mention compiled patterns storage.
|
||||
* **CLI Argument Parsing (`main.py:setup_arg_parser`):** Briefly describe `argparse` usage and key flags influencing execution.
|
||||
* **Output Directory Resolution (`main.py:main`):** Explain how the final output path is determined and resolved.
|
||||
* **Asset Processing (`AssetProcessor` class):**
|
||||
* *Classification (`_inventory_and_classify_files`):* Describe multi-pass approach using compiled regex from `Configuration`. Detail variant sorting criteria.
|
||||
* *Map Processing (`_process_maps`):* Detail image loading, Gloss->Rough inversion, resizing, format determination (using `Configuration` rules), bit depth conversion, stats calculation, aspect ratio logic, save fallback.
|
||||
* *Merging (`_merge_maps`):* Detail common resolution finding, input loading, channel merging (using `Configuration` rules), output determination, save fallback.
|
||||
* *Metadata (`_determine_base_metadata`, etc.):* Summarize base name extraction, category/archetype determination (using `Configuration` rules), `metadata.json` population.
|
||||
* **Blender Integration (`main.py:run_blender_script`, `gui/processing_handler.py:_run_blender_script_subprocess`):** Explain subprocess execution, command construction (`-b`, `--python`, `--`), argument passing (`asset_root_dir`).
|
||||
|
||||
* **8. State Management:**
|
||||
* `Configuration` object: Holds loaded config state and compiled regex. Instantiated per worker.
|
||||
* `AssetProcessor`: Primarily stateless between `process()` calls. Internal state within `process()` managed by local variables (e.g., `current_asset_metadata`). `self.classified_files` populated once and filtered per asset.
|
||||
* `main.py`: Tracks overall CLI run counts (processed, skipped, failed).
|
||||
* `gui/processing_handler.py`: Manages GUI processing run state via flags (`_is_running`, `_cancel_requested`) and stores `Future` objects (`_futures`).
|
||||
|
||||
* **9. Error Handling & Propagation:**
|
||||
* Custom Exceptions: `AssetProcessingError`, `ConfigurationError`.
|
||||
* `Configuration`: Raises `ConfigurationError` on load/validation failure. Logs regex compilation warnings.
|
||||
* `AssetProcessor`: Catches exceptions within per-asset loop, logs error, marks asset "failed", continues loop. Handles specific save fallbacks (EXR->PNG). Raises `AssetProcessingError` for critical setup failures.
|
||||
* Worker Wrapper (`main.py:process_single_asset_wrapper`): Catches exceptions from `Configuration`/`AssetProcessor`, logs, returns "failed" status tuple.
|
||||
* Process Pool (`main.py`, `gui/processing_handler.py`): `try...except` around executor block catches pool-level errors.
|
||||
* GUI Communication (`ProcessingHandler`): Catches errors during future result retrieval, emits failure status via signals.
|
||||
* Blender Scripts: Checks subprocess return code, logs stderr. Catches `FileNotFoundError`.
|
||||
|
||||
* **10. Key Data Structures:**
|
||||
* `Configuration` attributes: `compiled_map_keyword_regex`, `compiled_extra_regex`, etc. (compiled `re.Pattern` objects).
|
||||
* `AssetProcessor` structures: `self.classified_files` (dict[str, list[dict]]), `processed_maps_details_asset` (dict[str, dict[str, dict]]), `file_to_base_name_map` (dict[Path, Optional[str]]).
|
||||
* Return values: Status dictionary from `AssetProcessor.process()`, status tuple from `process_single_asset_wrapper`.
|
||||
* `ProcessingHandler._futures`: dict[Future, str].
|
||||
|
||||
* **11. Concurrency Models (CLI & GUI):**
|
||||
* **Common Core:** Both use `concurrent.futures.ProcessPoolExecutor` running `main.process_single_asset_wrapper`. `Configuration` and `AssetProcessor` instantiated within each worker process for isolation.
|
||||
* **CLI Orchestration (`main.py:run_processing`):** Direct executor usage, `as_completed` gathers results synchronously.
|
||||
* **GUI Orchestration (`gui/processing_handler.py`):** `ProcessingHandler` (QObject) runs executor logic in a `QThread`. Results processed in handler thread, communicated asynchronously to UI thread via Qt signals (`progress_updated`, `file_status_updated`, `processing_finished`).
|
||||
* **Cancellation (`gui/processing_handler.py:request_cancel`):** Sets flag, attempts `executor.shutdown(wait=False)`, tries cancelling pending futures. Limitation: Does not stop already running workers.
|
||||
|
||||
* **12. Resource Management:**
|
||||
* `Configuration`: Uses `with open` for preset files.
|
||||
* `AssetProcessor`: Manages temporary workspace (`tempfile.mkdtemp`, `shutil.rmtree` in `finally`). Uses `with open` for metadata JSON.
|
||||
* `ProcessPoolExecutor`: Lifecycle managed via `with` statement in `main.py` and `gui/processing_handler.py`.
|
||||
|
||||
* **13. Known Limitations & Edge Cases:**
|
||||
* Configuration: Basic structural validation only; regex compilation errors are warnings; `_fnmatch_to_regex` helper is basic.
|
||||
* AssetProcessor: Relies heavily on filename patterns; high memory potential for large images; limited intra-asset error recovery; simplified prediction logic.
|
||||
* CLI: Basic preset file existence check only before starting workers; Blender executable finding logic order (config > PATH).
|
||||
* GUI Concurrency: Cancellation doesn't stop currently executing worker processes.
|
||||
|
||||
4. **Switch Mode:** Request switching to Code mode to apply these changes by appending to `documentation.txt`.
|
||||
269
Deprecated/documentation.txt
Normal file
269
Deprecated/documentation.txt
Normal file
@@ -0,0 +1,269 @@
|
||||
================================
|
||||
Asset Processor Tool - Developer Documentation
|
||||
================================
|
||||
|
||||
This document provides a concise overview of the Asset Processor Tool's codebase for developers joining the project. It focuses on the architecture, key components, and development workflow.
|
||||
|
||||
**NOTE:** This documentation strictly excludes details on environment setup, dependency installation, building the project, or deployment procedures. It assumes familiarity with Python and the relevant libraries (OpenCV, NumPy, PySide6).
|
||||
|
||||
--------------------------------
|
||||
1. Project Overview
|
||||
--------------------------------
|
||||
|
||||
* **Purpose:** To process 3D asset source files (texture sets, models, etc., typically from ZIP archives or folders) into a standardized library format.
|
||||
* **Core Functionality:** Uses configurable JSON presets to interpret different asset sources, automating tasks like file classification, image resizing, channel merging, and metadata generation.
|
||||
* **High-Level Architecture:** Consists of a core processing engine (`AssetProcessor`), a configuration system handling presets (`Configuration`), multiple interfaces (GUI, CLI, Directory Monitor), and optional integration with Blender for automated material/nodegroup creation.
|
||||
|
||||
--------------------------------
|
||||
2. Codebase Structure
|
||||
--------------------------------
|
||||
|
||||
Key files and directories:
|
||||
|
||||
* `asset_processor.py`: Contains the `AssetProcessor` class, the core logic for processing a single asset through the pipeline. Includes methods for classification, map processing, merging, metadata generation, and output organization. Also provides methods for predicting output structure used by the GUI.
|
||||
* `configuration.py`: Defines the `Configuration` class. Responsible for loading core settings from `config.py` and merging them with a specified preset JSON file (`Presets/*.json`). Pre-compiles regex patterns from presets for efficiency.
|
||||
* `config.py`: Stores global default settings, constants, and core rules (e.g., standard map types, default resolutions, merge rules, output format rules, Blender paths).
|
||||
* `main.py`: Entry point for the Command-Line Interface (CLI). Handles argument parsing, logging setup, parallel processing orchestration (using `concurrent.futures.ProcessPoolExecutor`), calls `AssetProcessor` via a wrapper function, and optionally triggers Blender scripts.
|
||||
* `monitor.py`: Implements the automated directory monitoring feature using the `watchdog` library. Contains the `ZipHandler` class to detect new ZIP files and trigger processing via `main.run_processing`.
|
||||
* `gui/`: Directory containing all code related to the Graphical User Interface (GUI), built with PySide6.
|
||||
* `main_window.py`: Defines the `MainWindow` class, the main application window structure, UI layout (preset editor, processing panel, drag-and-drop, preview table, controls), event handling (button clicks, drag/drop), and menu setup. Manages GUI-specific logging (`QtLogHandler`).
|
||||
* `processing_handler.py`: Defines the `ProcessingHandler` class (runs on a `QThread`). Manages the execution of the main asset processing pipeline (using `ProcessPoolExecutor`) and Blender script execution in the background to keep the GUI responsive. Communicates progress and results back to the `MainWindow` via signals.
|
||||
* `prediction_handler.py`: Defines the `PredictionHandler` class (runs on a `QThread`). Manages background file analysis/preview generation by calling `AssetProcessor.get_detailed_file_predictions()`. Sends results back to the `MainWindow` via signals to update the preview table.
|
||||
* `preview_table_model.py`: Defines `PreviewTableModel` (inherits `QAbstractTableModel`) and `PreviewSortFilterProxyModel` for managing and displaying data in the GUI's preview table, including custom sorting logic.
|
||||
* `blenderscripts/`: Contains Python scripts (`create_nodegroups.py`, `create_materials.py`) designed to be executed *within* Blender, typically triggered by the main tool after processing to automate PBR nodegroup and material setup in `.blend` files.
|
||||
* `Presets/`: Contains supplier-specific configuration files in JSON format (e.g., `Poliigon.json`). These define rules for interpreting asset filenames, classifying maps, handling variants, etc. `_template.json` serves as a base for new presets.
|
||||
* `Testfiles/`: Contains example input assets for testing purposes.
|
||||
* `Tickets/`: Directory for issue and feature tracking using Markdown files.
|
||||
|
||||
--------------------------------
|
||||
3. Key Components/Modules
|
||||
--------------------------------
|
||||
|
||||
* **`AssetProcessor` (`asset_processor.py`):** The heart of the tool. Orchestrates the entire processing pipeline for a single input asset (ZIP or folder). Responsibilities include workspace management, file classification, metadata extraction, map processing (resizing, format conversion), channel merging, `metadata.json` generation, and organizing final output files.
|
||||
* **`Configuration` (`configuration.py`):** Manages the loading and merging of configuration settings. Takes a preset name, loads defaults from `config.py`, loads the specified `Presets/*.json`, merges them, validates settings, and pre-compiles regex patterns defined in the preset for efficient use by `AssetProcessor`.
|
||||
* **`MainWindow` (`gui/main_window.py`):** The main class for the GUI application. Sets up the UI layout, connects user actions (button clicks, drag/drop) to slots, manages the preset editor, interacts with background handlers (`ProcessingHandler`, `PredictionHandler`) via signals/slots, and displays feedback (logs, progress, status).
|
||||
* **`ProcessingHandler` (`gui/processing_handler.py`):** Handles the execution of the core asset processing logic and Blender scripts in a background thread for the GUI. Manages the `ProcessPoolExecutor` for parallel asset processing and communicates progress/results back to the `MainWindow`.
|
||||
* **`PredictionHandler` (`gui/prediction_handler.py`):** Handles the generation of file classification previews in a background thread for the GUI. Calls `AssetProcessor`'s prediction methods and sends results back to the `MainWindow` to populate the preview table without blocking the UI.
|
||||
* **`ZipHandler` (`monitor.py`):** A `watchdog` event handler used by `monitor.py`. Detects newly created ZIP files in the monitored input directory, validates the filename format (for preset extraction), and triggers the main processing logic via `main.run_processing`.
|
||||
|
||||
--------------------------------
|
||||
4. Core Concepts & Data Flow
|
||||
--------------------------------
|
||||
|
||||
* **Preset-Driven Configuration:**
|
||||
* Global defaults are set in `config.py`.
|
||||
* Supplier-specific rules (filename patterns, map keywords, variant handling, etc.) are defined using regex in `Presets/*.json` files.
|
||||
* The `Configuration` class loads `config.py` and merges it with the selected preset JSON, providing a unified configuration object to the `AssetProcessor`. Regex patterns are pre-compiled during `Configuration` initialization for performance.
|
||||
|
||||
* **Asset Processing Pipeline (Simplified Flow):**
|
||||
1. **Workspace Setup:** Create a temporary directory.
|
||||
2. **Extract/Copy:** Extract ZIP or copy folder contents to the workspace.
|
||||
3. **Classify Files:** Scan workspace, use compiled regex from `Configuration` to classify files (Map, Model, Extra, Ignored, Unrecognized). Handle 16-bit variants and assign suffixes based on rules.
|
||||
4. **Determine Metadata:** Extract asset name, category, archetype based on preset rules.
|
||||
5. **Skip Check:** If overwrite is false, check if output already exists; if so, skip this asset.
|
||||
6. **Process Maps:** Load images, resize (no upscale), convert format/bit depth based on complex rules (`config.py` and preset), handle Gloss->Roughness inversion, calculate stats, determine aspect ratio change. Save processed maps.
|
||||
7. **Merge Maps:** Combine channels from different processed maps based on `MAP_MERGE_RULES` in `config.py`. Save merged maps.
|
||||
8. **Generate `metadata.json`:** Collect all relevant information (map details, stats, aspect ratio, category, etc.) and write to `metadata.json` in the workspace.
|
||||
9. **Organize Output:** Create the final output directory structure (`<output_base>/<supplier>/<asset_name>/`) and move processed maps, merged maps, models, `metadata.json`, Extra files, and Ignored files into it.
|
||||
10. **Cleanup Workspace:** Delete the temporary directory.
|
||||
11. **(Optional) Blender Scripts:** If triggered via CLI/GUI, execute `blenderscripts/*.py` using the configured Blender executable via a subprocess.
|
||||
|
||||
* **Parallel Processing:**
|
||||
* Multiple input assets are processed concurrently using `concurrent.futures.ProcessPoolExecutor`.
|
||||
* This pool is managed by `main.py` (CLI) or `gui/processing_handler.py` (GUI).
|
||||
* Each asset runs in an isolated worker process, ensuring separate `Configuration` and `AssetProcessor` instances.
|
||||
|
||||
* **GUI Interaction & Threading:**
|
||||
* The GUI (`PySide6`) uses `QThread` to run `ProcessingHandler` (asset processing) and `PredictionHandler` (file preview generation) in the background, preventing the UI from freezing.
|
||||
* Communication between the main UI thread (`MainWindow`) and background threads relies on Qt's signals and slots mechanism for thread safety (e.g., updating progress, status messages, preview table data).
|
||||
* `PredictionHandler` calls `AssetProcessor` methods to get file classification details, which are then sent back to `MainWindow` to populate the `PreviewTableModel`.
|
||||
|
||||
* **Output (`metadata.json`):**
|
||||
* A key output file generated for each processed asset.
|
||||
* Contains structured data about the asset: map filenames, resolutions, formats, bit depths, merged map details, calculated image statistics, aspect ratio change info, asset category/archetype, source preset used, list of ignored source files, etc. This file is intended for use by downstream tools or scripts (like the Blender integration scripts).
|
||||
|
||||
--------------------------------
|
||||
5. Development Workflow
|
||||
--------------------------------
|
||||
|
||||
* **Modifying Core Processing Logic:** Changes to how assets are classified, maps are processed/resized/converted, channels are merged, or metadata is generated typically involve editing the `AssetProcessor` class in `asset_processor.py`.
|
||||
* **Changing Global Settings/Rules:** Adjustments to default output paths, standard resolutions, default format rules, map merge definitions, or Blender paths should be made in `config.py`.
|
||||
* **Adding/Modifying Supplier Rules:** To add support for a new asset source or change how an existing one is interpreted, create or edit the corresponding JSON file in the `Presets/` directory. Refer to `_template.json` and existing presets for structure. Focus on defining accurate regex patterns in `map_type_mapping`, `bit_depth_variants`, `model_patterns`, `source_naming_convention`, etc.
|
||||
* **Adjusting CLI Behavior:** Changes to command-line arguments, argument parsing, or the overall CLI workflow are handled in `main.py`.
|
||||
* **Modifying the GUI:** UI layout changes, adding new controls, altering event handling, or modifying background task management for the GUI involves working within the `gui/` directory, primarily `main_window.py`, `processing_handler.py`, and `prediction_handler.py`. UI elements are built using PySide6 widgets.
|
||||
* **Enhancing Blender Integration:** Improvements or changes to how nodegroups or materials are created in Blender require editing the Python scripts within the `blenderscripts/` directory. Consider how these scripts are invoked and what data they expect (primarily from `metadata.json` and command-line arguments passed via subprocess calls in `main.py` or `gui/processing_handler.py`).
|
||||
|
||||
--------------------------------
|
||||
6. Coding Conventions
|
||||
--------------------------------
|
||||
|
||||
* **Object-Oriented:** The codebase heavily utilizes classes (e.g., `AssetProcessor`, `Configuration`, `MainWindow`, various Handlers).
|
||||
* **Type Hinting:** Python type hints are used throughout the code for clarity and static analysis.
|
||||
* **Logging:** Standard Python `logging` module is used for logging messages at different levels (DEBUG, INFO, WARNING, ERROR). The GUI uses a custom `QtLogHandler` to display logs in the UI console.
|
||||
* **Error Handling:** Uses standard `try...except` blocks and defines some custom exceptions (e.g., `ConfigurationError`, `AssetProcessingError`).
|
||||
* **Parallelism:** Uses `concurrent.futures.ProcessPoolExecutor` for CPU-bound tasks (asset processing).
|
||||
* **GUI:** Uses `PySide6` (Qt for Python) with signals and slots for communication between UI elements and background threads (`QThread`).
|
||||
* **Configuration:** Relies on Python modules (`config.py`) for core settings and JSON files (`Presets/`) for specific rule sets.
|
||||
* **File Paths:** Uses `pathlib.Path` for handling file system paths.
|
||||
================================
|
||||
Internal Details for Debugging
|
||||
================================
|
||||
|
||||
This section provides deeper technical details about the internal workings, intended to aid in debugging unexpected behavior.
|
||||
|
||||
--------------------------------
|
||||
7. Internal Logic & Algorithms
|
||||
--------------------------------
|
||||
|
||||
* **Configuration Preparation (`Configuration` class in `configuration.py`):**
|
||||
* Instantiated per preset (`__init__`).
|
||||
* Loads core settings from `config.py` using `importlib.util`.
|
||||
* Loads specified preset from `presets/{preset_name}.json`.
|
||||
* Validates basic structure of loaded settings (`_validate_configs`), checking for required keys and basic types (e.g., `map_type_mapping` is a list of dicts).
|
||||
* Compiles regex patterns (`_compile_regex_patterns`) from preset rules (extra, model, bit depth, map keywords) using `re.compile` (mostly case-insensitive) and stores them on the instance (e.g., `self.compiled_map_keyword_regex`). Uses `_fnmatch_to_regex` helper for basic wildcard conversion.
|
||||
|
||||
* **CLI Argument Parsing (`main.py:setup_arg_parser`):**
|
||||
* Uses `argparse` to define and parse command-line arguments.
|
||||
* Key arguments influencing flow: `--preset` (required), `--output-dir` (optional override), `--workers` (concurrency), `--overwrite` (force reprocessing), `--verbose` (logging level), `--nodegroup-blend`, `--materials-blend`.
|
||||
* Calculates a default worker count based on `os.cpu_count()`.
|
||||
|
||||
* **Output Directory Resolution (`main.py:main`):**
|
||||
* Determines the base output directory by checking `--output-dir` argument first, then falling back to `OUTPUT_BASE_DIR` from `config.py`.
|
||||
* Resolves the path to an absolute path and ensures the directory exists (`Path.resolve()`, `Path.mkdir(parents=True, exist_ok=True)`).
|
||||
|
||||
* **Asset Processing (`AssetProcessor` class in `asset_processor.py`):**
|
||||
* **Classification (`_inventory_and_classify_files`):**
|
||||
* Multi-pass approach: Explicit Extra (regex) -> Models (regex) -> Potential Maps (keyword regex) -> Standalone 16-bit check (regex) -> Prioritize 16-bit variants -> Final Maps -> Remaining as Unrecognised (Extra).
|
||||
* Uses compiled regex patterns provided by the `Configuration` object passed during initialization.
|
||||
* Sorts potential map variants based on: 1. Preset rule index, 2. Keyword index within rule, 3. Alphabetical path. Suffixes (`-1`, `-2`) are assigned later per-asset based on this sort order and `RESPECT_VARIANT_MAP_TYPES`.
|
||||
* **Map Processing (`_process_maps`):**
|
||||
* Loads images using `cv2.imread` (flags: `IMREAD_UNCHANGED` or `IMREAD_GRAYSCALE`). Converts loaded 3-channel images from BGR to RGB for internal consistency (stats, merging).
|
||||
* **Saving Channel Order:** Before saving with `cv2.imwrite`, 3-channel images are conditionally converted back from RGB to BGR *only* if the target output format is *not* EXR (e.g., for PNG, JPG, TIF). This ensures correct channel order for standard formats while preserving RGB for EXR. (Fix for ISSUE-010).
|
||||
* Handles Gloss->Roughness inversion: Loads gloss, inverts using float math (`1.0 - img/norm`), stores as float32 with original dtype. Prioritizes gloss source if both gloss and native rough exist.
|
||||
* Resizes using `cv2.resize` (interpolation: `INTER_LANCZOS4` for downscale, `INTER_CUBIC` for potential same-size/upscale - though upscaling is generally avoided by checks).
|
||||
* Determines output format based on hierarchy: `FORCE_LOSSLESS_MAP_TYPES` > `RESOLUTION_THRESHOLD_FOR_JPG` > Input format priority (TIF/EXR often lead to lossless) > Configured defaults (`OUTPUT_FORMAT_16BIT_PRIMARY`, `OUTPUT_FORMAT_8BIT`).
|
||||
* Determines output bit depth based on `MAP_BIT_DEPTH_RULES` ('respect' vs 'force_8bit').
|
||||
* Converts dtype before saving (e.g., float to uint8/uint16 using scaling factors 255.0/65535.0).
|
||||
* Calculates stats (`_calculate_image_stats`) on normalized float64 data (in RGB space) for a specific resolution (`CALCULATE_STATS_RESOLUTION`).
|
||||
* Calculates aspect ratio string (`_normalize_aspect_ratio_change`) based on relative dimension changes.
|
||||
* Handles save fallback: If primary 16-bit format (e.g., EXR) fails, attempts fallback (e.g., PNG).
|
||||
* **Merging (`_merge_maps_from_source`):**
|
||||
* Identifies the required *source* files for merge inputs based on classified files.
|
||||
* Determines common resolutions based on available processed maps (as a proxy for size compatibility).
|
||||
* Loads required source maps for each common resolution using the `_load_and_transform_source` helper (utilizing the cache).
|
||||
* Converts loaded inputs to float32 (normalized 0-1).
|
||||
* Injects default values (from rule `defaults`) for missing channels.
|
||||
* Merges channels using `cv2.merge`.
|
||||
* Determines output bit depth based on rule (`force_16bit`, `respect_inputs`).
|
||||
* Determines output format based on complex rules (`config.py` and preset), considering the highest format among *source* inputs if not forced lossless or over JPG threshold. Handles JPG 16-bit conflict by forcing 8-bit.
|
||||
* Saves the merged image using the `_save_image` helper, including final data type/color space conversions and fallback logic (e.g., EXR->PNG).
|
||||
* **Metadata (`_determine_base_metadata`, `_determine_single_asset_metadata`, `_generate_metadata_file`):**
|
||||
* Base name determined using `source_naming` separator/index from `Configuration`, with fallback to common prefix or input name. Handles multiple assets within one input.
|
||||
* Category determined by model presence or `decal_keywords` from `Configuration`.
|
||||
* Archetype determined by matching keywords in `archetype_rules` (from `Configuration`) against file stems/base name.
|
||||
* Final `metadata.json` populated by accumulating results (map details, stats, features, etc.) during the per-asset processing loop.
|
||||
|
||||
* **Blender Integration (`main.py:run_blender_script`, `gui/processing_handler.py:_run_blender_script_subprocess`):**
|
||||
* Uses `subprocess.run` to execute Blender.
|
||||
* Command includes `-b` (background), the target `.blend` file, `--python` followed by the script path (`blenderscripts/*.py`), and `--` separator.
|
||||
* Arguments after `--` (currently just the `asset_root_dir`, and optionally the nodegroup blend path for the materials script) are passed to the Python script via `sys.argv`.
|
||||
* Uses `--factory-startup` in GUI handler. Checks return code and logs stdout/stderr.
|
||||
|
||||
--------------------------------
|
||||
8. State Management
|
||||
--------------------------------
|
||||
|
||||
* **`Configuration` Object:** Holds the loaded and merged configuration state (core + preset) and compiled regex patterns. Designed to be immutable after initialization. Instantiated once per worker process.
|
||||
* **`AssetProcessor` Instance:** Primarily stateless between calls to `process()`. State *within* a `process()` call is managed through local variables scoped to the overall call or the per-asset loop (e.g., `current_asset_metadata`, `processed_maps_details_asset`). `self.classified_files` is populated once by `_inventory_and_classify_files` early in `process()` and then used read-only (filtered copies) within the per-asset loop.
|
||||
* **`main.py` (CLI):** Tracks overall run progress (processed, skipped, failed counts) based on results returned from worker processes.
|
||||
* **`gui/processing_handler.py`:** Manages the state of a GUI processing run using internal flags (`_is_running`, `_cancel_requested`) and stores `Future` objects in `self._futures` dictionary while the pool is active.
|
||||
|
||||
--------------------------------
|
||||
9. Error Handling & Propagation
|
||||
--------------------------------
|
||||
|
||||
* **Custom Exceptions:** `ConfigurationError` (raised by `Configuration` on load/validation failure), `AssetProcessingError` (raised by `AssetProcessor` for various processing failures).
|
||||
* **Configuration:** `ConfigurationError` halts initialization. Regex compilation errors are logged as warnings but do not stop initialization.
|
||||
* **AssetProcessor:** Uses `try...except Exception` within key pipeline steps (`_process_maps`, `_merge_maps`, etc.) and within the per-asset loop in `process()`. Errors specific to one asset are logged (`log.error(exc_info=True)`), the asset is marked "failed" in the returned status dictionary, and the loop continues to the next asset. Critical setup errors (e.g., workspace creation) raise `AssetProcessingError`, halting the entire `process()` call. Includes specific save fallback logic (EXR->PNG) on `cv2.imwrite` failure for 16-bit formats.
|
||||
* **Worker Wrapper (`main.py:process_single_asset_wrapper`):** Catches `ConfigurationError`, `AssetProcessingError`, and general `Exception` during worker execution. Logs the error and returns a ("failed", error_message) status tuple to the main process.
|
||||
* **Process Pool (`main.py`, `gui/processing_handler.py`):** The `with ProcessPoolExecutor(...)` block handles pool setup/teardown. A `try...except` around `as_completed` or `future.result()` catches critical worker failures (e.g., process crash).
|
||||
* **GUI Communication (`ProcessingHandler`):** Catches exceptions during `future.result()` retrieval. Emits `file_status_updated` signal with "failed" status and error message. Emits `processing_finished` with final counts.
|
||||
* **Blender Scripts:** Checks `subprocess.run` return code. Logs stderr as ERROR if return code is non-zero, otherwise as WARNING. Catches `FileNotFoundError` if the Blender executable path is invalid.
|
||||
|
||||
--------------------------------
|
||||
10. Key Data Structures
|
||||
--------------------------------
|
||||
|
||||
* **`Configuration` Instance Attributes:**
|
||||
* `compiled_map_keyword_regex`: `dict[str, list[tuple[re.Pattern, str, int]]]` (Base type -> list of compiled regex tuples)
|
||||
* `compiled_extra_regex`, `compiled_model_regex`: `list[re.Pattern]`
|
||||
* `compiled_bit_depth_regex_map`: `dict[str, re.Pattern]` (Base type -> compiled regex)
|
||||
* **`AssetProcessor` Internal Structures (within `process()`):**
|
||||
* `self.classified_files`: `dict[str, list[dict]]` (Category -> list of file info dicts like `{'source_path': Path, 'map_type': str, ...}`)
|
||||
* `processed_maps_details_asset`, `merged_maps_details_asset`: `dict[str, dict[str, dict]]` (Map Type -> Resolution Key -> Details Dict `{'path': Path, 'width': int, ...}`)
|
||||
* `file_to_base_name_map`: `dict[Path, Optional[str]]` (Source relative path -> Determined asset base name or None)
|
||||
* `current_asset_metadata`: `dict` (Accumulates name, category, archetype, stats, map details per asset)
|
||||
* **Return Values:**
|
||||
* `AssetProcessor.process()`: `Dict[str, List[str]]` (e.g., `{"processed": [...], "skipped": [...], "failed": [...]}`)
|
||||
* `main.process_single_asset_wrapper()`: `Tuple[str, str, Optional[str]]` (input_path, status_string, error_message)
|
||||
* **`ProcessingHandler._futures`:** `dict[Future, str]` (Maps `concurrent.futures.Future` object to the input path string)
|
||||
* **Image Data:** `numpy.ndarray` (Handled by OpenCV).
|
||||
|
||||
--------------------------------
|
||||
11. Concurrency Models (CLI & GUI)
|
||||
--------------------------------
|
||||
|
||||
* **Common Core:** Both CLI and GUI utilize `concurrent.futures.ProcessPoolExecutor` for parallel processing. The target function executed by workers is `main.process_single_asset_wrapper`.
|
||||
* **Isolation:** Crucially, `Configuration` and `AssetProcessor` objects are instantiated *within* the `process_single_asset_wrapper` function, meaning each worker process gets its own independent configuration and processor instance based on the arguments passed. This prevents state conflicts between concurrent asset processing tasks. Data is passed between the main process and workers via pickling of arguments and return values.
|
||||
* **CLI Orchestration (`main.py:run_processing`):**
|
||||
* Creates the `ProcessPoolExecutor`.
|
||||
* Submits all `process_single_asset_wrapper` tasks.
|
||||
* Uses `concurrent.futures.as_completed` to iterate over finished futures as they complete, blocking until the next one is done.
|
||||
* Gathers results synchronously within the main script's execution flow.
|
||||
* **GUI Orchestration (`gui/processing_handler.py`):**
|
||||
* The `ProcessingHandler` object (a `QObject`) contains the `run_processing` method.
|
||||
* This method is intended to be run in a separate `QThread` (managed by `MainWindow`) to avoid blocking the main UI thread.
|
||||
* Inside `run_processing`, it creates and manages the `ProcessPoolExecutor`.
|
||||
* It uses `as_completed` similarly to the CLI to iterate over finished futures.
|
||||
* **Communication:** Instead of blocking the thread gathering results, it emits Qt signals (`progress_updated`, `file_status_updated`, `processing_finished`) from within the `as_completed` loop. These signals are connected to slots in `MainWindow` (running on the main UI thread), allowing for thread-safe updates to the GUI (progress bar, table status, status bar messages).
|
||||
* **Cancellation (GUI - `gui/processing_handler.py:request_cancel`):**
|
||||
* Sets an internal `_cancel_requested` flag.
|
||||
* Attempts `executor.shutdown(wait=False)` which prevents new tasks from starting and may cancel pending ones (depending on Python version).
|
||||
* Manually iterates through stored `_futures` and calls `future.cancel()` on those not yet running or done.
|
||||
* **Limitation:** This does *not* forcefully terminate worker processes that are already executing the `process_single_asset_wrapper` function. Cancellation primarily affects pending tasks and the processing of results from already running tasks (they will be marked as failed/cancelled when their future completes).
|
||||
|
||||
--------------------------------
|
||||
12. Resource Management
|
||||
--------------------------------
|
||||
|
||||
* **Configuration:** Preset JSON files are opened and closed using `with open(...)`.
|
||||
* **AssetProcessor:**
|
||||
* Temporary workspace directory created using `tempfile.mkdtemp()`.
|
||||
* Cleanup (`_cleanup_workspace`) uses `shutil.rmtree()` and is called within a `finally` block in the main `process()` method, ensuring cleanup attempt even if errors occur.
|
||||
* Metadata JSON file written using `with open(...)`.
|
||||
* Image data is loaded into memory using OpenCV/NumPy; memory usage depends on image size and number of concurrent workers.
|
||||
* **Process Pool:** The `ProcessPoolExecutor` manages the lifecycle of worker processes. Using it within a `with` statement (as done in `main.py` and `gui/processing_handler.py`) ensures proper shutdown and resource release for the pool itself.
|
||||
|
||||
--------------------------------
|
||||
13. Known Limitations & Edge Cases
|
||||
--------------------------------
|
||||
|
||||
* **Configuration:**
|
||||
* Validation (`_validate_configs`) is primarily structural (key presence, basic types), not deeply logical (e.g., doesn't check if regex patterns are *sensible*).
|
||||
* Regex compilation errors in `_compile_regex_patterns` are logged as warnings but don't prevent `Configuration` initialization, potentially leading to unexpected classification later.
|
||||
* `_fnmatch_to_regex` helper only handles basic `*` and `?` wildcards. Complex fnmatch patterns might not translate correctly.
|
||||
* **AssetProcessor:**
|
||||
* Heavily reliant on correct filename patterns and rules defined in presets. Ambiguous or incorrect patterns lead to misclassification.
|
||||
* Potential for high memory usage when processing very large images, especially with many workers.
|
||||
* Error handling within `process()` is per-asset; a failure during map processing for one asset marks the whole asset as failed, without attempting other maps for that asset. No partial recovery within an asset.
|
||||
* Gloss->Roughness inversion assumes gloss map is single channel or convertible to grayscale.
|
||||
* `predict_output_structure` and `get_detailed_file_predictions` use simplified logic (e.g., assuming PNG output, highest resolution only) and may not perfectly match final output names/formats in all cases.
|
||||
* Filename sanitization (`_sanitize_filename`) is basic and might not cover all edge cases for all filesystems.
|
||||
* **CLI (`main.py`):**
|
||||
* Preset existence check (`{preset}.json`) happens only in the main process before workers start.
|
||||
* Blender executable finding logic relies on `config.py` path being valid or `blender` being in the system PATH.
|
||||
* **GUI Concurrency (`gui/processing_handler.py`):**
|
||||
* Cancellation (`request_cancel`) is not immediate for tasks already running in worker processes. It prevents new tasks and stops processing results from completed futures once the flag is checked.
|
||||
* **General:**
|
||||
* Limited input format support (ZIP archives, folders). Internal file formats limited by OpenCV (`cv2.imread`, `cv2.imwrite`). Optional `OpenEXR` package recommended for full EXR support.
|
||||
* Error messages propagated from workers might lack full context in some edge cases.
|
||||
70
Deprecated/documentation_plan.md
Normal file
70
Deprecated/documentation_plan.md
Normal file
@@ -0,0 +1,70 @@
|
||||
# Asset Processor Tool Documentation Plan
|
||||
|
||||
This document outlines the proposed structure for the documentation of the Asset Processor Tool, based on the content from `readme.md` and `documentation.txt`. The goal is to create a clear, modular, and comprehensive documentation set within a new `Documentation` directory.
|
||||
|
||||
## Proposed Directory Structure
|
||||
|
||||
```
|
||||
Documentation/
|
||||
├── 00_Overview.md
|
||||
├── 01_User_Guide/
|
||||
│ ├── 01_Introduction.md
|
||||
│ ├── 02_Features.md
|
||||
│ ├── 03_Installation.md
|
||||
│ ├── 04_Configuration_and_Presets.md
|
||||
│ ├── 05_Usage_GUI.md
|
||||
│ ├── 06_Usage_CLI.md
|
||||
│ ├── 07_Usage_Monitor.md
|
||||
│ ├── 08_Usage_Blender.md
|
||||
│ ├── 09_Output_Structure.md
|
||||
│ └── 10_Docker.md
|
||||
└── 02_Developer_Guide/
|
||||
├── 01_Architecture.md
|
||||
├── 02_Codebase_Structure.md
|
||||
├── 03_Key_Components.md
|
||||
├── 04_Configuration_System_and_Presets.md
|
||||
├── 05_Processing_Pipeline.md
|
||||
├── 06_GUI_Internals.md
|
||||
├── 07_Monitor_Internals.md
|
||||
├── 08_Blender_Integration_Internals.md
|
||||
├── 09_Development_Workflow.md
|
||||
├── 10_Coding_Conventions.md
|
||||
└── 11_Debugging_Notes.md
|
||||
```
|
||||
|
||||
## File Content Breakdown
|
||||
|
||||
### `Documentation/00_Overview.md`
|
||||
|
||||
* Project purpose, scope, and intended audience.
|
||||
* High-level summary of the tool's functionality.
|
||||
* Table of Contents for the entire documentation set.
|
||||
|
||||
### `Documentation/01_User_Guide/`
|
||||
|
||||
* **`01_Introduction.md`**: Brief welcome and purpose for users.
|
||||
* **`02_Features.md`**: Detailed list of user-facing features.
|
||||
* **`03_Installation.md`**: Requirements and step-by-step installation instructions.
|
||||
* **`04_Configuration_and_Presets.md`**: Explains user-level configuration options (`config.py` settings relevant to users) and how to select and understand presets.
|
||||
* **`05_Usage_GUI.md`**: Guide on using the Graphical User Interface, including descriptions of panels, controls, and workflow.
|
||||
* **`06_Usage_CLI.md`**: Guide on using the Command-Line Interface, including arguments and examples.
|
||||
* **`07_Usage_Monitor.md`**: Guide on setting up and using the Directory Monitor for automated processing.
|
||||
* **`08_Usage_Blender.md`**: Explains the user-facing aspects of the Blender integration.
|
||||
* **`09_Output_Structure.md`**: Describes the structure and contents of the generated asset library.
|
||||
* **`10_Docker.md`**: Instructions for building and running the tool using Docker.
|
||||
|
||||
### `Documentation/02_Developer_Guide/`
|
||||
|
||||
* **`01_Architecture.md`**: High-level technical architecture, core components, and their relationships.
|
||||
* **`02_Codebase_Structure.md`**: Detailed breakdown of key files and directories within the project.
|
||||
* **`03_Key_Components.md`**: In-depth explanation of major classes and modules (`AssetProcessor`, `Configuration`, GUI Handlers, etc.).
|
||||
* **`04_Configuration_System_and_Presets.md`**: Technical details of the configuration loading and merging process, the structure of preset JSON files, and guidance on creating/modifying presets for developers.
|
||||
* **`05_Processing_Pipeline.md`**: Step-by-step technical breakdown of the asset processing logic within the `AssetProcessor` class.
|
||||
* **`06_GUI_Internals.md`**: Technical details of the GUI implementation, including threading, signals/slots, and background task management.
|
||||
* **`07_Monitor_Internals.md`**: Technical details of the Directory Monitor implementation using `watchdog`.
|
||||
* **`08_Blender_Integration_Internals.md`**: Technical details of how the Blender scripts are executed and interact with the processed assets.
|
||||
* **`09_Development_Workflow.md`**: Guidance for developers on contributing, setting up a development environment, and modifying specific parts of the codebase.
|
||||
* **`10_Coding_Conventions.md`**: Overview of the project's coding standards, object-oriented approach, type hinting, logging, and error handling.
|
||||
* **`11_Debugging_Notes.md`**: Advanced internal details, state management, error propagation, concurrency models, resource management, and known limitations/edge cases.
|
||||
|
||||
This plan provides a solid foundation for organizing the existing documentation and serves as a roadmap for creating the new markdown files.
|
||||
BIN
Deprecated/readme.md
Normal file
BIN
Deprecated/readme.md
Normal file
Binary file not shown.
356
Deprecated/readme.md.bak
Normal file
356
Deprecated/readme.md.bak
Normal file
@@ -0,0 +1,356 @@
|
||||
# Asset Processor Tool vX.Y
|
||||
|
||||
## Overview
|
||||
|
||||
This tool processes 3D asset source files (texture sets, models, etc., provided as ZIP archives or folders) into a standardized library format. It uses configurable presets to interpret different asset sources and automates tasks like file classification, image resizing, channel merging, and metadata generation.
|
||||
|
||||
The tool offers both a Graphical User Interface (GUI) for interactive use and a Command-Line Interface (CLI) for batch processing and scripting.
|
||||
|
||||
This tool is currently work in progress, rewritting features from an original proof of concept, original script can be found at `Deprecated-POC/` for reference
|
||||
|
||||
## Features
|
||||
|
||||
* **Preset-Driven:** Uses JSON presets (`presets/`) to define rules for different asset suppliers (e.g., `Poliigon.json`).
|
||||
* **Dual Interface:** Provides both a user-friendly GUI and a powerful CLI.
|
||||
* **Parallel Processing:** Utilizes multiple CPU cores for faster processing of multiple assets (configurable via `--workers` in CLI or GUI control).
|
||||
* **Multi-Asset Input Handling:** Correctly identifies and processes multiple distinct assets contained within a single input ZIP or folder, creating separate outputs for each.
|
||||
* **File Classification:** Automatically identifies map types (Color, Normal, Roughness, etc.), models, explicitly marked extra files, and unrecognised files based on preset rules.
|
||||
* **Variant Handling:** Map types listed in `RESPECT_VARIANT_MAP_TYPES` (in `config.py`, e.g., `"COL"`) will *always* receive a numeric suffix (`-1`, `-2`, etc.). The numbering priority is determined primarily by the order of keywords listed in the preset's `map_type_mapping`. Alphabetical sorting of filenames is used only as a tie-breaker for files matching the exact same keyword pattern. Other map types will *never* receive a suffix.
|
||||
* **16-bit Prioritization:** Correctly identifies 16-bit variants defined in preset `bit_depth_variants` (e.g., `*_NRM16.tif`), prioritizes them, and ignores the corresponding 8-bit version (marked as `Ignored` in GUI).
|
||||
* **Map Processing:**
|
||||
* Resizes texture maps to configured power of two resolutions (e.g., 4K, 2K, 1K), avoiding upscaling.
|
||||
* Handles Glossiness map inversion to Roughness.
|
||||
* Applies bit-depth rules (`respect` source or `force_8bit`).
|
||||
* Saves maps in appropriate formats. Map types listed in `FORCE_LOSSLESS_MAP_TYPES` (in `config.py`, e.g., `"NRM"`, `"DISP"`) are *always* saved in a lossless format (PNG for 8-bit, configured 16-bit format like EXR/PNG for 16-bit), overriding other rules. For other map types, if the output is 8-bit and the resolution meets or exceeds `RESOLUTION_THRESHOLD_FOR_JPG` (in `config.py`), the output is forced to JPG. Otherwise, the format is based on input type and target bit depth: JPG inputs yield JPG outputs (8-bit); TIF inputs yield PNG/EXR (based on target bit depth and config); other inputs use configured formats (PNG/EXR). Merged maps follow similar logic, checking `FORCE_LOSSLESS_MAP_TYPES` first, then the threshold for 8-bit targets, then using the highest format from inputs (EXR > TIF > PNG > JPG hierarchy, with TIF adjusted to PNG/EXR based on target bit depth).
|
||||
* Calculates basic image statistics (Min/Max/Mean) for a reference resolution.
|
||||
* Calculates and stores the relative aspect ratio change string in metadata.
|
||||
* **Channel Merging:** Combines channels from different maps into packed textures (e.g., NRMRGH) based on preset rules.
|
||||
* **Metadata Generation:** Creates a `metadata.json` file for each asset containing details about maps, category, archetype, aspect ratio change, processing settings, etc. **Aspect Ratio Metadata:** Calculates the relative aspect ratio change during resizing and stores it in the `metadata.json` file (`aspect_ratio_change_string`). The format indicates if the aspect is unchanged (`EVEN`), scaled horizontally (`X150`, `X110`, etc.), scaled vertically (`Y150`, `Y125`, etc.)
|
||||
* **Output Organization:** Creates a clean, structured output directory (`<output_base>/<supplier>/<asset_name>/`).
|
||||
* **Skip/Overwrite:** Can skip processing if the output already exists or force reprocessing with the `--overwrite` flag (CLI) or checkbox (GUI).
|
||||
* **Blender Integration:** Optionally runs Blender scripts (`create_nodegroups.py`, `create_materials.py`) after asset processing to automate node group and material creation in specified `.blend` files. Available via both CLI and GUI.
|
||||
* **GUI Features:**
|
||||
* Drag-and-drop input for assets (ZIPs/folders).
|
||||
* Integrated preset editor panel for managing `.json` presets.
|
||||
* Configurable output directory field with a browse button (defaults to path in `config.py`).
|
||||
* Enhanced live preview table showing predicted file status (Mapped, Model, Extra, Unrecognised, Ignored, Error) based on the selected processing preset.
|
||||
* Toggleable preview mode (via View menu) to switch between detailed file preview and a simple list of input assets.
|
||||
* Toggleable log console panel (via View menu) displaying application log messages within the GUI.
|
||||
* Progress bar, cancellation button, and clear queue button.
|
||||
* **Blender Post-Processing Controls:** Checkbox to enable/disable Blender script execution and input fields with browse buttons to specify the target `.blend` files for node group and material creation (defaults configurable in `config.py`).
|
||||
* **Responsive GUI:** Utilizes background threads (`QThread`) for processing (`ProcessPoolExecutor`) and file preview generation (`ThreadPoolExecutor`), ensuring the user interface remains responsive during intensive operations.
|
||||
* **Optimized Classification:** Pre-compiles regular expressions from presets for faster file identification during classification.
|
||||
* **Docker Support:** Includes a `Dockerfile` for containerized execution.
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
Asset_processor_tool/
|
||||
│
|
||||
├── main.py # CLI Entry Point & processing orchestrator
|
||||
├── monitor.py # Directory monitoring script for automated processing
|
||||
├── asset_processor.py # Core class handling single asset processing pipeline
|
||||
├── configuration.py # Class for loading and accessing configuration
|
||||
├── config.py # Core settings definition (output paths, resolutions, merge rules etc.)
|
||||
│
|
||||
├── blenderscripts/ # Scripts for integration with Blender
|
||||
│ └── create_nodegroups.py # Script to create node groups from processed assets
|
||||
│ └── create_materials.py # Script to create materials linking to node groups
|
||||
│
|
||||
├── gui/ # Contains files related to the Graphical User Interface
|
||||
│ ├── main_window.py # Main GUI application window and layout
|
||||
│ ├── processing_handler.py # Handles background processing logic for the GUI
|
||||
│ ├── prediction_handler.py # Handles background file prediction/preview for the GUI
|
||||
│
|
||||
├── Presets/ # Preset definition files
|
||||
│ ├── _template.json # Template for creating new presets
|
||||
│ └── Poliigon.json # Example preset for Poliigon assets
|
||||
│
|
||||
├── Testfiles/ # Directory containing example input assets for testing
|
||||
│
|
||||
├── Tickets/ # Directory for issue and feature tracking (Markdown files)
|
||||
│ ├── _template.md # Template for creating new tickets
|
||||
│ └── Ticket-README.md # Explanation of the ticketing system
|
||||
│
|
||||
├── requirements.txt # Python package dependencies for standard execution
|
||||
├── requirements-docker.txt # Dependencies specifically for the Docker environment
|
||||
├── Dockerfile # Instructions for building the Docker container image
|
||||
└── readme.md # This documentation file
|
||||
```
|
||||
|
||||
* **Core Logic:** `main.py`, `monitor.py`, `asset_processor.py`, `configuration.py`, `config.py`
|
||||
* **Blender Integration:** `blenderscripts/` directory
|
||||
* **GUI:** `gui/` directory
|
||||
* **Configuration:** `config.py`, `Presets/` directory
|
||||
* **Dependencies:** `requirements.txt`, `requirements-docker.txt`
|
||||
* **Containerization:** `Dockerfile`
|
||||
* **Documentation/Planning:** `readme.md`, `Project Notes/` directory
|
||||
* **Issue/Feature Tracking:** `Tickets/` directory (see `Tickets/README.md`)
|
||||
* **Testing:** `Testfiles/` directory
|
||||
|
||||
## Architecture
|
||||
|
||||
This section provides a higher-level overview of the tool's internal structure and design, intended for developers or users interested in the technical implementation.
|
||||
|
||||
### Core Components
|
||||
|
||||
The tool is primarily built around several key Python modules:
|
||||
|
||||
* `config.py`: Defines core, global settings (output paths, resolutions, default behaviors, format rules, Blender executable path, default Blender file paths, etc.) that are generally not supplier-specific.
|
||||
* `Presets/*.json`: Supplier-specific JSON files defining rules for interpreting source assets (filename patterns, map type keywords, model identification, etc.).
|
||||
* `configuration.py` **(**`Configuration` **class)**: Responsible for loading the core `config.py` settings and merging them with a selected preset JSON file. Crucially, it also **pre-compiles** regular expression patterns defined in the preset (e.g., for map keywords, extra files, 16-bit variants) upon initialization. This pre-compilation significantly speeds up the file classification process.
|
||||
* `asset_processor.py` **(**`AssetProcessor` **class)**: Contains the core logic for processing a *single* asset. It orchestrates the pipeline steps: workspace setup, extraction, file classification, metadata determination, map processing, channel merging, metadata file generation, and output organization.
|
||||
* `main.py`: Serves as the entry point for the Command-Line Interface (CLI). It handles argument parsing, sets up logging, manages the parallel processing pool, calls `AssetProcessor` for each input asset via a wrapper function, and optionally triggers Blender script execution after processing.
|
||||
* `gui/`: Contains modules related to the Graphical User Interface (GUI), built using PySide6.
|
||||
* `monitor.py`: Implements the directory monitoring functionality for automated processing.
|
||||
|
||||
### Parallel Processing (CLI & GUI)
|
||||
|
||||
To accelerate the processing of multiple assets, the tool utilizes Python's `concurrent.futures.ProcessPoolExecutor`.
|
||||
|
||||
* Both `main.py` (for CLI) and `gui/processing_handler.py` (for GUI background tasks) create a process pool.
|
||||
* The actual processing for each asset is delegated to the `main.process_single_asset_wrapper` function. This wrapper is executed in a separate worker process within the pool.
|
||||
* The wrapper function is responsible for instantiating the `Configuration` and `AssetProcessor` classes for the specific asset being processed in that worker. This isolates each asset's processing environment.
|
||||
* Results (success, skip, failure, error messages) are communicated back from the worker processes to the main coordinating script (either `main.py` or `gui/processing_handler.py`).
|
||||
|
||||
### Asset Processing Pipeline (`AssetProcessor` class)
|
||||
|
||||
The `AssetProcessor` class executes a sequence of steps for each asset:
|
||||
|
||||
1. `_setup_workspace()`: Creates a temporary directory for processing.
|
||||
2. `_extract_input()`: Extracts the input ZIP archive or copies the input folder contents into the temporary workspace.
|
||||
3. `_inventory_and_classify_files()`: This is a critical step that scans the workspace and classifies each file based on rules defined in the loaded `Configuration` (which includes the preset). It uses the pre-compiled regex patterns for efficiency. Key logic includes:
|
||||
* Identifying files explicitly marked for the `Extra/` folder.
|
||||
* Identifying model files.
|
||||
* Matching potential texture maps against keyword patterns.
|
||||
* Identifying and prioritizing 16-bit variants (e.g., `_NRM16.tif`) over their 8-bit counterparts based on `source_naming.bit_depth_variants` patterns. Ignored 8-bit files are tracked.
|
||||
* Handling map variants (e.g., multiple Color maps) by assigning suffixes (`-1`, `-2`) based on the `RESPECT_VARIANT_MAP_TYPES` setting in `config.py` and the order of keywords defined in the preset's `map_type_mapping`.
|
||||
* Classifying any remaining files as 'Unrecognised' (which are also moved to the `Extra/` folder).
|
||||
4. `_determine_base_metadata()`: Determines the asset's base name, category (Texture, Asset, Decal), and archetype (e.g., Wood, Metal) based on classified files and preset rules (`source_naming`, `asset_category_rules`, `archetype_rules`).
|
||||
5. **Skip Check**: If `overwrite` is false, checks if the final output directory and metadata file already exist. If so, processing for this asset stops early.
|
||||
6. `_process_maps()`: Iterates through classified texture maps. For each map:
|
||||
* Loads the image data (handling potential Gloss->Roughness inversion).
|
||||
* Resizes the map to each target resolution specified in `config.py`, avoiding upscaling.
|
||||
* Determines the output bit depth based on `MAP_BIT_DEPTH_RULES` (`respect` source or `force_8bit`).
|
||||
* Determines the output file format (`.jpg`, `.png`, `.exr`) based on a combination of factors:
|
||||
* The `RESOLUTION_THRESHOLD_FOR_JPG` (forces JPG for 8-bit maps above the threshold).
|
||||
* The original input file format (e.g., `.jpg` inputs tend to produce `.jpg` outputs if 8-bit and below threshold).
|
||||
* The target bit depth (16-bit outputs use configured `OUTPUT_FORMAT_16BIT_PRIMARY` or `_FALLBACK`).
|
||||
* Configured 8-bit format (`OUTPUT_FORMAT_8BIT`).
|
||||
* The `FORCE_LOSSLESS_MAP_TYPES` list in `config.py` (overrides all other logic for specified map types, ensuring PNG/EXR output).
|
||||
* Saves the processed map for each resolution, applying appropriate compression/quality settings. Includes fallback logic if saving in the primary format fails (e.g., EXR -> PNG).
|
||||
* Calculates basic image statistics (Min/Max/Mean) for a reference resolution (`CALCULATE_STATS_RESOLUTION`) and determines the aspect ratio change string (e.g., "EVEN", "X150", "Y075") stored in the metadata.
|
||||
7. `_merge_maps()`: Combines channels from different processed maps into new textures (e.g., NRMRGH) based on `MAP_MERGE_RULES` defined in `config.py`. It determines the output format for merged maps similarly to `_process_maps` (checking `FORCE_LOSSLESS_MAP_TYPES` first, then threshold, then input hierarchy), considering the formats of the input maps involved.
|
||||
8. `_generate_metadata_file()`: Collects all gathered information (asset name, maps present, resolutions, stats, aspect ratio change, etc.) and writes it to the `metadata.json` file.
|
||||
9. `_organize_output_files()`: Moves the processed maps, merged maps, models, metadata file, and any 'Extra'/'Unrecognised'/'Ignored' files from the temporary workspace to the final structured output directory (`<output_base>/<supplier>/<asset_name>/`).
|
||||
10. `_cleanup_workspace()`: Removes the temporary workspace directory.
|
||||
|
||||
### GUI Architecture (`gui/`)
|
||||
|
||||
The GUI provides an interactive way to use the tool and manage presets.
|
||||
|
||||
* **Framework**: Built using `PySide6`, the official Python bindings for the Qt framework.
|
||||
* **Main Window (**`main_window.py`**)**: Defines the main application window, which includes:
|
||||
* An integrated preset editor panel (using `QSplitter`).
|
||||
* A processing panel with drag-and-drop support, output directory selection, a file preview table, and processing controls.
|
||||
* **Blender Post-Processing Controls:** A group box containing a checkbox to enable/disable Blender script execution and input fields with browse buttons for specifying the target `.blend` files for node group and material creation.
|
||||
* **Threading Model**: To prevent the UI from freezing during potentially long operations, background tasks are run in separate `QThread`s:
|
||||
* `ProcessingHandler` **(**`processing_handler.py`**)**: Manages the execution of the main processing pipeline (using `ProcessPoolExecutor` and `main.process_single_asset_wrapper`, similar to the CLI) and the optional Blender script execution in a background thread. Receives the target output directory and Blender integration settings from the main window.
|
||||
* `PredictionHandler` **(**`prediction_handler.py`**)**: Manages the generation of file previews in a background thread using a `ThreadPoolExecutor` to parallelize prediction across multiple assets. It calls `AssetProcessor.get_detailed_file_predictions()`, which performs extraction and classification.
|
||||
* **Communication**: Qt's **signal and slot mechanism** is used for communication between the background threads (`ProcessingHandler`, `PredictionHandler`) and the main GUI thread (`MainWindow`). For example, signals are emitted to update the progress bar, populate the preview table, and report completion status or errors. A custom `QtLogHandler` redirects Python log messages to the UI console via signals.
|
||||
* **Preset Editor**: The editor allows creating, modifying, and saving preset JSON files directly within the GUI. Changes are tracked, and users are prompted to save before closing or loading another preset if changes are pending. Includes an optional, toggleable log console panel at the top.
|
||||
|
||||
### Monitor Architecture (`monitor.py`)
|
||||
|
||||
The `monitor.py` script enables automated processing of assets dropped into a designated input directory.
|
||||
|
||||
* **File System Watching**: Uses the `watchdog` library (specifically `PollingObserver` for cross-platform compatibility) to monitor the specified `INPUT_DIR`.
|
||||
* **Event Handling**: A custom `ZipHandler` detects `on_created` events for `.zip` files.
|
||||
* **Filename Parsing**: It expects filenames in the format `[preset]_filename.zip` and uses a regular expression (`PRESET_FILENAME_REGEX`) to extract the `preset` name.
|
||||
* **Preset Validation**: Checks if the extracted preset name corresponds to a valid `.json` file in the `Presets/` directory.
|
||||
* **Processing Trigger**: If the filename format and preset are valid, it calls the `main.run_processing` function (the same core logic used by the CLI) to process the detected ZIP file using the extracted preset.
|
||||
* **File Management**: Moves the source ZIP file to either a `PROCESSED_DIR` (on success/skip) or an `ERROR_DIR` (on failure or invalid preset) after the processing attempt.
|
||||
|
||||
### Error Handling
|
||||
|
||||
* Custom exception classes (`ConfigurationError`, `AssetProcessingError`) are defined and used to signal specific types of errors during configuration loading or asset processing.
|
||||
* Standard Python logging is used throughout the application (CLI, GUI, Monitor, Core Logic) to record information, warnings, and errors. Log levels can be configured.
|
||||
* Worker processes in the processing pool capture exceptions and report them back to the main process for logging and status updates.
|
||||
|
||||
## Requirements
|
||||
|
||||
* Python 3.8+
|
||||
* Required Python Packages (see `requirements.txt`):
|
||||
* `opencv-python` (for image processing)
|
||||
* `numpy` (for numerical operations)
|
||||
* `PySide6` (only needed for the GUI)
|
||||
* Optional Python Packages:
|
||||
* `OpenEXR` (provides more robust EXR file handling, recommended if processing EXR sources)
|
||||
* **Blender:** A working installation of Blender is required for the optional Blender integration features. The path to the executable should be configured in `config.py` or available in the system's PATH.
|
||||
|
||||
Install dependencies using pip:
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
(For GUI, ensure PySide6 is included or install separately: `pip install PySide6`)
|
||||
|
||||
## Configuration
|
||||
|
||||
The tool's behavior is controlled by two main configuration components:
|
||||
|
||||
1. `config.py`**:** Defines core, global settings:
|
||||
* `OUTPUT_BASE_DIR`: Default root directory for processed assets.
|
||||
* `DEFAULT_ASSET_CATEGORY`: Fallback category ("Texture", "Asset", "Decal").
|
||||
* `IMAGE_RESOLUTIONS`: Dictionary mapping resolution keys (e.g., "4K") to pixel dimensions.
|
||||
* `RESPECT_VARIANT_MAP_TYPES`: List of map type strings (e.g., `["COL"]`) that should always receive a numeric suffix (`-1`, `-2`, etc.) based on preset order, even if only one variant exists.
|
||||
* `TARGET_FILENAME_PATTERN`: Format string for output filenames.
|
||||
* `MAP_MERGE_RULES`: List defining how to merge channels (e.g., creating NRMRGH).
|
||||
* `ARCHETYPE_RULES`: Rules for determining asset usage archetype (e.g., Wood, Metal).
|
||||
* `RESOLUTION_THRESHOLD_FOR_JPG`: Dimension threshold (pixels) above which 8-bit maps are forced to JPG format, overriding other format logic.
|
||||
* `FORCE_LOSSLESS_MAP_TYPES`: List of map type strings (e.g., `["NRM", "DISP"]`) that should *always* be saved losslessly (PNG/EXR), overriding the JPG threshold and other format logic.
|
||||
* `BLENDER_EXECUTABLE_PATH`: Path to the Blender executable (required for Blender integration).
|
||||
* `DEFAULT_NODEGROUP_BLEND_PATH`: Default path to the .blend file for node group creation (used by GUI if not specified).
|
||||
* `DEFAULT_MATERIALS_BLEND_PATH`: Default path to the .blend file for material creation (used by GUI if not specified).
|
||||
* ... and other processing parameters (JPEG quality, PNG compression, 16-bit/8-bit output formats, etc.).
|
||||
2. `presets/*.json`**:** Define supplier-specific rules. Each JSON file represents a preset (e.g., `Poliigon.json`). Key sections include:
|
||||
* `supplier_name`: Name of the asset source.
|
||||
* `map_type_mapping`: A list of dictionaries defining rules to map source filename keywords/patterns to standard map types. Each dictionary should have `"target_type"` (e.g., `"COL"`, `"NRM"`) and `"keywords"` (a list of source filename patterns like `["_col*", "_color"]`). For map types listed in `config.py`'s `RESPECT_VARIANT_MAP_TYPES`, the numbering priority (`-1`, `-2`, etc.) is determined primarily by the order of the keywords within the `"keywords"` list for the matching rule. Alphabetical sorting of filenames is used only as a secondary tie-breaker for files matching the exact same keyword pattern. Other map types do not receive suffixes.
|
||||
* `bit_depth_variants`: Dictionary mapping standard map types (e.g., `"NRM"`) to fnmatch patterns used to identify their high bit-depth source files (e.g., `"*_NRM16*.tif"`). These take priority over standard keyword matches, and the corresponding 8-bit version will be ignored.
|
||||
* `bit_depth_rules`: Specifies whether to `respect` source bit depth or `force_8bit` for specific map types (defined in `config.py`).
|
||||
* `model_patterns`: Regex patterns to identify model files (e.g., `*.fbx`, `*.obj`).
|
||||
* `move_to_extra_patterns`: Regex patterns for files to move directly to the `Extra/` output folder.
|
||||
* `source_naming_convention`: Defines separator and indices for extracting base name/archetype from source filenames.
|
||||
* `asset_category_rules`: Keywords/patterns to identify specific asset categories (e.g., "Decal").
|
||||
|
||||
Use `presets/_template.json` as a starting point for creating new presets.
|
||||
|
||||
## Usage
|
||||
|
||||
### 1. Graphical User Interface (GUI)
|
||||
|
||||
* **Run:**
|
||||
|
||||
```bash
|
||||
python -m gui.main_window
|
||||
```
|
||||
*(Note: Run this command from the project root directory)*
|
||||
* **Interface:**
|
||||
* **Menu Bar:** Contains a "View" menu to toggle visibility of the Log Console and enable/disable the detailed file preview.
|
||||
* **Preset Editor Panel (Left):**
|
||||
* Optional **Log Console:** A text area at the top displaying application log messages (toggle via View menu).
|
||||
* **Preset List:** Allows creating, deleting, loading, editing, and saving presets. Select a preset here to load it into the editor tabs below.
|
||||
* **Preset Editor Tabs:** Edit preset details ("General & Naming", "Mapping & Rules").
|
||||
* **Processing Panel (Right):**
|
||||
* **Preset Selector:** Select the preset to use for *processing* the current queue.
|
||||
* **Output Directory:** Displays the target output directory. Defaults to the path in `config.py`. Use the "Browse..." button to select a different directory.
|
||||
* **Drag and Drop Area:** Drag asset ZIP files or folders here to add them to the queue.
|
||||
* **Preview Table:** Displays information about the assets in the queue. Behavior depends on the "Disable Detailed Preview" option in the View menu:
|
||||
* **Detailed Preview (Default):** Shows all files found within the dropped assets, their predicted classification status (Mapped, Model, Extra, Unrecognised, Ignored, Error), predicted output name (if applicable), and other details based on the selected *processing* preset. Rows are color-coded by status.
|
||||
* **Simple View (Preview Disabled):** Shows only the list of top-level input asset paths (ZIPs/folders) added to the queue.
|
||||
* **Progress Bar:** Shows the overall processing progress.
|
||||
* **Blender Post-Processing:** A group box containing a checkbox to enable/disable the optional Blender script execution. When enabled, input fields and browse buttons appear to specify the `.blend` files for node group and material creation. These fields default to the paths configured in `config.py`.
|
||||
* **Options & Controls (Bottom):**
|
||||
* `Overwrite Existing`: Checkbox to force reprocessing if output already exists.
|
||||
* `Workers`: Spinbox to set the number of assets to process concurrently.
|
||||
* `Clear Queue`: Button to remove all assets from the queue and clear the preview.
|
||||
* `Start Processing`: Button to begin processing all assets in the queue.
|
||||
* `Cancel`: Button to attempt stopping ongoing processing.
|
||||
* **Status Bar:** Displays messages about the current state, errors, or completion.
|
||||
|
||||
### 2. Command-Line Interface (CLI)
|
||||
|
||||
* **Run:**
|
||||
|
||||
```bash
|
||||
python main.py [OPTIONS] INPUT_PATH [INPUT_PATH ...]
|
||||
```
|
||||
* **Arguments:**
|
||||
* `INPUT_PATH`: One or more paths to input ZIP files or folders.
|
||||
* `-p PRESET`, `--preset PRESET`: (Required) Name of the preset to use (e.g., `Poliigon`).
|
||||
* `-o OUTPUT_DIR`, `--output-dir OUTPUT_DIR`: Override the `OUTPUT_BASE_DIR` set in `config.py`.
|
||||
* `-w WORKERS`, `--workers WORKERS`: Number of parallel processes (default: auto-detected based on CPU cores).
|
||||
* `--overwrite`: Force reprocessing and overwrite existing output.
|
||||
* `-v`, `--verbose`: Enable detailed DEBUG level logging.
|
||||
* `--nodegroup-blend NODEGROUP_BLEND`: Path to the .blend file for creating/updating node groups. Overrides `config.py` default. If provided, triggers node group script execution after processing.
|
||||
* `--materials-blend MATERIALS_BLEND`: Path to the .blend file for creating/updating materials. Overrides `config.py` default. If provided, triggers material script execution after processing.
|
||||
* **Example:**
|
||||
|
||||
```bash
|
||||
python main.py "C:/Downloads/WoodFine001.zip" -p Poliigon -o "G:/Assets/Processed" --workers 4 --overwrite --nodegroup-blend "G:/Blender/Libraries/NodeGroups.blend" --materials-blend "G:/Blender/Libraries/Materials.blend"
|
||||
```
|
||||
|
||||
### 3. Directory Monitor (Automated Processing)
|
||||
|
||||
* **Run:**
|
||||
|
||||
```bash
|
||||
python monitor.py
|
||||
```
|
||||
* **Functionality:** This script continuously monitors a specified input directory for new `.zip` files. When a file matching the expected format `[preset]_filename.zip` appears, it automatically triggers the processing pipeline using the extracted preset name. **Note:** The directory monitor currently does *not* support the optional Blender script execution. This feature is only available via the CLI and GUI.
|
||||
* **Configuration (Environment Variables):**
|
||||
* `INPUT_DIR`: Directory to monitor for new ZIP files (default: `/data/input`).
|
||||
* `OUTPUT_DIR`: Base directory for processed asset output (default: `/data/output`).
|
||||
* `PROCESSED_DIR`: Directory where successfully processed/skipped source ZIPs are moved (default: `/data/processed`).
|
||||
* `ERROR_DIR`: Directory where source ZIPs that failed processing are moved (default: `/data/error`).
|
||||
* `LOG_LEVEL`: Logging verbosity (e.g., `INFO`, `DEBUG`) (default: `INFO`).
|
||||
* `POLL_INTERVAL`: How often to check the input directory (seconds) (default: `5`).
|
||||
* `PROCESS_DELAY`: Delay after detecting a file before processing starts (seconds) (default: `2`).
|
||||
* `NUM_WORKERS`: Number of parallel workers for processing (default: auto-detected).
|
||||
* **Output:**
|
||||
* Logs processing activity to the console.
|
||||
* Processed assets are created in the `OUTPUT_DIR` following the standard structure.
|
||||
* The original input `.zip` file is moved to `PROCESSED_DIR` on success/skip or `ERROR_DIR` on failure.
|
||||
|
||||
### 4. Blender Node Group Creation Script (`blenderscripts/create_nodegroups.py`)
|
||||
* **Purpose:** This script, designed to be run *within* Blender (either manually or triggered by `main.py`/GUI), scans processed assets and creates/updates PBR node groups in the active `.blend` file.
|
||||
* **Execution:** Typically run via the Asset Processor tool's CLI or GUI after asset processing. Can also be run manually in Blender's Text Editor.
|
||||
* **Prerequisites (for manual run):**
|
||||
* A library of assets processed by this tool, located at a known path.
|
||||
* A Blender file containing two template node groups named exactly `Template_PBRSET` and `Template_PBRTYPE`.
|
||||
* **Configuration (Inside the script for manual run):**
|
||||
* `PROCESSED_ASSET_LIBRARY_ROOT`: **Must be updated** within the script to point to the base output directory where the processed supplier folders (e.g., `Poliigon/`) are located. This is overridden by the tool when run via CLI/GUI.
|
||||
* **Functionality:** Reads metadata, creates/updates node groups, loads textures, sets up nodes, applies metadata-driven settings (aspect ratio, stats, highest resolution), and sets asset previews. Includes an explicit save command at the end.
|
||||
|
||||
### 5. Blender Material Creation Script (`blenderscripts/create_materials.py`)
|
||||
* **Purpose:** This script, designed to be run *within* Blender (either manually or triggered by `main.py`/GUI), scans processed assets and creates/updates materials in the active `.blend` file that link to the PBRSET node groups created by `create_nodegroups.py`.
|
||||
* **Execution:** Typically run via the Asset Processor tool's CLI or GUI after asset processing. Can also be run manually in Blender's Text Editor.
|
||||
* **Prerequisites (for manual run):**
|
||||
* A library of assets processed by this tool, located at a known path.
|
||||
* A `.blend` file containing the PBRSET node groups created by `create_nodegroups.py`.
|
||||
* A template material in the *current* Blender file named `Template_PBRMaterial` that uses nodes and contains a Group node labeled `PLACEHOLDER_NODE_LABEL`.
|
||||
* **Configuration (Inside the script for manual run):**
|
||||
* `PROCESSED_ASSET_LIBRARY_ROOT`: **Must be updated** within the script to point to the base output directory where the processed supplier folders (e.g., `Poliigon/`) are located. This is overridden by the tool when run via CLI/GUI.
|
||||
* `NODEGROUP_BLEND_FILE_PATH`: **Must be updated** within the script to point to the `.blend` file containing the PBRSET node groups. This is overridden by the tool when run via CLI/GUI.
|
||||
* `TEMPLATE_MATERIAL_NAME`, `PLACEHOLDER_NODE_LABEL`, `MATERIAL_NAME_PREFIX`, `PBRSET_GROUP_PREFIX`, etc., can be adjusted if needed.
|
||||
* **Functionality:** Reads metadata, creates/updates materials by copying the template, links the corresponding PBRSET node group from the specified `.blend` file, marks materials as assets, copies tags, sets custom previews, and sets viewport properties based on metadata. Includes an explicit save command at the end.
|
||||
|
||||
## Processing Pipeline (Simplified)
|
||||
|
||||
1. **Extraction:** Input ZIP/folder contents are extracted/copied to a temporary workspace.
|
||||
2. **Classification:** Files are scanned and classified (map, model, extra, ignored) using preset rules.
|
||||
3. **Metadata Determination:** Asset name, category, and archetype are determined.
|
||||
4. **Skip Check:** If output exists and overwrite is off, processing stops here.
|
||||
5. **Map Processing:** Identified maps are loaded, resized, converted (bit depth, format), and saved. Gloss maps are inverted if needed. Stats are calculated.
|
||||
6. **Merging:** Channels are merged according to preset rules and saved.
|
||||
7. **Metadata Generation:** `metadata.json` is created with all collected information.
|
||||
8. **Output Organization:** Processed files are moved to the final structured output directory.
|
||||
9. **Cleanup:** The temporary workspace is removed.
|
||||
10. **Optional Blender Script Execution:** If configured via CLI or GUI, Blender is launched in the background to run `create_nodegroups.py` and `create_materials.py` on specified `.blend` files, using the processed asset output directory as input.
|
||||
|
||||
## Output Structure
|
||||
|
||||
Processed assets are saved to: `<output_base_directory>/<supplier_name>/<asset_name>/`
|
||||
|
||||
Each asset directory typically contains:
|
||||
|
||||
* Processed texture maps (e.g., `AssetName_Color_4K.png`, `AssetName_NRM_2K.exr`).
|
||||
* Merged texture maps (e.g., `AssetName_NRMRGH_4K.png`).
|
||||
* Model files (if present in source).
|
||||
* `metadata.json`: Detailed information about the asset and processing.
|
||||
* `Extra/` (subdirectory): Contains source files that were not classified as standard maps or models. This includes files explicitly matched by `move_to_extra_patterns` in the preset (e.g., previews, documentation) as well as any other unrecognised files.
|
||||
|
||||
## Docker
|
||||
|
||||
A `Dockerfile` and `requirements-docker.txt` are provided for building a container image to run the processor in an isolated environment. Build and run using standard Docker commands.
|
||||
Reference in New Issue
Block a user