Futher changes to bring refactor up to feature parity + Updated Docs

This commit is contained in:
2025-05-09 20:47:44 +02:00
parent deeb1595fd
commit beb8640085
13 changed files with 898 additions and 270 deletions

View File

@@ -1,5 +1,6 @@
import uuid
import dataclasses
import re
import os
import logging
from pathlib import Path
@@ -63,17 +64,136 @@ class IndividualMapProcessingStage(ProcessingStage):
self._update_file_rule_status(context, temp_id_for_fail, 'Failed', map_type=map_type_for_fail, details="Workspace path invalid")
return context
# Fetch config settings once before the loop
respect_variant_map_types = getattr(context.config_obj, "respect_variant_map_types", [])
image_resolutions = getattr(context.config_obj, "image_resolutions", {})
output_filename_pattern = getattr(context.config_obj, "output_filename_pattern", "[assetname]_[maptype]_[resolution].[ext]")
for file_rule_idx, file_rule in enumerate(context.files_to_process):
# Generate a unique ID for this file_rule processing instance for processed_maps_details
current_map_id_hex = f"map_{file_rule_idx}_{uuid.uuid4().hex[:8]}"
current_map_type = file_rule.item_type_override or file_rule.item_type or "UnknownMapType"
initial_current_map_type = file_rule.item_type_override or file_rule.item_type or "UnknownMapType"
# --- START NEW SUFFIXING LOGIC ---
final_current_map_type = initial_current_map_type # Default to initial
# 1. Determine Base Map Type from initial_current_map_type
base_map_type_match = re.match(r"(MAP_[A-Z]{3})", initial_current_map_type)
if base_map_type_match and context.asset_rule:
true_base_map_type = base_map_type_match.group(1) # This is "MAP_XXX"
# 2. Count Occurrences and Find Index of current_file_rule in context.asset_rule.files
peers_of_same_base_type_in_asset_rule = []
for fr_asset in context.asset_rule.files:
fr_asset_item_type = fr_asset.item_type_override or fr_asset.item_type or "UnknownMapType"
fr_asset_base_map_type_match = re.match(r"(MAP_[A-Z]{3})", fr_asset_item_type)
if fr_asset_base_map_type_match:
fr_asset_base_map_type = fr_asset_base_map_type_match.group(1)
if fr_asset_base_map_type == true_base_map_type:
peers_of_same_base_type_in_asset_rule.append(fr_asset)
num_occurrences_of_base_type = len(peers_of_same_base_type_in_asset_rule)
current_instance_index = 0 # 1-based
try:
current_instance_index = peers_of_same_base_type_in_asset_rule.index(file_rule) + 1
except ValueError:
logger.warning(
f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (Initial Type: '{initial_current_map_type}', Base: '{true_base_map_type}'): "
f"Could not find its own instance in the list of peers from asset_rule.files. "
f"Number of peers found: {num_occurrences_of_base_type}. Suffixing may be affected."
)
# 3. Determine Suffix
map_type_for_respect_check = true_base_map_type.replace("MAP_", "") # e.g., "COL"
is_in_respect_list = map_type_for_respect_check in respect_variant_map_types
suffix_to_append = ""
if num_occurrences_of_base_type > 1:
if current_instance_index > 0:
suffix_to_append = f"-{current_instance_index}"
else:
logger.warning(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}': Index for multi-occurrence map type '{true_base_map_type}' (count: {num_occurrences_of_base_type}) not determined. Omitting numeric suffix.")
elif num_occurrences_of_base_type == 1 and is_in_respect_list:
suffix_to_append = "-1"
# 4. Form the final_current_map_type
if suffix_to_append:
final_current_map_type = true_base_map_type + suffix_to_append
else:
final_current_map_type = initial_current_map_type
current_map_type = final_current_map_type
# --- END NEW SUFFIXING LOGIC ---
# --- START: Filename-friendly map type derivation ---
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: --- Starting Filename-Friendly Map Type Logic for: {current_map_type} ---")
filename_friendly_map_type = current_map_type # Fallback
# 1. Access FILE_TYPE_DEFINITIONS
file_type_definitions = None
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Attempting to access context.config_obj.FILE_TYPE_DEFINITIONS.")
try:
file_type_definitions = context.config_obj.FILE_TYPE_DEFINITIONS
if not file_type_definitions: # Check if it's None or empty
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: FILE_TYPE_DEFINITIONS is present but empty or None.")
else:
sample_defs_log = {k: file_type_definitions[k] for k in list(file_type_definitions.keys())[:2]} # Log first 2 for brevity
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Accessed FILE_TYPE_DEFINITIONS. Sample: {sample_defs_log}, Total keys: {len(file_type_definitions)}.")
except AttributeError:
logger.error(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Could not access context.config_obj.FILE_TYPE_DEFINITIONS via direct attribute.")
base_map_key = None
suffix_part = ""
if file_type_definitions and isinstance(file_type_definitions, dict) and len(file_type_definitions) > 0:
base_map_key = None
suffix_part = ""
sorted_known_base_keys = sorted(list(file_type_definitions.keys()), key=len, reverse=True)
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Sorted known base keys for parsing: {sorted_known_base_keys}")
for known_key in sorted_known_base_keys:
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Checking if '{current_map_type}' starts with '{known_key}'")
if current_map_type.startswith(known_key):
base_map_key = known_key
suffix_part = current_map_type[len(known_key):]
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Match found! current_map_type: '{current_map_type}', base_map_key: '{base_map_key}', suffix_part: '{suffix_part}'")
break
if base_map_key is None:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Could not parse base_map_key from '{current_map_type}' using known keys. Fallback: filename_friendly_map_type = '{filename_friendly_map_type}'.")
else:
definition = file_type_definitions.get(base_map_key)
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Definition for '{base_map_key}': {definition}")
if definition and isinstance(definition, dict):
standard_type_alias = definition.get("standard_type")
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Standard type alias for '{base_map_key}': '{standard_type_alias}'")
if standard_type_alias and isinstance(standard_type_alias, str) and standard_type_alias.strip():
filename_friendly_map_type = standard_type_alias.strip() + suffix_part
logger.info(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Successfully transformed map type: '{current_map_type}' -> '{filename_friendly_map_type}' (standard_type_alias: '{standard_type_alias}', suffix_part: '{suffix_part}').")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Standard type alias for '{base_map_key}' is missing, empty, or not a string (value: '{standard_type_alias}'). Using fallback. filename_friendly_map_type = '{filename_friendly_map_type}'.")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: No definition or invalid definition for '{base_map_key}' (value: {definition}). Using fallback. filename_friendly_map_type = '{filename_friendly_map_type}'.")
elif file_type_definitions is None:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: FILE_TYPE_DEFINITIONS not available for lookup (was None). Using fallback. filename_friendly_map_type = '{filename_friendly_map_type}'.")
elif not isinstance(file_type_definitions, dict):
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: FILE_TYPE_DEFINITIONS is not a dictionary (type: {type(file_type_definitions)}). Using fallback. filename_friendly_map_type = '{filename_friendly_map_type}'.")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: FILE_TYPE_DEFINITIONS is an empty dictionary. Using fallback. filename_friendly_map_type = '{filename_friendly_map_type}'.")
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Final filename_friendly_map_type: '{filename_friendly_map_type}'")
# --- END: Filename-friendly map type derivation ---
if not current_map_type or not current_map_type.startswith("MAP_") or current_map_type == "MAP_GEN_COMPOSITE":
logger.debug(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}': Skipping, item_type '{current_map_type}' not targeted for individual processing.")
logger.debug(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}': Skipping, item_type '{current_map_type}' (initial: '{initial_current_map_type}') not targeted for individual processing.")
continue
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (Type: {current_map_type}, ID: {current_map_id_hex}): Starting individual processing.")
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (Type: {current_map_type}, Initial Type: {initial_current_map_type}, ID: {current_map_id_hex}): Starting individual processing.")
# A. Find Source File (using file_rule.file_path as the pattern relative to source_base_path)
# The _find_source_file might need adjustment if file_rule.file_path is absolute or needs complex globbing.
@@ -81,117 +201,343 @@ class IndividualMapProcessingStage(ProcessingStage):
source_file_path = self._find_source_file(source_base_path, file_rule.file_path, asset_name_for_log, current_map_id_hex)
if not source_file_path:
logger.error(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Source file not found with path/pattern '{file_rule.file_path}' in '{source_base_path}'.")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=current_map_type, details="Source file not found")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=filename_friendly_map_type, details="Source file not found")
continue
# B. Load and Transform Image
image_data: Optional[np.ndarray] = ipu.load_image(str(source_file_path))
if image_data is None:
logger.error(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Failed to load image from '{source_file_path}'.")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=current_map_type, source_file=str(source_file_path), details="Image load failed")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=filename_friendly_map_type, source_file=str(source_file_path), details="Image load failed")
continue
original_height, original_width = image_data.shape[:2]
logger.debug(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Loaded image '{source_file_path}' with dimensions {original_width}x{original_height}.")
# Initialize transform settings with defaults
transform_settings = {
"target_width": 2048,
"target_height": None,
"resize_mode": "fit",
"ensure_pot": False,
"allow_upscale": False,
"resize_filter": "AREA",
"color_profile_management": False,
"target_color_profile": "sRGB",
"output_format_settings": None
}
# 1. Initial Power-of-Two (POT) Downscaling
pot_width = ipu.get_nearest_power_of_two_downscale(original_width)
pot_height = ipu.get_nearest_power_of_two_downscale(original_height)
# Attempt to load transform settings from file_rule.channel_merge_instructions
# Maintain aspect ratio for initial POT scaling, using the smaller of the scaled dimensions
# This ensures we only downscale.
if original_width > 0 and original_height > 0 : # Avoid division by zero
aspect_ratio = original_width / original_height
# Calculate new dimensions based on POT width, then POT height, and pick the one that results in downscale or same size
pot_h_from_w = int(pot_width / aspect_ratio)
pot_w_from_h = int(pot_height * aspect_ratio)
# Option 1: Scale by width, adjust height
candidate1_w, candidate1_h = pot_width, ipu.get_nearest_power_of_two_downscale(pot_h_from_w)
# Option 2: Scale by height, adjust width
candidate2_w, candidate2_h = ipu.get_nearest_power_of_two_downscale(pot_w_from_h), pot_height
# Ensure candidates are not upscaling
if candidate1_w > original_width or candidate1_h > original_height:
candidate1_w, candidate1_h = original_width, original_height # Fallback to original if upscaling
if candidate2_w > original_width or candidate2_h > original_height:
candidate2_w, candidate2_h = original_width, original_height # Fallback to original if upscaling
# Choose the candidate that results in a larger area (preferring less downscaling if multiple POT options)
# but still respects the POT downscale logic for each dimension individually.
# The actual POT dimensions are already calculated by get_nearest_power_of_two_downscale.
# We need to decide if we base the aspect ratio calc on pot_width or pot_height.
# The goal is to make one dimension POT and the other POT while maintaining aspect as much as possible, only downscaling.
final_pot_width = ipu.get_nearest_power_of_two_downscale(original_width)
final_pot_height = ipu.get_nearest_power_of_two_downscale(original_height)
# If original aspect is not 1:1, one of the POT dimensions might need further adjustment to maintain aspect
# after the other dimension is set to its POT.
# We prioritize fitting within the *downscaled* POT dimensions.
# Scale to fit within final_pot_width, adjust height, then make height POT (downscale)
scaled_h_for_pot_w = max(1, round(final_pot_width / aspect_ratio))
h1 = ipu.get_nearest_power_of_two_downscale(scaled_h_for_pot_w)
w1 = final_pot_width
if h1 > final_pot_height: # If this adjustment made height too big, re-evaluate
h1 = final_pot_height
w1 = ipu.get_nearest_power_of_two_downscale(max(1, round(h1 * aspect_ratio)))
# Scale to fit within final_pot_height, adjust width, then make width POT (downscale)
scaled_w_for_pot_h = max(1, round(final_pot_height * aspect_ratio))
w2 = ipu.get_nearest_power_of_two_downscale(scaled_w_for_pot_h)
h2 = final_pot_height
if w2 > final_pot_width: # If this adjustment made width too big, re-evaluate
w2 = final_pot_width
h2 = ipu.get_nearest_power_of_two_downscale(max(1, round(w2 / aspect_ratio)))
# Choose the option that results in larger area (less aggressive downscaling)
# while ensuring both dimensions are POT and not upscaled from original.
if w1 * h1 >= w2 * h2:
base_pot_width, base_pot_height = w1, h1
else:
base_pot_width, base_pot_height = w2, h2
# Final check to ensure no upscaling from original dimensions
base_pot_width = min(base_pot_width, original_width)
base_pot_height = min(base_pot_height, original_height)
# And ensure they are POT
base_pot_width = ipu.get_nearest_power_of_two_downscale(base_pot_width)
base_pot_height = ipu.get_nearest_power_of_two_downscale(base_pot_height)
else: # Handle cases like 0-dim images, though load_image should prevent this
base_pot_width, base_pot_height = 1, 1
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Original dims: ({original_width},{original_height}), Initial POT Scaled Dims: ({base_pot_width},{base_pot_height}).")
# Calculate and store aspect ratio change string
if original_width > 0 and original_height > 0 and base_pot_width > 0 and base_pot_height > 0:
aspect_change_str = ipu.normalize_aspect_ratio_change(
original_width, original_height,
base_pot_width, base_pot_height
)
if aspect_change_str:
# This will overwrite if multiple maps are processed; specified by requirements.
context.asset_metadata['aspect_ratio_change_string'] = aspect_change_str
logger.info(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Map Type {current_map_type}: Calculated aspect ratio change string: '{aspect_change_str}' (Original: {original_width}x{original_height}, Base POT: {base_pot_width}x{base_pot_height}). Stored in asset_metadata.")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Map Type {current_map_type}: Failed to calculate aspect ratio change string.")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Map Type {current_map_type}: Skipping aspect ratio change string calculation due to invalid dimensions (Original: {original_width}x{original_height}, Base POT: {base_pot_width}x{base_pot_height}).")
base_pot_image_data = image_data.copy()
if (base_pot_width, base_pot_height) != (original_width, original_height):
interpolation = cv2.INTER_AREA # Good for downscaling
base_pot_image_data = ipu.resize_image(base_pot_image_data, base_pot_width, base_pot_height, interpolation=interpolation)
if base_pot_image_data is None:
logger.error(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Failed to resize image to base POT dimensions.")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=filename_friendly_map_type, source_file=str(source_file_path), original_dimensions=(original_width, original_height), details="Base POT resize failed")
continue
# Color Profile Management (after initial POT resize, before multi-res saving)
# Initialize transform settings with defaults for color management
transform_settings = {
"color_profile_management": False, # Default, can be overridden by FileRule
"target_color_profile": "sRGB", # Default
"output_format_settings": None # For JPG quality, PNG compression
}
if file_rule.channel_merge_instructions and 'transform' in file_rule.channel_merge_instructions:
custom_transform_settings = file_rule.channel_merge_instructions['transform']
if isinstance(custom_transform_settings, dict):
transform_settings.update(custom_transform_settings)
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Loaded transform settings from file_rule.channel_merge_instructions.")
else:
logger.warning(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): 'transform' in channel_merge_instructions is not a dictionary. Using defaults.")
# TODO: Implement fallback to context.config_obj for global/item_type specific transform settings
# else:
# # Example: config_transforms = context.config_obj.get_transform_settings(file_rule.item_type or file_rule.item_type_override)
# # if config_transforms:
# # transform_settings.update(config_transforms)
target_width, target_height = ipu.calculate_target_dimensions(
original_width, original_height,
transform_settings['target_width'], transform_settings['target_height'],
transform_settings['resize_mode'],
transform_settings['ensure_pot'],
transform_settings['allow_upscale']
)
logger.debug(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Original dims: ({original_width},{original_height}), Calculated target dims: ({target_width},{target_height}) using sourced transforms.")
processed_image_data = image_data.copy()
if (target_width, target_height) != (original_width, original_height):
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Resizing from ({original_width},{original_height}) to ({target_width},{target_height}).")
interpolation_map = {"NEAREST": cv2.INTER_NEAREST, "LINEAR": cv2.INTER_LINEAR, "CUBIC": cv2.INTER_CUBIC, "AREA": cv2.INTER_AREA, "LANCZOS4": cv2.INTER_LANCZOS4}
interpolation = interpolation_map.get(transform_settings['resize_filter'].upper(), cv2.INTER_AREA)
processed_image_data = ipu.resize_image(processed_image_data, target_width, target_height, interpolation=interpolation)
if processed_image_data is None:
logger.error(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Failed to resize image.")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=current_map_type, source_file=str(source_file_path), original_dimensions=(original_width, original_height), details="Image resize failed")
continue
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Loaded transform settings for color/output from file_rule.")
if transform_settings['color_profile_management'] and transform_settings['target_color_profile'] == "RGB":
if len(processed_image_data.shape) == 3 and processed_image_data.shape[2] == 3:
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Converting BGR to RGB.")
processed_image_data = ipu.convert_bgr_to_rgb(processed_image_data)
elif len(processed_image_data.shape) == 3 and processed_image_data.shape[2] == 4:
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Converting BGRA to RGBA.")
processed_image_data = ipu.convert_bgra_to_rgba(processed_image_data)
if len(base_pot_image_data.shape) == 3 and base_pot_image_data.shape[2] == 3: # BGR to RGB
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Converting BGR to RGB for base POT image.")
base_pot_image_data = ipu.convert_bgr_to_rgb(base_pot_image_data)
elif len(base_pot_image_data.shape) == 3 and base_pot_image_data.shape[2] == 4: # BGRA to RGBA
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Converting BGRA to RGBA for base POT image.")
base_pot_image_data = ipu.convert_bgra_to_rgba(base_pot_image_data)
# Ensure engine_temp_dir exists before saving base POT
if not context.engine_temp_dir.exists():
try:
context.engine_temp_dir.mkdir(parents=True, exist_ok=True)
logger.info(f"Asset '{asset_name_for_log}': Created engine_temp_dir at '{context.engine_temp_dir}'")
except OSError as e:
logger.error(f"Asset '{asset_name_for_log}': Failed to create engine_temp_dir '{context.engine_temp_dir}': {e}")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=current_map_type, source_file=str(source_file_path), details="Failed to create temp directory")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=filename_friendly_map_type, source_file=str(source_file_path), details="Failed to create temp directory for base POT")
continue
temp_filename_suffix = Path(source_file_path).suffix
safe_map_type_filename = sanitize_filename(current_map_type)
temp_output_filename = f"processed_{safe_map_type_filename}_{current_map_id_hex}{temp_filename_suffix}"
temp_output_path = context.engine_temp_dir / temp_output_filename
save_params = []
if transform_settings['output_format_settings']:
if temp_filename_suffix.lower() in ['.jpg', '.jpeg']:
quality = transform_settings['output_format_settings'].get('quality', 95)
save_params = [cv2.IMWRITE_JPEG_QUALITY, quality]
elif temp_filename_suffix.lower() == '.png':
compression = transform_settings['output_format_settings'].get('compression_level', 3)
save_params = [cv2.IMWRITE_PNG_COMPRESSION, compression]
base_pot_temp_filename = f"{current_map_id_hex}_basePOT{temp_filename_suffix}"
base_pot_temp_path = context.engine_temp_dir / base_pot_temp_filename
save_success = ipu.save_image(str(temp_output_path), processed_image_data, params=save_params)
# Determine save parameters for base POT image (can be different from variants if needed)
base_save_params = []
base_output_ext = temp_filename_suffix.lstrip('.') # Default to original, can be overridden by format rules
# TODO: Add logic here to determine base_output_ext and base_save_params based on bit depth and config, similar to variants.
# For now, using simple save.
if not save_success:
logger.error(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Failed to save temporary image to '{temp_output_path}'.")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=current_map_type, source_file=str(source_file_path), original_dimensions=(original_width, original_height), processed_dimensions=(processed_image_data.shape[1], processed_image_data.shape[0]) if processed_image_data is not None else None, details="Temporary image save failed")
if not ipu.save_image(str(base_pot_temp_path), base_pot_image_data, params=base_save_params):
logger.error(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Failed to save base POT image to '{base_pot_temp_path}'.")
self._update_file_rule_status(context, current_map_id_hex, 'Failed', map_type=filename_friendly_map_type, source_file=str(source_file_path), original_dimensions=(original_width, original_height), base_pot_dimensions=(base_pot_width, base_pot_height), details="Base POT image save failed")
continue
logger.info(f"Asset '{asset_name_for_log}', FileRule path '{file_rule.file_path}' (ID: {current_map_id_hex}): Successfully processed and saved temporary map to '{temp_output_path}'.")
self._update_file_rule_status(context, current_map_id_hex, 'Processed', map_type=current_map_type, source_file=str(source_file_path), temp_processed_file=str(temp_output_path), original_dimensions=(original_width, original_height), processed_dimensions=(processed_image_data.shape[1], processed_image_data.shape[0]), details="Successfully processed")
if 'processed_files' not in context.asset_metadata:
context.asset_metadata['processed_files'] = []
context.asset_metadata['processed_files'].append({
'processed_map_key': current_map_id_hex, # Changed from file_rule_id
'path': str(temp_output_path),
'type': 'temporary_map',
'map_type': current_map_type
})
logger.info(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Successfully saved base POT image to '{base_pot_temp_path}' with dims ({base_pot_width}x{base_pot_height}).")
# Initialize/update the status for this map in processed_maps_details
self._update_file_rule_status(
context,
current_map_id_hex,
'BasePOTSaved', # Intermediate status, will be updated after variant check
map_type=filename_friendly_map_type,
source_file=str(source_file_path),
original_dimensions=(original_width, original_height),
base_pot_dimensions=(base_pot_width, base_pot_height),
temp_processed_file=str(base_pot_temp_path) # Store path to the saved base POT
)
# 2. Multiple Resolution Output (Variants)
processed_at_least_one_resolution_variant = False
# Resolution variants are attempted for all map types individually processed.
# The filter at the beginning of the loop (around line 72) ensures only relevant maps reach this stage.
generate_variants_for_this_map_type = True
if generate_variants_for_this_map_type: # This will now always be true if code execution reaches here
logger.info(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Map type '{current_map_type}' is eligible for individual processing. Attempting to generate resolution variants.")
# Sort resolutions from largest to smallest
sorted_resolutions = sorted(image_resolutions.items(), key=lambda item: item[1], reverse=True)
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Sorted resolutions for variant processing: {sorted_resolutions}")
for res_key, res_max_dim in sorted_resolutions:
current_w, current_h = base_pot_image_data.shape[1], base_pot_image_data.shape[0]
if current_w <= 0 or current_h <=0:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Res {res_key}: Base POT image has zero dimension ({current_w}x{current_h}). Skipping this resolution variant.")
continue
if max(current_w, current_h) >= res_max_dim:
target_w_res, target_h_res = current_w, current_h
if max(current_w, current_h) > res_max_dim:
if current_w >= current_h:
target_w_res = res_max_dim
target_h_res = max(1, round(target_w_res / (current_w / current_h)))
else:
target_h_res = res_max_dim
target_w_res = max(1, round(target_h_res * (current_w / current_h)))
else:
logger.debug(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Res {res_key}: Base POT image ({current_w}x{current_h}) is smaller than target max dim {res_max_dim}. Skipping this resolution variant.")
continue
target_w_res = min(target_w_res, current_w)
target_h_res = min(target_h_res, current_h)
if target_w_res <=0 or target_h_res <=0:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Res {res_key}: Calculated target variant dims are zero or negative ({target_w_res}x{target_h_res}). Skipping.")
continue
logger.info(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Res {res_key}: Processing variant for {res_max_dim}. Base POT Dims: ({current_w}x{current_h}), Target Dims for {res_key}: ({target_w_res}x{target_h_res}).")
output_image_data_for_res = base_pot_image_data
if (target_w_res, target_h_res) != (current_w, current_h):
interpolation_res = cv2.INTER_AREA
output_image_data_for_res = ipu.resize_image(base_pot_image_data, target_w_res, target_h_res, interpolation=interpolation_res)
if output_image_data_for_res is None:
logger.error(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Res {res_key}: Failed to resize image for resolution variant {res_key}.")
continue
assetname_placeholder = context.asset_rule.asset_name if context.asset_rule else "UnknownAsset"
resolution_placeholder = res_key
# TODO: Implement proper output format/extension determination for variants
output_ext_variant = temp_filename_suffix.lstrip('.')
temp_output_filename_variant = output_filename_pattern.replace("[assetname]", sanitize_filename(assetname_placeholder)) \
.replace("[maptype]", sanitize_filename(filename_friendly_map_type)) \
.replace("[resolution]", sanitize_filename(resolution_placeholder)) \
.replace("[ext]", output_ext_variant)
temp_output_filename_variant = f"{current_map_id_hex}_variant_{temp_output_filename_variant}" # Distinguish variant temp files
temp_output_path_variant = context.engine_temp_dir / temp_output_filename_variant
save_params_variant = []
if transform_settings.get('output_format_settings'):
if output_ext_variant.lower() in ['jpg', 'jpeg']:
quality = transform_settings['output_format_settings'].get('quality', context.config_obj.get("JPG_QUALITY", 95))
save_params_variant = [cv2.IMWRITE_JPEG_QUALITY, quality]
elif output_ext_variant.lower() == 'png':
compression = transform_settings['output_format_settings'].get('compression_level', context.config_obj.get("PNG_COMPRESSION_LEVEL", 6))
save_params_variant = [cv2.IMWRITE_PNG_COMPRESSION, compression]
save_success_variant = ipu.save_image(str(temp_output_path_variant), output_image_data_for_res, params=save_params_variant)
if not save_success_variant:
logger.error(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Res {res_key}: Failed to save temporary variant image to '{temp_output_path_variant}'.")
continue
logger.info(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Res {res_key}: Successfully saved temporary variant map to '{temp_output_path_variant}' with dims ({target_w_res}x{target_h_res}).")
processed_at_least_one_resolution_variant = True
if 'variants' not in context.processed_maps_details[current_map_id_hex]:
context.processed_maps_details[current_map_id_hex]['variants'] = []
context.processed_maps_details[current_map_id_hex]['variants'].append({
'resolution_key': res_key,
'temp_path': str(temp_output_path_variant), # Changed 'path' to 'temp_path'
'dimensions': (target_w_res, target_h_res),
'resolution_name': f"{target_w_res}x{target_h_res}" # Retain for potential use
})
if 'processed_files' not in context.asset_metadata:
context.asset_metadata['processed_files'] = []
context.asset_metadata['processed_files'].append({
'processed_map_key': current_map_id_hex,
'resolution_key': res_key,
'path': str(temp_output_path_variant),
'type': 'temporary_map_variant',
'map_type': current_map_type,
'dimensions_w': target_w_res,
'dimensions_h': target_h_res
})
# Calculate and store image statistics for the lowest resolution output
lowest_res_image_data_for_stats = None
image_to_stat_path_for_log = "N/A"
source_of_stats_image = "unknown"
if processed_at_least_one_resolution_variant and \
current_map_id_hex in context.processed_maps_details and \
'variants' in context.processed_maps_details[current_map_id_hex] and \
context.processed_maps_details[current_map_id_hex]['variants']:
variants_list = context.processed_maps_details[current_map_id_hex]['variants']
valid_variants_for_stats = [
v for v in variants_list
if isinstance(v.get('dimensions'), tuple) and len(v['dimensions']) == 2 and v['dimensions'][0] > 0 and v['dimensions'][1] > 0
]
if valid_variants_for_stats:
smallest_variant = min(valid_variants_for_stats, key=lambda v: v['dimensions'][0] * v['dimensions'][1])
if smallest_variant and 'temp_path' in smallest_variant and smallest_variant.get('dimensions'):
smallest_res_w, smallest_res_h = smallest_variant['dimensions']
logger.info(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Identified smallest variant for stats: {smallest_variant.get('resolution_key', 'N/A')} ({smallest_res_w}x{smallest_res_h}) at {smallest_variant['temp_path']}")
lowest_res_image_data_for_stats = ipu.load_image(smallest_variant['temp_path'])
image_to_stat_path_for_log = smallest_variant['temp_path']
source_of_stats_image = f"variant {smallest_variant.get('resolution_key', 'N/A')}"
if lowest_res_image_data_for_stats is None:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Failed to load smallest variant image '{smallest_variant['temp_path']}' for stats.")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Could not determine smallest variant for stats from valid variants list (details missing).")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: No valid variants found to determine the smallest one for stats.")
if lowest_res_image_data_for_stats is None:
if base_pot_image_data is not None:
logger.info(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Using base POT image for stats (dimensions: {base_pot_width}x{base_pot_height}). Smallest variant not available/loaded or no variants generated.")
lowest_res_image_data_for_stats = base_pot_image_data
image_to_stat_path_for_log = f"In-memory base POT image (dims: {base_pot_width}x{base_pot_height})"
source_of_stats_image = "base POT"
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Base POT image data is also None. Cannot calculate stats.")
if lowest_res_image_data_for_stats is not None:
stats_dict = ipu.calculate_image_stats(lowest_res_image_data_for_stats)
if stats_dict and "error" not in stats_dict:
if 'image_stats_lowest_res' not in context.asset_metadata:
context.asset_metadata['image_stats_lowest_res'] = {}
context.asset_metadata['image_stats_lowest_res'][current_map_type] = stats_dict
logger.info(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Map Type '{current_map_type}': Calculated and stored image stats from '{source_of_stats_image}' (source ref: '{image_to_stat_path_for_log}').")
elif stats_dict and "error" in stats_dict:
logger.error(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Map Type '{current_map_type}': Error calculating image stats from '{source_of_stats_image}': {stats_dict['error']}.")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Map Type '{current_map_type}': Failed to calculate image stats from '{source_of_stats_image}' (result was None or empty).")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}, Map Type '{current_map_type}': No image data available (from variant or base POT) to calculate stats.")
# Final status update based on whether variants were generated (and expected)
if generate_variants_for_this_map_type:
if processed_at_least_one_resolution_variant:
self._update_file_rule_status(context, current_map_id_hex, 'Processed_With_Variants', map_type=filename_friendly_map_type, details="Successfully processed with multiple resolution variants.")
else:
logger.warning(f"Asset '{asset_name_for_log}', Map ID {current_map_id_hex}: Variants were expected for map type '{current_map_type}', but none were generated (e.g., base POT too small for any variant tier).")
self._update_file_rule_status(context, current_map_id_hex, 'Processed_No_Variants', map_type=filename_friendly_map_type, details="Variants expected but none generated (e.g., base POT too small).")
else: # No variants were expected for this map type
self._update_file_rule_status(context, current_map_id_hex, 'Processed_No_Variants', map_type=filename_friendly_map_type, details="Processed to base POT; variants not applicable for this map type.")
logger.info(f"Asset '{asset_name_for_log}': Finished individual map processing stage.")
return context
@@ -260,13 +606,34 @@ class IndividualMapProcessingStage(ProcessingStage):
orig_w, orig_h = kwargs['original_dimensions']
context.processed_maps_details[map_id_hex]['original_resolution_name'] = f"{orig_w}x{orig_h}"
if status == 'Processed' and 'processed_dimensions' in kwargs and isinstance(kwargs['processed_dimensions'], tuple) and len(kwargs['processed_dimensions']) == 2:
proc_w, proc_h = kwargs['processed_dimensions']
context.processed_maps_details[map_id_hex]['processed_resolution_name'] = f"{proc_w}x{proc_h}"
elif 'processed_dimensions' in kwargs: # If present but not as expected, log or handle
logger.warning(f"Asset '{asset_name_for_log}', Map ID {map_id_hex}: 'processed_dimensions' present but not a valid tuple: {kwargs['processed_dimensions']}")
# Determine the correct dimensions to use for 'processed_resolution_name'
# This name refers to the base POT scaled image dimensions before variant generation.
dims_to_log_as_base_processed = None
if 'base_pot_dimensions' in kwargs and isinstance(kwargs['base_pot_dimensions'], tuple) and len(kwargs['base_pot_dimensions']) == 2:
# This key is used when status is 'Processed_With_Variants'
dims_to_log_as_base_processed = kwargs['base_pot_dimensions']
elif 'processed_dimensions' in kwargs and isinstance(kwargs['processed_dimensions'], tuple) and len(kwargs['processed_dimensions']) == 2:
# This key is used when status is 'Processed_No_Variants' (and potentially others)
dims_to_log_as_base_processed = kwargs['processed_dimensions']
if dims_to_log_as_base_processed:
proc_w, proc_h = dims_to_log_as_base_processed
resolution_name_str = f"{proc_w}x{proc_h}"
context.processed_maps_details[map_id_hex]['base_pot_resolution_name'] = resolution_name_str
# Ensure 'processed_resolution_name' is also set for OutputOrganizationStage compatibility
context.processed_maps_details[map_id_hex]['processed_resolution_name'] = resolution_name_str
elif 'processed_dimensions' in kwargs or 'base_pot_dimensions' in kwargs:
details_for_warning = kwargs.get('processed_dimensions', kwargs.get('base_pot_dimensions'))
logger.warning(f"Asset '{asset_name_for_log}', Map ID {map_id_hex}: 'processed_dimensions' or 'base_pot_dimensions' key present but its value is not a valid 2-element tuple: {details_for_warning}")
# If temp_processed_file was passed, ensure it's in the details
if 'temp_processed_file' in kwargs:
context.processed_maps_details[map_id_hex]['temp_processed_file'] = kwargs['temp_processed_file']
# Log all details being stored for clarity, including the newly added resolution names
log_details = context.processed_maps_details[map_id_hex].copy()
# Avoid logging full image data if it accidentally gets into kwargs
if 'image_data' in log_details: del log_details['image_data']
if 'base_pot_image_data' in log_details: del log_details['base_pot_image_data']
logger.debug(f"Asset '{asset_name_for_log}', Map ID {map_id_hex}: Status updated to '{status}'. Details: {log_details}")

View File

@@ -217,9 +217,28 @@ class MapMergingStage(ProcessingStage):
if source_image is not None:
if source_image.ndim == 2: # Grayscale source
source_data_this_channel = source_image
elif source_image.ndim == 3: # Color source, take the first channel (assuming it's grayscale or R of RGB)
source_data_this_channel = source_image[:,:,0]
logger.debug(f"Asset {asset_name_for_log}, Merge Op ID {merge_op_id}: Taking channel 0 from {input_map_type_for_this_channel} for output {out_channel_char}.")
elif source_image.ndim == 3 or source_image.ndim == 4: # Color source (3-channel BGR or 4-channel BGRA), assumed loaded by ipu.load_image
# Standard BGR(A) channel indexing: B=0, G=1, R=2, A=3 (if present)
# This map helps get NRM's Red data for 'R' output, NRM's Green for 'G' output etc.
# based on the semantic meaning of out_channel_char.
semantic_to_bgr_idx = {'R': 2, 'G': 1, 'B': 0, 'A': 3}
if input_map_type_for_this_channel == "NRM":
idx_to_extract = semantic_to_bgr_idx.get(out_channel_char)
if idx_to_extract is not None and idx_to_extract < source_image.shape[2]:
source_data_this_channel = source_image[:, :, idx_to_extract]
logger.debug(f"Asset {asset_name_for_log}, Merge Op ID {merge_op_id}: For output '{out_channel_char}', using NRM's semantic '{out_channel_char}' channel (BGR(A) index {idx_to_extract}).")
else:
# Fallback if out_channel_char isn't R,G,B,A or NRM doesn't have the channel (e.g. 3-channel NRM and 'A' requested)
logger.warning(f"Asset {asset_name_for_log}, Merge Op ID {merge_op_id}: Could not map output '{out_channel_char}' to a specific BGR(A) channel of NRM (shape {source_image.shape}). Defaulting to NRM's channel 0 (Blue).")
source_data_this_channel = source_image[:, :, 0]
else:
# For other multi-channel sources (e.g., ROUGH as RGB, or other color maps not "NRM")
# Default to taking the first channel (Blue in BGR).
# This covers "Roughness map's greyscale data" if ROUGH is RGB (by taking one of its channels as a proxy).
source_data_this_channel = source_image[:, :, 0]
logger.debug(f"Asset {asset_name_for_log}, Merge Op ID {merge_op_id}: For output '{out_channel_char}', source {input_map_type_for_this_channel} (shape {source_image.shape}) is multi-channel but not NRM. Using its channel 0 (Blue).")
else: # Source map was not found, use default
default_val_for_channel = default_values.get(out_channel_char)
if default_val_for_channel is not None:

View File

@@ -125,7 +125,27 @@ class MetadataFinalizationAndSaveStage(ProcessingStage):
def make_serializable(data: Any) -> Any:
if isinstance(data, Path):
return str(data)
# metadata_save_path is available from the outer scope
metadata_dir = metadata_save_path.parent
try:
# Attempt to make the path relative if it's absolute and under the same root
if data.is_absolute():
# Check if the path can be made relative (e.g., same drive on Windows)
# This check might need to be more robust depending on os.path.relpath behavior
# For pathlib, relative_to will raise ValueError if not possible.
return str(data.relative_to(metadata_dir))
else:
# If it's already relative, assume it's correct or handle as needed
return str(data)
except ValueError:
# If paths are on different drives or cannot be made relative,
# log a warning and return the absolute path as a string.
# This can happen if an output path was explicitly set to an unrelated directory.
logger.warning(
f"Asset '{asset_name_for_log}': Could not make path {data} "
f"relative to {metadata_dir}. Storing as absolute."
)
return str(data)
if isinstance(data, datetime.datetime): # Ensure datetime is serializable
return data.isoformat()
if isinstance(data, dict):

View File

@@ -50,57 +50,202 @@ class OutputOrganizationStage(ProcessingStage):
# A. Organize Processed Individual Maps
if context.processed_maps_details:
logger.debug(f"Asset '{asset_name_for_log}': Organizing {len(context.processed_maps_details)} processed individual map(s).")
for processed_map_key, details in context.processed_maps_details.items(): # Use processed_map_key
if details.get('status') != 'Processed' or not details.get('temp_processed_file'):
logger.debug(f"Asset '{asset_name_for_log}': Skipping processed map key '{processed_map_key}' due to status '{details.get('status')}' or missing temp file.")
continue
logger.debug(f"Asset '{asset_name_for_log}': Organizing {len(context.processed_maps_details)} processed individual map entries.")
for processed_map_key, details in context.processed_maps_details.items():
map_status = details.get('status')
base_map_type = details.get('map_type', 'unknown_map_type') # Original map type
temp_file_path = Path(details['temp_processed_file'])
map_type = details.get('map_type', 'unknown_map_type')
resolution_str = details.get('processed_resolution_name', details.get('original_resolution_name', 'resX'))
# Construct token_data for path generation
token_data = {
"assetname": asset_name_for_log,
"supplier": context.effective_supplier or "DefaultSupplier",
"maptype": map_type,
"resolution": resolution_str,
"ext": temp_file_path.suffix.lstrip('.'), # Get extension without dot
"incrementingvalue": getattr(context, 'incrementing_value', None),
"sha5": getattr(context, 'sha5_value', None)
}
token_data_cleaned = {k: v for k, v in token_data.items() if v is not None}
# Generate filename first using its pattern
# output_filename = f"{asset_name_for_log}_{sanitize_filename(map_type)}{temp_file_path.suffix}" # Old way
output_filename = generate_path_from_pattern(output_filename_pattern_config, token_data_cleaned)
try:
relative_dir_path_str = generate_path_from_pattern(
pattern_string=output_dir_pattern,
token_data=token_data_cleaned
)
final_path = Path(context.output_base_path) / Path(relative_dir_path_str) / Path(output_filename)
final_path.parent.mkdir(parents=True, exist_ok=True)
if final_path.exists() and not overwrite_existing:
logger.info(f"Asset '{asset_name_for_log}': Output file {final_path} exists and overwrite is disabled. Skipping copy.")
else:
shutil.copy2(temp_file_path, final_path)
logger.info(f"Asset '{asset_name_for_log}': Copied {temp_file_path} to {final_path}")
final_output_files.append(str(final_path))
if map_status in ['Processed', 'Processed_No_Variants']:
if not details.get('temp_processed_file'):
logger.debug(f"Asset '{asset_name_for_log}': Skipping map key '{processed_map_key}' (status '{map_status}') due to missing 'temp_processed_file'.")
details['status'] = 'Organization Skipped (Missing Temp File)'
continue
context.processed_maps_details[processed_map_key]['final_output_path'] = str(final_path)
context.processed_maps_details[processed_map_key]['status'] = 'Organized'
temp_file_path = Path(details['temp_processed_file'])
resolution_str = details.get('processed_resolution_name', details.get('original_resolution_name', 'resX'))
except Exception as e:
logger.error(f"Asset '{asset_name_for_log}': Failed to copy {temp_file_path} to destination for processed map key '{processed_map_key}'. Error: {e}", exc_info=True)
context.status_flags['output_organization_error'] = True
context.asset_metadata['status'] = "Failed (Output Organization Error)"
context.processed_maps_details[processed_map_key]['status'] = 'Organization Failed'
token_data = {
"assetname": asset_name_for_log,
"supplier": context.effective_supplier or "DefaultSupplier",
"maptype": base_map_type,
"resolution": resolution_str,
"ext": temp_file_path.suffix.lstrip('.'),
"incrementingvalue": getattr(context, 'incrementing_value', None),
"sha5": getattr(context, 'sha5_value', None)
}
token_data_cleaned = {k: v for k, v in token_data.items() if v is not None}
output_filename = generate_path_from_pattern(output_filename_pattern_config, token_data_cleaned)
try:
relative_dir_path_str = generate_path_from_pattern(
pattern_string=output_dir_pattern,
token_data=token_data_cleaned
)
final_path = Path(context.output_base_path) / Path(relative_dir_path_str) / Path(output_filename)
final_path.parent.mkdir(parents=True, exist_ok=True)
if final_path.exists() and not overwrite_existing:
logger.info(f"Asset '{asset_name_for_log}': Output file {final_path} for map '{processed_map_key}' exists and overwrite is disabled. Skipping copy.")
else:
shutil.copy2(temp_file_path, final_path)
logger.info(f"Asset '{asset_name_for_log}': Copied {temp_file_path} to {final_path} for map '{processed_map_key}'.")
final_output_files.append(str(final_path))
details['final_output_path'] = str(final_path)
details['status'] = 'Organized'
# Update asset_metadata for metadata.json
map_metadata_entry = context.asset_metadata.setdefault('maps', {}).setdefault(processed_map_key, {})
map_metadata_entry['map_type'] = base_map_type
map_metadata_entry['path'] = str(Path(relative_dir_path_str) / Path(output_filename)) # Store relative path
except Exception as e:
logger.error(f"Asset '{asset_name_for_log}': Failed to copy {temp_file_path} for map key '{processed_map_key}'. Error: {e}", exc_info=True)
context.status_flags['output_organization_error'] = True
context.asset_metadata['status'] = "Failed (Output Organization Error)"
details['status'] = 'Organization Failed'
elif map_status == 'Processed_With_Variants':
variants = details.get('variants')
if not variants: # No variants list, or it's empty
logger.warning(f"Asset '{asset_name_for_log}': Map key '{processed_map_key}' (status '{map_status}') has no 'variants' list or it is empty. Attempting fallback to base file.")
if not details.get('temp_processed_file'):
logger.error(f"Asset '{asset_name_for_log}': Skipping map key '{processed_map_key}' (fallback) as 'temp_processed_file' is also missing.")
details['status'] = 'Organization Failed (No Variants, No Temp File)'
continue # Skip to next map key
# Fallback: Process the base temp_processed_file
temp_file_path = Path(details['temp_processed_file'])
resolution_str = details.get('processed_resolution_name', details.get('original_resolution_name', 'baseRes'))
token_data = {
"assetname": asset_name_for_log,
"supplier": context.effective_supplier or "DefaultSupplier",
"maptype": base_map_type,
"resolution": resolution_str,
"ext": temp_file_path.suffix.lstrip('.'),
"incrementingvalue": getattr(context, 'incrementing_value', None),
"sha5": getattr(context, 'sha5_value', None)
}
token_data_cleaned = {k: v for k, v in token_data.items() if v is not None}
output_filename = generate_path_from_pattern(output_filename_pattern_config, token_data_cleaned)
try:
relative_dir_path_str = generate_path_from_pattern(
pattern_string=output_dir_pattern,
token_data=token_data_cleaned
)
final_path = Path(context.output_base_path) / Path(relative_dir_path_str) / Path(output_filename)
final_path.parent.mkdir(parents=True, exist_ok=True)
if final_path.exists() and not overwrite_existing:
logger.info(f"Asset '{asset_name_for_log}': Output file {final_path} for map '{processed_map_key}' (fallback) exists and overwrite is disabled. Skipping copy.")
else:
shutil.copy2(temp_file_path, final_path)
logger.info(f"Asset '{asset_name_for_log}': Copied {temp_file_path} to {final_path} for map '{processed_map_key}' (fallback).")
final_output_files.append(str(final_path))
details['final_output_path'] = str(final_path)
details['status'] = 'Organized (Base File Fallback)'
map_metadata_entry = context.asset_metadata.setdefault('maps', {}).setdefault(processed_map_key, {})
map_metadata_entry['map_type'] = base_map_type
map_metadata_entry['path'] = str(Path(relative_dir_path_str) / Path(output_filename))
if 'variant_paths' in map_metadata_entry: # Clean up if it was somehow set
del map_metadata_entry['variant_paths']
except Exception as e:
logger.error(f"Asset '{asset_name_for_log}': Failed to copy {temp_file_path} (fallback) for map key '{processed_map_key}'. Error: {e}", exc_info=True)
context.status_flags['output_organization_error'] = True
context.asset_metadata['status'] = "Failed (Output Organization Error - Fallback)"
details['status'] = 'Organization Failed (Fallback)'
continue # Finished with this map key due to fallback
# If we are here, 'variants' list exists and is not empty. Proceed with variant processing.
logger.debug(f"Asset '{asset_name_for_log}': Organizing {len(variants)} variants for map key '{processed_map_key}' (map type: {base_map_type}).")
map_metadata_entry = context.asset_metadata.setdefault('maps', {}).setdefault(processed_map_key, {})
map_metadata_entry['map_type'] = base_map_type
map_metadata_entry.setdefault('variant_paths', {}) # Initialize if not present
processed_any_variant_successfully = False
failed_any_variant = False
for variant_index, variant_detail in enumerate(variants):
temp_variant_path_str = variant_detail.get('temp_path')
if not temp_variant_path_str:
logger.warning(f"Asset '{asset_name_for_log}': Variant {variant_index} for map '{processed_map_key}' is missing 'temp_path'. Skipping.")
variant_detail['status'] = 'Organization Skipped (Missing Temp Path)'
continue
temp_variant_path = Path(temp_variant_path_str)
variant_resolution_key = variant_detail.get('resolution_key', f"varRes{variant_index}")
variant_ext = temp_variant_path.suffix.lstrip('.')
token_data_variant = {
"assetname": asset_name_for_log,
"supplier": context.effective_supplier or "DefaultSupplier",
"maptype": base_map_type,
"resolution": variant_resolution_key,
"ext": variant_ext,
"incrementingvalue": getattr(context, 'incrementing_value', None),
"sha5": getattr(context, 'sha5_value', None)
}
token_data_variant_cleaned = {k: v for k, v in token_data_variant.items() if v is not None}
output_filename_variant = generate_path_from_pattern(output_filename_pattern_config, token_data_variant_cleaned)
try:
relative_dir_path_str_variant = generate_path_from_pattern(
pattern_string=output_dir_pattern,
token_data=token_data_variant_cleaned
)
final_variant_path = Path(context.output_base_path) / Path(relative_dir_path_str_variant) / Path(output_filename_variant)
final_variant_path.parent.mkdir(parents=True, exist_ok=True)
if final_variant_path.exists() and not overwrite_existing:
logger.info(f"Asset '{asset_name_for_log}': Output variant file {final_variant_path} for map '{processed_map_key}' (res: {variant_resolution_key}) exists and overwrite is disabled. Skipping copy.")
variant_detail['status'] = 'Organized (Exists, Skipped Copy)'
else:
shutil.copy2(temp_variant_path, final_variant_path)
logger.info(f"Asset '{asset_name_for_log}': Copied variant {temp_variant_path} to {final_variant_path} for map '{processed_map_key}'.")
final_output_files.append(str(final_variant_path))
variant_detail['status'] = 'Organized'
variant_detail['final_output_path'] = str(final_variant_path)
relative_final_variant_path_str = str(Path(relative_dir_path_str_variant) / Path(output_filename_variant))
map_metadata_entry['variant_paths'][variant_resolution_key] = relative_final_variant_path_str
processed_any_variant_successfully = True
except Exception as e:
logger.error(f"Asset '{asset_name_for_log}': Failed to copy variant {temp_variant_path} for map key '{processed_map_key}' (res: {variant_resolution_key}). Error: {e}", exc_info=True)
context.status_flags['output_organization_error'] = True
context.asset_metadata['status'] = "Failed (Output Organization Error - Variant)"
variant_detail['status'] = 'Organization Failed'
failed_any_variant = True
# Update parent map detail status based on variant outcomes
if failed_any_variant:
details['status'] = 'Organization Failed (Variants)'
elif processed_any_variant_successfully:
# Check if all processable variants were organized
all_attempted_organized = True
for v_detail in variants:
if v_detail.get('temp_path') and not v_detail.get('status', '').startswith('Organized'):
all_attempted_organized = False
break
if all_attempted_organized:
details['status'] = 'Organized (All Attempted Variants)'
else:
details['status'] = 'Partially Organized (Variants)'
elif not any(v.get('temp_path') for v in variants): # No variants had temp_paths to begin with
details['status'] = 'Processed_With_Variants (No Valid Variants to Organize)'
else: # Variants list existed, items had temp_paths, but none were successfully organized (e.g., all skipped due to existing file and no overwrite)
details['status'] = 'Organization Skipped (No Variants Copied/Needed)'
else: # Other statuses like 'Skipped', 'Failed', 'Organization Failed' etc.
logger.debug(f"Asset '{asset_name_for_log}': Skipping map key '{processed_map_key}' (status: '{map_status}') for organization as it's not 'Processed', 'Processed_No_Variants', or 'Processed_With_Variants'.")
continue
else:
logger.debug(f"Asset '{asset_name_for_log}': No processed individual maps to organize.")

View File

@@ -31,17 +31,17 @@ def get_nearest_power_of_two_downscale(value: int) -> int:
If the value is already a power of two, it returns the value itself.
Returns 1 if the value is less than 1.
"""
if value &lt; 1:
if value < 1:
return 1
if is_power_of_two(value):
return value
# Find the largest power of two strictly less than value,
# unless value itself is POT.
# (1 &lt;&lt; (value.bit_length() - 1)) achieves this.
# Example: value=7 (0111, bl=3), 1&lt;&lt;2 = 4.
# Example: value=8 (1000, bl=4), 1&lt;&lt;3 = 8.
# Example: value=9 (1001, bl=4), 1&lt;&lt;3 = 8.
return 1 &lt;&lt; (value.bit_length() - 1)
# (1 << (value.bit_length() - 1)) achieves this.
# Example: value=7 (0111, bl=3), 1<<2 = 4.
# Example: value=8 (1000, bl=4), 1<<3 = 8.
# Example: value=9 (1001, bl=4), 1<<3 = 8.
return 1 << (value.bit_length() - 1)
# --- Dimension Calculation ---
def calculate_target_dimensions(
@@ -184,10 +184,12 @@ def calculate_image_stats(image_data: np.ndarray) -> Optional[Dict]:
stats["min"] = float(np.min(data_float))
stats["max"] = float(np.max(data_float))
stats["mean"] = float(np.mean(data_float))
stats["median"] = float(np.median(data_float))
elif len(data_float.shape) == 3: # Color (H, W, C)
stats["min"] = [float(v) for v in np.min(data_float, axis=(0, 1))]
stats["max"] = [float(v) for v in np.max(data_float, axis=(0, 1))]
stats["mean"] = [float(v) for v in np.mean(data_float, axis=(0, 1))]
stats["median"] = [float(v) for v in np.median(data_float, axis=(0, 1))]
else:
return None # Unsupported shape
return stats
@@ -235,46 +237,67 @@ def normalize_aspect_ratio_change(original_width: int, original_height: int, res
if abs(output_width - 1.0) < epsilon: output_width = 1
if abs(output_height - 1.0) < epsilon: output_height = 1
# Helper to format the number part
def format_value(val, dec):
# Multiply by 10^decimals, convert to int to keep trailing zeros in effect
# e.g. val=1.1, dec=2 -> 1.1 * 100 = 110
# e.g. val=1.0, dec=2 -> 1.0 * 100 = 100 (though this might become "1" if it's exactly 1.0 before this)
# The existing logic already handles output_width/height being 1.0 to produce "EVEN" or skip a component.
# This formatting is for when output_width/height is NOT 1.0.
return str(int(round(val * (10**dec))))
if abs(output_width - output_height) < epsilon: # Handles original square or aspect maintained
output = "EVEN"
elif output_width != 1 and abs(output_height - 1.0) < epsilon : # Width changed, height maintained relative to width
output = f"X{str(output_width).replace('.', '')}"
output = f"X{format_value(output_width, decimals)}"
elif output_height != 1 and abs(output_width - 1.0) < epsilon: # Height changed, width maintained relative to height
output = f"Y{str(output_height).replace('.', '')}"
output = f"Y{format_value(output_height, decimals)}"
else: # Both changed relative to each other
output = f"X{str(output_width).replace('.', '')}Y{str(output_height).replace('.', '')}"
output = f"X{format_value(output_width, decimals)}Y{format_value(output_height, decimals)}"
return output
# --- Image Loading, Conversion, Resizing ---
def load_image(image_path: Union[str, Path], read_flag: int = cv2.IMREAD_UNCHANGED) -> Optional[np.ndarray]:
"""Loads an image from the specified path."""
"""Loads an image from the specified path. Converts BGR/BGRA to RGB/RGBA if color."""
try:
img = cv2.imread(str(image_path), read_flag)
if img is None:
# print(f"Warning: Failed to load image: {image_path}") # Optional: for debugging utils
return None
# Ensure RGB/RGBA for color images
if len(img.shape) == 3:
if img.shape[2] == 4: # BGRA from OpenCV
img = cv2.cvtColor(img, cv2.COLOR_BGRA2RGBA)
elif img.shape[2] == 3: # BGR from OpenCV
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
return img
except Exception: # as e:
# print(f"Error loading image {image_path}: {e}") # Optional: for debugging utils
return None
def convert_bgr_to_rgb(image: np.ndarray) -> np.ndarray:
"""Converts an image from BGR to RGB color space."""
"""Converts an image from BGR/BGRA to RGB/RGBA color space."""
if image is None or len(image.shape) < 3:
return image # Return as is if not a color image or None
if image.shape[2] == 4: # BGRA
return cv2.cvtColor(image, cv2.COLOR_BGRA2RGB)
return cv2.cvtColor(image, cv2.COLOR_BGRA2RGBA) # Keep alpha, convert to RGBA
elif image.shape[2] == 3: # BGR
return cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
return image # Return as is if not 3 or 4 channels
def convert_rgb_to_bgr(image: np.ndarray) -> np.ndarray:
"""Converts an image from RGB to BGR color space."""
if image is None or len(image.shape) < 3 or image.shape[2] != 3: # Only for 3-channel RGB
return image # Return as is if not a 3-channel color image or None
return cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
"""Converts an image from RGB/RGBA to BGR/BGRA color space."""
if image is None or len(image.shape) < 3:
return image # Return as is if not a color image or None
if image.shape[2] == 4: # RGBA
return cv2.cvtColor(image, cv2.COLOR_RGBA2BGRA)
elif image.shape[2] == 3: # RGB
return cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
return image # Return as is if not 3 or 4 channels
def resize_image(image: np.ndarray, target_width: int, target_height: int, interpolation: Optional[int] = None) -> np.ndarray:
@@ -349,18 +372,19 @@ def save_image(
elif img_to_save.dtype == np.float16: img_to_save = img_to_save.astype(np.float32)
# 2. Color Space Conversion (RGB -> BGR)
# Typically, OpenCV expects BGR for formats like PNG, JPG. EXR usually expects RGB.
# The `convert_to_bgr_before_save` flag controls this.
# If output_format is exr, this should generally be False.
# 2. Color Space Conversion (Internal RGB/RGBA -> BGR/BGRA for OpenCV)
# Input `image_data` is assumed to be in RGB/RGBA format (due to `load_image` changes).
# OpenCV's `imwrite` typically expects BGR/BGRA for formats like PNG, JPG.
# EXR format usually expects RGB/RGBA.
# The `convert_to_bgr_before_save` flag controls this behavior.
current_format = output_format if output_format else path_obj.suffix.lower().lstrip('.')
if convert_to_bgr_before_save and current_format != 'exr':
if len(img_to_save.shape) == 3 and img_to_save.shape[2] == 3:
img_to_save = convert_rgb_to_bgr(img_to_save)
# BGRA is handled by OpenCV imwrite for PNGs, no explicit conversion needed if saving as RGBA.
# If it's 4-channel and not PNG/TIFF with alpha, it might need stripping or specific handling.
# For simplicity, this function assumes 3-channel RGB input if BGR conversion is active.
# If image is 3-channel (RGB) or 4-channel (RGBA), convert to BGR/BGRA.
if len(img_to_save.shape) == 3 and (img_to_save.shape[2] == 3 or img_to_save.shape[2] == 4):
img_to_save = convert_rgb_to_bgr(img_to_save) # Handles RGB->BGR and RGBA->BGRA
# If `convert_to_bgr_before_save` is False or format is 'exr',
# the image (assumed RGB/RGBA) is saved as is.
# 3. Save Image
try: