diff --git a/.roomodes b/.roomodes new file mode 100644 index 0000000..1f2c591 --- /dev/null +++ b/.roomodes @@ -0,0 +1,3 @@ +{ + "customModes": [] +} \ No newline at end of file diff --git a/gui/delegates.py b/gui/delegates.py index d40765d..a992280 100644 --- a/gui/delegates.py +++ b/gui/delegates.py @@ -126,12 +126,15 @@ class SupplierSearchDelegate(QStyledItemDelegate): """Loads the list of known suppliers from the JSON config file.""" try: with open(SUPPLIERS_CONFIG_PATH, 'r') as f: - suppliers = json.load(f) - if isinstance(suppliers, list): + suppliers_data = json.load(f) # Renamed variable for clarity + if isinstance(suppliers_data, list): # Ensure all items are strings - return sorted([str(s) for s in suppliers if isinstance(s, str)]) - else: - log.warning(f"'{SUPPLIERS_CONFIG_PATH}' does not contain a valid list. Starting fresh.") + return sorted([str(s) for s in suppliers_data if isinstance(s, str)]) + elif isinstance(suppliers_data, dict): # ADDED: Handle dictionary case + # If it's a dictionary, extract keys as supplier names + return sorted([str(key) for key in suppliers_data.keys() if isinstance(key, str)]) + else: # MODIFIED: Updated warning message + log.warning(f"'{SUPPLIERS_CONFIG_PATH}' does not contain a valid list or dictionary of suppliers. Starting fresh.") return [] except FileNotFoundError: log.info(f"'{SUPPLIERS_CONFIG_PATH}' not found. Starting with an empty supplier list.") diff --git a/gui/preset_editor_widget.py b/gui/preset_editor_widget.py index 25b9629..01ce841 100644 --- a/gui/preset_editor_widget.py +++ b/gui/preset_editor_widget.py @@ -20,7 +20,8 @@ script_dir = Path(__file__).parent project_root = script_dir.parent PRESETS_DIR = project_root / "Presets" TEMPLATE_PATH = PRESETS_DIR / "_template.json" -APP_SETTINGS_PATH_LOCAL = project_root / "config" / "app_settings.json" +APP_SETTINGS_PATH_LOCAL = project_root / "config" / "app_settings.json" # Retain for other settings if used elsewhere +FILE_TYPE_DEFINITIONS_PATH = project_root / "config" / "file_type_definitions.json" log = logging.getLogger(__name__) @@ -63,18 +64,19 @@ class PresetEditorWidget(QWidget): """Loads FILE_TYPE_DEFINITIONS keys from app_settings.json.""" keys = [] try: - if APP_SETTINGS_PATH_LOCAL.is_file(): - with open(APP_SETTINGS_PATH_LOCAL, 'r', encoding='utf-8') as f: + if FILE_TYPE_DEFINITIONS_PATH.is_file(): + with open(FILE_TYPE_DEFINITIONS_PATH, 'r', encoding='utf-8') as f: settings = json.load(f) + # The FILE_TYPE_DEFINITIONS key is at the root of file_type_definitions.json ftd = settings.get("FILE_TYPE_DEFINITIONS", {}) keys = list(ftd.keys()) - log.debug(f"Successfully loaded {len(keys)} FILE_TYPE_DEFINITIONS keys.") + log.debug(f"Successfully loaded {len(keys)} FILE_TYPE_DEFINITIONS keys from {FILE_TYPE_DEFINITIONS_PATH}.") else: - log.error(f"app_settings.json not found at {APP_SETTINGS_PATH_LOCAL} for PresetEditorWidget.") + log.error(f"file_type_definitions.json not found at {FILE_TYPE_DEFINITIONS_PATH} for PresetEditorWidget.") except json.JSONDecodeError as e: - log.error(f"Failed to parse app_settings.json in PresetEditorWidget: {e}") + log.error(f"Failed to parse file_type_definitions.json in PresetEditorWidget: {e}") except Exception as e: - log.error(f"Error loading FILE_TYPE_DEFINITIONS keys in PresetEditorWidget: {e}") + log.error(f"Error loading FILE_TYPE_DEFINITIONS keys from {FILE_TYPE_DEFINITIONS_PATH} in PresetEditorWidget: {e}") return keys def _init_ui(self): diff --git a/gui/unified_view_model.py b/gui/unified_view_model.py index 1ebd2bb..64e58ef 100644 --- a/gui/unified_view_model.py +++ b/gui/unified_view_model.py @@ -552,6 +552,13 @@ class UnifiedViewModel(QAbstractItemModel): supplier_col_index = self.createIndex(existing_source_row, self.COL_SUPPLIER, existing_source_rule) self.dataChanged.emit(supplier_col_index, supplier_col_index, [Qt.DisplayRole, Qt.EditRole]) + # Always update the preset_name from the new_source_rule, as this reflects the latest prediction context + if existing_source_rule.preset_name != new_source_rule.preset_name: + log.debug(f" Updating preset_name for SourceRule '{source_path}' from '{existing_source_rule.preset_name}' to '{new_source_rule.preset_name}'") + existing_source_rule.preset_name = new_source_rule.preset_name + # Note: preset_name is not directly displayed in the view, so no dataChanged needed for a specific column, + # but if it influenced other display elements, dataChanged would be emitted for those. + # --- Merge AssetRules --- existing_assets_dict = {asset.asset_name: asset for asset in existing_source_rule.assets} diff --git a/main.py b/main.py index 2e04852..7b859d8 100644 --- a/main.py +++ b/main.py @@ -4,6 +4,7 @@ import time import os import logging from pathlib import Path +import re # Added for checking incrementing token from concurrent.futures import ProcessPoolExecutor, as_completed import subprocess import shutil @@ -238,9 +239,14 @@ class ProcessingTask(QRunnable): # output_dir should already be a Path object pattern = getattr(config, 'output_directory_pattern', None) if pattern: - log.debug(f"Calculating next incrementing value for dir: {output_dir} using pattern: {pattern}") - next_increment_str = get_next_incrementing_value(output_dir, pattern) - log.info(f"Calculated next incrementing value for {output_dir}: {next_increment_str}") + # Only call get_next_incrementing_value if the pattern contains an incrementing token + if re.search(r"\[IncrementingValue\]|#+", pattern): + log.debug(f"Incrementing token found in pattern '{pattern}'. Calculating next value for dir: {output_dir}") + next_increment_str = get_next_incrementing_value(output_dir, pattern) + log.info(f"Calculated next incrementing value for {output_dir}: {next_increment_str}") + else: + log.debug(f"No incrementing token found in pattern '{pattern}'. Skipping increment calculation.") + next_increment_str = None # Or a default like "00" if downstream expects a string, but None is cleaner if handled. else: log.warning(f"Cannot calculate incrementing value: 'output_directory_pattern' not found in configuration for preset {config.preset_name}") except Exception as e: diff --git a/monitor.py b/monitor.py index b550cd0..8c0beb3 100644 --- a/monitor.py +++ b/monitor.py @@ -195,17 +195,25 @@ def _process_archive_task(archive_path: Path, output_dir: Path, processed_dir: P # Assuming config object has 'output_directory_pattern' attribute/key pattern = getattr(config, 'output_directory_pattern', None) # Use getattr for safety if pattern: - log.debug(f"[Task:{archive_path.name}] Calculating next incrementing value for dir: {output_dir} using pattern: {pattern}") - next_increment_str = get_next_incrementing_value(output_dir, pattern) - log.info(f"[Task:{archive_path.name}] Calculated next incrementing value: {next_increment_str}") + if re.search(r"\[IncrementingValue\]|#+", pattern): + log.debug(f"[Task:{archive_path.name}] Incrementing token found in pattern '{pattern}'. Calculating next value for dir: {output_dir}") + next_increment_str = get_next_incrementing_value(output_dir, pattern) + log.info(f"[Task:{archive_path.name}] Calculated next incrementing value: {next_increment_str}") + else: + log.debug(f"[Task:{archive_path.name}] No incrementing token found in pattern '{pattern}'. Skipping increment calculation.") + next_increment_str = None else: # Check if config is a dict as fallback (depends on load_config implementation) if isinstance(config, dict): pattern = config.get('output_directory_pattern') if pattern: - log.debug(f"[Task:{archive_path.name}] Calculating next incrementing value for dir: {output_dir} using pattern (from dict): {pattern}") - next_increment_str = get_next_incrementing_value(output_dir, pattern) - log.info(f"[Task:{archive_path.name}] Calculated next incrementing value (from dict): {next_increment_str}") + if re.search(r"\[IncrementingValue\]|#+", pattern): + log.debug(f"[Task:{archive_path.name}] Incrementing token found in pattern '{pattern}' (from dict). Calculating next value for dir: {output_dir}") + next_increment_str = get_next_incrementing_value(output_dir, pattern) + log.info(f"[Task:{archive_path.name}] Calculated next incrementing value (from dict): {next_increment_str}") + else: + log.debug(f"[Task:{archive_path.name}] No incrementing token found in pattern '{pattern}' (from dict). Skipping increment calculation.") + next_increment_str = None else: log.warning(f"[Task:{archive_path.name}] Cannot calculate incrementing value: 'output_directory_pattern' not found in configuration dictionary.") else: