diff --git a/ORPHANED_MODELS_FEATURE.md b/ORPHANED_MODELS_FEATURE.md new file mode 100644 index 00000000000..18c64bae2df --- /dev/null +++ b/ORPHANED_MODELS_FEATURE.md @@ -0,0 +1,152 @@ +# Orphaned Models Synchronization Feature + +## Overview +This feature adds a UI for synchronizing the models directory by finding and removing orphaned model files. Orphaned models are directories that contain model files but are not referenced in the InvokeAI database. + +## Implementation Summary + +### Backend (Python) + +#### New Service: `OrphanedModelsService` +- Location: `invokeai/app/services/orphaned_models/` +- Implements the core logic from the CLI script +- Methods: + - `find_orphaned_models()`: Scans the models directory and database to find orphaned models + - `delete_orphaned_models(paths)`: Safely deletes specified orphaned model directories + +#### API Routes +Added to `invokeai/app/api/routers/model_manager.py`: +- `GET /api/v2/models/sync/orphaned`: Returns list of orphaned models with metadata +- `DELETE /api/v2/models/sync/orphaned`: Deletes selected orphaned models + +#### Data Models +- `OrphanedModelInfo`: Contains path, absolute_path, files list, and size_bytes +- `DeleteOrphanedModelsRequest`: Contains list of paths to delete +- `DeleteOrphanedModelsResponse`: Contains deleted paths and errors + +### Frontend (TypeScript/React) + +#### New Components + +1. **SyncModelsButton.tsx** + - Red button styled with `colorScheme="error"` for visual prominence + - Labeled "Sync Models" + - Opens the SyncModelsDialog when clicked + - Located next to the "+ Add Models" button + +2. **SyncModelsDialog.tsx** + - Modal dialog that displays orphaned models + - Features: + - List of orphaned models with checkboxes (default: all checked) + - "Select All" / "Deselect All" toggle + - Shows file count and total size for each model + - "Delete" and "Cancel" buttons + - Loading spinner while fetching data + - Error handling with user-friendly messages + - Automatically shows toast if no orphaned models found + - Shows success/error toasts after deletion + +#### API Integration +- Added `useGetOrphanedModelsQuery` and `useDeleteOrphanedModelsMutation` hooks to `services/api/endpoints/models.ts` +- Integrated with RTK Query for efficient data fetching and caching + +#### Translation Strings +Added to `public/locales/en.json`: +- syncModels, noOrphanedModels, orphanedModelsFound +- orphanedModelsDescription, foundOrphanedModels (with pluralization) +- filesCount, deleteSelected, deselectAll +- Success/error messages for deletion operations + +## User Experience Flow + +1. User clicks the red "Sync Models" button in the Model Manager +2. System queries the backend for orphaned models +3. If no orphaned models: + - Toast message: "The models directory is synchronized. No orphaned files found." + - Dialog closes automatically +4. If orphaned models found: + - Dialog shows list with checkboxes (all selected by default) + - User can toggle individual models or use "Select All" / "Deselect All" + - Each model shows: + - Directory path + - File count + - Total size (formatted: B, KB, MB, GB) +5. User clicks "Delete {{count}} selected" +6. System deletes selected models +7. Success/error toasts appear +8. Dialog closes + +## Safety Features + +1. **Database Backup**: The service creates a backup before any deletion +2. **Selective Deletion**: Users choose which models to delete +3. **Path Validation**: Ensures paths are within the models directory +4. **Error Handling**: Reports which models failed to delete and why +5. **Default Selected**: All models are selected by default for convenience +6. **Confirmation Required**: User must explicitly click Delete + +## Technical Details + +### Directory-Based Detection +The system treats model paths as directories: +- If database has `model-id/file.safetensors`, the entire `model-id/` directory belongs to that model +- All files and subdirectories within a registered model directory are protected +- Only directories with NO registered models are flagged as orphaned + +### Supported File Extensions +- .safetensors +- .ckpt +- .pt +- .pth +- .bin +- .onnx + +### Skipped Directories +- .download_cache +- .convert_cache +- __pycache__ +- .git + +## Testing Recommendations + +1. **Test with orphaned models**: + - Manually copy a model directory to models folder + - Verify it appears in the dialog + - Delete it and verify removal + +2. **Test with no orphaned models**: + - Clean install + - Verify toast message appears + +3. **Test partial selection**: + - Select only some models + - Verify only selected ones are deleted + +4. **Test error scenarios**: + - Invalid paths + - Permission issues + - Verify error messages are clear + +## Files Changed + +### Backend +- `invokeai/app/services/orphaned_models/__init__.py` (new) +- `invokeai/app/services/orphaned_models/orphaned_models_service.py` (new) +- `invokeai/app/api/routers/model_manager.py` (modified) + +### Frontend +- `invokeai/frontend/web/src/services/api/endpoints/models.ts` (modified) +- `invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManager.tsx` (modified) +- `invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/SyncModelsButton.tsx` (new) +- `invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/SyncModelsDialog.tsx` (new) +- `invokeai/frontend/web/public/locales/en.json` (modified) + +## Future Enhancements + +Potential improvements for future versions: +1. Show preview of what will be deleted before deletion +2. Add option to move orphaned models to archive instead of deleting +3. Show disk space that will be freed +4. Add filter/search in orphaned models list +5. Support for undo operation +6. Scheduled automatic cleanup diff --git a/invokeai/app/api/routers/model_manager.py b/invokeai/app/api/routers/model_manager.py index ceca9f8f53b..f67393f2ea2 100644 --- a/invokeai/app/api/routers/model_manager.py +++ b/invokeai/app/api/routers/model_manager.py @@ -27,6 +27,7 @@ ModelRecordChanges, UnknownModelException, ) +from invokeai.app.services.orphaned_models import OrphanedModelInfo from invokeai.app.util.suppress_output import SuppressOutput from invokeai.backend.model_manager.configs.factory import AnyModelConfig, ModelConfigFactory from invokeai.backend.model_manager.configs.main import ( @@ -148,6 +149,28 @@ async def list_model_records( return ModelsList(models=found_models) +@model_manager_router.get( + "/missing", + operation_id="list_missing_models", + responses={200: {"description": "List of models with missing files"}}, +) +async def list_missing_models() -> ModelsList: + """Get models whose files are missing from disk. + + These are models that have database entries but their corresponding + weight files have been deleted externally (not via Model Manager). + """ + record_store = ApiDependencies.invoker.services.model_manager.store + models_path = ApiDependencies.invoker.services.configuration.models_path + + missing_models: list[AnyModelConfig] = [] + for model_config in record_store.all_models(): + if not (models_path / model_config.path).resolve().exists(): + missing_models.append(model_config) + + return ModelsList(models=missing_models) + + @model_manager_router.get( "/get_by_attrs", operation_id="get_model_records_by_attrs", @@ -1068,3 +1091,79 @@ async def do_hf_login( @model_manager_router.delete("/hf_login", operation_id="reset_hf_token", response_model=HFTokenStatus) async def reset_hf_token() -> HFTokenStatus: return HFTokenHelper.reset_token() + + +# Orphaned Models Management Routes + + +class DeleteOrphanedModelsRequest(BaseModel): + """Request to delete specific orphaned model directories.""" + + paths: list[str] = Field(description="List of relative paths to delete") + + +class DeleteOrphanedModelsResponse(BaseModel): + """Response from deleting orphaned models.""" + + deleted: list[str] = Field(description="Paths that were successfully deleted") + errors: dict[str, str] = Field(description="Paths that had errors, with error messages") + + +@model_manager_router.get( + "/sync/orphaned", + operation_id="get_orphaned_models", + response_model=list[OrphanedModelInfo], +) +async def get_orphaned_models() -> list[OrphanedModelInfo]: + """Find orphaned model directories. + + Orphaned models are directories in the models folder that contain model files + but are not referenced in the database. This can happen when models are deleted + from the database but the files remain on disk. + + Returns: + List of orphaned model directory information + """ + from invokeai.app.services.orphaned_models import OrphanedModelsService + + # Access the database through the model records service + model_records_service = ApiDependencies.invoker.services.model_manager.store + + service = OrphanedModelsService( + config=ApiDependencies.invoker.services.configuration, + db=model_records_service._db, # Access the database from model records service + ) + return service.find_orphaned_models() + + +@model_manager_router.delete( + "/sync/orphaned", + operation_id="delete_orphaned_models", + response_model=DeleteOrphanedModelsResponse, +) +async def delete_orphaned_models(request: DeleteOrphanedModelsRequest) -> DeleteOrphanedModelsResponse: + """Delete specified orphaned model directories. + + Args: + request: Request containing list of relative paths to delete + + Returns: + Response indicating which paths were deleted and which had errors + """ + from invokeai.app.services.orphaned_models import OrphanedModelsService + + # Access the database through the model records service + model_records_service = ApiDependencies.invoker.services.model_manager.store + + service = OrphanedModelsService( + config=ApiDependencies.invoker.services.configuration, + db=model_records_service._db, # Access the database from model records service + ) + + results = service.delete_orphaned_models(request.paths) + + # Separate successful deletions from errors + deleted = [path for path, status in results.items() if status == "deleted"] + errors = {path: status for path, status in results.items() if status != "deleted"} + + return DeleteOrphanedModelsResponse(deleted=deleted, errors=errors) diff --git a/invokeai/app/services/orphaned_models/__init__.py b/invokeai/app/services/orphaned_models/__init__.py new file mode 100644 index 00000000000..db9eaae7bb4 --- /dev/null +++ b/invokeai/app/services/orphaned_models/__init__.py @@ -0,0 +1,5 @@ +"""Service for finding and removing orphaned model files.""" + +from invokeai.app.services.orphaned_models.orphaned_models_service import OrphanedModelInfo, OrphanedModelsService + +__all__ = ["OrphanedModelsService", "OrphanedModelInfo"] diff --git a/invokeai/app/services/orphaned_models/orphaned_models_service.py b/invokeai/app/services/orphaned_models/orphaned_models_service.py new file mode 100644 index 00000000000..8d2894c8671 --- /dev/null +++ b/invokeai/app/services/orphaned_models/orphaned_models_service.py @@ -0,0 +1,209 @@ +"""Service for finding and removing orphaned model files. + +Orphaned models are files in the models directory that are not referenced +in the database models table. +""" + +import json +import shutil +from pathlib import Path +from typing import Set + +from pydantic import BaseModel, Field + +from invokeai.app.services.config.config_default import InvokeAIAppConfig +from invokeai.app.services.shared.sqlite.sqlite_database import SqliteDatabase + + +class OrphanedModelInfo(BaseModel): + """Information about an orphaned model directory.""" + + path: str = Field(description="Relative path to the orphaned directory from models root") + absolute_path: str = Field(description="Absolute path to the orphaned directory") + files: list[str] = Field(description="List of model files in this directory") + size_bytes: int = Field(description="Total size of all files in bytes") + + +class OrphanedModelsService: + """Service for finding and removing orphaned model files.""" + + # Common model file extensions + MODEL_EXTENSIONS = { + ".safetensors", + ".ckpt", + ".pt", + ".pth", + ".bin", + ".onnx", + ".gguf", + } + + # Directories to skip during scan + SKIP_DIRS = { + ".download_cache", + ".convert_cache", + "__pycache__", + ".git", + } + + def __init__(self, config: InvokeAIAppConfig, db: SqliteDatabase): + """Initialize the service. + + Args: + config: Application configuration containing models path + db: Database connection for querying registered models + """ + self._config = config + self._db = db + + def find_orphaned_models(self) -> list[OrphanedModelInfo]: + """Find all orphaned model directories. + + Returns: + List of OrphanedModelInfo objects describing orphaned directories + """ + models_path = self._config.models_path + + # Get all model directories registered in the database + db_model_directories = self._get_registered_model_directories(models_path) + + # Find all model files on disk + disk_model_files = self._get_all_model_files(models_path) + + # Find orphaned files (files not under any registered model directory) + orphaned_files = set() + for disk_file in disk_model_files: + is_under_model_dir = False + for model_dir in db_model_directories: + try: + # Check if disk_file is under model_dir + disk_file.relative_to(model_dir) + is_under_model_dir = True + break + except ValueError: + # Not under this model directory, continue checking + continue + + if not is_under_model_dir: + orphaned_files.add(disk_file) + + # Group orphaned files by their top-level directory + orphaned_dirs_map: dict[Path, list[Path]] = {} + for orphaned_file in orphaned_files: + # Get the top-level directory relative to models_path + try: + rel_path = orphaned_file.relative_to(models_path) + if rel_path.parts: + top_level_dir = models_path / rel_path.parts[0] + if top_level_dir not in orphaned_dirs_map: + orphaned_dirs_map[top_level_dir] = [] + orphaned_dirs_map[top_level_dir].append(orphaned_file) + except ValueError: + # File is outside models_path, skip it + continue + + # Convert to OrphanedModelInfo objects + result = [] + for dir_path, files in orphaned_dirs_map.items(): + # Calculate total size + total_size = sum(f.stat().st_size for f in files if f.exists()) + + # Get relative file paths + file_names = [str(f.relative_to(dir_path)) for f in files] + + result.append( + OrphanedModelInfo( + path=str(dir_path.relative_to(models_path)), + absolute_path=str(dir_path), + files=file_names, + size_bytes=total_size, + ) + ) + + return result + + def delete_orphaned_models(self, orphaned_paths: list[str]) -> dict[str, str]: + """Delete the specified orphaned model directories. + + Args: + orphaned_paths: List of relative paths to delete (relative to models root) + + Returns: + Dictionary mapping paths to status messages ("deleted" or error message) + """ + models_path = self._config.models_path + results = {} + + for rel_path in orphaned_paths: + try: + full_path = models_path / rel_path + if not full_path.exists(): + results[rel_path] = "error: path does not exist" + continue + + # Safety check: ensure path is under models directory + try: + full_path.relative_to(models_path) + except ValueError: + results[rel_path] = "error: path is not under models directory" + continue + + # Delete the directory + shutil.rmtree(full_path) + results[rel_path] = "deleted" + + except Exception as e: + results[rel_path] = f"error: {str(e)}" + + return results + + def _get_registered_model_directories(self, models_dir: Path) -> Set[Path]: + """Get the set of all model directories from the database.""" + model_directories = set() + + with self._db.transaction() as cursor: + cursor.execute("SELECT config FROM models") + rows = cursor.fetchall() + + for row in rows: + try: + config = json.loads(row[0]) + if "path" in config and config["path"]: + path_str = config["path"] + path = Path(path_str) + + # If the path is relative, resolve it relative to models_dir + if not path.is_absolute(): + full_path = (models_dir / path).resolve() + else: + full_path = path.resolve() + + # Extract the top-level directory under models_dir + try: + rel_path = full_path.relative_to(models_dir) + if rel_path.parts: + top_level_dir = models_dir / rel_path.parts[0] + model_directories.add(top_level_dir.resolve()) + except ValueError: + # Path is not relative to models_dir + model_directories.add(full_path) + + except (json.JSONDecodeError, KeyError, TypeError): + # Skip invalid model configs + continue + + return model_directories + + def _get_all_model_files(self, models_path: Path) -> Set[Path]: + """Get all model files in the models directory.""" + model_files = set() + + for item in models_path.rglob("*"): + # Skip directories we don't want to scan + if any(skip_dir in item.parts for skip_dir in self.SKIP_DIRS): + continue + + if item.is_file() and item.suffix.lower() in self.MODEL_EXTENSIONS: + model_files.add(item.resolve()) + + return model_files diff --git a/invokeai/app/services/shared/graph.py b/invokeai/app/services/shared/graph.py index 2501169edb5..fd31448ea4f 100644 --- a/invokeai/app/services/shared/graph.py +++ b/invokeai/app/services/shared/graph.py @@ -124,38 +124,36 @@ def is_any(t: Any) -> bool: def are_connection_types_compatible(from_type: Any, to_type: Any) -> bool: - if not from_type: - return False - if not to_type: + if not from_type or not to_type: return False - # TODO: this is pretty forgiving on generic types. Clean that up (need to handle optionals and such) - if from_type and to_type: - # Ports are compatible - if from_type == to_type or is_any(from_type) or is_any(to_type): - return True + # Ports are compatible + if from_type == to_type or is_any(from_type) or is_any(to_type): + return True - if from_type in get_args(to_type): - return True + if from_type in get_args(to_type): + return True - if to_type in get_args(from_type): - return True + if to_type in get_args(from_type): + return True - # allow int -> float, pydantic will cast for us - if from_type is int and to_type is float: - return True + # allow int -> float, pydantic will cast for us + if from_type is int and to_type is float: + return True - # allow int|float -> str, pydantic will cast for us - if (from_type is int or from_type is float) and to_type is str: - return True + # allow int|float -> str, pydantic will cast for us + if (from_type is int or from_type is float) and to_type is str: + return True - # if not issubclass(from_type, to_type): - if not is_union_subtype(from_type, to_type): - return False - else: - return False + # Prefer issubclass when both are real classes + try: + if isinstance(from_type, type) and isinstance(to_type, type): + return issubclass(from_type, to_type) + except TypeError: + pass - return True + # Union-to-Union (or Union-to-non-Union) handling + return is_union_subtype(from_type, to_type) def are_connections_compatible( @@ -654,6 +652,9 @@ def _is_iterator_connection_valid( if new_output is not None: outputs.append(new_output) + if len(inputs) == 0: + return "Iterator must have a collection input edge" + # Only one input is allowed for iterators if len(inputs) > 1: return "Iterator may only have one input edge" @@ -675,9 +676,13 @@ def _is_iterator_connection_valid( # Collector input type must match all iterator output types if isinstance(input_node, CollectInvocation): + collector_inputs = self._get_input_edges(input_node.id, ITEM_FIELD) + if len(collector_inputs) == 0: + return "Iterator input collector must have at least one item input edge" + # Traverse the graph to find the first collector input edge. Collectors validate that their collection # inputs are all of the same type, so we can use the first input edge to determine the collector's type - first_collector_input_edge = self._get_input_edges(input_node.id, ITEM_FIELD)[0] + first_collector_input_edge = collector_inputs[0] first_collector_input_type = get_output_field_type( self.get_node(first_collector_input_edge.source.node_id), first_collector_input_edge.source.field ) @@ -751,21 +756,12 @@ def nx_graph(self) -> nx.DiGraph: g.add_edges_from({(e.source.node_id, e.destination.node_id) for e in self.edges}) return g - def nx_graph_with_data(self) -> nx.DiGraph: - """Returns a NetworkX DiGraph representing the data and layout of this graph""" - g = nx.DiGraph() - g.add_nodes_from(list(self.nodes.items())) - g.add_edges_from({(e.source.node_id, e.destination.node_id) for e in self.edges}) - return g - def nx_graph_flat(self, nx_graph: Optional[nx.DiGraph] = None) -> nx.DiGraph: """Returns a flattened NetworkX DiGraph, including all subgraphs (but not with iterations expanded)""" g = nx_graph or nx.DiGraph() # Add all nodes from this graph except graph/iteration nodes - g.add_nodes_from([n.id for n in self.nodes.values() if not isinstance(n, IterateInvocation)]) - - # TODO: figure out if iteration nodes need to be expanded + g.add_nodes_from([n.id for n in self.nodes.values()]) unique_edges = {(e.source.node_id, e.destination.node_id) for e in self.edges} g.add_edges_from(unique_edges) @@ -816,10 +812,57 @@ class GraphExecutionState(BaseModel): # Optional priority; others follow in name order ready_order: list[str] = Field(default_factory=list) indegree: dict[str, int] = Field(default_factory=dict, description="Remaining unmet input count for exec nodes") + _iteration_path_cache: dict[str, tuple[int, ...]] = PrivateAttr(default_factory=dict) def _type_key(self, node_obj: BaseInvocation) -> str: return node_obj.__class__.__name__ + def _get_iteration_path(self, exec_node_id: str) -> tuple[int, ...]: + """Best-effort outer->inner iteration indices for an execution node, stopping at collectors.""" + cached = self._iteration_path_cache.get(exec_node_id) + if cached is not None: + return cached + + # Only prepared execution nodes participate; otherwise treat as non-iterated. + source_node_id = self.prepared_source_mapping.get(exec_node_id) + if source_node_id is None: + self._iteration_path_cache[exec_node_id] = () + return () + + # Source-graph iterator ancestry, with edges into collectors removed so iteration context doesn't leak. + it_g = self._iterator_graph(self.graph.nx_graph()) + iterator_sources = [ + n for n in nx.ancestors(it_g, source_node_id) if isinstance(self.graph.get_node(n), IterateInvocation) + ] + + # Order iterators outer->inner via topo order of the iterator graph. + topo = list(nx.topological_sort(it_g)) + topo_index = {n: i for i, n in enumerate(topo)} + iterator_sources.sort(key=lambda n: topo_index.get(n, 0)) + + # Map iterator source nodes to the prepared iterator exec nodes that are ancestors of exec_node_id. + eg = self.execution_graph.nx_graph() + path: list[int] = [] + for it_src in iterator_sources: + prepared = self.source_prepared_mapping.get(it_src) + if not prepared: + continue + it_exec = next((p for p in prepared if nx.has_path(eg, p, exec_node_id)), None) + if it_exec is None: + continue + it_node = self.execution_graph.nodes.get(it_exec) + if isinstance(it_node, IterateInvocation): + path.append(it_node.index) + + # If this exec node is itself an iterator, include its own index as the innermost element. + node_obj = self.execution_graph.nodes.get(exec_node_id) + if isinstance(node_obj, IterateInvocation): + path.append(node_obj.index) + + result = tuple(path) + self._iteration_path_cache[exec_node_id] = result + return result + def _queue_for(self, cls_name: str) -> Deque[str]: q = self._ready_queues.get(cls_name) if q is None: @@ -843,7 +886,15 @@ def _enqueue_if_ready(self, nid: str) -> None: if self.indegree[nid] != 0 or nid in self.executed: return node_obj = self.execution_graph.nodes[nid] - self._queue_for(self._type_key(node_obj)).append(nid) + q = self._queue_for(self._type_key(node_obj)) + nid_path = self._get_iteration_path(nid) + # Insert in lexicographic outer->inner order; preserve FIFO for equal paths. + for i, existing in enumerate(q): + if self._get_iteration_path(existing) > nid_path: + q.insert(i, nid) + break + else: + q.append(nid) model_config = ConfigDict( json_schema_extra={ @@ -1083,12 +1134,12 @@ def no_unexecuted_iter_ancestors(n: str) -> bool: # Select the correct prepared parents for each iteration # For every iterator, the parent must either not be a child of that iterator, or must match the prepared iteration for that iterator - # TODO: Handle a node mapping to none eg = self.execution_graph.nx_graph_flat() prepared_parent_mappings = [ [(n, self._get_iteration_node(n, g, eg, it)) for n in next_node_parents] for it in iterator_node_prepared_combinations ] # type: ignore + prepared_parent_mappings = [m for m in prepared_parent_mappings if all(p[1] is not None for p in m)] # Create execution node for each iteration for iteration_mappings in prepared_parent_mappings: @@ -1110,15 +1161,17 @@ def _get_iteration_node( if len(prepared_nodes) == 1: return next(iter(prepared_nodes)) - # Check if the requested node is an iterator - prepared_iterator = next((n for n in prepared_nodes if n in prepared_iterator_nodes), None) - if prepared_iterator is not None: - return prepared_iterator - # Filter to only iterator nodes that are a parent of the specified node, in tuple format (prepared, source) iterator_source_node_mapping = [(n, self.prepared_source_mapping[n]) for n in prepared_iterator_nodes] parent_iterators = [itn for itn in iterator_source_node_mapping if nx.has_path(graph, itn[1], source_node_id)] + # If the requested node is an iterator, only accept it if it is compatible with all parent iterators + prepared_iterator = next((n for n in prepared_nodes if n in prepared_iterator_nodes), None) + if prepared_iterator is not None: + if all(nx.has_path(execution_graph, pit[0], prepared_iterator) for pit in parent_iterators): + return prepared_iterator + return None + return next( (n for n in prepared_nodes if all(nx.has_path(execution_graph, pit[0], n) for pit in parent_iterators)), None, @@ -1156,11 +1209,10 @@ def _prepare_inputs(self, node: BaseInvocation): # Inputs must be deep-copied, else if a node mutates the object, other nodes that get the same input # will see the mutation. if isinstance(node, CollectInvocation): - output_collection = [ - copydeep(getattr(self.results[edge.source.node_id], edge.source.field)) - for edge in input_edges - if edge.destination.field == ITEM_FIELD - ] + item_edges = [e for e in input_edges if e.destination.field == ITEM_FIELD] + item_edges.sort(key=lambda e: (self._get_iteration_path(e.source.node_id), e.source.node_id)) + + output_collection = [copydeep(getattr(self.results[e.source.node_id], e.source.field)) for e in item_edges] node.collection = output_collection else: for edge in input_edges: diff --git a/invokeai/frontend/web/public/locales/en.json b/invokeai/frontend/web/public/locales/en.json index 5327b6d8251..3c94bbfd6e1 100644 --- a/invokeai/frontend/web/public/locales/en.json +++ b/invokeai/frontend/web/public/locales/en.json @@ -974,6 +974,8 @@ "loraModels": "LoRAs", "main": "Main", "metadata": "Metadata", + "missingFiles": "Missing Files", + "missingFilesTooltip": "Model files are missing from disk", "model": "Model", "modelConversionFailed": "Model Conversion Failed", "modelConverted": "Model Converted", @@ -1046,6 +1048,23 @@ "controlLora": "Control LoRA", "llavaOnevision": "LLaVA OneVision", "syncModels": "Sync Models", + "syncModelsTooltip": "Identify and remove unused model files in the InvokeAI root directory.", + "syncModelsDirectory": "Synchronize Models Directory", + "noOrphanedModels": "The models directory is synchronized. No orphaned files found.", + "orphanedModelsFound": "Orphaned Models Found", + "orphanedModelsDescription": "The following model directories are not referenced in the database and can be safely deleted:", + "foundOrphanedModels": "Found {{count}} orphaned model directory", + "foundOrphanedModels_other": "Found {{count}} orphaned model directories", + "filesCount": "{{count}} file", + "filesCount_other": "{{count}} files", + "deleteSelected": "Delete {{count}} selected", + "deleteSelected_other": "Delete {{count}} selected", + "deselectAll": "Deselect All", + "orphanedModelsDeleted": "Successfully deleted {{count}} orphaned model", + "orphanedModelsDeleted_other": "Successfully deleted {{count}} orphaned models", + "orphanedModelsDeleteErrors": "Some models could not be deleted", + "orphanedModelsDeleteFailed": "Failed to delete orphaned models", + "errorLoadingOrphanedModels": "Error loading orphaned models. Please try again.", "textualInversions": "Textual Inversions", "triggerPhrases": "Trigger Phrases", "loraTriggerPhrases": "LoRA Trigger Phrases", diff --git a/invokeai/frontend/web/src/features/modelManagerV2/models.ts b/invokeai/frontend/web/src/features/modelManagerV2/models.ts index cd83315d48c..c4dd56f8113 100644 --- a/invokeai/frontend/web/src/features/modelManagerV2/models.ts +++ b/invokeai/frontend/web/src/features/modelManagerV2/models.ts @@ -22,15 +22,15 @@ import { } from 'services/api/types'; import { objectEntries } from 'tsafe'; -import type { FilterableModelType } from './store/modelManagerV2Slice'; +import type { ModelCategoryType } from './store/modelManagerV2Slice'; export type ModelCategoryData = { - category: FilterableModelType; + category: ModelCategoryType; i18nKey: string; filter: (config: AnyModelConfig) => boolean; }; -export const MODEL_CATEGORIES: Record = { +export const MODEL_CATEGORIES: Record = { unknown: { category: 'unknown', i18nKey: 'common.unknown', diff --git a/invokeai/frontend/web/src/features/modelManagerV2/store/modelManagerV2Slice.ts b/invokeai/frontend/web/src/features/modelManagerV2/store/modelManagerV2Slice.ts index 65c9cbc1302..092998d0c31 100644 --- a/invokeai/frontend/web/src/features/modelManagerV2/store/modelManagerV2Slice.ts +++ b/invokeai/frontend/web/src/features/modelManagerV2/store/modelManagerV2Slice.ts @@ -7,7 +7,10 @@ import { zModelType } from 'features/nodes/types/common'; import { assert } from 'tsafe'; import z from 'zod'; -const zFilterableModelType = zModelType.exclude(['onnx']).or(z.literal('refiner')); +const zModelCategoryType = zModelType.exclude(['onnx']).or(z.literal('refiner')); +export type ModelCategoryType = z.infer; + +const zFilterableModelType = zModelCategoryType.or(z.literal('missing')); export type FilterableModelType = z.infer; const zModelManagerState = z.object({ diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManager.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManager.tsx index 9447bd4145f..a6c462ddf5b 100644 --- a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManager.tsx +++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManager.tsx @@ -8,6 +8,7 @@ import { PiPlusBold } from 'react-icons/pi'; import ModelList from './ModelManagerPanel/ModelList'; import { ModelListNavigation } from './ModelManagerPanel/ModelListNavigation'; +import { SyncModelsButton } from './ModelManagerPanel/SyncModelsButton'; const modelManagerSx: SystemStyleObject = { flexDir: 'column', @@ -33,11 +34,14 @@ export const ModelManager = memo(() => { {t('common.modelManager')} - {!!selectedModelKey && ( - - )} + + + {!!selectedModelKey && ( + + )} + diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/MissingModelsContext.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/MissingModelsContext.tsx new file mode 100644 index 00000000000..2490a5a8648 --- /dev/null +++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/MissingModelsContext.tsx @@ -0,0 +1,32 @@ +import type { PropsWithChildren } from 'react'; +import { createContext, useContext, useMemo } from 'react'; +import { modelConfigsAdapterSelectors, useGetMissingModelsQuery } from 'services/api/endpoints/models'; + +type MissingModelsContextValue = { + missingModelKeys: Set; + isLoading: boolean; +}; + +const MissingModelsContext = createContext({ + missingModelKeys: new Set(), + isLoading: false, +}); + +export const MissingModelsProvider = ({ children }: PropsWithChildren) => { + const { data, isLoading } = useGetMissingModelsQuery(); + + const value = useMemo(() => { + const missingModels = modelConfigsAdapterSelectors.selectAll(data ?? { ids: [], entities: {} }); + const missingModelKeys = new Set(missingModels.map((m) => m.key)); + return { missingModelKeys, isLoading }; + }, [data, isLoading]); + + return {children}; +}; + +const useMissingModels = () => useContext(MissingModelsContext); + +export const useIsModelMissing = (modelKey: string) => { + const { missingModelKeys } = useMissingModels(); + return missingModelKeys.has(modelKey); +}; diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelList.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelList.tsx index 2159d538bee..f3be0b4686c 100644 --- a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelList.tsx +++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelList.tsx @@ -18,12 +18,14 @@ import { serializeError } from 'serialize-error'; import { modelConfigsAdapterSelectors, useBulkDeleteModelsMutation, + useGetMissingModelsQuery, useGetModelConfigsQuery, } from 'services/api/endpoints/models'; import type { AnyModelConfig } from 'services/api/types'; import { BulkDeleteModelsModal } from './BulkDeleteModelsModal'; import { FetchingModelsLoader } from './FetchingModelsLoader'; +import { MissingModelsProvider } from './MissingModelsContext'; import { ModelListWrapper } from './ModelListWrapper'; const log = logger('models'); @@ -40,11 +42,30 @@ const ModelList = () => { const { isOpen, close } = useBulkDeleteModal(); const [isDeleting, setIsDeleting] = useState(false); - const { data, isLoading } = useGetModelConfigsQuery(); + const { data: allModelsData, isLoading: isLoadingAll } = useGetModelConfigsQuery(); + const { data: missingModelsData, isLoading: isLoadingMissing } = useGetMissingModelsQuery(); const [bulkDeleteModels] = useBulkDeleteModelsMutation(); + const data = filteredModelType === 'missing' ? missingModelsData : allModelsData; + const isLoading = filteredModelType === 'missing' ? isLoadingMissing : isLoadingAll; + const models = useMemo(() => { const modelConfigs = modelConfigsAdapterSelectors.selectAll(data ?? { ids: [], entities: {} }); + + // For missing models filter, show all models in a single category + if (filteredModelType === 'missing') { + const filtered = modelConfigs.filter( + (m) => + m.name.toLowerCase().includes(searchTerm.toLowerCase()) || + m.base.toLowerCase().includes(searchTerm.toLowerCase()) || + m.type.toLowerCase().includes(searchTerm.toLowerCase()) + ); + return { + total: filtered.length, + byCategory: [{ i18nKey: 'modelManager.missingFiles', configs: filtered }], + }; + } + const baseFilteredModelConfigs = modelsFilter(modelConfigs, searchTerm, filteredModelType); const byCategory: { i18nKey: string; configs: AnyModelConfig[] }[] = []; const total = baseFilteredModelConfigs.length; @@ -128,7 +149,7 @@ const ModelList = () => { }, [bulkDeleteModels, selectedModelKeys, dispatch, close, toast, t]); return ( - <> + @@ -152,7 +173,7 @@ const ModelList = () => { modelCount={selectedModelKeys.length} isDeleting={isDeleting} /> - + ); }; diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelListBulkActions.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelListBulkActions.tsx index 2442bd02162..1e6281f1c17 100644 --- a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelListBulkActions.tsx +++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelListBulkActions.tsx @@ -11,7 +11,11 @@ import { import { t } from 'i18next'; import { memo, useCallback, useMemo } from 'react'; import { PiCaretDownBold, PiTrashSimpleBold } from 'react-icons/pi'; -import { modelConfigsAdapterSelectors, useGetModelConfigsQuery } from 'services/api/endpoints/models'; +import { + modelConfigsAdapterSelectors, + useGetMissingModelsQuery, + useGetModelConfigsQuery, +} from 'services/api/endpoints/models'; import type { AnyModelConfig } from 'services/api/types'; import { useBulkDeleteModal } from './ModelList'; @@ -31,7 +35,8 @@ export const ModelListBulkActions = memo(({ sx }: ModelListBulkActionsProps) => const filteredModelType = useAppSelector(selectFilteredModelType); const selectedModelKeys = useAppSelector(selectSelectedModelKeys); const searchTerm = useAppSelector(selectSearchTerm); - const { data } = useGetModelConfigsQuery(); + const { data: allModelsData } = useGetModelConfigsQuery(); + const { data: missingModelsData } = useGetMissingModelsQuery(); const bulkDeleteModal = useBulkDeleteModal(); const handleBulkDelete = useCallback(() => { @@ -40,10 +45,24 @@ export const ModelListBulkActions = memo(({ sx }: ModelListBulkActionsProps) => // Calculate displayed (filtered) model keys const displayedModelKeys = useMemo(() => { + // Use missing models data when the filter is 'missing' + const data = filteredModelType === 'missing' ? missingModelsData : allModelsData; const modelConfigs = modelConfigsAdapterSelectors.selectAll(data ?? { ids: [], entities: {} }); + + // For missing models filter, only apply search term filter + if (filteredModelType === 'missing') { + const filtered = modelConfigs.filter( + (m) => + m.name.toLowerCase().includes(searchTerm.toLowerCase()) || + m.base.toLowerCase().includes(searchTerm.toLowerCase()) || + m.type.toLowerCase().includes(searchTerm.toLowerCase()) + ); + return filtered.map((m) => m.key); + } + const filteredModels = modelsFilter(modelConfigs, searchTerm, filteredModelType); return filteredModels.map((m) => m.key); - }, [data, searchTerm, filteredModelType]); + }, [allModelsData, missingModelsData, searchTerm, filteredModelType]); const { allSelected, someSelected } = useMemo(() => { if (displayedModelKeys.length === 0) { diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelListItem.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelListItem.tsx index 5719752ff01..9547046ba41 100644 --- a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelListItem.tsx +++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelListItem.tsx @@ -1,5 +1,5 @@ import type { SystemStyleObject } from '@invoke-ai/ui-library'; -import { chakra, Checkbox, Flex, Spacer, Text } from '@invoke-ai/ui-library'; +import { Badge, chakra, Checkbox, Flex, Spacer, Text, Tooltip } from '@invoke-ai/ui-library'; import { createSelector } from '@reduxjs/toolkit'; import { useAppDispatch, useAppSelector } from 'app/store/storeHooks'; import { @@ -15,8 +15,10 @@ import { filesize } from 'filesize'; import type { ChangeEvent, MouseEvent } from 'react'; import { memo, useCallback, useMemo } from 'react'; import { useTranslation } from 'react-i18next'; +import { PiWarningBold } from 'react-icons/pi'; import type { AnyModelConfig } from 'services/api/types'; +import { useIsModelMissing } from './MissingModelsContext'; import ModelImage from './ModelImage'; const StyledLabel = chakra('label'); @@ -58,6 +60,7 @@ const sx: SystemStyleObject = { const ModelListItem = ({ model }: ModelListItemProps) => { const { t } = useTranslation(); const dispatch = useAppDispatch(); + const isMissing = useIsModelMissing(model.key); const selectIsSelected = useMemo( () => createSelector( @@ -139,6 +142,14 @@ const ModelListItem = ({ model }: ModelListItemProps) => { + {isMissing && ( + + + + {t('modelManager.missingFiles')} + + + )} diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelTypeFilter.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelTypeFilter.tsx index dcb22071482..5aa8e628869 100644 --- a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelTypeFilter.tsx +++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/ModelTypeFilter.tsx @@ -1,11 +1,16 @@ -import { Button, Menu, MenuButton, MenuItem, MenuList } from '@invoke-ai/ui-library'; +import { Button, Flex, Menu, MenuButton, MenuItem, MenuList } from '@invoke-ai/ui-library'; import { useAppDispatch, useAppSelector } from 'app/store/storeHooks'; import type { ModelCategoryData } from 'features/modelManagerV2/models'; import { MODEL_CATEGORIES, MODEL_CATEGORIES_AS_LIST } from 'features/modelManagerV2/models'; +import type { ModelCategoryType } from 'features/modelManagerV2/store/modelManagerV2Slice'; import { selectFilteredModelType, setFilteredModelType } from 'features/modelManagerV2/store/modelManagerV2Slice'; import { memo, useCallback } from 'react'; import { useTranslation } from 'react-i18next'; -import { PiFunnelBold } from 'react-icons/pi'; +import { PiFunnelBold, PiWarningBold } from 'react-icons/pi'; + +const isModelCategoryType = (type: string): type is ModelCategoryType => { + return type in MODEL_CATEGORIES; +}; export const ModelTypeFilter = memo(() => { const { t } = useTranslation(); @@ -16,13 +21,37 @@ export const ModelTypeFilter = memo(() => { dispatch(setFilteredModelType(null)); }, [dispatch]); + const setMissingFilter = useCallback(() => { + dispatch(setFilteredModelType('missing')); + }, [dispatch]); + + const getButtonLabel = () => { + if (filteredModelType === 'missing') { + return t('modelManager.missingFiles'); + } + if (filteredModelType && isModelCategoryType(filteredModelType)) { + return t(MODEL_CATEGORIES[filteredModelType].i18nKey); + } + return t('modelManager.allModels'); + }; + return ( }> - {filteredModelType ? t(MODEL_CATEGORIES[filteredModelType].i18nKey) : t('modelManager.allModels')} + {getButtonLabel()} {t('modelManager.allModels')} + + + + {t('modelManager.missingFiles')} + + {MODEL_CATEGORIES_AS_LIST.map((data) => ( ))} diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/SyncModelsButton.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/SyncModelsButton.tsx new file mode 100644 index 00000000000..5734d84e9cd --- /dev/null +++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/SyncModelsButton.tsx @@ -0,0 +1,34 @@ +import { Button, Tooltip, useDisclosure } from '@invoke-ai/ui-library'; +import { memo, useCallback } from 'react'; +import { useTranslation } from 'react-i18next'; +import { PiArrowsClockwiseBold } from 'react-icons/pi'; + +import { SyncModelsDialog } from './SyncModelsDialog'; + +export const SyncModelsButton = memo(() => { + const { t } = useTranslation(); + const { isOpen, onOpen, onClose } = useDisclosure(); + + const handleClick = useCallback(() => { + onOpen(); + }, [onOpen]); + + return ( + <> + + + + + + ); +}); + +SyncModelsButton.displayName = 'SyncModelsButton'; diff --git a/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/SyncModelsDialog.tsx b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/SyncModelsDialog.tsx new file mode 100644 index 00000000000..680cc468b90 --- /dev/null +++ b/invokeai/frontend/web/src/features/modelManagerV2/subpanels/ModelManagerPanel/SyncModelsDialog.tsx @@ -0,0 +1,283 @@ +import { + Button, + Checkbox, + Collapse, + Flex, + Heading, + IconButton, + Modal, + ModalBody, + ModalCloseButton, + ModalContent, + ModalFooter, + ModalHeader, + ModalOverlay, + Spinner, + Text, + useToast, +} from '@invoke-ai/ui-library'; +import { memo, useCallback, useEffect, useState } from 'react'; +import { useTranslation } from 'react-i18next'; +import { PiCaretDownBold, PiCaretRightBold } from 'react-icons/pi'; +import { useDeleteOrphanedModelsMutation, useGetOrphanedModelsQuery } from 'services/api/endpoints/models'; + +type OrphanedModel = { + path: string; + absolute_path: string; + files: string[]; + size_bytes: number; +}; + +type SyncModelsDialogProps = { + isOpen: boolean; + onClose: () => void; +}; + +export const SyncModelsDialog = memo(({ isOpen, onClose }: SyncModelsDialogProps) => { + const { t } = useTranslation(); + const toast = useToast(); + const { data: orphanedModels, isLoading, error } = useGetOrphanedModelsQuery(undefined, { skip: !isOpen }); + const [deleteOrphanedModels, { isLoading: isDeleting }] = useDeleteOrphanedModelsMutation(); + + const [selectedModels, setSelectedModels] = useState>(new Set()); + const [selectAll, setSelectAll] = useState(true); + const [expandedModels, setExpandedModels] = useState>(new Set()); + + // Initialize selected models when data loads + useEffect(() => { + if (orphanedModels && orphanedModels.length > 0) { + // Default all models to selected + setSelectedModels(new Set(orphanedModels.map((m: OrphanedModel) => m.path))); + setSelectAll(true); + } + }, [orphanedModels]); + + // Show toast if no orphaned models found + useEffect(() => { + if (!isLoading && !error && orphanedModels && orphanedModels.length === 0) { + toast({ + id: 'no-orphaned-models', + title: t('modelManager.noOrphanedModels'), + status: 'success', + duration: 3000, + }); + onClose(); + } + }, [isLoading, error, orphanedModels, t, toast, onClose]); + + const handleToggleModel = useCallback((path: string) => { + setSelectedModels((prev) => { + const next = new Set(prev); + if (next.has(path)) { + next.delete(path); + } else { + next.add(path); + } + return next; + }); + }, []); + + const handleToggleSelectAll = useCallback(() => { + if (selectAll && orphanedModels) { + // Deselect all + setSelectedModels(new Set()); + setSelectAll(false); + } else if (orphanedModels) { + // Select all + setSelectedModels(new Set(orphanedModels.map((m: OrphanedModel) => m.path))); + setSelectAll(true); + } + }, [selectAll, orphanedModels]); + + const handleToggleExpanded = useCallback((path: string) => { + setExpandedModels((prev) => { + const next = new Set(prev); + if (next.has(path)) { + next.delete(path); + } else { + next.add(path); + } + return next; + }); + }, []); + + const createToggleExpandedHandler = useCallback( + (path: string) => () => handleToggleExpanded(path), + [handleToggleExpanded] + ); + + const createToggleModelHandler = useCallback((path: string) => () => handleToggleModel(path), [handleToggleModel]); + + const handleDelete = useCallback(async () => { + try { + const result = await deleteOrphanedModels({ paths: Array.from(selectedModels) }).unwrap(); + + if (result.deleted.length > 0) { + toast({ + title: t('modelManager.orphanedModelsDeleted', { count: result.deleted.length }), + status: 'success', + duration: 3000, + }); + } + + if (Object.keys(result.errors).length > 0) { + toast({ + title: t('modelManager.orphanedModelsDeleteErrors'), + description: Object.values(result.errors).join(', '), + status: 'error', + duration: 5000, + }); + } + + onClose(); + } catch { + toast({ + title: t('modelManager.orphanedModelsDeleteFailed'), + status: 'error', + duration: 5000, + }); + } + }, [selectedModels, deleteOrphanedModels, toast, t, onClose]); + + const formatSize = useCallback((bytes: number) => { + if (bytes < 1024) { + return `${bytes} B`; + } + if (bytes < 1024 * 1024) { + return `${(bytes / 1024).toFixed(2)} KB`; + } + if (bytes < 1024 * 1024 * 1024) { + return `${(bytes / (1024 * 1024)).toFixed(2)} MB`; + } + return `${(bytes / (1024 * 1024 * 1024)).toFixed(2)} GB`; + }, []); + + // Early return if error + if (error) { + return ( + + + + {t('modelManager.syncModels')} + + + {t('modelManager.errorLoadingOrphanedModels')} + + + + + + + ); + } + + // Loading state + if (isLoading) { + return ( + + + + {t('modelManager.syncModels')} + + + + + + + + + ); + } + + // No orphaned models found + if (!orphanedModels || orphanedModels.length === 0) { + return null; + } + + return ( + + + + {t('modelManager.orphanedModelsFound')} + + + + {t('modelManager.orphanedModelsDescription')} + + + {t('modelManager.foundOrphanedModels', { count: orphanedModels.length })} + + {selectAll ? t('modelManager.deselectAll') : t('modelManager.selectAll')} + + + + + {orphanedModels.map((model: OrphanedModel) => ( + + + + : } + size="xs" + variant="ghost" + onClick={createToggleExpandedHandler(model.path)} + /> + + {model.path} + + + + {formatSize(model.size_bytes)} + + + + + {t('modelManager.filesCount', { count: model.files.length })} + + + + + {model.files.map((file) => ( + + {file} + + ))} + + + + ))} + + + + + + + + + + ); +}); + +SyncModelsDialog.displayName = 'SyncModelsDialog'; diff --git a/invokeai/frontend/web/src/services/api/endpoints/models.ts b/invokeai/frontend/web/src/services/api/endpoints/models.ts index 707352bcb39..1117f3b61fe 100644 --- a/invokeai/frontend/web/src/services/api/endpoints/models.ts +++ b/invokeai/frontend/web/src/services/api/endpoints/models.ts @@ -79,6 +79,25 @@ type GetHuggingFaceModelsResponse = type GetByAttrsArg = operations['get_model_records_by_attrs']['parameters']['query']; +// Orphaned models types - manually defined since the schema hasn't been regenerated yet +type OrphanedModelInfo = { + path: string; + absolute_path: string; + files: string[]; + size_bytes: number; +}; + +type GetOrphanedModelsResponse = OrphanedModelInfo[]; + +type DeleteOrphanedModelsArg = { + paths: string[]; +}; + +type DeleteOrphanedModelsResponse = { + deleted: string[]; + errors: Record; +}; + const modelConfigsAdapter = createEntityAdapter({ selectId: (entity) => entity.key, sortComparer: (a, b) => a.name.localeCompare(b.name), @@ -290,6 +309,13 @@ export const modelsApi = api.injectEndpoints({ }); }, }), + getMissingModels: build.query, void>({ + query: () => ({ url: buildModelsUrl('missing') }), + providesTags: [{ type: 'ModelConfig', id: LIST_TAG }], + transformResponse: (response: GetModelConfigsResponse) => { + return modelConfigsAdapter.setAll(modelConfigsAdapter.getInitialState(), response.models); + }, + }), getStarterModels: build.query({ query: () => buildModelsUrl('starter_models'), providesTags: [{ type: 'ModelConfig', id: LIST_TAG }], @@ -351,12 +377,28 @@ export const modelsApi = api.injectEndpoints({ } }, }), + getOrphanedModels: build.query({ + query: () => ({ + url: buildModelsUrl('sync/orphaned'), + method: 'GET', + }), + providesTags: ['OrphanedModels'], + }), + deleteOrphanedModels: build.mutation({ + query: (arg) => ({ + url: buildModelsUrl('sync/orphaned'), + method: 'DELETE', + body: arg, + }), + invalidatesTags: ['OrphanedModels'], + }), }), }); export const { useGetModelConfigsQuery, useGetModelConfigQuery, + useGetMissingModelsQuery, useDeleteModelsMutation, useBulkDeleteModelsMutation, useDeleteModelImageMutation, @@ -375,6 +417,9 @@ export const { useResetHFTokenMutation, useEmptyModelCacheMutation, useReidentifyModelMutation, + useGetOrphanedModelsQuery, + useDeleteOrphanedModelsMutation, } = modelsApi; export const selectModelConfigsQuery = modelsApi.endpoints.getModelConfigs.select(); +export const selectMissingModelsQuery = modelsApi.endpoints.getMissingModels.select(); diff --git a/invokeai/frontend/web/src/services/api/hooks/modelsByType.ts b/invokeai/frontend/web/src/services/api/hooks/modelsByType.ts index b771dd78400..92722b7664c 100644 --- a/invokeai/frontend/web/src/services/api/hooks/modelsByType.ts +++ b/invokeai/frontend/web/src/services/api/hooks/modelsByType.ts @@ -4,7 +4,9 @@ import type { RootState } from 'app/store/store'; import { useMemo } from 'react'; import { modelConfigsAdapterSelectors, + selectMissingModelsQuery, selectModelConfigsQuery, + useGetMissingModelsQuery, useGetModelConfigsQuery, } from 'services/api/endpoints/models'; import type { AnyModelConfig } from 'services/api/types'; @@ -33,16 +35,24 @@ const buildModelsHook = (typeGuard: (config: AnyModelConfig) => config is T) => (filter: (config: T) => boolean = () => true) => { const result = useGetModelConfigsQuery(undefined); + const { data: missingModelsData } = useGetMissingModelsQuery(); + const modelConfigs = useMemo(() => { if (!result.data) { return EMPTY_ARRAY; } + // Get set of missing model keys to exclude from selection + const missingModelKeys = new Set( + modelConfigsAdapterSelectors.selectAll(missingModelsData ?? { ids: [], entities: {} }).map((m) => m.key) + ); + return modelConfigsAdapterSelectors .selectAll(result.data) .filter((config) => typeGuard(config)) + .filter((config) => !missingModelKeys.has(config.key)) .filter(filter); - }, [filter, result.data]); + }, [filter, result.data, missingModelsData]); return [modelConfigs, result] as const; }; @@ -75,7 +85,17 @@ const buildModelsSelector = if (!result.data) { return EMPTY_ARRAY; } - return modelConfigsAdapterSelectors.selectAll(result.data).filter(typeGuard); + + // Get set of missing model keys to exclude from selection + const missingResult = selectMissingModelsQuery(state); + const missingModelKeys = new Set( + modelConfigsAdapterSelectors.selectAll(missingResult.data ?? { ids: [], entities: {} }).map((m) => m.key) + ); + + return modelConfigsAdapterSelectors + .selectAll(result.data) + .filter(typeGuard) + .filter((config) => !missingModelKeys.has(config.key)); }; export const selectIPAdapterModels = buildModelsSelector(isIPAdapterModelConfig); export const selectGlobalRefImageModels = buildModelsSelector( diff --git a/invokeai/frontend/web/src/services/api/index.ts b/invokeai/frontend/web/src/services/api/index.ts index fdd30029a75..795d9539bba 100644 --- a/invokeai/frontend/web/src/services/api/index.ts +++ b/invokeai/frontend/web/src/services/api/index.ts @@ -38,6 +38,7 @@ const tagTypes = [ 'ModelInstalls', 'ModelRelationships', 'ModelScanFolderResults', + 'OrphanedModels', 'T2IAdapterModel', 'MainModel', 'VaeModel', diff --git a/invokeai/frontend/web/src/services/api/schema.ts b/invokeai/frontend/web/src/services/api/schema.ts index 98e43bcb722..7d2733e9f0b 100644 --- a/invokeai/frontend/web/src/services/api/schema.ts +++ b/invokeai/frontend/web/src/services/api/schema.ts @@ -39,6 +39,29 @@ export type paths = { patch?: never; trace?: never; }; + "/api/v2/models/missing": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * List Missing Models + * @description Get models whose files are missing from disk. + * + * These are models that have database entries but their corresponding + * weight files have been deleted externally (not via Model Manager). + */ + get: operations["list_missing_models"]; + put?: never; + post?: never; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; "/api/v2/models/get_by_attrs": { parameters: { query?: never; @@ -403,6 +426,43 @@ export type paths = { patch?: never; trace?: never; }; + "/api/v2/models/sync/orphaned": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Get Orphaned Models + * @description Find orphaned model directories. + * + * Orphaned models are directories in the models folder that contain model files + * but are not referenced in the database. This can happen when models are deleted + * from the database but the files remain on disk. + * + * Returns: + * List of orphaned model directory information + */ + get: operations["get_orphaned_models"]; + put?: never; + post?: never; + /** + * Delete Orphaned Models + * @description Delete specified orphaned model directories. + * + * Args: + * request: Request containing list of relative paths to delete + * + * Returns: + * Response indicating which paths were deleted and which had errors + */ + delete: operations["delete_orphaned_models"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; "/api/v1/download_queue/": { parameters: { query?: never; @@ -6419,6 +6479,35 @@ export type components = { */ deleted_images: string[]; }; + /** + * DeleteOrphanedModelsRequest + * @description Request to delete specific orphaned model directories. + */ + DeleteOrphanedModelsRequest: { + /** + * Paths + * @description List of relative paths to delete + */ + paths: string[]; + }; + /** + * DeleteOrphanedModelsResponse + * @description Response from deleting orphaned models. + */ + DeleteOrphanedModelsResponse: { + /** + * Deleted + * @description Paths that were successfully deleted + */ + deleted: string[]; + /** + * Errors + * @description Paths that had errors, with error messages + */ + errors: { + [key: string]: string; + }; + }; /** * Denoise - SD1.5, SDXL * @description Denoises noisy latents to decodable images @@ -20398,6 +20487,32 @@ export type components = { */ items: components["schemas"]["ImageDTO"][]; }; + /** + * OrphanedModelInfo + * @description Information about an orphaned model directory. + */ + OrphanedModelInfo: { + /** + * Path + * @description Relative path to the orphaned directory from models root + */ + path: string; + /** + * Absolute Path + * @description Absolute path to the orphaned directory + */ + absolute_path: string; + /** + * Files + * @description List of model files in this directory + */ + files: string[]; + /** + * Size Bytes + * @description Total size of all files in bytes + */ + size_bytes: number; + }; /** * OutputFieldJSONSchemaExtra * @description Extra attributes to be added to input fields and their OpenAPI schema. Used by the workflow editor @@ -27204,6 +27319,26 @@ export interface operations { }; }; }; + list_missing_models: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description List of models with missing files */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["ModelsList"]; + }; + }; + }; + }; get_model_records_by_attrs: { parameters: { query: { @@ -28160,6 +28295,59 @@ export interface operations { }; }; }; + get_orphaned_models: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["OrphanedModelInfo"][]; + }; + }; + }; + }; + delete_orphaned_models: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody: { + content: { + "application/json": components["schemas"]["DeleteOrphanedModelsRequest"]; + }; + }; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["DeleteOrphanedModelsResponse"]; + }; + }; + /** @description Validation Error */ + 422: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["HTTPValidationError"]; + }; + }; + }; + }; list_downloads: { parameters: { query?: never; diff --git a/invokeai/version/invokeai_version.py b/invokeai/version/invokeai_version.py index 67340893b60..81ff40b405f 100644 --- a/invokeai/version/invokeai_version.py +++ b/invokeai/version/invokeai_version.py @@ -1 +1 @@ -__version__ = "6.11.0.rc1" +__version__ = "6.11.0.post1" diff --git a/scripts/remove_orphaned_models.py b/scripts/remove_orphaned_models.py new file mode 100755 index 00000000000..3b94276e7ac --- /dev/null +++ b/scripts/remove_orphaned_models.py @@ -0,0 +1,464 @@ +#!/usr/bin/env python +"""Script to remove orphaned model files from INVOKEAI_ROOT directory. + +Orphaned models are ones that appear in the INVOKEAI_ROOT/models directory, +but which are not referenced in the database `models` table. +""" + +import argparse +import datetime +import json +import locale +import os +import shutil +import sqlite3 +from pathlib import Path +from typing import Set + +import yaml + + +class ConfigMapper: + """Configuration loader for InvokeAI paths.""" + + YAML_FILENAME = "invokeai.yaml" + DATABASE_FILENAME = "invokeai.db" + DEFAULT_DB_DIR = "databases" + DEFAULT_MODELS_DIR = "models" + + def __init__(self): + self.database_path = None + self.database_backup_dir = None + self.models_path = None + + def load(self, root_path: Path) -> bool: + """Load configuration from root directory.""" + yaml_path = root_path / self.YAML_FILENAME + if not yaml_path.exists(): + print(f"Unable to find {self.YAML_FILENAME} at {yaml_path}!") + return False + + db_dir, models_dir = self._load_paths_from_yaml_file(yaml_path) + + if db_dir is None: + db_dir = self.DEFAULT_DB_DIR + print(f"The {self.YAML_FILENAME} file was found but is missing the db_dir setting! Defaulting to {db_dir}") + + if models_dir is None: + models_dir = self.DEFAULT_MODELS_DIR + print( + f"The {self.YAML_FILENAME} file was found but is missing the models_dir setting! Defaulting to {models_dir}" + ) + + # Set database path + if os.path.isabs(db_dir): + self.database_path = Path(db_dir) / self.DATABASE_FILENAME + else: + self.database_path = root_path / db_dir / self.DATABASE_FILENAME + + self.database_backup_dir = self.database_path.parent / "backup" + + # Set models path + if os.path.isabs(models_dir): + self.models_path = Path(models_dir) + else: + self.models_path = root_path / models_dir + + db_exists = self.database_path.exists() + models_exists = self.models_path.exists() + + print(f"Found {self.YAML_FILENAME} file at {yaml_path}:") + print(f" Database : {self.database_path} - {'Exists!' if db_exists else 'Not Found!'}") + print(f" Models : {self.models_path} - {'Exists!' if models_exists else 'Not Found!'}") + + if db_exists and models_exists: + return True + else: + print( + "\nOne or more paths specified in invokeai.yaml do not exist. Please inspect/correct the configuration." + ) + return False + + def _load_paths_from_yaml_file(self, yaml_path: Path): + """Load paths from YAML configuration file.""" + try: + with open(yaml_path, "rt", encoding=locale.getpreferredencoding()) as file: + yamlinfo = yaml.safe_load(file) + db_dir = yamlinfo.get("InvokeAI", {}).get("Paths", {}).get("db_dir", None) + models_dir = yamlinfo.get("InvokeAI", {}).get("Paths", {}).get("models_dir", None) + return db_dir, models_dir + except Exception as e: + print(f"Failed to load paths from yaml file! {yaml_path}! Error: {e}") + return None, None + + +class DatabaseMapper: + """Class to abstract database functionality.""" + + def __init__(self, database_path: Path, database_backup_dir: Path): + self.database_path = database_path + self.database_backup_dir = database_backup_dir + self.connection = None + self.cursor = None + + def backup(self, timestamp_string: str): + """Take a backup of the database.""" + if not self.database_backup_dir.exists(): + print(f"Database backup directory {self.database_backup_dir} does not exist -> creating...", end="") + self.database_backup_dir.mkdir(parents=True, exist_ok=True) + print("Done!") + + database_backup_path = self.database_backup_dir / f"backup-{timestamp_string}-invokeai.db" + print(f"Making DB Backup at {database_backup_path}...", end="") + shutil.copy2(self.database_path, database_backup_path) + print("Done!") + + def connect(self): + """Open connection to the database.""" + self.connection = sqlite3.connect(str(self.database_path)) + self.cursor = self.connection.cursor() + + def get_all_model_directories(self, models_dir: Path) -> Set[Path]: + """Get the set of all model directories from the database. + + A model directory is the top-level directory under models/ that contains + the model files. If the path in the database is just a directory, that's + the model directory. If it's a file path, we extract the first directory + component. + + Args: + models_dir: The root models directory path. Relative paths from the database + will be resolved relative to this directory. + + Returns: + Set of absolute Path objects for model directories. + """ + sql_get_models = "SELECT config FROM models" + self.cursor.execute(sql_get_models) + rows = self.cursor.fetchall() + model_directories = set() + for row in rows: + try: + config = json.loads(row[0]) + if "path" in config and config["path"]: + path_str = config["path"] + # Convert to Path object + path = Path(path_str) + + # If the path is relative, resolve it relative to models_dir + # If it's absolute, use it as-is + if not path.is_absolute(): + full_path = (models_dir / path).resolve() + else: + full_path = path.resolve() + + # Extract the top-level directory under models_dir + # This handles both cases: + # 1. path is "model-id" -> model-id is the directory + # 2. path is "model-id/file.safetensors" -> model-id is the directory + try: + # Get the relative path from models_dir + rel_path = full_path.relative_to(models_dir) + # Get the first component (top-level directory) + if rel_path.parts: + top_level_dir = models_dir / rel_path.parts[0] + model_directories.add(top_level_dir.resolve()) + except ValueError: + # Path is not relative to models_dir, use the path itself + # This handles absolute paths outside models_dir + model_directories.add(full_path) + + except (json.JSONDecodeError, KeyError, TypeError) as e: + print(f"Warning: Failed to parse model config: {e}") + continue + return model_directories + + def disconnect(self): + """Disconnect from the database.""" + if self.cursor is not None: + self.cursor.close() + if self.connection is not None: + self.connection.close() + + +class ModelFileMapper: + """Class to handle model file system operations.""" + + # Common model file extensions + MODEL_EXTENSIONS = { + ".safetensors", + ".ckpt", + ".pt", + ".pth", + ".bin", + ".onnx", + } + + # Directories to skip during scan + SKIP_DIRS = { + ".download_cache", + ".convert_cache", + "__pycache__", + ".git", + } + + def __init__(self, models_path: Path): + self.models_path = models_path + + def get_all_model_directories(self) -> Set[Path]: + """ + Get all directories in the models path that contain model files. + Returns a set of directory paths that contain at least one model file. + """ + model_dirs = set() + + for item in self.models_path.rglob("*"): + # Skip directories we don't want to scan + if any(skip_dir in item.parts for skip_dir in self.SKIP_DIRS): + continue + + if item.is_file() and item.suffix.lower() in self.MODEL_EXTENSIONS: + # Add the parent directory of the model file + model_dirs.add(item.parent) + + return model_dirs + + def get_all_model_files(self) -> Set[Path]: + """Get all model files in the models directory.""" + model_files = set() + + for item in self.models_path.rglob("*"): + # Skip directories we don't want to scan + if any(skip_dir in item.parts for skip_dir in self.SKIP_DIRS): + continue + + if item.is_file() and item.suffix.lower() in self.MODEL_EXTENSIONS: + model_files.add(item.resolve()) + + return model_files + + def remove_file(self, file_path: Path): + """Remove a single model file.""" + try: + file_path.unlink() + print(f" Deleted file: {file_path}") + except Exception as e: + print(f" Error deleting {file_path}: {e}") + + def remove_directory_if_empty(self, directory: Path): + """Remove a directory if it's empty (after removing files).""" + try: + if directory.exists() and not any(directory.iterdir()): + directory.rmdir() + print(f" Deleted empty directory: {directory}") + except Exception as e: + print(f" Error removing directory {directory}: {e}") + + +class OrphanedModelsApp: + """Main application class for removing orphaned model files.""" + + def __init__(self, delete_without_confirm: bool = False): + self.delete_without_confirm = delete_without_confirm + self.orphaned_count = 0 + + def find_orphaned_files_by_directory( + self, file_mapper: ModelFileMapper, db_mapper: DatabaseMapper, models_path: Path + ) -> dict[Path, list[Path]]: + """Find orphaned files grouped by their parent directory. + + A file is orphaned if it's NOT under any model directory registered in the database. + Model directories are extracted from the database paths - if a path is + 'model-id/file.safetensors', then 'model-id' is the model directory and ALL files + under it belong to that model. + """ + print("\nScanning models directory for orphaned files...") + + # Get all model files on disk + disk_model_files = file_mapper.get_all_model_files() + print(f"Found {len(disk_model_files)} model files on disk") + + # Get all model directories from database + db_model_directories = db_mapper.get_all_model_directories(models_path) + print(f"Found {len(db_model_directories)} model directories in database") + + # Find orphaned files (files on disk but not under any registered model directory) + orphaned_files = set() + for disk_file in disk_model_files: + # Check if this file is under any registered model directory + is_under_model_dir = False + for model_dir in db_model_directories: + try: + # Check if disk_file is under model_dir + disk_file.relative_to(model_dir) + is_under_model_dir = True + break + except ValueError: + # Not under this model directory, continue checking + continue + + if not is_under_model_dir: + orphaned_files.add(disk_file) + + # Group orphaned files by their parent directory + orphaned_dirs = {} + for orphaned_file in orphaned_files: + parent = orphaned_file.parent + if parent not in orphaned_dirs: + orphaned_dirs[parent] = [] + orphaned_dirs[parent].append(orphaned_file) + + return orphaned_dirs + + def ask_to_continue(self) -> bool: + """Ask user whether they want to continue with the operation.""" + while True: + try: + input_choice = input("\nDo you wish to continue? (Y or N) [N]: ") + # Default to 'N' if user presses Enter without input + if input_choice.strip() == "": + return False + if str.lower(input_choice) == "y": + return True + if str.lower(input_choice) == "n": + return False + print("Please enter Y or N") + except (KeyboardInterrupt, EOFError): + return False + + def remove_orphaned_models(self, config: ConfigMapper, file_mapper: ModelFileMapper, db_mapper: DatabaseMapper): + """Remove orphaned model directories.""" + print("\n" + "=" * 80) + print("= Remove Orphaned Model Files") + print("=" * 80) + print("\nThis operation will find model files in the models directory that are not") + print("referenced in the database and remove them.") + print() + print(f"Database File Path : {config.database_path}") + print(f"Models Directory : {config.models_path}") + print() + print("Notes:") + print("- A database backup will be created before any changes") + print("- Model files not referenced in the database will be permanently deleted") + print("- This operation cannot be undone (except by restoring the deleted files)") + print() + + # Connect to database and find orphaned files + db_mapper.connect() + try: + orphaned_dirs = self.find_orphaned_files_by_directory(file_mapper, db_mapper, config.models_path) + + if not orphaned_dirs: + print("\nNo orphaned model files found!") + return + + print(f"\nFound {len(orphaned_dirs)} directories with orphaned model files:") + print() + + for directory, files in sorted(orphaned_dirs.items()): + print(f"Directory: {directory}") + for file in sorted(files): + print(f" - {file.name}") + print() + + self.orphaned_count = sum(len(files) for files in orphaned_dirs.values()) + print(f"Total orphaned files: {self.orphaned_count}") + + # Ask for confirmation unless --delete flag is used + if not self.delete_without_confirm: + if not self.ask_to_continue(): + print("\nOperation cancelled by user.") + self.orphaned_count = 0 # Reset count since no files were removed + return + + # Create database backup with timestamp + timestamp_string = datetime.datetime.now(datetime.UTC).strftime("%Y%m%dT%H%M%SZ") + db_mapper.backup(timestamp_string) + + # Delete the orphaned files + print("\nDeleting orphaned model files...") + for directory, files in sorted(orphaned_dirs.items()): + for file in sorted(files): + file_mapper.remove_file(file) + # After removing files, clean up the directory if it's now empty + file_mapper.remove_directory_if_empty(directory) + + finally: + db_mapper.disconnect() + + def main(self, root_path: Path): + """Main entry point.""" + print("\n" + "=" * 80) + print("Orphaned Model Files Cleanup for InvokeAI") + print("=" * 80 + "\n") + + config_mapper = ConfigMapper() + if not config_mapper.load(root_path): + print("\nInvalid configuration...exiting.\n") + return 1 + + file_mapper = ModelFileMapper(config_mapper.models_path) + db_mapper = DatabaseMapper(config_mapper.database_path, config_mapper.database_backup_dir) + + try: + self.remove_orphaned_models(config_mapper, file_mapper, db_mapper) + except KeyboardInterrupt: + print("\n\nOperation cancelled by user.") + return 1 + except Exception as e: + print(f"\n\nError during operation: {e}") + import traceback + + traceback.print_exc() + return 1 + + print("\n" + "=" * 80) + print("= Operation Complete") + print("=" * 80) + print(f"\nOrphaned model files removed: {self.orphaned_count}") + print() + + return 0 + + +def main(): + """Command-line entry point.""" + parser = argparse.ArgumentParser( + description="Remove orphaned model files from InvokeAI installation", + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" +This script finds and removes model files that exist in the models directory +but are not referenced in the InvokeAI database. This can happen if: +- Models were manually deleted from the database +- The database was reset but model files were kept +- Files were manually copied into the models directory + +By default, the script will list orphaned files and ask for confirmation +before deleting them. +""", + ) + parser.add_argument( + "--root", + type=Path, + default=os.environ.get("INVOKEAI_ROOT", "."), + help="InvokeAI root directory (default: $INVOKEAI_ROOT or current directory)", + ) + parser.add_argument( + "--delete", + action="store_true", + help="Delete orphan model files without asking for confirmation", + ) + args = parser.parse_args() + + # Resolve the root path + root_path = Path(args.root).resolve() + if not root_path.exists(): + print(f"Error: Root directory does not exist: {root_path}") + return 1 + + app = OrphanedModelsApp(delete_without_confirm=args.delete) + return app.main(root_path) + + +if __name__ == "__main__": + exit(main()) diff --git a/tests/app/services/model_install/test_missing_models.py b/tests/app/services/model_install/test_missing_models.py new file mode 100644 index 00000000000..e42c9e2f95e --- /dev/null +++ b/tests/app/services/model_install/test_missing_models.py @@ -0,0 +1,220 @@ +""" +Tests for missing model detection (_scan_for_missing_models) and bulk deletion. +""" + +import gc +from pathlib import Path + +import pytest + +from invokeai.app.services.config import InvokeAIAppConfig +from invokeai.app.services.model_install import ModelInstallServiceBase +from invokeai.app.services.model_records import UnknownModelException +from invokeai.backend.model_manager.configs.textual_inversion import TI_File_SD1_Config +from invokeai.backend.model_manager.taxonomy import ( + BaseModelType, + ModelFormat, + ModelSourceType, + ModelType, +) +from tests.backend.model_manager.model_manager_fixtures import * # noqa F403 + + +class TestScanForMissingModels: + """Tests for ModelInstallService._scan_for_missing_models().""" + + def test_no_missing_models( + self, mm2_installer: ModelInstallServiceBase, embedding_file: Path, mm2_app_config: InvokeAIAppConfig + ) -> None: + """When all registered models exist on disk, _scan_for_missing_models returns an empty list.""" + mm2_installer.register_path(embedding_file) + missing = mm2_installer._scan_for_missing_models() + assert len(missing) == 0 + + def test_detects_missing_model( + self, mm2_installer: ModelInstallServiceBase, embedding_file: Path, mm2_app_config: InvokeAIAppConfig + ) -> None: + """A model whose path does not exist on disk is reported as missing.""" + # Register a real model first, then add a fake one with a non-existent path + mm2_installer.register_path(embedding_file) + + fake_config = TI_File_SD1_Config( + key="missing-model-key-1", + path="/nonexistent/path/missing_model.safetensors", + name="MissingModel", + base=BaseModelType.StableDiffusion1, + type=ModelType.TextualInversion, + format=ModelFormat.EmbeddingFile, + hash="FAKEHASH1", + file_size=1024, + source="test/source", + source_type=ModelSourceType.Path, + ) + mm2_installer.record_store.add_model(fake_config) + + missing = mm2_installer._scan_for_missing_models() + assert len(missing) == 1 + assert missing[0].key == "missing-model-key-1" + + def test_mix_of_existing_and_missing( + self, + mm2_installer: ModelInstallServiceBase, + embedding_file: Path, + diffusers_dir: Path, + mm2_app_config: InvokeAIAppConfig, + ) -> None: + """With multiple models, only the ones with missing files are returned.""" + key_existing = mm2_installer.register_path(embedding_file) + mm2_installer.register_path(diffusers_dir) + + # Add two models with non-existent paths + fake1 = TI_File_SD1_Config( + key="missing-key-1", + path="/nonexistent/missing1.safetensors", + name="Missing1", + base=BaseModelType.StableDiffusion1, + type=ModelType.TextualInversion, + format=ModelFormat.EmbeddingFile, + hash="FAKEHASH_A", + file_size=1024, + source="test/source1", + source_type=ModelSourceType.Path, + ) + fake2 = TI_File_SD1_Config( + key="missing-key-2", + path="/nonexistent/missing2.safetensors", + name="Missing2", + base=BaseModelType.StableDiffusion1, + type=ModelType.TextualInversion, + format=ModelFormat.EmbeddingFile, + hash="FAKEHASH_B", + file_size=2048, + source="test/source2", + source_type=ModelSourceType.Path, + ) + mm2_installer.record_store.add_model(fake1) + mm2_installer.record_store.add_model(fake2) + + missing = mm2_installer._scan_for_missing_models() + missing_keys = {m.key for m in missing} + assert len(missing) == 2 + assert "missing-key-1" in missing_keys + assert "missing-key-2" in missing_keys + assert key_existing not in missing_keys + + def test_empty_store_returns_empty(self, mm2_installer: ModelInstallServiceBase) -> None: + """With no models registered, _scan_for_missing_models returns an empty list.""" + missing = mm2_installer._scan_for_missing_models() + assert len(missing) == 0 + + +class TestBulkDelete: + """Tests for bulk model deletion.""" + + def test_delete_installed_model( + self, mm2_installer: ModelInstallServiceBase, embedding_file: Path, mm2_app_config: InvokeAIAppConfig + ) -> None: + """Deleting an installed model removes it from the store and disk.""" + key = mm2_installer.install_path(embedding_file) + record = mm2_installer.record_store.get_model(key) + model_path = mm2_app_config.models_path / record.path + assert model_path.exists() + assert mm2_installer.record_store.exists(key) + + gc.collect() + mm2_installer.delete(key) + + with pytest.raises(UnknownModelException): + mm2_installer.record_store.get_model(key) + + def test_unregister_missing_model( + self, mm2_installer: ModelInstallServiceBase, mm2_app_config: InvokeAIAppConfig + ) -> None: + """Unregistering a model whose file is missing removes it from the DB.""" + fake_config = TI_File_SD1_Config( + key="missing-to-delete", + path="/nonexistent/path/gone.safetensors", + name="GoneModel", + base=BaseModelType.StableDiffusion1, + type=ModelType.TextualInversion, + format=ModelFormat.EmbeddingFile, + hash="FAKEHASH_GONE", + file_size=1024, + source="test/source", + source_type=ModelSourceType.Path, + ) + mm2_installer.record_store.add_model(fake_config) + assert mm2_installer.record_store.exists("missing-to-delete") + + # Unregister removes it from DB without touching disk + mm2_installer.unregister("missing-to-delete") + + with pytest.raises(UnknownModelException): + mm2_installer.record_store.get_model("missing-to-delete") + + def test_delete_unknown_key_raises(self, mm2_installer: ModelInstallServiceBase) -> None: + """Deleting a model with an unknown key raises UnknownModelException.""" + with pytest.raises(UnknownModelException): + mm2_installer.delete("nonexistent-key-12345") + + def test_scan_then_unregister_clears_missing( + self, mm2_installer: ModelInstallServiceBase, mm2_app_config: InvokeAIAppConfig + ) -> None: + """After unregistering all missing models, _scan_for_missing_models returns empty.""" + # Add two models with non-existent paths + for i in range(2): + config = TI_File_SD1_Config( + key=f"missing-bulk-{i}", + path=f"/nonexistent/bulk_{i}.safetensors", + name=f"BulkMissing{i}", + base=BaseModelType.StableDiffusion1, + type=ModelType.TextualInversion, + format=ModelFormat.EmbeddingFile, + hash=f"BULKHASH{i}", + file_size=1024, + source=f"test/bulk{i}", + source_type=ModelSourceType.Path, + ) + mm2_installer.record_store.add_model(config) + + missing = mm2_installer._scan_for_missing_models() + assert len(missing) == 2 + + # Unregister all missing (simulates bulk delete for missing models) + for model in missing: + mm2_installer.unregister(model.key) + + assert len(mm2_installer._scan_for_missing_models()) == 0 + + def test_bulk_unregister_does_not_affect_existing_models( + self, + mm2_installer: ModelInstallServiceBase, + embedding_file: Path, + mm2_app_config: InvokeAIAppConfig, + ) -> None: + """Unregistering missing models does not affect models that exist on disk.""" + existing_key = mm2_installer.register_path(embedding_file) + + fake_config = TI_File_SD1_Config( + key="missing-selective", + path="/nonexistent/selective.safetensors", + name="SelectiveMissing", + base=BaseModelType.StableDiffusion1, + type=ModelType.TextualInversion, + format=ModelFormat.EmbeddingFile, + hash="SELECTIVEHASH", + file_size=1024, + source="test/selective", + source_type=ModelSourceType.Path, + ) + mm2_installer.record_store.add_model(fake_config) + + # Only unregister the missing one + missing = mm2_installer._scan_for_missing_models() + assert len(missing) == 1 + for model in missing: + mm2_installer.unregister(model.key) + + # Existing model should still be there + assert mm2_installer.record_store.exists(existing_key) + assert len(mm2_installer._scan_for_missing_models()) == 0 diff --git a/tests/test_graph_execution_state.py b/tests/test_graph_execution_state.py index 381c4c73482..e0b8fd4717d 100644 --- a/tests/test_graph_execution_state.py +++ b/tests/test_graph_execution_state.py @@ -225,3 +225,92 @@ def test_graph_iterate_execution_order(execution_number: int): _ = invoke_next(g) assert _[1].item == "Dinosaur Sushi" _ = invoke_next(g) + + +# Because this tests deterministic ordering, we run it multiple times +@pytest.mark.parametrize("execution_number", range(5)) +def test_graph_nested_iterate_execution_order(execution_number: int): + """ + Validates best-effort in-order execution for nodes expanded under nested iterators. + Expected lexicographic order by (outer_index, inner_index), subject to readiness. + """ + graph = Graph() + + # Outer iterator: [0, 1] + graph.add_node(RangeInvocation(id="outer_range", start=0, stop=2, step=1)) + graph.add_node(IterateInvocation(id="outer_iter")) + + # Inner iterator is derived from the outer item: + # start = outer_item * 10 + # stop = start + 2 => yields 2 items per outer item + graph.add_node(MultiplyInvocation(id="mul10", b=10)) + graph.add_node(AddInvocation(id="stop_plus2", b=2)) + graph.add_node(RangeInvocation(id="inner_range", start=0, stop=1, step=1)) + graph.add_node(IterateInvocation(id="inner_iter")) + + # Observe inner items (they encode outer via start=outer*10) + graph.add_node(AddInvocation(id="sum", b=0)) + + graph.add_edge(create_edge("outer_range", "collection", "outer_iter", "collection")) + graph.add_edge(create_edge("outer_iter", "item", "mul10", "a")) + graph.add_edge(create_edge("mul10", "value", "stop_plus2", "a")) + graph.add_edge(create_edge("mul10", "value", "inner_range", "start")) + graph.add_edge(create_edge("stop_plus2", "value", "inner_range", "stop")) + graph.add_edge(create_edge("inner_range", "collection", "inner_iter", "collection")) + graph.add_edge(create_edge("inner_iter", "item", "sum", "a")) + + g = GraphExecutionState(graph=graph) + sum_values: list[int] = [] + + while True: + n, o = invoke_next(g) + if n is None: + break + if g.prepared_source_mapping[n.id] == "sum": + sum_values.append(o.value) + + assert sum_values == [0, 1, 10, 11] + + +def test_graph_validate_self_iterator_without_collection_input_raises_invalid_edge_error(): + """Iterator nodes with no collection input should fail validation cleanly. + + This test exposes the bug where validation crashes with IndexError instead of raising InvalidEdgeError. + """ + from invokeai.app.services.shared.graph import InvalidEdgeError + + graph = Graph() + graph.add_node(IterateInvocation(id="iterate")) + + with pytest.raises(InvalidEdgeError): + graph.validate_self() + + +def test_graph_validate_self_collector_without_item_inputs_raises_invalid_edge_error(): + """Collector nodes with no item inputs should fail validation cleanly. + + This test exposes the bug where validation can crash (e.g. StopIteration) instead of raising InvalidEdgeError. + """ + from invokeai.app.services.shared.graph import InvalidEdgeError + + graph = Graph() + graph.add_node(CollectInvocation(id="collect")) + + with pytest.raises(InvalidEdgeError): + graph.validate_self() + + +def test_are_connection_types_compatible_accepts_subclass_to_base(): + """A subclass output should be connectable to a base-class input. + + This test exposes the bug where non-Union targets reject valid subclass connections. + """ + from invokeai.app.services.shared.graph import are_connection_types_compatible + + class Base: + pass + + class Child(Base): + pass + + assert are_connection_types_compatible(Child, Base) is True diff --git a/tests/test_node_graph.py b/tests/test_node_graph.py index ae3e26075f4..160dc96d852 100644 --- a/tests/test_node_graph.py +++ b/tests/test_node_graph.py @@ -785,7 +785,7 @@ def test_collector_different_incomers(): run_session_with_mock_context(session) output = get_single_output_from_session(session, n3.id) assert isinstance(output, CollectInvocationOutput) - assert output.collection == ["Banana", "Sushi"] # Both inputs should be collected + assert set(output.collection) == {"Banana", "Sushi"} # Both inputs should be collected, no order guarantee def test_iterator_collector_iterator_chain():