diff --git a/src/fromager/bootstrapper.py b/src/fromager/bootstrapper.py index e749641f..85fa107d 100644 --- a/src/fromager/bootstrapper.py +++ b/src/fromager/bootstrapper.py @@ -1,5 +1,6 @@ from __future__ import annotations +import dataclasses import json import logging import operator @@ -36,6 +37,59 @@ logger = logging.getLogger(__name__) + +@dataclasses.dataclass +class BuildResult: + """Simple result object for package builds. + + Tracks both successful and failed build attempts with detailed context + for error analysis and reporting in test mode. + """ + + wheel_filename: pathlib.Path | None = None + sdist_filename: pathlib.Path | None = None + unpack_dir: pathlib.Path | None = None + source_url_type: str = "unknown" + sdist_root_dir: pathlib.Path | None = None + build_env: build_environment.BuildEnvironment | None = None + failed: bool = False + + # Context fields for error tracking and reporting + req: Requirement | None = None + resolved_version: Version | None = None + exception: Exception | None = dataclasses.field( + default=None, repr=False, compare=False + ) + exception_type: str | None = None # Serializable: exception.__class__.__name__ + exception_message: str | None = None # Serializable: str(exception) + + @classmethod + def failure( + cls, + req: Requirement | None = None, + resolved_version: Version | None = None, + exception: Exception | None = None, + ) -> BuildResult: + """Create a failed build result with context for debugging. + + Args: + req: The requirement that failed to build + resolved_version: The resolved version that was attempted + exception: The exception that caused the failure + + Returns: + BuildResult marked as failed with captured context + """ + return cls( + failed=True, + req=req, + resolved_version=resolved_version, + exception=exception, + exception_type=exception.__class__.__name__ if exception else None, + exception_message=str(exception) if exception else None, + ) + + # package name, extras, version, sdist/wheel SeenKey = tuple[NormalizedName, tuple[str, ...], str, typing.Literal["sdist", "wheel"]] @@ -48,12 +102,17 @@ def __init__( prev_graph: DependencyGraph | None = None, cache_wheel_server_url: str | None = None, sdist_only: bool = False, + test_mode: bool = False, ) -> None: self.ctx = ctx self.progressbar = progressbar or progress.Progressbar(None) self.prev_graph = prev_graph self.cache_wheel_server_url = cache_wheel_server_url or ctx.wheel_server_url self.sdist_only = sdist_only + self.test_mode = test_mode + self.failed_builds: list[ + BuildResult + ] = [] # Track all failures (build, resolution, etc.) with full context self.why: list[tuple[RequirementType, Requirement, Version]] = [] # Push items onto the stack as we start to resolve their # dependencies so at the end we have a list of items that need to @@ -154,8 +213,8 @@ def bootstrap(self, req: Requirement, req_type: RequirementType) -> Version: # # When bootstrap encounters another package with a *build* requirement # on a pre-built wheel, its installation dependencies are materialized. - build_sdist_only = self.sdist_only and not self._processing_build_requirement( - req_type + build_sdist_only: bool = ( + self.sdist_only and not self._processing_build_requirement(req_type) ) # Avoid cyclic dependencies and redundant processing. @@ -169,168 +228,41 @@ def bootstrap(self, req: Requirement, req_type: RequirementType) -> Version: logger.info(f"new {req_type} dependency {req} resolves to {resolved_version}") - # Build the dependency chain up to the point of this new - # requirement using a new list so we can avoid modifying the list - # we're given. self.why.append((req_type, req, resolved_version)) - # for cleanup - build_env: build_environment.BuildEnvironment | None = None - sdist_root_dir: pathlib.Path | None = None - cached_wheel_filename: pathlib.Path | None = None - wheel_filename: pathlib.Path | None = None - sdist_filename: pathlib.Path | None = None - unpack_dir: pathlib.Path | None = None - unpacked_cached_wheel: pathlib.Path | None = None - - source_url_type = sources.get_source_type(self.ctx, req) - - if pbi.pre_built: - wheel_filename, unpack_dir = self._download_prebuilt( - req=req, - req_type=req_type, - resolved_version=resolved_version, - wheel_url=source_url, - ) - # Remember that this is a prebuilt wheel, and where we got it. - source_url_type = str(SourceType.PREBUILT) - else: - # Look a few places for an existing wheel that matches what we need, - # using caches for locations where we might have built the wheel - # before. - - # Check if we have previously built a wheel and still have it on the - # local filesystem. - if not wheel_filename and not cached_wheel_filename: - cached_wheel_filename, unpacked_cached_wheel = ( - self._look_for_existing_wheel( - req, - resolved_version, - self.ctx.wheels_build, - ) - ) - - # Check if we have previously downloaded a wheel and still have it - # on the local filesystem. - if not wheel_filename and not cached_wheel_filename: - cached_wheel_filename, unpacked_cached_wheel = ( - self._look_for_existing_wheel( - req, - resolved_version, - self.ctx.wheels_downloads, - ) - ) - - # Look for a wheel on the cache server and download it if there is - # one. - if not wheel_filename and not cached_wheel_filename: - cached_wheel_filename, unpacked_cached_wheel = ( - self._download_wheel_from_cache(req, resolved_version) - ) - - if not unpacked_cached_wheel: - # We didn't find anything so we are going to have to build the - # wheel in order to process its installation dependencies. - logger.debug("no cached wheel, downloading sources") - source_filename = sources.download_source( - ctx=self.ctx, - req=req, - version=resolved_version, - download_url=source_url, - ) - sdist_root_dir = sources.prepare_source( - ctx=self.ctx, - req=req, - source_filename=source_filename, - version=resolved_version, - ) - else: - logger.debug(f"have cached wheel in {unpacked_cached_wheel}") - sdist_root_dir = unpacked_cached_wheel / unpacked_cached_wheel.stem - - assert sdist_root_dir is not None - - if sdist_root_dir.parent.parent != self.ctx.work_dir: - raise ValueError( - f"'{sdist_root_dir}/../..' should be {self.ctx.work_dir}" - ) - unpack_dir = sdist_root_dir.parent - - build_env = build_environment.BuildEnvironment( - ctx=self.ctx, - parent_dir=sdist_root_dir.parent, - ) - - # need to call this function irrespective of whether we had the wheel cached - # so that the build dependencies can be bootstrapped - self._prepare_build_dependencies(req, sdist_root_dir, build_env) - - if cached_wheel_filename: - logger.debug( - f"getting install requirements from cached " - f"wheel {cached_wheel_filename.name}" - ) - # prefer existing wheel even in sdist_only mode - # skip building even if it is a non-fromager built wheel - wheel_filename = cached_wheel_filename - build_sdist_only = False - elif build_sdist_only: - # get install dependencies from sdist and pyproject_hooks (only top-level and install) - logger.debug( - f"getting install requirements from sdist " - f"{req.name}=={resolved_version} ({req_type})" - ) - wheel_filename = None - sdist_filename = self._build_sdist( - req, resolved_version, sdist_root_dir, build_env - ) - else: - # build wheel (build requirements, full build mode) - logger.debug( - f"building wheel {req.name}=={resolved_version} " - f"to get install requirements ({req_type})" - ) - wheel_filename, sdist_filename = self._build_wheel( - req, resolved_version, sdist_root_dir, build_env - ) + result = self._build_package(req, resolved_version, pbi, build_sdist_only) hooks.run_post_bootstrap_hooks( ctx=self.ctx, req=req, dist_name=canonicalize_name(req.name), dist_version=str(resolved_version), - sdist_filename=sdist_filename, - wheel_filename=wheel_filename, + sdist_filename=result.sdist_filename, + wheel_filename=result.wheel_filename, ) - if wheel_filename is not None: - assert unpack_dir is not None - logger.debug( - "get install dependencies of wheel %s", - wheel_filename.name, + if self.test_mode and result.failed: + logger.warning( + "test mode: skipping install dependencies for failed package %s", + req.name, ) + install_dependencies = set() + elif result.wheel_filename and result.unpack_dir: install_dependencies = dependencies.get_install_dependencies_of_wheel( req=req, - wheel_filename=wheel_filename, - requirements_file_dir=unpack_dir, - ) - elif sdist_filename is not None: - assert sdist_root_dir is not None - assert build_env is not None - logger.debug( - "get install dependencies of sdist from directory %s", - sdist_root_dir, + wheel_filename=result.wheel_filename, + requirements_file_dir=result.unpack_dir, ) + elif result.sdist_filename and result.sdist_root_dir and result.build_env: install_dependencies = dependencies.get_install_dependencies_of_sdist( ctx=self.ctx, req=req, version=resolved_version, - sdist_root_dir=sdist_root_dir, - build_env=build_env, + sdist_root_dir=result.sdist_root_dir, + build_env=result.build_env, ) else: - # unreachable - raise RuntimeError("wheel_filename and sdist_filename are None") + raise RuntimeError("Both wheel_filename and sdist_filename are None") logger.debug( "install dependencies: %s", @@ -341,7 +273,7 @@ def bootstrap(self, req: Requirement, req_type: RequirementType) -> Version: req=req, version=resolved_version, source_url=source_url, - source_url_type=source_url_type, + source_url_type=result.source_url_type, prebuilt=pbi.pre_built, constraint=constraint, ) @@ -357,7 +289,7 @@ def bootstrap(self, req: Requirement, req_type: RequirementType) -> Version: # we are done processing this req, so lets remove it from the why chain self.why.pop() - self.ctx.clean_build_dirs(sdist_root_dir, build_env) + self.ctx.clean_build_dirs(result.sdist_root_dir, result.build_env) return resolved_version @property @@ -417,6 +349,187 @@ def _build_wheel( logger.info(f"built wheel for version {resolved_version}: {wheel_filename}") return wheel_filename, sdist_filename + def _build_package( + self, + req: Requirement, + resolved_version: Version, + pbi, + build_sdist_only: bool, + ) -> BuildResult: + """Build or download package - handles test mode failures gracefully.""" + try: + return self._build_wheel_and_sdist( + req, resolved_version, pbi, build_sdist_only + ) + except Exception as build_error: + if not self.test_mode: + raise + + logger.warning( + "test mode: build failed for %s==%s, attempting fallback to pre-built", + req.name, + resolved_version, + exc_info=True, + ) + + try: + wheel_url, _ = self._resolve_prebuilt_with_history( + req=req, req_type=RequirementType.TOP_LEVEL + ) + wheel_filename, unpack_dir = self._download_prebuilt( + req=req, + req_type=RequirementType.TOP_LEVEL, + resolved_version=resolved_version, + wheel_url=wheel_url, + ) + logger.info( + "test mode: successfully handled %s as pre-built after build failure", + req.name, + ) + return BuildResult( + wheel_filename=wheel_filename, + unpack_dir=unpack_dir, + source_url_type=str(SourceType.PREBUILT), + ) + except Exception as prebuilt_error: + logger.error( + "test mode: failed to handle %s as pre-built: %s", + req.name, + prebuilt_error, + exc_info=True, + ) + failure = BuildResult.failure( + req=req, + resolved_version=resolved_version, + exception=build_error, # Use original build error, not prebuilt fallback error + ) + self.failed_builds.append(failure) + return failure + + def _build_wheel_and_sdist( + self, + req: Requirement, + resolved_version: Version, + pbi, + build_sdist_only: bool, + ) -> BuildResult: + """Build or download wheel and sdist for the current requirement.""" + build_env: build_environment.BuildEnvironment | None = None + sdist_root_dir: pathlib.Path | None = None + cached_wheel_filename: pathlib.Path | None = None + wheel_filename: pathlib.Path | None = None + sdist_filename: pathlib.Path | None = None + unpack_dir: pathlib.Path | None = None + unpacked_cached_wheel: pathlib.Path | None = None + source_url, _ = self.resolve_version( + req=req, req_type=RequirementType.TOP_LEVEL + ) + source_url_type = sources.get_source_type(self.ctx, req) + + if pbi.pre_built: + wheel_filename, unpack_dir = self._download_prebuilt( + req=req, + req_type=RequirementType.TOP_LEVEL, + resolved_version=resolved_version, + wheel_url=source_url, + ) + source_url_type = str(SourceType.PREBUILT) + else: + cached_wheel_filename, unpacked_cached_wheel = self._find_cached_wheel( + req, resolved_version + ) + + if not unpacked_cached_wheel: + sdist_root_dir, unpack_dir = self._prepare_source_for_build( + req, resolved_version, source_url + ) + build_env = build_environment.BuildEnvironment( + ctx=self.ctx, parent_dir=sdist_root_dir.parent + ) + self._prepare_build_dependencies(req, sdist_root_dir, build_env) + else: + logger.debug(f"have cached wheel in {unpacked_cached_wheel}") + sdist_root_dir = unpacked_cached_wheel / unpacked_cached_wheel.stem + unpack_dir = sdist_root_dir.parent + build_env = build_environment.BuildEnvironment( + ctx=self.ctx, parent_dir=sdist_root_dir.parent + ) + self._prepare_build_dependencies(req, sdist_root_dir, build_env) + + if cached_wheel_filename: + logger.debug(f"using cached wheel {cached_wheel_filename.name}") + wheel_filename = cached_wheel_filename + elif build_sdist_only: + logger.debug(f"building sdist for {req.name}=={resolved_version}") + wheel_filename = None + sdist_filename = self._build_sdist( + req, resolved_version, sdist_root_dir, build_env + ) + else: + logger.debug(f"building wheel for {req.name}=={resolved_version}") + wheel_filename, sdist_filename = self._build_wheel( + req, resolved_version, sdist_root_dir, build_env + ) + + return BuildResult( + wheel_filename=wheel_filename, + sdist_filename=sdist_filename, + unpack_dir=unpack_dir, + source_url_type=source_url_type, + sdist_root_dir=sdist_root_dir, + build_env=build_env, + ) + + def _find_cached_wheel( + self, req: Requirement, resolved_version: Version + ) -> tuple[pathlib.Path | None, pathlib.Path | None]: + """Find cached wheel in local directories or download from cache server. + + Returns (wheel_filename, unpacked_metadata_dir) where either can be None. + """ + cached_wheel_filename, unpacked_cached_wheel = self._look_for_existing_wheel( + req, resolved_version, self.ctx.wheels_build + ) + + if not cached_wheel_filename: + cached_wheel_filename, unpacked_cached_wheel = ( + self._look_for_existing_wheel( + req, resolved_version, self.ctx.wheels_downloads + ) + ) + + if not cached_wheel_filename: + cached_wheel_filename, unpacked_cached_wheel = ( + self._download_wheel_from_cache(req, resolved_version) + ) + + return cached_wheel_filename, unpacked_cached_wheel + + def _prepare_source_for_build( + self, req: Requirement, resolved_version: Version, source_url: str + ) -> tuple[pathlib.Path, pathlib.Path]: + """Download and prepare source for building.""" + logger.debug("no cached wheel, downloading sources") + source_filename = sources.download_source( + ctx=self.ctx, + req=req, + version=resolved_version, + download_url=source_url, + ) + sdist_root_dir = sources.prepare_source( + ctx=self.ctx, + req=req, + source_filename=source_filename, + version=resolved_version, + ) + + assert sdist_root_dir is not None + if sdist_root_dir.parent.parent != self.ctx.work_dir: + raise ValueError(f"'{sdist_root_dir}/../..' should be {self.ctx.work_dir}") + + unpack_dir = sdist_root_dir.parent + return sdist_root_dir, unpack_dir + def _prepare_build_dependencies( self, req: Requirement, @@ -486,7 +599,18 @@ def _handle_build_requirements( try: self.bootstrap(req=dep, req_type=build_type) except Exception as err: - raise ValueError(f"could not handle {self._explain}") from err + if self.test_mode: + logger.warning( + "test mode: build dependency %s failed for %s, marking as failed and continuing", + dep.name, + req.name, + exc_info=True, + ) + failure = BuildResult.failure(req=dep, exception=err) + self.failed_builds.append(failure) + continue + else: + raise ValueError(f"could not handle {self._explain}") from err self.progressbar.update() def _download_prebuilt( @@ -557,9 +681,7 @@ def _download_wheel_from_cache( wheelfile_name = pathlib.Path(urlparse(wheel_url).path) pbi = self.ctx.package_build_info(req) expected_build_tag = pbi.build_tag(resolved_version) - # Log the expected build tag for debugging logger.info(f"has expected build tag {expected_build_tag}") - # Get changelogs for debug info changelogs = pbi.get_changelog(resolved_version) logger.debug(f"has change logs {changelogs}") diff --git a/src/fromager/commands/bootstrap.py b/src/fromager/commands/bootstrap.py index a9836428..7edb1cda 100644 --- a/src/fromager/commands/bootstrap.py +++ b/src/fromager/commands/bootstrap.py @@ -97,6 +97,13 @@ def _get_requirements_from_args( default=False, help="Skip generating constraints.txt file to allow building collections with conflicting versions", ) +@click.option( + "--test-mode", + "test_mode", + is_flag=True, + default=False, + help="Test mode: mark failed packages as pre-built and continue, report failures at end", +) @click.argument("toplevel", nargs=-1) @click.pass_obj def bootstrap( @@ -106,6 +113,7 @@ def bootstrap( cache_wheel_server_url: str | None, sdist_only: bool, skip_constraints: bool, + test_mode: bool, toplevel: list[str], ) -> None: """Compute and build the dependencies of a set of requirements recursively @@ -116,6 +124,11 @@ def bootstrap( """ logger.info(f"cache wheel server url: {cache_wheel_server_url}") + if test_mode: + logger.info( + "test mode enabled: will mark failed packages as pre-built and continue" + ) + to_build = _get_requirements_from_args(toplevel, requirements_files) if not to_build: raise RuntimeError( @@ -148,6 +161,7 @@ def bootstrap( prev_graph, cache_wheel_server_url, sdist_only=sdist_only, + test_mode=test_mode, ) # we need to resolve all the top level dependencies before we start bootstrapping. @@ -183,9 +197,29 @@ def bootstrap( for req in to_build: token = requirement_ctxvar.set(req) - bt.bootstrap(req, requirements_file.RequirementType.TOP_LEVEL) - progressbar.update() - requirement_ctxvar.reset(token) + try: + bt.bootstrap(req, requirements_file.RequirementType.TOP_LEVEL) + progressbar.update() + if test_mode: + logger.info("Successfully processed: %s", req) + except Exception as err: + if test_mode: + # Test mode: record error, log, and continue processing + logger.error( + "test mode: failed to process %s: %s", + req, + err, + exc_info=True, # Full traceback to debug log + ) + bt.failed_builds.append( + bootstrapper.BuildResult.failure(req=req, exception=err) + ) + progressbar.update() # Update progress even on failure + else: + # Normal mode: re-raise the exception (fail-fast) + raise + finally: + requirement_ctxvar.reset(token) constraints_filename = wkctx.work_dir / "constraints.txt" if skip_constraints: @@ -200,7 +234,57 @@ def bootstrap( logger.debug("match_py_req LRU cache: %r", resolver.match_py_req.cache_info()) - metrics.summarize(wkctx, "Bootstrapping") + # Test mode summary reporting + if test_mode: + if bt.failed_builds: + # Use repository's logging pattern for error reporting + logger.error("test mode: the following packages failed to build:") + for failure in sorted( + bt.failed_builds, key=lambda f: str(f.req) if f.req else "" + ): + if failure.req and failure.resolved_version: + logger.error( + " - %s==%s", + failure.req, + failure.resolved_version, + ) + if failure.exception_type: + logger.error( + " Error: %s: %s", + failure.exception_type, + failure.exception_message, + ) + else: + logger.error(" - unknown package (missing context)") + + # Categorize failures by exception type for better analysis + failure_types: dict[str, list[str]] = {} + for failure in bt.failed_builds: + exc_type = failure.exception_type or "Unknown" + pkg_name = ( + f"{failure.req}=={failure.resolved_version}" + if failure.req and failure.resolved_version + else "unknown" + ) + failure_types.setdefault(exc_type, []).append(pkg_name) + + logger.error("") + logger.error("test mode: failure breakdown by type:") + for exc_type, packages in sorted(failure_types.items()): + logger.error(" %s: %d package(s)", exc_type, len(packages)) + + logger.error( + "test mode: %d package(s) failed to build", len(bt.failed_builds) + ) + # Follow repository's error exit pattern like __main__.py and lint.py + raise SystemExit( + f"Test mode completed with {len(bt.failed_builds)} build failures" + ) + else: + logger.info("test mode: all packages built successfully") + metrics.summarize(wkctx, "Test Mode Bootstrapping") + else: + metrics.summarize(wkctx, "Bootstrapping") def write_constraints_file( @@ -480,6 +564,9 @@ def bootstrap_parallel( remaining wheels in parallel. The bootstrap step downloads sdists and builds build-time dependency in serial. The build-parallel step builds the remaining wheels in parallel. + + Note: --test-mode is not supported with bootstrap-parallel. Use + 'bootstrap --test-mode' for comprehensive failure testing. """ # Do not remove build environments in bootstrap phase to speed up the # parallel build phase. @@ -495,6 +582,7 @@ def bootstrap_parallel( cache_wheel_server_url=cache_wheel_server_url, sdist_only=True, skip_constraints=skip_constraints, + test_mode=False, toplevel=toplevel, ) diff --git a/src/fromager/commands/graph.py b/src/fromager/commands/graph.py index 51802b8e..a00bde48 100644 --- a/src/fromager/commands/graph.py +++ b/src/fromager/commands/graph.py @@ -125,7 +125,7 @@ def _find_customized_nodes( """Filter nodes to find only those with customizations.""" customized_nodes: list[DependencyNode] = [] for node in nodes: - pbi = wkctx.settings.package_build_info(node.canonicalized_name) + pbi = wkctx.package_build_info(node.canonicalized_name) if node.canonicalized_name != ROOT and pbi.has_customizations: customized_nodes.append(node) return customized_nodes @@ -161,7 +161,7 @@ def _find_customized_dependencies_for_node( continue child = edge.destination_node - child_pbi = wkctx.settings.package_build_info(child.canonicalized_name) + child_pbi = wkctx.package_build_info(child.canonicalized_name) new_path = path + [current_node.key] # Use the first requirement we encounter in the path @@ -277,7 +277,7 @@ def get_node_id(node: str) -> str: if not name: node_type.append("toplevel") else: - pbi = wkctx.settings.package_build_info(name) + pbi = wkctx.package_build_info(name) all_patches: PatchMap = pbi.get_all_patches() if node.pre_built: diff --git a/src/fromager/commands/list_overrides.py b/src/fromager/commands/list_overrides.py index ba62087f..228ed461 100644 --- a/src/fromager/commands/list_overrides.py +++ b/src/fromager/commands/list_overrides.py @@ -65,7 +65,7 @@ def list_overrides( export_data = [] for name in overridden_packages: - pbi = wkctx.settings.package_build_info(name) + pbi = wkctx.package_build_info(name) ps = wkctx.settings.package_setting(name) plugin_hooks: list[str] = [] diff --git a/src/fromager/context.py b/src/fromager/context.py index 02c94f8f..3c47be85 100644 --- a/src/fromager/context.py +++ b/src/fromager/context.py @@ -164,7 +164,7 @@ def package_build_info( name = package.name else: name = package - return self.settings.package_build_info(name) + return self.settings.package_build_info(name, self) def setup(self) -> None: # The work dir must already exist, so don't try to create it. diff --git a/src/fromager/hooks.py b/src/fromager/hooks.py index cf7457a2..ed8df140 100644 --- a/src/fromager/hooks.py +++ b/src/fromager/hooks.py @@ -3,6 +3,7 @@ import logging import pathlib import typing +from importlib import metadata from packaging.requirements import Requirement from stevedore import extension, hook @@ -39,7 +40,7 @@ def _get_hooks(name: str) -> hook.HookManager: def log_hooks() -> None: # We load the hooks differently here because we want all of them when # normally we would load them by name. - _mgr = extension.ExtensionManager( + _mgr: extension.ExtensionManager[typing.Any] = extension.ExtensionManager( namespace="fromager.hooks", invoke_on_load=False, on_load_failure_callback=_die_on_plugin_load_failure, @@ -56,9 +57,9 @@ def log_hooks() -> None: def _die_on_plugin_load_failure( - mgr: hook.HookManager, - ep: extension.Extension, - err: Exception, + mgr: hook.HookManager[typing.Any], + ep: metadata.EntryPoint, + err: BaseException, ) -> typing.NoReturn: raise RuntimeError(f"failed to load overrides for {ep.name}") from err diff --git a/src/fromager/overrides.py b/src/fromager/overrides.py index ea5fc6fc..38434860 100644 --- a/src/fromager/overrides.py +++ b/src/fromager/overrides.py @@ -18,7 +18,7 @@ _mgr = None -def _get_extensions() -> extension.ExtensionManager: +def _get_extensions() -> extension.ExtensionManager[typing.Any]: global _mgr if _mgr is None: _mgr = extension.ExtensionManager( @@ -30,9 +30,9 @@ def _get_extensions() -> extension.ExtensionManager: def _die_on_plugin_load_failure( - mgr: extension.ExtensionManager, - ep: extension.Extension, - err: Exception, + mgr: extension.ExtensionManager[typing.Any], + ep: metadata.EntryPoint, + err: BaseException, ) -> None: raise RuntimeError(f"failed to load overrides for {ep.name}") from err diff --git a/src/fromager/packagesettings.py b/src/fromager/packagesettings.py index 180081d3..99bfbc1e 100644 --- a/src/fromager/packagesettings.py +++ b/src/fromager/packagesettings.py @@ -624,12 +624,26 @@ def get_available_memory_gib() -> float: class PackageBuildInfo: - """Package build information + """Variant-aware package build configuration and metadata. - Public API for PackageSettings with i + Primary public API for accessing package-specific settings during the build + process. Combines static configuration from YAML files with runtime context + to provide variant-specific (cpu, cuda, etc.) build information. + + Key responsibilities: + - Determine if package should be built or use pre-built wheels + - Provide patches to apply for specific versions + - Configure build environment (parallel jobs, environment variables) + - Manage package customizations (plugins, custom download URLs) + - Calculate build tags from changelogs for wheel versioning + + Instances are cached per package and accessed via ``WorkContext.package_build_info()``. """ - def __init__(self, settings: Settings, ps: PackageSettings) -> None: + def __init__( + self, settings: Settings, ps: PackageSettings, ctx: context.WorkContext + ) -> None: + self._ctx = ctx self._variant = typing.cast(Variant, settings.variant) self._patches_dir = settings.patches_dir self._variant_changelog = settings.variant_changelog() @@ -744,7 +758,7 @@ def has_customizations(self) -> bool: @property def pre_built(self) -> bool: - """Does the variant use pre-build wheels?""" + """Does the variant use pre-built wheels?""" vi = self._ps.variants.get(self.variant) if vi is not None: return vi.pre_built @@ -1146,23 +1160,27 @@ def package_setting(self, package: str | Package) -> PackageSettings: self._package_settings[package] = ps return ps - def package_build_info(self, package: str | Package) -> PackageBuildInfo: + def package_build_info( + self, package: str | Package, ctx: context.WorkContext + ) -> PackageBuildInfo: """Get (cached) PackageBuildInfo for package and current variant""" package = Package(canonicalize_name(package, validate=True)) pbi = self._pbi_cache.get(package) if pbi is None: ps = self.package_setting(package) - pbi = PackageBuildInfo(self, ps) + pbi = PackageBuildInfo(self, ps, ctx) self._pbi_cache[package] = pbi return pbi def list_pre_built(self) -> set[Package]: - """List packages marked as pre-built""" - return set( - name - for name in self._package_settings - if self.package_build_info(name).pre_built - ) + """List packages marked as pre-built by configuration""" + result = set() + for name in self._package_settings: + ps = self._package_settings[name] + vi = ps.variants.get(self._variant) + if vi is not None and vi.pre_built: + result.add(name) + return result def list_overrides(self) -> set[Package]: """List packages with overrides diff --git a/tests/test_bootstrap_test_mode.py b/tests/test_bootstrap_test_mode.py new file mode 100644 index 00000000..4c9b6a6d --- /dev/null +++ b/tests/test_bootstrap_test_mode.py @@ -0,0 +1,91 @@ +"""Tests for bootstrap --test-mode functionality. + +Tests for test mode failure tracking and BuildResult. +""" + +from unittest import mock + +import pytest +from packaging.requirements import Requirement +from packaging.version import Version + +from fromager import bootstrapper +from fromager.context import WorkContext + + +class MockBuildError(Exception): + """Mock exception for simulating build failures.""" + + pass + + +def test_test_mode_tracks_complete_failures(tmp_context: WorkContext) -> None: + """Test that test mode tracks failures with full context when both build and fallback fail.""" + bt = bootstrapper.Bootstrapper(tmp_context, test_mode=True) + + # Mock to always fail + def mock_build_wheel_and_sdist(req, version, pbi, build_sdist_only): + raise MockBuildError(f"Build failed for {req.name}") + + with mock.patch.object( + bt, "_build_wheel_and_sdist", side_effect=mock_build_wheel_and_sdist + ): + req = Requirement("broken-package==1.0") + version = Version("1.0") + pbi = tmp_context.package_build_info(req) + + result = bt._build_package(req, version, pbi, build_sdist_only=False) + + # Verify complete failure is tracked with full context + assert result.failed + assert result.req == req + assert result.resolved_version == version + assert result.exception_type == "MockBuildError" + assert result.exception_message is not None + assert "Build failed for broken-package" in result.exception_message + + # Verify failure is in failed_builds list + assert len(bt.failed_builds) == 1 + failed_build = bt.failed_builds[0] + assert failed_build.req is not None + assert failed_build.req.name == "broken-package" + + +def test_normal_mode_still_fails_fast(tmp_context: WorkContext) -> None: + """Test that normal mode (test_mode=False) still raises exceptions immediately.""" + bt = bootstrapper.Bootstrapper(tmp_context, test_mode=False) + + def mock_build_wheel_and_sdist(req, version, pbi, build_sdist_only): + raise MockBuildError(f"Build failed for {req.name}") + + with mock.patch.object( + bt, "_build_wheel_and_sdist", side_effect=mock_build_wheel_and_sdist + ): + req = Requirement("failing-package==1.0") + version = Version("1.0") + pbi = tmp_context.package_build_info(req) + + # Should raise immediately in normal mode + with pytest.raises(MockBuildError, match="Build failed for failing-package"): + bt._build_package(req, version, pbi, build_sdist_only=False) + + +def test_build_result_captures_exception_context() -> None: + """Test that BuildResult.failure() properly captures exception context.""" + req = Requirement("test-package>=1.0") + version = Version("1.2.3") + exception = ValueError("Something went wrong") + + result = bootstrapper.BuildResult.failure( + req=req, resolved_version=version, exception=exception + ) + + # Verify all context is captured + assert result.failed + assert result.req == req + assert result.resolved_version == version + assert result.exception is exception + assert result.exception_type == "ValueError" + assert result.exception_message == "Something went wrong" + assert result.wheel_filename is None + assert result.sdist_filename is None diff --git a/tests/test_commands.py b/tests/test_commands.py index 7617e308..6ac1ceac 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -17,5 +17,7 @@ def test_bootstrap_parallel_options() -> None: # graph_file internally. expected.discard("sdist_only") expected.discard("graph_file") + # test_mode is not supported in bootstrap-parallel (serial mode only) + expected.discard("test_mode") assert set(get_option_names(bootstrap.bootstrap_parallel)) == expected diff --git a/tests/test_graph_commands.py b/tests/test_graph_commands.py index 4f2f63fd..2d75af84 100644 --- a/tests/test_graph_commands.py +++ b/tests/test_graph_commands.py @@ -273,15 +273,13 @@ def test_find_transitive_customized_dependency() -> None: # Mock context: only package-c is customized mock_ctx = Mock() - mock_settings = Mock() def mock_pbi(name: str) -> Mock: pbi = Mock() pbi.has_customizations = name == "package-c" return pbi - mock_settings.package_build_info = mock_pbi - mock_ctx.settings = mock_settings + mock_ctx.package_build_info = mock_pbi # Test node_a = graph.nodes["package-a==1.0.0"] @@ -376,15 +374,13 @@ def test_cycle_prevention_no_infinite_loop() -> None: # Mock context: only package-c is customized mock_ctx = Mock() - mock_settings = Mock() def mock_pbi(name: str) -> Mock: pbi = Mock() pbi.has_customizations = name == "package-c" return pbi - mock_settings.package_build_info = mock_pbi - mock_ctx.settings = mock_settings + mock_ctx.package_build_info = mock_pbi # Test: Should not hang or raise error node_a = graph.nodes["package-a==1.0.0"] @@ -431,15 +427,13 @@ def test_requirement_preservation_through_chain() -> None: # Mock context: only package-c is customized mock_ctx = Mock() - mock_settings = Mock() def mock_pbi(name: str) -> Mock: pbi = Mock() pbi.has_customizations = name == "package-c" return pbi - mock_settings.package_build_info = mock_pbi - mock_ctx.settings = mock_settings + mock_ctx.package_build_info = mock_pbi # Test node_a = graph.nodes["package-a==1.0.0"] diff --git a/tests/test_packagesettings.py b/tests/test_packagesettings.py index 07bee01a..a00fea64 100644 --- a/tests/test_packagesettings.py +++ b/tests/test_packagesettings.py @@ -239,7 +239,7 @@ def test_pbi_test_pkg_extra_environ( "EXTRA_MAX_JOBS": "1", } - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert ( pbi.get_extra_environ(template_env={"EXTRA": "extra"}) == { @@ -264,7 +264,7 @@ def test_pbi_test_pkg_extra_environ( ) testdata_context.settings.variant = Variant("rocm") - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert ( pbi.get_extra_environ(template_env={"EXTRA": "extra"}) == { @@ -278,7 +278,7 @@ def test_pbi_test_pkg_extra_environ( ) testdata_context.settings.variant = Variant("cuda") - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert ( pbi.get_extra_environ(template_env={"EXTRA": "spam"}) == { @@ -319,7 +319,7 @@ def test_pbi_test_pkg_extra_environ( def test_pbi_test_pkg(testdata_context: context.WorkContext) -> None: - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert pbi.package == NormalizedName(TEST_PKG) assert pbi.variant == Variant(testdata_context.settings.variant) assert pbi.pre_built is False @@ -357,7 +357,7 @@ def test_pbi_test_pkg(testdata_context: context.WorkContext) -> None: def test_pbi_test_pkg_patches(testdata_context: context.WorkContext) -> None: - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) norm_test_pkg = TEST_PKG.replace("-", "_") unversioned_patchdir = testdata_context.settings.patches_dir / norm_test_pkg versioned_patchdir = ( @@ -391,7 +391,7 @@ def test_pbi_test_pkg_patches(testdata_context: context.WorkContext) -> None: def test_pbi_other(testdata_context: context.WorkContext) -> None: - pbi = testdata_context.settings.package_build_info(TEST_OTHER_PKG) + pbi = testdata_context.package_build_info(TEST_OTHER_PKG) assert pbi.package == NormalizedName(TEST_OTHER_PKG) assert pbi.variant == Variant(testdata_context.settings.variant) assert pbi.pre_built is False @@ -478,7 +478,7 @@ def test_settings_overrides(testdata_context: context.WorkContext) -> None: def test_global_changelog(testdata_context: context.WorkContext) -> None: - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert pbi.package == TEST_PKG assert not pbi.pre_built assert pbi.variant == "cpu" @@ -490,7 +490,7 @@ def test_global_changelog(testdata_context: context.WorkContext) -> None: # CUDA variant has no global changelog testdata_context.settings.variant = Variant("cuda") - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert pbi.package == TEST_PKG assert not pbi.pre_built assert pbi.variant == "cuda" @@ -502,14 +502,14 @@ def test_global_changelog(testdata_context: context.WorkContext) -> None: # ROCm variant has pre-built flag testdata_context.settings.variant = Variant("rocm") - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert pbi.package == TEST_PKG assert pbi.pre_built assert pbi.variant == "rocm" assert pbi.build_tag(Version("0.99")) == () testdata_context.settings.variant = Variant("cpu") - pbi = testdata_context.settings.package_build_info(TEST_PREBUILT_PKG) + pbi = testdata_context.package_build_info(TEST_PREBUILT_PKG) assert pbi.package == TEST_PREBUILT_PKG assert pbi.pre_built assert pbi.variant == "cpu" @@ -543,7 +543,7 @@ def test_parallel_jobs( ) -> None: assert testdata_context.settings.max_jobs is None - pbi = testdata_context.settings.package_build_info(TEST_EMPTY_PKG) + pbi = testdata_context.package_build_info(TEST_EMPTY_PKG) assert pbi.parallel_jobs() == 7 get_cpu_count.return_value = 4 @@ -556,13 +556,13 @@ def test_parallel_jobs( assert pbi.parallel_jobs() == 1 testdata_context.settings.max_jobs = 2 - pbi = testdata_context.settings.package_build_info(TEST_EMPTY_PKG) + pbi = testdata_context.package_build_info(TEST_EMPTY_PKG) get_available_memory_gib.return_value = 23 assert pbi.parallel_jobs() == 2 # test-pkg needs more memory testdata_context.settings.max_jobs = 200 - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) get_cpu_count.return_value = 16 get_available_memory_gib.return_value = 20 assert pbi.parallel_jobs() == 4 @@ -572,7 +572,7 @@ def test_parallel_jobs( assert pbi.parallel_jobs() == 6 testdata_context.settings.max_jobs = 4 - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert pbi.parallel_jobs() == 4 @@ -727,18 +727,26 @@ def test_package_build_info_exclusive_build( # Test PackageBuildInfo properly accesses it through build_options import pathlib + from fromager.context import WorkContext from fromager.packagesettings import Settings, SettingsFile - # Create a temporary Settings object to test with - settings = Settings( - settings=SettingsFile(), - package_settings=[custom_settings], - variant="cpu", + # Create a temporary WorkContext to test with + ctx = WorkContext( + active_settings=Settings( + settings=SettingsFile(), + package_settings=[custom_settings], + variant="cpu", + patches_dir=pathlib.Path("/tmp"), + max_jobs=1, + ), + constraints_file=None, patches_dir=pathlib.Path("/tmp"), - max_jobs=1, + sdists_repo=pathlib.Path("/tmp/sdists"), + wheels_repo=pathlib.Path("/tmp/wheels"), + work_dir=pathlib.Path("/tmp/work"), ) - custom_pbi = settings.package_build_info("exclusive-pkg") + custom_pbi = ctx.package_build_info("exclusive-pkg") assert custom_pbi.exclusive_build is True @@ -770,38 +778,36 @@ def test_annotation_type() -> None: def test_pbi_annotations(testdata_context: context.WorkContext) -> None: - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert pbi.annotations == { "fromager.test.value": "somevalue", "fromager.test.override": "cpu override", } testdata_context.settings.variant = Variant("cuda") - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert pbi.annotations == { "fromager.test.value": "somevalue", "fromager.test.override": "variant override", } testdata_context.settings.variant = Variant("rocm") - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert pbi.annotations == { "fromager.test.value": "somevalue", "fromager.test.override": "amd override", } - pbi = testdata_context.settings.package_build_info(TEST_EMPTY_PKG) + pbi = testdata_context.package_build_info(TEST_EMPTY_PKG) assert pbi.annotations == {} def test_use_pypi_org_metadata(testdata_context: context.WorkContext) -> None: - pbi = testdata_context.settings.package_build_info(TEST_PKG) + pbi = testdata_context.package_build_info(TEST_PKG) assert pbi.use_pypi_org_metadata - pbi = testdata_context.settings.package_build_info(TEST_EMPTY_PKG) + pbi = testdata_context.package_build_info(TEST_EMPTY_PKG) assert not pbi.use_pypi_org_metadata - pbi = testdata_context.settings.package_build_info( - "somepackage_without_customization" - ) + pbi = testdata_context.package_build_info("somepackage_without_customization") assert pbi.use_pypi_org_metadata