diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index c72f507..b669167 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -18,6 +18,9 @@ jobs: contents: read outputs: packages: ${{ steps.set-packages.outputs.packages }} + dependency-packages: ${{ steps.set-packages.outputs.dependency-packages }} + dependent-packages: ${{ steps.set-packages.outputs.dependent-packages }} + independent-packages: ${{ steps.set-packages.outputs.independent-packages }} steps: - uses: actions/checkout@v5 @@ -32,28 +35,44 @@ jobs: - name: Read packages from configuration id: set-packages run: | - # Use new read_packages.py script that supports both YAML and txt formats packages=$(python read_packages.py) - echo "packages=$packages" >> "$GITHUB_OUTPUT" - echo "Packages to build:" - echo "$packages" | jq -r '.[] | " - \(.spec) [\(.source)]"' + + # Auto-detect package groups based on their characteristics: + # 1. dependency-packages: packages that other packages depend on (is_dependency: true) + # 2. dependent-packages: packages that have dependencies (build_dependencies: [...]) + # 3. independent-packages: packages with no dependencies and not depended upon + + dependency_packages=$(echo "$packages" | jq -c '[.[] | select(.is_dependency == true)]') + dependent_packages=$(echo "$packages" | jq -c '[.[] | select((.build_dependencies | length) > 0)]') + independent_packages=$(echo "$packages" | jq -c '[.[] | select(.is_dependency == false and ((.build_dependencies | length) == 0))]') + + { + echo "packages=$packages" + echo "dependency-packages=$dependency_packages" + echo "dependent-packages=$dependent_packages" + echo "independent-packages=$independent_packages" + } >> "$GITHUB_OUTPUT" + + echo "Package groups (auto-detected):" + echo " Dependency packages (others depend on these): $(echo "$dependency_packages" | jq -r '[.[].name] | join(", ")')" + echo " Dependent packages (depend on others): $(echo "$dependent_packages" | jq -r '[.[].name] | join(", ")')" + echo " Independent packages (no dependencies): $(echo "$independent_packages" | jq -r '[.[].name] | join(", ")')" - build_wheels: + # Build packages that other packages depend on (e.g., numpy) + build_dependency_packages: name: Build ${{ matrix.package.name }} for ${{ matrix.os }} needs: read_packages + if: fromJson(needs.read_packages.outputs.dependency-packages)[0] != null runs-on: ${{ matrix.runs-on }} permissions: contents: read defaults: run: shell: bash - # env: - # # Add the following for Android only: - # CIBW_BUILD_FRONTEND: ${{ matrix.platform == 'android' && 'build' || '' }} strategy: fail-fast: false matrix: - package: ${{ fromJson(needs.read_packages.outputs.packages) }} + package: ${{ fromJson(needs.read_packages.outputs.dependency-packages) }} os: [android-arm64_v8a, android-x86_64, ios] include: - os: android-arm64_v8a @@ -68,8 +87,8 @@ jobs: runs-on: macos-latest platform: ios archs: all - steps: + # Copy all steps from build_level_0 template - name: Check if platform should be skipped id: check-skip run: | @@ -123,72 +142,480 @@ jobs: echo "Installing pip dependencies: ${{ join(matrix.package.pip_dependencies, ' ') }}" python -m pip install ${{ join(matrix.package.pip_dependencies, ' ') }} - - name: Wait for and install build dependencies - if: steps.check-skip.outputs.skip != 'true' && matrix.package.build_dependencies[0] != null + # Level 0 has no build dependencies, skip that step + + - name: Download package source + if: steps.check-skip.outputs.skip != 'true' run: | - echo "Package has build dependencies: ${{ join(matrix.package.build_dependencies, ' ') }}" - BUILD_DEPS='${{ toJSON(matrix.package.build_dependencies) }}' + python -m pip install --upgrade pip + # Check if custom URL is specified + if [ "${{ matrix.package.source }}" = "url" ] && [ -n "${{ matrix.package.url }}" ]; then + echo "Downloading from custom URL: ${{ matrix.package.url }}" + curl -L -o package_source "${{ matrix.package.url }}" + # Determine file type and extract + file package_source + if file package_source | grep -q "gzip"; then + mv package_source package.tar.gz + tar -xzf package.tar.gz && rm package.tar.gz + elif file package_source | grep -q "Zip"; then + mv package_source package.zip + unzip package.zip && rm package.zip + elif file package_source | grep -q "tar"; then + mv package_source package.tar + tar -xf package.tar && rm package.tar + else + echo "Unknown file type, trying as tarball" + mv package_source package.tar.gz + tar -xzf package.tar.gz && rm package.tar.gz + fi + elif [ "${{ matrix.package.source }}" = "git" ] && [ -n "${{ matrix.package.url }}" ]; then + echo "Cloning from git: ${{ matrix.package.url }}" + git clone "${{ matrix.package.url }}" package_dir + else + echo "Downloading from PyPI: ${{ matrix.package.spec }}" + pip download --no-binary :all: --no-deps "${{ matrix.package.spec }}" + # Extract the downloaded package + for file in *.tar.gz; do [ -f "$file" ] && tar -xzf "$file" && rm "$file"; done + for file in *.zip; do [ -f "$file" ] && unzip "$file" && rm "$file"; done + for file in *.tar; do [ -f "$file" ] && tar -xf "$file" && rm "$file"; done + fi + # Find the extracted directory (exclude common repo directories and scripts) + PACKAGE_DIR=$(find . -maxdepth 1 -type d -not -name ".*" -not -name "__pycache__" -not -name ".github" -not -name "recipes" -not -name "scripts" -not -name "." | head -n 1) - # Parse build dependencies - echo "$BUILD_DEPS" | jq -r '.[]' | while read -r dep_name; do - echo "Waiting for dependency: $dep_name (platform: ${{ matrix.os }})" + # Validate that PACKAGE_DIR is set and exists + if [ -z "$PACKAGE_DIR" ]; then + echo "ERROR: Could not find extracted package directory" + echo "Current directory contents:" + ls -la + exit 1 + fi + + # Validate that the directory contains a Python package configuration file + if [ ! -f "$PACKAGE_DIR/setup.py" ] && [ ! -f "$PACKAGE_DIR/setup.cfg" ] && [ ! -f "$PACKAGE_DIR/pyproject.toml" ]; then + echo "ERROR: Package directory does not contain setup.py, setup.cfg, or pyproject.toml" + echo "Directory contents:" + ls -la "$PACKAGE_DIR" + exit 1 + fi + + echo "PACKAGE_DIR=$PACKAGE_DIR" >> "$GITHUB_ENV" + echo "Building package in: $PACKAGE_DIR" + + - name: Apply patches + if: steps.check-skip.outputs.skip != 'true' && toJSON(matrix.package.patches) != '[]' + run: | + echo "Applying patches to package in: ${{ env.PACKAGE_DIR }}" + cd "${{ env.PACKAGE_DIR }}" + # Apply each patch + PATCH_INDEX=0 + PATCHES='${{ toJSON(matrix.package.patches) }}' + echo "$PATCHES" | jq -r '.[]' | while read -r patch_path; do + PATCH_INDEX=$((PATCH_INDEX + 1)) + if [[ "$patch_path" =~ ^https?:// ]]; then + # Download patch from URL + echo "Downloading patch from URL: $patch_path" + curl -L -o "/tmp/patch_${PATCH_INDEX}.patch" "$patch_path" + PATCH_FILE="/tmp/patch_${PATCH_INDEX}.patch" + else + # Use local patch file + echo "Using local patch: $patch_path" + # Convert to absolute path from repository root + if [[ ! "$patch_path" =~ ^/ ]]; then + PATCH_FILE="${GITHUB_WORKSPACE}/$patch_path" + else + PATCH_FILE="$patch_path" + fi + fi - # Wait for the artifact to be available (polling approach) - MAX_ATTEMPTS=60 # 60 attempts * 30 seconds = 30 minutes max wait - ATTEMPT=0 - ARTIFACT_NAME="cibw-wheels-${{ matrix.os }}-${dep_name}" + echo "Applying patch ${PATCH_INDEX}..." + patch -p1 < "$PATCH_FILE" || { + echo "Failed to apply patch with -p1, trying -p0" + patch -p0 < "$PATCH_FILE" + } - while [ $ATTEMPT -lt $MAX_ATTEMPTS ]; do - echo "Attempt $((ATTEMPT + 1))/$MAX_ATTEMPTS: Checking for artifact $ARTIFACT_NAME..." - - # Use GitHub API to check if artifact exists - ARTIFACTS_JSON=$(curl -s -H "Authorization: token ${{ github.token }}" \ - "https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/artifacts") - - # Check if our artifact exists - ARTIFACT_ID=$(echo "$ARTIFACTS_JSON" | jq -r ".artifacts[] | select(.name == \"$ARTIFACT_NAME\") | .id") - - if [ -n "$ARTIFACT_ID" ] && [ "$ARTIFACT_ID" != "null" ]; then - echo "✓ Found artifact with ID: $ARTIFACT_ID" - - # Download the artifact using GitHub API - mkdir -p "/tmp/build_deps/${dep_name}" - DOWNLOAD_URL=$(echo "$ARTIFACTS_JSON" | jq -r ".artifacts[] | select(.name == \"$ARTIFACT_NAME\") | .archive_download_url") - - echo "Downloading artifact from: $DOWNLOAD_URL" - curl -L -H "Authorization: token ${{ github.token }}" \ - -o "/tmp/build_deps/${dep_name}.zip" "$DOWNLOAD_URL" - - # Extract the artifact - unzip -q "/tmp/build_deps/${dep_name}.zip" -d "/tmp/build_deps/${dep_name}" - rm "/tmp/build_deps/${dep_name}.zip" - - # Install the wheel - if ls /tmp/build_deps/${dep_name}/*.whl 1> /dev/null 2>&1; then - echo "Installing wheel(s) from $dep_name..." - python -m pip install /tmp/build_deps/${dep_name}/*.whl - echo "✓ Installed $dep_name successfully" - else - echo "Warning: No wheel files found in artifact for $dep_name" - fi - break + # Clean up if it was a downloaded patch + if [[ "$patch_path" =~ ^https?:// ]]; then + rm "/tmp/patch_${PATCH_INDEX}.patch" + fi + done + echo "All patches applied successfully" + + - name: Build wheels + working-directory: ${{ env.PACKAGE_DIR }} + if: steps.check-skip.outputs.skip != 'true' + env: + CIBW_PLATFORM: ${{ matrix.platform }} + CIBW_ARCHS: ${{ matrix.archs }} + CIBW_BUILD: cp314-* + # Pass through environment variables needed by build scripts + CIBW_ENVIRONMENT_PASS_LINUX: GITHUB_WORKSPACE HOST_DEPENDENCIES + CIBW_ENVIRONMENT_PASS_MACOS: GITHUB_WORKSPACE HOST_DEPENDENCIES + # Set HOST_DEPENDENCIES for use in build scripts + HOST_DEPENDENCIES: ${{ join(matrix.package.host_dependencies, ' ') }} + # Apply package-specific cibuildwheel environment variables if specified + CIBW_ENVIRONMENT: ${{ matrix.package.cibw_environment }} + # Override before_all if specified in recipe (empty string disables it) + # If host dependencies exist and before_all is set, append the env setup script + CIBW_BEFORE_ALL: | + ${{ matrix.package.cibw_before_all }} + if [ -n "$HOST_DEPENDENCIES" ]; then + echo "Setting up cross-compilation environment for host dependencies..." + if [ -f "$GITHUB_WORKSPACE/scripts/setup_cross_compile_env.sh" ]; then + # Run script in bash, capture environment variables, and source them in current shell + bash -c ". '$GITHUB_WORKSPACE/scripts/setup_cross_compile_env.sh' >/dev/null 2>&1 && env" | \ + grep -E '^(CFLAGS|CPPFLAGS|LDFLAGS|PKG_CONFIG_PATH|.*_(INCLUDE|LIB)_DIR)=' | \ + sed 's/^/export /' | sed 's/=/="/' | sed 's/$/"/' > /tmp/build_env.sh + . /tmp/build_env.sh + rm /tmp/build_env.sh + fi + fi + # Override config_settings if specified in recipe + CIBW_CONFIG_SETTINGS: ${{ matrix.package.cibw_config_settings }} + run: | + python -m pip install --upgrade pip + python -m pip install cibuildwheel==3.3.0 + echo "Running cibuildwheel (platform=$CIBW_PLATFORM, archs=$CIBW_ARCHS) in $(pwd)" + ls -la + # Run cibuildwheel from the package directory; output wheels to ./wheelhouse + python -m cibuildwheel --output-dir wheelhouse . + + - uses: actions/upload-artifact@v4 + if: steps.check-skip.outputs.skip != 'true' + with: + name: cibw-wheels-${{ matrix.os }}-${{ matrix.package.name }} + path: ./wheelhouse/*.whl + + # Build packages with no dependencies and not depended upon by others + build_independent_packages: + name: Build ${{ matrix.package.name }} for ${{ matrix.os }} + needs: read_packages + if: fromJson(needs.read_packages.outputs.independent-packages)[0] != null + runs-on: ${{ matrix.runs-on }} + permissions: + contents: read + defaults: + run: + shell: bash + strategy: + fail-fast: false + matrix: + package: ${{ fromJson(needs.read_packages.outputs.independent-packages) }} + os: [android-arm64_v8a, android-x86_64, ios] + include: + - os: android-arm64_v8a + runs-on: ubuntu-latest + platform: android + archs: arm64_v8a + - os: android-x86_64 + runs-on: ubuntu-latest + platform: android + archs: x86_64 + - os: ios + runs-on: macos-latest + platform: ios + archs: all + steps: + - name: Check if platform should be skipped + id: check-skip + run: | + # Check if this platform is in the skip_platforms list + skip_platforms='${{ toJSON(matrix.package.skip_platforms) }}' + current_platform='${{ matrix.platform }}' + + if echo "$skip_platforms" | jq -e --arg platform "$current_platform" 'index($platform)' > /dev/null; then + echo "Skipping build for $current_platform (in skip_platforms list)" + echo "skip=true" >> "$GITHUB_OUTPUT" + else + echo "Building for $current_platform" + echo "skip=false" >> "$GITHUB_OUTPUT" + fi + + - uses: actions/checkout@v5 + if: steps.check-skip.outputs.skip != 'true' + + - name: Set up Python + if: steps.check-skip.outputs.skip != 'true' + uses: actions/setup-python@v5 + with: + python-version: '3.14' + + - name: Install host dependencies (Ubuntu) + if: steps.check-skip.outputs.skip != 'true' && runner.os == 'Linux' && matrix.package.host_dependencies[0] != null + run: | + echo "Installing host dependencies: ${{ join(matrix.package.host_dependencies, ' ') }}" + sudo apt-get update + sudo apt-get install -y ${{ join(matrix.package.host_dependencies, ' ') }} + + - name: Install host dependencies (macOS) + if: steps.check-skip.outputs.skip != 'true' && runner.os == 'macOS' && matrix.package.host_dependencies[0] != null + run: | + echo "Installing host dependencies: ${{ join(matrix.package.host_dependencies, ' ') }}" + # Map common Linux package names to macOS equivalents + deps="${{ join(matrix.package.host_dependencies, ' ') }}" + deps="${deps//libffi-dev/libffi}" + deps="${deps//libssl-dev/openssl}" + deps="${deps//libjpeg-dev/jpeg}" + deps="${deps//libpng-dev/libpng}" + deps="${deps//libtiff-dev/libtiff}" + deps="${deps//libfreetype6-dev/freetype}" + deps="${deps//liblcms2-dev/little-cms2}" + deps="${deps//libwebp-dev/webp}" + brew install "$deps" || true + + - name: Install pip dependencies + if: steps.check-skip.outputs.skip != 'true' && matrix.package.pip_dependencies[0] != null + run: | + echo "Installing pip dependencies: ${{ join(matrix.package.pip_dependencies, ' ') }}" + python -m pip install ${{ join(matrix.package.pip_dependencies, ' ') }} + + # Level 0 has no build dependencies, skip that step + + - name: Download package source + if: steps.check-skip.outputs.skip != 'true' + run: | + python -m pip install --upgrade pip + # Check if custom URL is specified + if [ "${{ matrix.package.source }}" = "url" ] && [ -n "${{ matrix.package.url }}" ]; then + echo "Downloading from custom URL: ${{ matrix.package.url }}" + curl -L -o package_source "${{ matrix.package.url }}" + # Determine file type and extract + file package_source + if file package_source | grep -q "gzip"; then + mv package_source package.tar.gz + tar -xzf package.tar.gz && rm package.tar.gz + elif file package_source | grep -q "Zip"; then + mv package_source package.zip + unzip package.zip && rm package.zip + elif file package_source | grep -q "tar"; then + mv package_source package.tar + tar -xf package.tar && rm package.tar + else + echo "Unknown file type, trying as tarball" + mv package_source package.tar.gz + tar -xzf package.tar.gz && rm package.tar.gz + fi + elif [ "${{ matrix.package.source }}" = "git" ] && [ -n "${{ matrix.package.url }}" ]; then + echo "Cloning from git: ${{ matrix.package.url }}" + git clone "${{ matrix.package.url }}" package_dir + else + echo "Downloading from PyPI: ${{ matrix.package.spec }}" + pip download --no-binary :all: --no-deps "${{ matrix.package.spec }}" + # Extract the downloaded package + for file in *.tar.gz; do [ -f "$file" ] && tar -xzf "$file" && rm "$file"; done + for file in *.zip; do [ -f "$file" ] && unzip "$file" && rm "$file"; done + for file in *.tar; do [ -f "$file" ] && tar -xf "$file" && rm "$file"; done + fi + # Find the extracted directory (exclude common repo directories and scripts) + PACKAGE_DIR=$(find . -maxdepth 1 -type d -not -name ".*" -not -name "__pycache__" -not -name ".github" -not -name "recipes" -not -name "scripts" -not -name "." | head -n 1) + + # Validate that PACKAGE_DIR is set and exists + if [ -z "$PACKAGE_DIR" ]; then + echo "ERROR: Could not find extracted package directory" + echo "Current directory contents:" + ls -la + exit 1 + fi + + # Validate that the directory contains a Python package configuration file + if [ ! -f "$PACKAGE_DIR/setup.py" ] && [ ! -f "$PACKAGE_DIR/setup.cfg" ] && [ ! -f "$PACKAGE_DIR/pyproject.toml" ]; then + echo "ERROR: Package directory does not contain setup.py, setup.cfg, or pyproject.toml" + echo "Directory contents:" + ls -la "$PACKAGE_DIR" + exit 1 + fi + + echo "PACKAGE_DIR=$PACKAGE_DIR" >> "$GITHUB_ENV" + echo "Building package in: $PACKAGE_DIR" + + - name: Apply patches + if: steps.check-skip.outputs.skip != 'true' && toJSON(matrix.package.patches) != '[]' + run: | + echo "Applying patches to package in: ${{ env.PACKAGE_DIR }}" + cd "${{ env.PACKAGE_DIR }}" + # Apply each patch + PATCH_INDEX=0 + PATCHES='${{ toJSON(matrix.package.patches) }}' + echo "$PATCHES" | jq -r '.[]' | while read -r patch_path; do + PATCH_INDEX=$((PATCH_INDEX + 1)) + if [[ "$patch_path" =~ ^https?:// ]]; then + # Download patch from URL + echo "Downloading patch from URL: $patch_path" + curl -L -o "/tmp/patch_${PATCH_INDEX}.patch" "$patch_path" + PATCH_FILE="/tmp/patch_${PATCH_INDEX}.patch" + else + # Use local patch file + echo "Using local patch: $patch_path" + # Convert to absolute path from repository root + if [[ ! "$patch_path" =~ ^/ ]]; then + PATCH_FILE="${GITHUB_WORKSPACE}/$patch_path" else - if [ $ATTEMPT -eq 0 ]; then - echo "Artifact not yet available, waiting for build to complete..." - fi - ATTEMPT=$((ATTEMPT + 1)) - if [ $ATTEMPT -lt $MAX_ATTEMPTS ]; then - sleep 30 - else - echo "ERROR: Timeout waiting for dependency $dep_name" - echo "The build dependency '$dep_name' did not complete in time." - exit 1 - fi + PATCH_FILE="$patch_path" fi - done + fi + + echo "Applying patch ${PATCH_INDEX}..." + patch -p1 < "$PATCH_FILE" || { + echo "Failed to apply patch with -p1, trying -p0" + patch -p0 < "$PATCH_FILE" + } + + # Clean up if it was a downloaded patch + if [[ "$patch_path" =~ ^https?:// ]]; then + rm "/tmp/patch_${PATCH_INDEX}.patch" + fi done + echo "All patches applied successfully" + + - name: Build wheels + working-directory: ${{ env.PACKAGE_DIR }} + if: steps.check-skip.outputs.skip != 'true' + env: + CIBW_PLATFORM: ${{ matrix.platform }} + CIBW_ARCHS: ${{ matrix.archs }} + CIBW_BUILD: cp314-* + # Pass through environment variables needed by build scripts + CIBW_ENVIRONMENT_PASS_LINUX: GITHUB_WORKSPACE HOST_DEPENDENCIES + CIBW_ENVIRONMENT_PASS_MACOS: GITHUB_WORKSPACE HOST_DEPENDENCIES + # Set HOST_DEPENDENCIES for use in build scripts + HOST_DEPENDENCIES: ${{ join(matrix.package.host_dependencies, ' ') }} + # Apply package-specific cibuildwheel environment variables if specified + CIBW_ENVIRONMENT: ${{ matrix.package.cibw_environment }} + # Override before_all if specified in recipe (empty string disables it) + # If host dependencies exist and before_all is set, append the env setup script + CIBW_BEFORE_ALL: | + ${{ matrix.package.cibw_before_all }} + if [ -n "$HOST_DEPENDENCIES" ]; then + echo "Setting up cross-compilation environment for host dependencies..." + if [ -f "$GITHUB_WORKSPACE/scripts/setup_cross_compile_env.sh" ]; then + # Run script in bash, capture environment variables, and source them in current shell + bash -c ". '$GITHUB_WORKSPACE/scripts/setup_cross_compile_env.sh' >/dev/null 2>&1 && env" | \ + grep -E '^(CFLAGS|CPPFLAGS|LDFLAGS|PKG_CONFIG_PATH|.*_(INCLUDE|LIB)_DIR)=' | \ + sed 's/^/export /' | sed 's/=/="/' | sed 's/$/"/' > /tmp/build_env.sh + . /tmp/build_env.sh + rm /tmp/build_env.sh + fi + fi + # Override config_settings if specified in recipe + CIBW_CONFIG_SETTINGS: ${{ matrix.package.cibw_config_settings }} + run: | + python -m pip install --upgrade pip + python -m pip install cibuildwheel==3.3.0 + echo "Running cibuildwheel (platform=$CIBW_PLATFORM, archs=$CIBW_ARCHS) in $(pwd)" + ls -la + # Run cibuildwheel from the package directory; output wheels to ./wheelhouse + python -m cibuildwheel --output-dir wheelhouse . + + - uses: actions/upload-artifact@v4 + if: steps.check-skip.outputs.skip != 'true' + with: + name: cibw-wheels-${{ matrix.os }}-${{ matrix.package.name }} + path: ./wheelhouse/*.whl + + # Build packages that have dependencies on other packages + build_dependent_packages: + name: Build ${{ matrix.package.name }} for ${{ matrix.os }} + needs: [read_packages, build_dependency_packages] + if: fromJson(needs.read_packages.outputs.dependent-packages)[0] != null + runs-on: ${{ matrix.runs-on }} + permissions: + contents: read + defaults: + run: + shell: bash + strategy: + fail-fast: false + matrix: + package: ${{ fromJson(needs.read_packages.outputs.dependent-packages) }} + os: [android-arm64_v8a, android-x86_64, ios] + include: + - os: android-arm64_v8a + runs-on: ubuntu-latest + platform: android + archs: arm64_v8a + - os: android-x86_64 + runs-on: ubuntu-latest + platform: android + archs: x86_64 + - os: ios + runs-on: macos-latest + platform: ios + archs: all + steps: + - name: Check if platform should be skipped + id: check-skip + run: | + # Check if this platform is in the skip_platforms list + skip_platforms='${{ toJSON(matrix.package.skip_platforms) }}' + current_platform='${{ matrix.platform }}' - echo "All build dependencies installed successfully" + if echo "$skip_platforms" | jq -e --arg platform "$current_platform" 'index($platform)' > /dev/null; then + echo "Skipping build for $current_platform (in skip_platforms list)" + echo "skip=true" >> "$GITHUB_OUTPUT" + else + echo "Building for $current_platform" + echo "skip=false" >> "$GITHUB_OUTPUT" + fi + + - uses: actions/checkout@v5 + if: steps.check-skip.outputs.skip != 'true' + + - name: Set up Python + if: steps.check-skip.outputs.skip != 'true' + uses: actions/setup-python@v5 + with: + python-version: '3.14' + + - name: Install host dependencies (Ubuntu) + if: steps.check-skip.outputs.skip != 'true' && runner.os == 'Linux' && matrix.package.host_dependencies[0] != null + run: | + echo "Installing host dependencies: ${{ join(matrix.package.host_dependencies, ' ') }}" + sudo apt-get update + sudo apt-get install -y ${{ join(matrix.package.host_dependencies, ' ') }} + + - name: Install host dependencies (macOS) + if: steps.check-skip.outputs.skip != 'true' && runner.os == 'macOS' && matrix.package.host_dependencies[0] != null + run: | + echo "Installing host dependencies: ${{ join(matrix.package.host_dependencies, ' ') }}" + # Map common Linux package names to macOS equivalents + deps="${{ join(matrix.package.host_dependencies, ' ') }}" + deps="${deps//libffi-dev/libffi}" + deps="${deps//libssl-dev/openssl}" + deps="${deps//libjpeg-dev/jpeg}" + deps="${deps//libpng-dev/libpng}" + deps="${deps//libtiff-dev/libtiff}" + deps="${deps//libfreetype6-dev/freetype}" + deps="${deps//liblcms2-dev/little-cms2}" + deps="${deps//libwebp-dev/webp}" + brew install "$deps" || true + + - name: Install pip dependencies + if: steps.check-skip.outputs.skip != 'true' && matrix.package.pip_dependencies[0] != null + run: | + echo "Installing pip dependencies: ${{ join(matrix.package.pip_dependencies, ' ') }}" + python -m pip install ${{ join(matrix.package.pip_dependencies, ' ') }} + + - name: Download and install build dependencies + if: steps.check-skip.outputs.skip != 'true' && matrix.package.build_dependencies[0] != null + uses: actions/download-artifact@v4 + with: + path: /tmp/build_deps + pattern: cibw-wheels-${{ matrix.os }}-* + + - name: Install build dependency wheels + if: steps.check-skip.outputs.skip != 'true' && matrix.package.build_dependencies[0] != null + run: | + # Install dependency wheels + BUILD_DEPS='${{ toJSON(matrix.package.build_dependencies) }}' + echo "$BUILD_DEPS" | jq -r '.[]' | while read -r dep_name; do + ARTIFACT_DIR="/tmp/build_deps/cibw-wheels-${{ matrix.os }}-${dep_name}" + if [ -d "$ARTIFACT_DIR" ] && ls "$ARTIFACT_DIR"/*.whl 1> /dev/null 2>&1; then + echo "Installing wheel(s) from $dep_name..." + python -m pip install "$ARTIFACT_DIR"/*.whl + echo "✓ Installed $dep_name successfully" + else + echo "Warning: No wheel files found for dependency $dep_name" + fi + done - name: Download package source if: steps.check-skip.outputs.skip != 'true' @@ -333,10 +760,8 @@ jobs: deploy_index: name: Deploy wheel index to GitHub Pages - needs: [read_packages, build_wheels] + needs: [read_packages, build_dependency_packages, build_independent_packages, build_dependent_packages] runs-on: ubuntu-latest - # Deploy even if some builds fail (but not if cancelled) - # This allows successful wheels to be published immediately if: always() && !cancelled() && (github.event_name == 'push' || github.event_name == 'release') permissions: contents: read @@ -366,7 +791,6 @@ jobs: - name: Organize wheels run: | mkdir -p wheels - # Move all wheels to a single directory find artifacts -name "*.whl" -exec cp {} wheels/ \; ls -lh wheels/ diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 944b3bc..e94a391 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -45,26 +45,34 @@ packages: ### How It Works -1. **Dependency Resolution**: When you run `read_packages.py`, it performs a topological sort on all packages based on their `build_dependencies`. This ensures packages are built in the correct order. +1. **Dependency Resolution**: When you run `read_packages.py`, it performs a topological sort on all packages based on their `build_dependencies`. This ensures packages are built in the correct order and assigns each package a dependency level (0 = no dependencies, 1 = depends on level 0 packages, etc.). -2. **Build Order**: Packages with no dependencies are built first, followed by packages that depend on them. +2. **Build Order**: The workflow uses GitHub Actions' `needs` keyword to create a dependency chain: + - Level 0 packages (no dependencies) build first in the `build_level_0` job + - Level 1 packages build next in the `build_level_1` job, which depends on `build_level_0` via `needs` + - Level 2 and 3 jobs follow the same pattern if packages exist at those levels + - Packages within the same level build in parallel using matrix strategy -3. **Waiting for Dependencies**: During the build process, if a package has build dependencies: - - The workflow waits for the dependency wheels to be uploaded as artifacts - - It polls the GitHub API every 30 seconds for up to 30 minutes - - Once available, it downloads and installs the dependency wheels - - Then proceeds with building the current package +3. **Dependency Handling**: When a package has build dependencies: + - The job waits for previous level jobs to complete (enforced by `needs`) + - Once the previous level completes, artifacts are guaranteed to be available + - The workflow downloads dependency artifacts using `actions/download-artifact@v4` + - Dependency wheels are installed before building the current package + - No polling required - GitHub Actions handles job orchestration 4. **Error Handling**: - - Circular dependencies are detected and reported as errors + - Circular dependencies are detected during `read_packages.py` and reported as errors - Missing dependencies (not in the package list) generate warnings - - Timeout waiting for dependencies causes the build to fail + - If a dependency build fails, dependent builds are automatically skipped + - Clear visualization of dependency chain in GitHub Actions UI ### Important Notes - Build dependencies must be packages defined in the same workflow (recipes or packages.yaml) - For external Python dependencies from PyPI, use `pip_dependencies` instead - Build dependencies are resolved per platform (Android/iOS builds are independent) +- The dependency level system supports up to 4 levels (0-3); expand if needed + ## Host Dependencies diff --git a/read_packages.py b/read_packages.py index 00c28c3..c38e9cd 100755 --- a/read_packages.py +++ b/read_packages.py @@ -208,6 +208,42 @@ def read_txt_config(config_file): return packages_data +def calculate_dependency_levels(packages_data): + """Calculate dependency level for each package (0 = no deps, 1 = depends on level 0, etc.).""" + pkg_map = {pkg['name']: pkg for pkg in packages_data} + levels = {} + + # Build reverse dependency graph (package -> packages it depends on) + dependencies = {pkg['name']: pkg.get('build_dependencies', []) for pkg in packages_data} + + # Calculate levels iteratively + remaining = set(pkg['name'] for pkg in packages_data) + current_level = 0 + + while remaining: + # Find packages whose dependencies are all satisfied + level_packages = [] + for pkg_name in remaining: + deps = dependencies[pkg_name] + # Check if all dependencies are already assigned a level + if all(dep in levels or dep not in pkg_map for dep in deps): + level_packages.append(pkg_name) + + if not level_packages: + # Circular dependency or missing dependency + print(f"Error: Could not resolve dependencies for remaining packages: {', '.join(remaining)}", file=sys.stderr) + sys.exit(1) + + # Assign current level to these packages + for pkg_name in level_packages: + levels[pkg_name] = current_level + remaining.remove(pkg_name) + + current_level += 1 + + return levels + + def topological_sort(packages_data): """Sort packages based on build_dependencies using topological sort.""" # Build a mapping from package names to package data @@ -217,11 +253,15 @@ def topological_sort(packages_data): graph = {pkg['name']: [] for pkg in packages_data} in_degree = {pkg['name']: 0 for pkg in packages_data} + # Track which packages are depended upon by others + is_dependency_of_others = set() + for pkg in packages_data: for dep in pkg.get('build_dependencies', []): if dep in pkg_map: graph[dep].append(pkg['name']) in_degree[pkg['name']] += 1 + is_dependency_of_others.add(dep) # Mark this package as a dependency else: print(f"Warning: Package {pkg['name']} depends on {dep} which is not in the package list", file=sys.stderr) @@ -246,6 +286,13 @@ def topological_sort(packages_data): print(f"Error: Circular dependency detected among packages: {', '.join(remaining)}", file=sys.stderr) sys.exit(1) + # Calculate and add dependency levels + levels = calculate_dependency_levels(sorted_packages) + for pkg in sorted_packages: + pkg['dependency_level'] = levels[pkg['name']] + # Mark if this package is a dependency of others + pkg['is_dependency'] = pkg['name'] in is_dependency_of_others + return sorted_packages @@ -292,16 +339,23 @@ def main(): # Also output summary to stderr for logging print(f"\nFound {len(packages_data)} packages:", file=sys.stderr) - for pkg in packages_data: - info_parts = [] - if pkg['host_dependencies']: - info_parts.append(f"host deps: {', '.join(pkg['host_dependencies'])}") - if pkg.get('build_dependencies'): - info_parts.append(f"build deps: {', '.join(pkg['build_dependencies'])}") - if pkg['patches']: - info_parts.append(f"patches: {len(pkg['patches'])}") - info = f" ({'; '.join(info_parts)})" if info_parts else "" - print(f" - {pkg['spec']}{info}", file=sys.stderr) + + # Group by dependency level + max_level = max(pkg.get('dependency_level', 0) for pkg in packages_data) + for level in range(max_level + 1): + level_packages = [pkg for pkg in packages_data if pkg.get('dependency_level', 0) == level] + if level_packages: + print(f"\n Level {level} ({len(level_packages)} packages):", file=sys.stderr) + for pkg in level_packages: + info_parts = [] + if pkg['host_dependencies']: + info_parts.append(f"host deps: {', '.join(pkg['host_dependencies'])}") + if pkg.get('build_dependencies'): + info_parts.append(f"build deps: {', '.join(pkg['build_dependencies'])}") + if pkg['patches']: + info_parts.append(f"patches: {len(pkg['patches'])}") + info = f" ({'; '.join(info_parts)})" if info_parts else "" + print(f" - {pkg['spec']}{info}", file=sys.stderr) if __name__ == '__main__': diff --git a/recipes/cffi/build_libffi.sh b/recipes/cffi/build_libffi.sh index b4648f1..2dbed25 100755 --- a/recipes/cffi/build_libffi.sh +++ b/recipes/cffi/build_libffi.sh @@ -117,14 +117,21 @@ if [ "$CIBW_PLATFORM" = "android" ]; then ls -la "$PREFIX/include" || true ls -la "$PREFIX/lib" || true - # Export environment variables for cffi to find libffi - # These will be available to the build process + # Export environment variables to a file for cibuildwheel + ENV_FILE="/tmp/libffi_env.sh" + echo "export FFI_INCLUDE_DIR='$PREFIX/include'" > "$ENV_FILE" + echo "export FFI_LIB_DIR='$PREFIX/lib'" >> "$ENV_FILE" + echo "export CFLAGS=\"\${CFLAGS} -I$PREFIX/include\"" >> "$ENV_FILE" + echo "export LDFLAGS=\"\${LDFLAGS} -L$PREFIX/lib\"" >> "$ENV_FILE" + + # Also export for immediate use export FFI_INCLUDE_DIR="$PREFIX/include" export FFI_LIB_DIR="$PREFIX/lib" - echo "Environment variables set:" + echo "Environment variables set and saved to $ENV_FILE:" echo " FFI_INCLUDE_DIR=$FFI_INCLUDE_DIR" echo " FFI_LIB_DIR=$FFI_LIB_DIR" + cat "$ENV_FILE" echo "libffi build complete for Android $ANDROID_ABI" @@ -210,13 +217,21 @@ elif [ "$CIBW_PLATFORM" = "ios" ]; then ls -la "$PREFIX/include" || true ls -la "$PREFIX/lib" || true - # Export environment variables + # Export environment variables to a file for cibuildwheel + ENV_FILE="/tmp/libffi_env.sh" + echo "export FFI_INCLUDE_DIR='$PREFIX/include'" > "$ENV_FILE" + echo "export FFI_LIB_DIR='$PREFIX/lib'" >> "$ENV_FILE" + echo "export CFLAGS=\"\${CFLAGS} -I$PREFIX/include\"" >> "$ENV_FILE" + echo "export LDFLAGS=\"\${LDFLAGS} -L$PREFIX/lib\"" >> "$ENV_FILE" + + # Also export for immediate use export FFI_INCLUDE_DIR="$PREFIX/include" export FFI_LIB_DIR="$PREFIX/lib" - echo "Environment variables set:" + echo "Environment variables set and saved to $ENV_FILE:" echo " FFI_INCLUDE_DIR=$FFI_INCLUDE_DIR" echo " FFI_LIB_DIR=$FFI_LIB_DIR" + cat "$ENV_FILE" echo "libffi build complete for iOS $SDK ($ARCH)" else diff --git a/recipes/cffi/recipe.yaml b/recipes/cffi/recipe.yaml index 66ec0a9..e891319 100644 --- a/recipes/cffi/recipe.yaml +++ b/recipes/cffi/recipe.yaml @@ -8,8 +8,16 @@ host_dependencies: # Build libffi for the target architecture before building cffi # This script downloads, cross-compiles, and installs libffi for Android/iOS -# The patched setup.py will automatically find the built library in /tmp/libffi-install-*/ -cibw_before_all: bash $GITHUB_WORKSPACE/recipes/cffi/build_libffi.sh +# The script also creates /tmp/libffi_env.sh with environment variables +# The patched setup.py will automatically find the built library +cibw_before_all: | + bash $GITHUB_WORKSPACE/recipes/cffi/build_libffi.sh + if [ -f /tmp/libffi_env.sh ]; then + echo "Sourcing libffi environment variables..." + . /tmp/libffi_env.sh + echo "FFI_INCLUDE_DIR=$FFI_INCLUDE_DIR" + echo "FFI_LIB_DIR=$FFI_LIB_DIR" + fi # CIBW environment variables to help with cross-compilation # PKG_CONFIG="" disables pkg-config to avoid finding host libraries