diff --git a/.circleci/check_pr_status.sh b/.circleci/check_pr_status.sh deleted file mode 100755 index 4b31a29698..0000000000 --- a/.circleci/check_pr_status.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -# Extract the repository owner -REPO_OWNER=$(echo $CIRCLE_PULL_REQUEST | awk -F'/' '{print $(NF-3)}') - -# Extract the repository name -REPO_NAME=$(echo $CIRCLE_PULL_REQUEST | awk -F'/' '{print $(NF-2)}') - -# Extract the pull request number -PR_NUMBER=$(echo $CIRCLE_PULL_REQUEST | awk -F'/' '{print $NF}') - - -PR_DETAILS=$(curl -s \ - "https://api.github.com/repos/$REPO_OWNER/$REPO_NAME/pulls/$PR_NUMBER") - - -IS_DRAFT=$(echo "$PR_DETAILS" | jq -r .draft) -echo $IS_DRAFT - -if [ "$IS_DRAFT" == "true" ]; then - echo "This PR is a draft. Skipping the workflow." - exit 1 -else - echo "This PR is not a draft. Proceeding with the workflow." - exit 0 -fi diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 1c14c8730a..0000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,300 +0,0 @@ -version: 2.1 - -orbs: - python: circleci/python@2.1.1 - python-lib: dialogue/python-lib@0.1.55 - -jobs: - check-if-pr-is-draft: - docker: - - image: cimg/python:3.10 - steps: - - checkout - - run: - name: Install jq - command: sudo apt-get update && sudo apt-get install -y jq - - run: - name: Check if PR is a draft - command: .circleci/check_pr_status.sh - - ruff: - resource_class: small - parameters: - python-version: - type: string - docker: - - image: cimg/python:<< parameters.python-version >> - - steps: - - checkout - - - restore_cache: - name: Restore cached ruff venv - keys: - - v2-pypi-py-ruff-<< parameters.python-version >> - - - run: - name: Update & Activate ruff venv - command: | - python -m venv .venv - . .venv/bin/activate - python -m pip install --upgrade uv - uv pip install ruff==0.11.5 - - - save_cache: - name: Save cached ruff venv - paths: - - ".venv/" - key: v2-pypi-py-ruff-<< parameters.python-version >> - - - run: - name: Ruff format check - command: | - . .venv/bin/activate - ruff format --diff . - - check_compatibility: - parameters: - python_version: - type: string - docker: - - image: cimg/python:3.10 - steps: - - checkout - - run: - name: Check if requirements files have changed - command: ./scripts/check_requirements_changes.sh - - run: - name: Install dependencies and Check compatibility - command: | - if [ "$REQUIREMENTS_CHANGED" == "true" ]; then - python -m pip install ".[dev,cli]" --dry-run --python-version << parameters.python_version >> --no-deps - else - echo "Skipping compatibility checks..." - fi - - build-and-test: - resource_class: medium - parallelism: 2 - parameters: - python-version: - type: string - docker: - - image: cimg/python:<< parameters.python-version >> - - steps: - - checkout - - - run: - name: Update & Activate venv - command: | - python -m venv .venv - . .venv/bin/activate - python -m pip install --upgrade uv - uv sync --extra dev --dev - - - run: - name: Install Bittensor - command: | - . .venv/bin/activate - uv sync --extra dev --dev - - - run: - name: Instantiate Mock Wallet - command: | - . .venv/bin/activate - ./scripts/create_wallet.sh - - - run: - name: Unit Tests - no_output_timeout: 20m - command: | - . .venv/bin/activate - export PYTHONUNBUFFERED=1 - pytest -n2 --reruns 3 --durations=0 --verbose --junitxml=test-results/unit_tests.xml \ - --cov=. --cov-append --cov-config .coveragerc \ - --splits $CIRCLE_NODE_TOTAL --group $((CIRCLE_NODE_INDEX + 1)) \ - --splitting-algorithm duration_based_chunks --store-durations --durations-path .test_durations \ - tests/unit_tests/ - - - run: - name: Integration Tests - no_output_timeout: 30m - command: | - . .venv/bin/activate - export PYTHONUNBUFFERED=1 - pytest -n2 --reruns 3 --reruns-delay 15 --durations=0 --verbose --junitxml=test-results/integration_tests.xml \ - --cov=. --cov-append --cov-config .coveragerc \ - --splits $CIRCLE_NODE_TOTAL --group $((CIRCLE_NODE_INDEX + 1)) \ - --splitting-algorithm duration_based_chunks --store-durations --durations-path .test_durations \ - tests/integration_tests/ - - - store_test_results: - path: test-results - - store_artifacts: - path: test-results - - - #- when: - #condition: - #equal: ["3.10.5", << parameters.python-version >> ] - #steps: - #- run: - #name: Upload Coverage - #command: | - #. .venv/bin/activate && coveralls - #env: - #CI_NAME: circleci - #CI_BUILD_NUMBER: $CIRCLE_BUILD_NUM - #CI_BUILD_URL: $CIRCLE_BUILD_URL - #CI_BRANCH: $CIRCLE_BRANCH - #CI_JOB_ID: $CIRCLE_NODE_INDEX - #COVERALLS_PARALLEL: true - - - lint-and-type-check: - resource_class: medium - parallelism: 2 - parameters: - python-version: - type: string - docker: - - image: cimg/python:<< parameters.python-version >> - - steps: - - checkout - - - run: - name: Update & Activate venv - command: | - python -m venv .venv - . .venv/bin/activate - python -m pip install --upgrade uv - uv sync --extra dev --dev - uv pip install flake8 - - - run: - name: Install Bittensor - command: | - . .venv/bin/activate - uv sync --extra dev --dev - - - run: - name: Lint with flake8 - command: | - . .venv/bin/activate - python -m flake8 bittensor/ --count - - - run: - name: Type check with mypy - command: | - . .venv/bin/activate - python -m mypy --ignore-missing-imports bittensor/ - - unit-tests-all-python-versions: - docker: - - image: cimg/python:3.10 - steps: - - run: - name: Placeholder command - command: echo "Success, only runs if all python versions ran" - - coveralls: - docker: - - image: cimg/python:3.10 - steps: - - run: - name: Combine Coverage - command: | - uv pip install --upgrade coveralls - coveralls --finish --rcfile .coveragerc || echo "Failed to upload coverage" - - check-changelog-updated: - docker: - - image: cimg/python:3.10 - steps: - - checkout - - run: - name: File CHANGELOG.md is updated - command: | - [[ $(git diff-tree --no-commit-id --name-only -r HEAD..master | grep CHANGELOG.md | wc -l) == 1 ]] && echo "CHANGELOG.md has changed" - - check-version-not-released: - docker: - - image: cimg/python:3.10 - steps: - - checkout - - run: - name: Git tag does not exist for the current version - command: | - [[ $(git tag | grep `cat VERSION` | wc -l) == 0 ]] && echo "VERSION is not a tag" - - run: - name: Pypi package 'bittensor' does not exist for the current version - command: | - [[ $(pip index versions bittensor | grep `cat VERSION` | wc -l) == 0 ]] && echo "Pypi package 'bittensor' does not exist" - - run: - name: Docker image 'opentensorfdn/bittensor' does not exist for the current version - command: | - [[ $(docker manifest inspect opentensorfdn/bittensor:`cat VERSION` > /dev/null 2> /dev/null ; echo $?) == 1 ]] && echo "Docker image 'opentensorfdn/bittensor:`cat VERSION`' does not exist in dockerhub" - -workflows: - compatibility_checks: - jobs: - - check_compatibility: - python_version: "3.9" - name: check-compatibility-3.9 - - check_compatibility: - python_version: "3.10" - name: check-compatibility-3.10 - - check_compatibility: - python_version: "3.11" - name: check-compatibility-3.11 - - check_compatibility: - python_version: "3.12" - name: check-compatibility-3.12 - - check_compatibility: - python_version: "3.13" - name: check-compatibility-3.13 - - - pr-requirements: - jobs: - - check-if-pr-is-draft - - ruff: - python-version: "3.9.13" - requires: - - check-if-pr-is-draft - - build-and-test: - matrix: - parameters: - python-version: [ "3.9.13", "3.10.6", "3.11.4", "3.12.7", "3.13.1" ] - requires: - - check-if-pr-is-draft - - unit-tests-all-python-versions: - requires: - - build-and-test - - lint-and-type-check: - matrix: - parameters: - python-version: [ "3.9.13", "3.10.6", "3.11.4", "3.12.7", "3.13.1" ] - requires: - - check-if-pr-is-draft - #- coveralls: - #requires: - #- build-and-test - - release-branches-requirements: - jobs: - - check-changelog-updated: - filters: - branches: - only: - - /^(release|hotfix)/.*/ - - release-requirements: - jobs: - - check-version-not-released: - filters: - branches: - only: - - master diff --git a/.github/workflows/auto-assign.yml b/.github/workflows/auto-assign.yml deleted file mode 100644 index 3a952f91b8..0000000000 --- a/.github/workflows/auto-assign.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: Auto Assign Cortex to Pull Requests - -on: - pull_request: - types: [opened, reopened] - -jobs: - auto-assign: - runs-on: ubuntu-latest - steps: - - name: Auto-assign Cortex Team - uses: kentaro-m/auto-assign-action@v1.2.4 - with: - repo-token: "${{ secrets.GITHUB_TOKEN }}" - configuration-path: .github/auto_assign.yml \ No newline at end of file diff --git a/.github/workflows/changelog-checker.yml b/.github/workflows/changelog-checker.yml new file mode 100644 index 0000000000..8c8de24a0d --- /dev/null +++ b/.github/workflows/changelog-checker.yml @@ -0,0 +1,24 @@ +name: Changelog guard (for release of hotfix) + +permissions: + contents: read + +on: + pull_request: + branches: + - staging + - master + +jobs: + changelog: + if: startsWith(github.head_ref, 'release/') || startsWith(github.head_ref, 'hotfix/') + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: tj-actions/changed-files@v42 + id: changed + - name: Ensure CHANGELOG.md updated + if: contains(steps.changed.outputs.all_changed_files, 'CHANGELOG.md') == false + uses: actions/github-script@v7 + with: + script: core.setFailed('CHANGELOG.md must be updated.') diff --git a/.github/workflows/compatibility.yml b/.github/workflows/compatibility.yml new file mode 100644 index 0000000000..2ef9a1b95f --- /dev/null +++ b/.github/workflows/compatibility.yml @@ -0,0 +1,27 @@ +name: Requirements compatibility for supported Python versions +permissions: + contents: read + +on: + pull_request: + paths: + - "pyproject.toml" + +jobs: + compatibility: + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - run: | + python -m pip install --upgrade pip + python -m pip install ".[dev,cli]" --dry-run --no-deps diff --git a/.github/workflows/e2e-subtensor-tests.yaml b/.github/workflows/e2e-subtensor-tests.yaml index 364b96698b..265b358a84 100644 --- a/.github/workflows/e2e-subtensor-tests.yaml +++ b/.github/workflows/e2e-subtensor-tests.yaml @@ -1,19 +1,14 @@ name: E2E Subtensor Tests concurrency: - group: e2e-subtensor-${{ github.ref }} + group: e2e-subtensor-${{ github.event.pull_request.number || github.ref }} cancel-in-progress: true on: - push: - branches: [master, development, staging] - pull_request: - branches: [master, development, staging] - types: [ opened, synchronize, reopened, ready_for_review ] - - schedule: - - cron: '0 9 * * *' # Run every night at 2:00 PST + branches: + - '**' + types: [ opened, synchronize, reopened, ready_for_review, labeled, unlabeled ] workflow_dispatch: inputs: @@ -28,10 +23,10 @@ env: # job to run tests in parallel jobs: - + # Looking for e2e tests find-tests: runs-on: ubuntu-latest - if: ${{ github.event_name != 'pull_request' || github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.draft == false }} outputs: test-files: ${{ steps.get-tests.outputs.test-files }} steps: @@ -44,20 +39,69 @@ jobs: test_files=$(find tests/e2e_tests -name "test*.py" | jq -R -s -c 'split("\n") | map(select(. != ""))') # keep it here for future debug # test_files=$(find tests/e2e_tests -type f -name "test*.py" | grep -E 'test_(hotkeys|staking)\.py$' | jq -R -s -c 'split("\n") | map(select(. != ""))') + echo "Found test files: $test_files" echo "test-files=$test_files" >> "$GITHUB_OUTPUT" shell: bash + # Pull docker image pull-docker-image: runs-on: ubuntu-latest + outputs: + image-name: ${{ steps.set-image.outputs.image }} steps: + - name: Set Docker image tag based on label or branch + id: set-image + run: | + echo "Event: $GITHUB_EVENT_NAME" + echo "Branch: $GITHUB_REF_NAME" + + echo "Reading labels ..." + if [[ "${GITHUB_EVENT_NAME}" == "pull_request" ]]; then + labels=$(jq -r '.pull_request.labels[].name' "$GITHUB_EVENT_PATH") + else + labels="" + fi + + image="" + + for label in $labels; do + echo "Found label: $label" + case "$label" in + "subtensor-localnet:main") + image="ghcr.io/opentensor/subtensor-localnet:main" + break + ;; + "subtensor-localnet:testnet") + image="ghcr.io/opentensor/subtensor-localnet:testnet" + break + ;; + "subtensor-localnet:devnet") + image="ghcr.io/opentensor/subtensor-localnet:devnet" + break + ;; + esac + done + + if [[ -z "$image" ]]; then + # fallback to default based on branch + if [[ "${GITHUB_REF_NAME}" == "master" ]]; then + image="ghcr.io/opentensor/subtensor-localnet:main" + else + image="ghcr.io/opentensor/subtensor-localnet:devnet-ready" + fi + fi + + echo "โœ… Final selected image: $image" + echo "image=$image" >> "$GITHUB_OUTPUT" + - name: Log in to GitHub Container Registry run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $GITHUB_ACTOR --password-stdin - name: Pull Docker Image - run: docker pull ghcr.io/opentensor/subtensor-localnet:devnet-ready + run: docker pull ${{ steps.set-image.outputs.image }} - name: Save Docker Image to Cache - run: docker save -o subtensor-localnet.tar ghcr.io/opentensor/subtensor-localnet:devnet-ready + run: docker save -o subtensor-localnet.tar ${{ steps.set-image.outputs.image }} - name: Upload Docker Image as Artifact uses: actions/upload-artifact@v4 @@ -104,111 +148,25 @@ jobs: - name: Load Docker Image run: docker load -i subtensor-localnet.tar -# - name: Run tests -# run: uv run pytest ${{ matrix.test-file }} -s - - name: Run tests with retry - run: | - set +e - for i in 1 2 3; do - echo "๐Ÿ” Attempt $i: Running tests" - uv run pytest ${{ matrix.test-file }} -s - status=$? - if [ $status -eq 0 ]; then - echo "โœ… Tests passed on attempt $i" - break - else - echo "โŒ Tests failed on attempt $i" - if [ $i -eq 3 ]; then - echo "Tests failed after 3 attempts" - exit 1 - fi - echo "Retrying..." - sleep 5 - fi - done - - # run non-fast-blocks only on Saturday and by cron schedule - check-if-saturday: - if: github.event_name == 'schedule' - runs-on: ubuntu-latest - outputs: - is-saturday: ${{ steps.check.outputs.is-saturday }} - steps: - - id: check - run: | - day=$(date -u +%u) - echo "Today is weekday $day" - if [ "$day" -ne 6 ]; then - echo "โญ๏ธ Skipping: not Saturday" - echo "is-saturday=false" >> "$GITHUB_OUTPUT" - exit 0 - fi - echo "is-saturday=true" - echo "is-saturday=true" >> "$GITHUB_OUTPUT" - - - cron-run-non-fast-blocks-e2e-test: - if: github.event_name == 'schedule' && needs.check-if-saturday.outputs.is-saturday == 'true' - name: "NFB: ${{ matrix.test-file }} / Python ${{ matrix.python-version }}" - needs: - - check-if-saturday - - find-tests - - pull-docker-image - runs-on: ubuntu-latest - timeout-minutes: 1440 - - strategy: - fail-fast: false # Allow other matrix jobs to run even if this job fails - max-parallel: 32 # Set the maximum number of parallel jobs (same as we have cores in ubuntu-latest runner) - matrix: - os: - - ubuntu-latest - test-file: ${{ fromJson(needs.find-tests.outputs.test-files) }} - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] - - steps: - - name: Check-out repository - uses: actions/checkout@v4 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Install uv - uses: astral-sh/setup-uv@v4 - - - name: install dependencies - run: uv sync --extra dev --dev - - - name: Download Cached Docker Image - uses: actions/download-artifact@v4 - with: - name: subtensor-localnet - - - name: Load Docker Image - run: docker load -i subtensor-localnet.tar - - - name: Run patched E2E tests env: - FAST_BLOCKS: "0" + LOCALNET_IMAGE_NAME: ${{ needs.pull-docker-image.outputs.image-name }} run: | - set +e for i in 1 2 3; do - echo "๐Ÿ” Attempt $i: Running tests" - uv run pytest ${{ matrix.test-file }} -s - status=$? - if [ $status -eq 0 ]; then + echo "::group::๐Ÿ” Test attempt $i" + if uv run pytest ${{ matrix.test-file }} -s; then echo "โœ… Tests passed on attempt $i" - break + echo "::endgroup::" + exit 0 else echo "โŒ Tests failed on attempt $i" - if [ $i -eq 3 ]; then - echo "Tests failed after 3 attempts" - exit 1 + echo "::endgroup::" + if [ "$i" -lt 3 ]; then + echo "Retrying..." + sleep 5 fi - echo "Retrying..." - sleep 5 fi done + + echo "Tests failed after 3 attempts" + exit 1 diff --git a/.github/workflows/flake8-and-mypy.yml b/.github/workflows/flake8-and-mypy.yml new file mode 100644 index 0000000000..1fbe094728 --- /dev/null +++ b/.github/workflows/flake8-and-mypy.yml @@ -0,0 +1,56 @@ +name: Flake8 and Mypy - linters check +permissions: + contents: read + +on: + pull_request: + types: [opened, synchronize, reopened, edited] + +jobs: + linters: + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + + strategy: + fail-fast: false + max-parallel: 5 + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Cache venv + id: cache + uses: actions/cache@v4 + with: + path: venv + key: | + v3-${{ runner.os }}-${{ runner.arch }}-${{ matrix.python-version }}-${{ hashFiles('pyproject.toml') }} + restore-keys: | + v3-${{ runner.os }}-${{ runner.arch }}-${{ matrix.python-version }}- + + - name: Install deps (flake8 + mypy + project.dev) + if: ${{ steps.cache.outputs.cache-hit != 'true' }} + run: | + python -m venv venv + source venv/bin/activate + python -m pip install --upgrade pip + python -m pip install uv + python -m uv sync --extra dev --active + + - name: Flake8 + run: | + source venv/bin/activate + python -m flake8 bittensor/ --count + + - name: mypy + run: | + source venv/bin/activate + python -m mypy --ignore-missing-imports bittensor/ diff --git a/.github/workflows/nightly-e2e-tests-subtensor-main.yml b/.github/workflows/nightly-e2e-tests-subtensor-main.yml new file mode 100644 index 0000000000..589141ec1b --- /dev/null +++ b/.github/workflows/nightly-e2e-tests-subtensor-main.yml @@ -0,0 +1,363 @@ +name: Nightly E2E Subtensor tests + +permissions: + contents: read + packages: write + +concurrency: + group: e2e-subtensor-${{ github.ref }} + cancel-in-progress: true + +on: + schedule: + - cron: '0 9 * * *' # Run every night at 2:00 PST + + workflow_dispatch: + inputs: + verbose: + description: "Output more information when triggered manually" + required: false + default: "" + +env: + CARGO_TERM_COLOR: always + VERBOSE: ${{ github.event.inputs.verbose }} + +# job to run tests in parallel +jobs: + # Looking for e2e tests + find-tests: + runs-on: ubuntu-latest + if: ${{ github.event_name != 'pull_request' || github.event.pull_request.draft == false }} + outputs: + test-files: ${{ steps.get-tests.outputs.test-files }} + steps: + - name: Check-out repository under $GITHUB_WORKSPACE + uses: actions/checkout@v4 + + - name: Find test files + id: get-tests + run: | + test_files=$(find tests/e2e_tests -name "test*.py" | jq -R -s -c 'split("\n") | map(select(. != ""))') + # keep it here for future debug + # test_files=$(find tests/e2e_tests -type f -name "test*.py" | grep -E 'test_(hotkeys|staking)\.py$' | jq -R -s -c 'split("\n") | map(select(. != ""))') + echo "Found test files: $test_files" + echo "test-files=$test_files" >> "$GITHUB_OUTPUT" + shell: bash + + # Pull docker images (devnet-ready and main) + pull-docker-images: + runs-on: ubuntu-latest + steps: + - name: Log in to GitHub Container Registry + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $GITHUB_ACTOR --password-stdin + + - name: Pull Docker Image + run: | + docker pull ghcr.io/opentensor/subtensor-localnet:main + docker pull ghcr.io/opentensor/subtensor-localnet:devnet-ready + + - name: List pulled images + run: docker images + + - name: Save Docker Images to Cache + run: | + docker save -o subtensor-localnet-main.tar ghcr.io/opentensor/subtensor-localnet:main + docker save -o subtensor-localnet-devnet-ready.tar ghcr.io/opentensor/subtensor-localnet:devnet-ready + + - name: Upload main Docker Image as Artifact + uses: actions/upload-artifact@v4 + with: + name: subtensor-localnet-main + path: subtensor-localnet-main.tar + + - name: Upload devnet-ready Docker Image as Artifact + uses: actions/upload-artifact@v4 + with: + name: subtensor-localnet-devnet-ready + path: subtensor-localnet-devnet-ready.tar + # Determine the day for non-fast-blocks run + check-if-saturday: + runs-on: ubuntu-latest + outputs: + is-saturday: ${{ steps.check.outputs.is-saturday }} + steps: + - id: check + run: | + day=$(date -u +%u) + echo "Today is weekday $day" + if [ "$day" -ne 6 ]; then + echo "โญ๏ธ Skipping: not Saturday" + echo "is-saturday=false" >> "$GITHUB_OUTPUT" + exit 0 + fi + echo "is-saturday=true" + echo "is-saturday=true" >> "$GITHUB_OUTPUT" + + # Daily run of fast-blocks tests from `bittensor:master` based on `subtensor:main docker` image + run-fast-blocks-e2e-test-master: + name: "FB master: ${{ matrix.test-file }} / Python ${{ matrix.python-version }}" + needs: + - find-tests + - pull-docker-images + runs-on: ubuntu-latest + timeout-minutes: 25 + strategy: + fail-fast: false # Allow other matrix jobs to run even if this job fails + max-parallel: 32 # Set the maximum number of parallel jobs (same as we have cores in ubuntu-latest runner) + matrix: + os: + - ubuntu-latest + test-file: ${{ fromJson(needs.find-tests.outputs.test-files) }} + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + steps: + - name: Check-out repository + uses: actions/checkout@v4 + with: + ref: master + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install uv + uses: astral-sh/setup-uv@v4 + + - name: install dependencies + run: uv sync --extra dev --dev + + - name: Download Cached Docker Image + uses: actions/download-artifact@v4 + with: + name: subtensor-localnet-main + + - name: Load Docker Image + run: docker load -i subtensor-localnet-main.tar + + - name: Run tests with retry + env: + FAST_BLOCKS: "1" + LOCALNET_IMAGE_NAME: "ghcr.io/opentensor/subtensor-localnet:main" + run: | + set +e + for i in 1 2 3; do + echo "๐Ÿ” Attempt $i: Running tests" + uv run pytest ${{ matrix.test-file }} -s + status=$? + if [ $status -eq 0 ]; then + echo "โœ… Tests passed on attempt $i" + break + else + echo "โŒ Tests failed on attempt $i" + if [ $i -eq 3 ]; then + echo "Tests failed after 3 attempts" + exit 1 + fi + echo "Retrying..." + sleep 5 + fi + done + + # Daily run of fast-blocks tests from `bittensor:staging` based on `subtensor:devnet-ready` docker image + run-fast-blocks-e2e-test-staging: + name: "FB staging: ${{ matrix.test-file }} / Python ${{ matrix.python-version }}" + needs: + - find-tests + - pull-docker-images + runs-on: ubuntu-latest + timeout-minutes: 25 + strategy: + fail-fast: false # Allow other matrix jobs to run even if this job fails + max-parallel: 32 # Set the maximum number of parallel jobs (same as we have cores in ubuntu-latest runner) + matrix: + os: + - ubuntu-latest + test-file: ${{ fromJson(needs.find-tests.outputs.test-files) }} + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + steps: + - name: Check-out repository + uses: actions/checkout@v4 + with: + ref: staging + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install uv + uses: astral-sh/setup-uv@v4 + + - name: install dependencies + run: uv sync --extra dev --dev + + - name: Download Cached Docker Image + uses: actions/download-artifact@v4 + with: + name: subtensor-localnet-devnet-ready + + - name: Load Docker Image + run: docker load -i subtensor-localnet-devnet-ready.tar + + - name: Run tests with retry + env: + FAST_BLOCKS: "1" + LOCALNET_IMAGE_NAME: "ghcr.io/opentensor/subtensor-localnet:devnet-ready" + run: | + set +e + for i in 1 2 3; do + echo "๐Ÿ” Attempt $i: Running tests" + uv run pytest ${{ matrix.test-file }} -s + status=$? + if [ $status -eq 0 ]; then + echo "โœ… Tests passed on attempt $i" + break + else + echo "โŒ Tests failed on attempt $i" + if [ $i -eq 3 ]; then + echo "Tests failed after 3 attempts" + exit 1 + fi + echo "Retrying..." + sleep 5 + fi + done + + # Saturday run of non-fast-blocks tests from `bittensor:master` based on `subtensor:main` docker image + run-non-fast-blocks-e2e-test-master: + if: needs.check-if-saturday.outputs.is-saturday == 'true' + name: "NFB master: ${{ matrix.test-file }} / Python ${{ matrix.python-version }}" + needs: + - check-if-saturday + - find-tests + - pull-docker-images + runs-on: ubuntu-latest + timeout-minutes: 1440 + + strategy: + fail-fast: false # Allow other matrix jobs to run even if this job fails + max-parallel: 32 # Set the maximum number of parallel jobs (same as we have cores in ubuntu-latest runner) + matrix: + os: + - ubuntu-latest + test-file: ${{ fromJson(needs.find-tests.outputs.test-files) }} + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + + steps: + - name: Check-out repository + uses: actions/checkout@v4 + with: + ref: master + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install uv + uses: astral-sh/setup-uv@v4 + + - name: install dependencies + run: uv sync --extra dev --dev + + - name: Download Cached Docker Image + uses: actions/download-artifact@v4 + with: + name: subtensor-localnet-main + + - name: Load Docker Image + run: docker load -i subtensor-localnet-main.tar + + - name: Run patched E2E tests + env: + FAST_BLOCKS: "0" + LOCALNET_IMAGE_NAME: "ghcr.io/opentensor/subtensor-localnet:main" + run: | + set +e + for i in 1 2 3; do + echo "๐Ÿ” Attempt $i: Running tests" + uv run pytest ${{ matrix.test-file }} -s + status=$? + if [ $status -eq 0 ]; then + echo "โœ… Tests passed on attempt $i" + break + else + echo "โŒ Tests failed on attempt $i" + if [ $i -eq 3 ]; then + echo "Tests failed after 3 attempts" + exit 1 + fi + echo "Retrying..." + sleep 5 + fi + done + + # Saturday run of non-fast-blocks tests from `bittensor:staging` based on `subtensor:devnet-ready` docker image + run-non-fast-blocks-e2e-test-staging: + if: needs.check-if-saturday.outputs.is-saturday == 'true' + name: "NFB staging: ${{ matrix.test-file }} / Python ${{ matrix.python-version }}" + needs: + - check-if-saturday + - find-tests + - pull-docker-images + runs-on: ubuntu-latest + timeout-minutes: 1440 + + strategy: + fail-fast: false # Allow other matrix jobs to run even if this job fails + max-parallel: 32 # Set the maximum number of parallel jobs (same as we have cores in ubuntu-latest runner) + matrix: + os: + - ubuntu-latest + test-file: ${{ fromJson(needs.find-tests.outputs.test-files) }} + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + + steps: + - name: Check-out repository + uses: actions/checkout@v4 + with: + ref: staging + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install uv + uses: astral-sh/setup-uv@v4 + + - name: install dependencies + run: uv sync --extra dev --dev + + - name: Download Cached Docker Image + uses: actions/download-artifact@v4 + with: + name: subtensor-localnet-devnet-ready + + - name: Load Docker Image + run: docker load -i subtensor-localnet-devnet-ready.tar + + - name: Run patched E2E tests + env: + FAST_BLOCKS: "0" + LOCALNET_IMAGE_NAME: "ghcr.io/opentensor/subtensor-localnet:devnet-ready" + run: | + set +e + for i in 1 2 3; do + echo "๐Ÿ” Attempt $i: Running tests" + uv run pytest ${{ matrix.test-file }} -s + status=$? + if [ $status -eq 0 ]; then + echo "โœ… Tests passed on attempt $i" + break + else + echo "โŒ Tests failed on attempt $i" + if [ $i -eq 3 ]; then + echo "Tests failed after 3 attempts" + exit 1 + fi + echo "Retrying..." + sleep 5 + fi + done diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml new file mode 100644 index 0000000000..7dfaf2eda5 --- /dev/null +++ b/.github/workflows/ruff.yml @@ -0,0 +1,33 @@ +name: Ruff - formatter check +permissions: + contents: read + +on: + pull_request: + types: [opened, synchronize, reopened, edited] + +jobs: + ruff: + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Ruff in virtual environment + run: | + python -m venv venv + source venv/bin/activate + python -m pip install --upgrade pip + python -m pip install ruff==0.11.5 + + - name: Ruff format check + run: | + source venv/bin/activate + python -m ruff format --diff bittensor diff --git a/.github/workflows/unit-and-integration-tests.yml b/.github/workflows/unit-and-integration-tests.yml new file mode 100644 index 0000000000..7bc70ae030 --- /dev/null +++ b/.github/workflows/unit-and-integration-tests.yml @@ -0,0 +1,59 @@ +name: Unit and integration tests checker +permissions: + contents: read + +on: + pull_request: + types: [opened, synchronize, reopened, edited] + +jobs: + unit-and-integration-tests: + if: github.event.pull_request.draft == false + runs-on: ubuntu-latest + + strategy: + fail-fast: false + max-parallel: 5 + matrix: + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Cache venv + id: cache + uses: actions/cache@v4 + with: + path: venv + key: v2-${{ runner.os }}-${{ hashFiles('pyproject.toml') }} + + - name: Install deps + if: ${{ steps.cache.outputs.cache-hit != 'true' }} + run: | + python -m venv venv + source venv/bin/activate + python -m pip install --upgrade pip + python -m pip install uv + python -m uv sync --extra dev --active + + - name: Unit tests + timeout-minutes: 20 + env: + PYTHONUNBUFFERED: "1" + run: | + source venv/bin/activate + python -m uv run pytest -n 2 tests/unit_tests/ --reruns 3 + + - name: Integration tests + timeout-minutes: 20 + env: + PYTHONUNBUFFERED: "1" + run: | + source venv/bin/activate + python -m uv run pytest -n 2 tests/integration_tests/ --reruns 3 diff --git a/CHANGELOG.md b/CHANGELOG.md index be626ec86c..acc5c17f16 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,16 @@ # Changelog +## 9.8.3 /2025-07-18 +* improve make file by @basfroman in https://github.com/opentensor/bittensor/pull/2965 +* Move all workflows from `app.circleci.com` to `GH actions` by @basfroman in https://github.com/opentensor/bittensor/pull/2970 +* Improve `changelog` workflow by @basfroman in https://github.com/opentensor/bittensor/pull/2973 +* Add SECURITY.md by @basfroman in https://github.com/opentensor/bittensor/pull/2976 +* Improve test infrastructure by @basfroman in https://github.com/opentensor/bittensor/pull/2974 +* Add labels checker by @basfroman in https://github.com/opentensor/bittensor/pull/2977 +* Use specified block/hash in metagraph, get_subnet, get_all_subnets by @thewhaleking in https://github.com/opentensor/bittensor/pull/2979 + +**Full Changelog**: https://github.com/opentensor/bittensor/compare/v9.8.2...v9.8.3 + ## 9.8.2 /2025-07-10 ## What's Changed diff --git a/Makefile b/Makefile index 154d6a1f2f..d68152d42d 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,5 @@ -SHELL:=/bin/bash +SHELL := /bin/bash +.PHONY: init-venv clean-venv clean install install-dev reinstall reinstall-dev init-venv: python3 -m venv venv && source ./venv/bin/activate @@ -6,18 +7,20 @@ init-venv: clean-venv: source ./venv/bin/activate && \ pip freeze > make_venv_to_uninstall.txt && \ - pip uninstall -r make_venv_to_uninstall.txt && \ + pip uninstall -r make_venv_to_uninstall.txt -y && \ rm make_venv_to_uninstall.txt clean: - rm -rf dist/ && \ - rm -rf build/ && \ - rm -rf bittensor.egg-info/ && \ - rm -rf .pytest_cache/ && \ - rm -rf lib/ + rm -rf dist/ build/ bittensor.egg-info/ .pytest_cache/ lib/ -install: +install: init-venv + source ./venv/bin/activate && \ python3 -m pip install . -install-dev: - python3 -m pip install '.[dev]' +install-dev: init-venv + source ./venv/bin/activate && \ + python3 -m pip install -e '.[dev]' + +reinstall: clean clean-venv install + +reinstall-dev: clean clean-venv install-dev diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..ef06884bb2 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,42 @@ +# Security Policy + +## Reporting a Vulnerability + +If you discover a security vulnerability in the Bittensor protocol, SDK, or any of its components, we strongly encourage you to report it responsibly. + +Please **do not publicly disclose** the vulnerability until we have had a reasonable chance to address it. + +### ๐Ÿ” Confidential Reporting + +To report a vulnerability, you can use any of the following methods: + +- Create a [GitHub Issue](https://github.com/opentensor/bittensor/issues) using the `Security` label or title. + +- Contact us via our official Discord support thread: [#btcli-btsdk](https://discord.com/channels/1120750674595024897/1242999357436071956) + +### ๐Ÿงพ What to Include + +When reporting a vulnerability, please provide as much detail as possible: + +- Affected component (e.g., `bittensor`, `bittensor-cli`, `bittensor-wallet`, etc.) +- Version or commit hash +- Description of the vulnerability +- Steps to reproduce (if possible) +- Impact assessment +- Any potential mitigations or recommendations + +--- + +## Response Process + +1. We will acknowledge your report within **48 hours**. +2. We will investigate and confirm the issue. +3. If confirmed, we will coordinate on a fix and set an embargo period if needed. +4. A fix will be developed, tested, and released as soon as possible. +5. You will be credited (if you wish) in the security section of our release notes. + +--- + +## Thank You + +We appreciate your efforts in keeping the Bittensor ecosystem secure and responsible. diff --git a/bittensor/core/async_subtensor.py b/bittensor/core/async_subtensor.py index 058bc10314..bb46e4ddc9 100644 --- a/bittensor/core/async_subtensor.py +++ b/bittensor/core/async_subtensor.py @@ -814,13 +814,21 @@ async def all_subnets( method="get_all_dynamic_info", block_hash=block_hash, ), - self.get_subnet_prices(), + self.get_subnet_prices(block_hash=block_hash), + return_exceptions=True, ) decoded = query.decode() - for sn in decoded: - sn.update({"price": subnet_prices.get(sn["netuid"], Balance.from_tao(0))}) + if not isinstance(subnet_prices, SubstrateRequestException): + for sn in decoded: + sn.update( + {"price": subnet_prices.get(sn["netuid"], Balance.from_tao(0))} + ) + else: + logging.warning( + f"Unable to fetch subnet prices for block {block_number}, block hash {block_hash}: {subnet_prices}" + ) return DynamicInfo.list_from_dicts(decoded) async def blocks_since_last_step( @@ -1129,21 +1137,30 @@ async def get_all_subnets_info( Notes: See also: """ - result = await self.query_runtime_api( - runtime_api="SubnetInfoRuntimeApi", - method="get_subnets_info_v2", - params=[], - block=block, - block_hash=block_hash, - reuse_block=reuse_block, + result, prices = await asyncio.gather( + self.query_runtime_api( + runtime_api="SubnetInfoRuntimeApi", + method="get_subnets_info_v2", + params=[], + block=block, + block_hash=block_hash, + reuse_block=reuse_block, + ), + self.get_subnet_prices( + block=block, block_hash=block_hash, reuse_block=reuse_block + ), + return_exceptions=True, ) if not result: return [] - subnets_prices = await self.get_subnet_prices() - - for subnet in result: - subnet.update({"price": subnets_prices.get(subnet["netuid"], 0)}) + if not isinstance(prices, SubstrateRequestException): + for subnet in result: + subnet.update({"price": prices.get(subnet["netuid"], 0)}) + else: + logging.warning( + f"Unable to fetch subnet prices for block {block}, block hash {block_hash}: {prices}" + ) return SubnetInfo.list_from_dicts(result) @@ -2035,6 +2052,7 @@ async def get_metagraph_info( "SubnetInfoRuntimeApi", "get_metagraph", params=[netuid], + block_hash=block_hash, ) if query.value is None: diff --git a/bittensor/core/subtensor.py b/bittensor/core/subtensor.py index b64809ee15..b4b35d334a 100644 --- a/bittensor/core/subtensor.py +++ b/bittensor/core/subtensor.py @@ -455,10 +455,16 @@ def all_subnets(self, block: Optional[int] = None) -> Optional[list["DynamicInfo method="get_all_dynamic_info", block_hash=block_hash, ) - subnet_prices = self.get_subnet_prices() decoded = query.decode() - for sn in decoded: - sn.update({"price": subnet_prices.get(sn["netuid"], Balance.from_tao(0))}) + try: + subnet_prices = self.get_subnet_prices(block=block) + for sn in decoded: + sn.update( + {"price": subnet_prices.get(sn["netuid"], Balance.from_tao(0))} + ) + except SubstrateRequestException as e: + logging.warning(f"Unable to fetch subnet prices for block {block}: {e}") + return DynamicInfo.list_from_dicts(decoded) def blocks_since_last_step( @@ -644,11 +650,13 @@ def get_all_subnets_info(self, block: Optional[int] = None) -> list["SubnetInfo" ) if not result: return [] + try: + subnets_prices = self.get_subnet_prices(block=block) - subnets_prices = self.get_subnet_prices() - - for subnet in result: - subnet.update({"price": subnets_prices.get(subnet["netuid"], 0)}) + for subnet in result: + subnet.update({"price": subnets_prices.get(subnet["netuid"], 0)}) + except SubstrateRequestException as e: + logging.warning(f"Unable to fetch subnet prices for block {block}: {e}") return SubnetInfo.list_from_dicts(result) diff --git a/pyproject.toml b/pyproject.toml index 9c32e6ee7a..de109b014b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "bittensor" -version = "9.8.2" +version = "9.8.3" description = "Bittensor" readme = "README.md" authors = [ diff --git a/tests/e2e_tests/conftest.py b/tests/e2e_tests/conftest.py index 863e9c3210..c79446be26 100644 --- a/tests/e2e_tests/conftest.py +++ b/tests/e2e_tests/conftest.py @@ -18,7 +18,7 @@ setup_wallet, ) -LOCALNET_IMAGE_NAME = "ghcr.io/opentensor/subtensor-localnet:devnet-ready" +LOCALNET_IMAGE_NAME = os.getenv("LOCALNET_IMAGE_NAME") or "ghcr.io/opentensor/subtensor-localnet:devnet-ready" CONTAINER_NAME_PREFIX = "test_local_chain_"