Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .config/nextest.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,11 @@ slow-timeout = { period = "120s", terminate-after = 3 }
filter = 'test(rpc_snapshot_test_)'
slow-timeout = { period = "120s", terminate-after = 3 }
retries = { backoff = "exponential", count = 3, delay = "5s", jitter = true }

# These tests download test snapshots from the network, which can take a while.
# There might be some network issues, so we allow some retries with backoff.
# Jitter is enabled to avoid [thundering herd issues](https://en.wikipedia.org/wiki/Thundering_herd_problem).
[[profile.default.overrides]]
filter = 'test(state_compute_)'
slow-timeout = { period = "120s", terminate-after = 3 }
retries = { backoff = "exponential", count = 3, delay = "5s", jitter = true }
17 changes: 17 additions & 0 deletions .github/workflows/coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,16 @@ env:
CI: 1
CARGO_INCREMENTAL: 0
CACHE_TIMEOUT_MINUTES: 5
AWS_ACCESS_KEY_ID: "${{ secrets.AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.AWS_SECRET_ACCESS_KEY }}"
RUSTC_WRAPPER: sccache
CC: sccache clang
CXX: sccache clang++
# To minimize compile times: https://nnethercote.github.io/perf-book/build-configuration.html#minimizing-compile-times
RUSTFLAGS: "-C linker=clang -C link-arg=-fuse-ld=lld"
FOREST_F3_SIDECAR_FFI_BUILD_OPT_OUT: 1
FIL_PROOFS_PARAMETER_CACHE: /var/tmp/filecoin-proof-parameters
RUST_LOG: warn

jobs:
codecov:
Expand All @@ -41,6 +46,14 @@ jobs:
runs-on: buildjet-4vcpu-ubuntu-2204
timeout-minutes: 45
steps:
- name: Configure SCCache variables
run: |
# External PRs do not have access to 'vars' or 'secrets'.
if [[ "${{secrets.AWS_ACCESS_KEY_ID}}" != "" ]]; then
echo "SCCACHE_ENDPOINT=${{ vars.SCCACHE_ENDPOINT}}" >> $GITHUB_ENV
echo "SCCACHE_BUCKET=${{ vars.SCCACHE_BUCKET}}" >> $GITHUB_ENV
echo "SCCACHE_REGION=${{ vars.SCCACHE_REGION}}" >> $GITHUB_ENV
fi
- uses: actions/checkout@v6
- name: Setup sccache
uses: mozilla-actions/sccache-action@v0.0.9
Expand All @@ -51,6 +64,10 @@ jobs:
go-version-file: "go.work"
- uses: taiki-e/install-action@cargo-llvm-cov
- uses: taiki-e/install-action@nextest
- name: Fetch proof params and RPC test snapshots
run: |
cargo run --bin forest-dev --no-default-features --profile quick -- fetch-rpc-tests
ls -ahl $FIL_PROOFS_PARAMETER_CACHE
- name: Generate code coverage
run: make codecov
# Save lcov.info as an artifact for debugging purposes
Expand Down
17 changes: 17 additions & 0 deletions .github/workflows/unit-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,13 @@ env:
CI: 1
CARGO_INCREMENTAL: 0
CACHE_TIMEOUT_MINUTES: 5
AWS_ACCESS_KEY_ID: "${{ secrets.AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "${{ secrets.AWS_SECRET_ACCESS_KEY }}"
RUSTC_WRAPPER: "sccache"
CC: "sccache clang"
CXX: "sccache clang++"
FIL_PROOFS_PARAMETER_CACHE: /var/tmp/filecoin-proof-parameters
RUST_LOG: warn

jobs:
tests-release:
Expand All @@ -39,6 +43,14 @@ jobs:
# This is done to limit the runner cost.
if: github.event.pull_request.draft == false
steps:
- name: Configure SCCache variables
run: |
# External PRs do not have access to 'vars' or 'secrets'.
if [[ "${{secrets.AWS_ACCESS_KEY_ID}}" != "" ]]; then
echo "SCCACHE_ENDPOINT=${{ vars.SCCACHE_ENDPOINT}}" >> $GITHUB_ENV
echo "SCCACHE_BUCKET=${{ vars.SCCACHE_BUCKET}}" >> $GITHUB_ENV
echo "SCCACHE_REGION=${{ vars.SCCACHE_REGION}}" >> $GITHUB_ENV
fi
# find the nearest S3 space for storing cache files
- name: Show IP
run: curl ifconfig.me
Expand All @@ -61,12 +73,17 @@ jobs:
go-version-file: "go.work"
- name: install nextest
uses: taiki-e/install-action@nextest
- name: Fetch proof params and RPC test snapshots
run: |
cargo run --bin forest-dev --no-default-features --profile quick -- fetch-rpc-tests
ls -ahl $FIL_PROOFS_PARAMETER_CACHE
- run: |
make test-release-docs
make test-release
env:
# To minimize compile times: https://nnethercote.github.io/perf-book/build-configuration.html#minimizing-compile-times
RUSTFLAGS: "-C linker=clang -C link-arg=-fuse-ld=lld"
FOREST_TEST_SKIP_PROOF_PARAM_CHECK: 1
- id: get-cache-hash
run: |
ls -lhR ~/.cache/forest/test/rpc-snapshots/rpc_test/*
Expand Down
10 changes: 5 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -99,23 +99,23 @@ docker-run:
docker build -t forest:latest -f ./Dockerfile . && docker run forest

test:
cargo nextest run --workspace --no-fail-fast
cargo nextest run --workspace --no-default-features --no-fail-fast

test-docs:
# nextest doesn't run doctests https://github.com/nextest-rs/nextest/issues/16
# see also lib.rs::doctest_private
cargo test --doc --features doctest-private
cargo test --doc --no-default-features --features doctest-private

test-release:
cargo nextest run --cargo-profile quick --workspace --no-fail-fast
cargo nextest run --cargo-profile quick --workspace --no-default-features --no-fail-fast

test-release-docs:
# nextest doesn't run doctests https://github.com/nextest-rs/nextest/issues/16
# see also lib.rs::doctest_private
cargo test --profile quick --doc --features doctest-private
cargo test --profile quick --doc --no-default-features --features doctest-private

codecov:
cargo llvm-cov --workspace --codecov --output-path lcov.info
cargo llvm-cov -p forest-filecoin --no-default-features --codecov --output-path lcov.info

# Checks if all headers are present and adds if not
license:
Expand Down
7 changes: 7 additions & 0 deletions src/bin/forest-dev.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
// Copyright 2019-2025 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

#[tokio::main(flavor = "multi_thread")]
async fn main() -> anyhow::Result<()> {
forest::forest_dev_main(std::env::args_os()).await
}
9 changes: 7 additions & 2 deletions src/chain_sync/chain_follower.rs
Original file line number Diff line number Diff line change
Expand Up @@ -884,13 +884,18 @@ mod tests {
use num_bigint::BigInt;
use num_traits::ToPrimitive;
use std::sync::Arc;
use tracing::level_filters::LevelFilter;
use tracing_subscriber::EnvFilter;

fn setup() -> (Arc<ChainStore<MemoryDB>>, Chain4U<Arc<MemoryDB>>) {
// Initialize test logger
let _ = tracing_subscriber::fmt()
.without_time()
.with_env_filter(
tracing_subscriber::EnvFilter::from_default_env()
.add_directive(tracing::Level::DEBUG.into()),
EnvFilter::builder()
.with_default_directive(LevelFilter::DEBUG.into())
.from_env()
.unwrap(),
)
.try_init();

Expand Down
18 changes: 18 additions & 0 deletions src/dev/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
// Copyright 2019-2025 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

use super::subcommands::Cli;
use crate::cli_shared::logger::setup_minimal_logger;
use clap::Parser as _;
use std::ffi::OsString;

pub async fn main<ArgT>(args: impl IntoIterator<Item = ArgT>) -> anyhow::Result<()>
where
ArgT: Into<OsString> + Clone,
{
// Capture Cli inputs
let Cli { cmd } = Cli::parse_from(args);
setup_minimal_logger();
let client = crate::rpc::Client::default_or_from_env(None)?;
cmd.run(client).await
}
5 changes: 5 additions & 0 deletions src/dev/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
// Copyright 2019-2025 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

pub mod main;
pub mod subcommands;
87 changes: 87 additions & 0 deletions src/dev/subcommands/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
// Copyright 2019-2025 ChainSafe Systems
// SPDX-License-Identifier: Apache-2.0, MIT

use crate::cli_shared::cli::HELP_MESSAGE;
use crate::rpc::Client;
use crate::utils::net::{DownloadFileOption, download_file_with_cache};
use crate::utils::proofs_api::ensure_proof_params_downloaded;
use crate::utils::version::FOREST_VERSION_STRING;
use anyhow::Context as _;
use clap::Parser;
use directories::ProjectDirs;
use std::borrow::Cow;
use std::path::PathBuf;
use std::time::Duration;
use tokio::task::JoinSet;
use url::Url;

/// Command-line options for the `forest-dev` binary
#[derive(Parser)]
#[command(name = env!("CARGO_PKG_NAME"), bin_name = "forest-dev", author = env!("CARGO_PKG_AUTHORS"), version = FOREST_VERSION_STRING.as_str(), about = env!("CARGO_PKG_DESCRIPTION")
)]
#[command(help_template(HELP_MESSAGE))]
pub struct Cli {
#[command(subcommand)]
pub cmd: Subcommand,
}

/// forest-dev sub-commands
#[derive(clap::Subcommand)]
pub enum Subcommand {
/// Fetch RPC test snapshots to the local cache
FetchRpcTests,
}

impl Subcommand {
pub async fn run(self, _client: Client) -> anyhow::Result<()> {
match self {
Self::FetchRpcTests => fetch_rpc_tests().await,
}
}
}

async fn fetch_rpc_tests() -> anyhow::Result<()> {
crate::utils::proofs_api::maybe_set_proofs_parameter_cache_dir_env(
&crate::Config::default().client.data_dir,
);
ensure_proof_params_downloaded().await?;
let tests = include_str!("../../tool/subcommands/api_cmd/test_snapshots.txt")
.lines()
.map(|i| {
// Remove comment
i.split("#").next().unwrap().trim().to_string()
})
.filter(|l| !l.is_empty() && !l.starts_with('#'));
let mut joinset = JoinSet::new();
for test in tests {
joinset.spawn(fetch_rpc_test_snapshot(test.into()));
}
for result in joinset.join_all().await {
if let Err(e) = result {
tracing::warn!("{e}");
}
}
Ok(())
}

pub async fn fetch_rpc_test_snapshot<'a>(name: Cow<'a, str>) -> anyhow::Result<PathBuf> {
let url: Url =
format!("https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/rpc_test/{name}")
.parse()
.with_context(|| format!("Failed to parse URL for test: {name}"))?;
let project_dir =
ProjectDirs::from("com", "ChainSafe", "Forest").context("failed to get project dir")?;
let cache_dir = project_dir.cache_dir().join("test").join("rpc-snapshots");
let path = crate::utils::retry(
crate::utils::RetryArgs {
timeout: Some(Duration::from_secs(30)),
max_retries: Some(5),
delay: Some(Duration::from_secs(1)),
},
|| download_file_with_cache(&url, &cache_dir, DownloadFileOption::NonResumable),
)
.await
.map_err(|e| anyhow::anyhow!("failed to fetch rpc test snapshot {name} :{e}"))?
.path;
Ok(path)
}
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ mod cli;
mod cli_shared;
mod daemon;
mod db;
mod dev;
mod documentation;
mod eth;
mod f3;
Expand Down Expand Up @@ -124,6 +125,7 @@ pub use auth::{JWT_IDENTIFIER, verify_token};
pub use cli::main::main as forest_main;
pub use cli_shared::cli::{Client, Config};
pub use daemon::main::main as forestd_main;
pub use dev::main::main as forest_dev_main;
pub use key_management::{
ENCRYPTED_KEYSTORE_NAME, FOREST_KEYSTORE_PHRASE_ENV, KEYSTORE_NAME, KeyStore, KeyStoreConfig,
};
Expand Down
20 changes: 16 additions & 4 deletions src/state_manager/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,7 @@ pub mod state_compute {
use std::{
path::{Path, PathBuf},
sync::{Arc, LazyLock},
time::Duration,
};
use url::Url;

Expand All @@ -215,11 +216,22 @@ pub mod state_compute {
let url = Url::parse(&format!(
"https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/state_compute/{chain}_{epoch}.forest.car.zst"
))?;
Ok(
download_file_with_cache(&url, &SNAPSHOT_CACHE_DIR, DownloadFileOption::NonResumable)
.await?
.path,
Ok(crate::utils::retry(
crate::utils::RetryArgs {
timeout: Some(Duration::from_secs(30)),
max_retries: Some(5),
delay: Some(Duration::from_secs(1)),
},
|| {
download_file_with_cache(
&url,
&SNAPSHOT_CACHE_DIR,
DownloadFileOption::NonResumable,
)
},
)
.await?
.path)
}

pub async fn prepare_state_compute(
Expand Down
37 changes: 4 additions & 33 deletions src/tool/subcommands/api_cmd/test_snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -185,53 +185,24 @@ async fn ctx(
mod tests {
use super::*;
use crate::Config;
use crate::utils::net::{DownloadFileOption, download_file_with_cache};
use crate::utils::proofs_api::ensure_proof_params_downloaded;
use ahash::HashSet;
use anyhow::Context as _;
use directories::ProjectDirs;
use std::sync::LazyLock;
use std::time::{Duration, Instant};
use tokio::sync::Mutex;
use url::Url;
use std::time::Instant;

// To run a single test: cargo test --lib filecoin_multisig_statedecodeparams_1754230255631789 -- --nocapture
include!(concat!(env!("OUT_DIR"), "/__rpc_regression_tests_gen.rs"));

async fn rpc_regression_test_run(name: &str) {
// Set proof parameter data dir and make sure the proofs are available
{
static PROOF_PARAMS_LOCK: LazyLock<Mutex<()>> = LazyLock::new(|| Mutex::new(()));
let _guard = PROOF_PARAMS_LOCK.lock().await;
crate::utils::proofs_api::maybe_set_proofs_parameter_cache_dir_env(
&Config::default().client.data_dir,
);
ensure_proof_params_downloaded().await.unwrap();
}
let url: Url =
format!("https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/rpc_test/{name}")
.parse()
.with_context(|| format!("Failed to parse URL for test: {name}"))
.unwrap();
let project_dir = ProjectDirs::from("com", "ChainSafe", "Forest").unwrap();
let cache_dir = project_dir.cache_dir().join("test").join("rpc-snapshots");
let path = crate::utils::retry(
crate::utils::RetryArgs {
timeout: Some(Duration::from_secs(if crate::utils::is_ci() {
20
} else {
120
})),
max_retries: Some(5),
..Default::default()
},
|| async {
download_file_with_cache(&url, &cache_dir, DownloadFileOption::NonResumable).await
},
)
.await
.unwrap()
.path;
let path = crate::dev::subcommands::fetch_rpc_test_snapshot(name.into())
.await
.unwrap();

// We need to set RNG seed so that tests are run with deterministic
// output. The snapshots should be generated with a node running with the same seed, if
Expand Down
Loading
Loading