Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/aspire/commands/cov3d.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,10 @@ def cov3d(

source = source.whiten()
basis = FBBasis3D((max_resolution, max_resolution, max_resolution))
mean_estimator = MeanEstimator(source, basis, batch_size=8192)
mean_estimator = MeanEstimator(source, basis, batch_size=512)
mean_est = mean_estimator.estimate()

noise_estimator = WhiteNoiseEstimator(source, batch_size=500)
noise_estimator = WhiteNoiseEstimator(source, batch_size=512)
# Estimate the noise variance. This is needed for the covariance estimation step below.
noise_variance = noise_estimator.estimate()
logger.info(f"Noise Variance = {noise_variance}")
Expand Down
8 changes: 6 additions & 2 deletions src/aspire/covariance/covar2d.py
Original file line number Diff line number Diff line change
Expand Up @@ -513,10 +513,14 @@ class BatchedRotCov2D(RotCov2D):
be extracted.
:param basis: The `FBBasis2D` object used to decompose the images. By
default, this is set to `FFBBasis2D((src.L, src.L))`.
:param batch_size: The number of images to process at a time (default 8192).
:param batch_size: The number of images to process at a time (default 512).
512 is a good starting point for large images with a GPU where
memory is a concern. If the GPU runs out of memory, try
scaling down `batch_size`. For hi-memory CPU applications,
scaling up to a larger value such as 8192 may yield better performance.
"""

def __init__(self, src, basis=None, batch_size=8192):
def __init__(self, src, basis=None, batch_size=512):
self.src = src
self.basis = basis
self.batch_size = batch_size
Expand Down
Loading