Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion docs/source/overview.parallelisation.rst
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,10 @@ created using `multiprocessing.pool.Pool
The number of threads is controlled by an integer, passed in to the
function as an optional ``num_python_subprocesses`` argument, or stored
in the env var ``WFL_NUM_PYTHON_SUBPROCESSES``. The script should be
started with a normal run of the python executable.
started with a normal run of the python executable. Setting
the ``WFL_TORCH_N_GPUS`` env var to the number of GPUs
causes ``wfl`` to call ``torch.cuda.set_device()`` for each subprocess
ensuring that it gets a unique GPU from the other subprocesses.


========================================
Expand Down
31 changes: 28 additions & 3 deletions tests/local_scripts/complete_pytest.tin
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,40 @@

module purge
# module load compiler/gnu python/system python_extras/quippy lapack/mkl
module load compiler/gnu python python_extras/quippy lapack/mkl
module load compiler/gnu python python_extras/structure python_extras/quippy lapack/mkl
# for wfl dependencies
module load python_extras/wif
module load python_extras/torch/cpu

if [ ! -z "$WFL_PYTEST_POST_MODULE_COMMANDS" ]; then
echo "Using WFL_PYTEST_POST_MODULE_COMMANDS '$WFL_PYTEST_POST_MODULE_COMMANDS'" 1>&2
eval $WFL_PYTEST_POST_MODULE_COMMANDS
else
echo "Using no WFL_PYTEST_POST_MODULE_COMMANDS" 1>&2
fi

if [ -z "$WFL_PYTEST_EXPYRE_INFO" ]; then
echo "To override partition used, set WFL_PYTEST_EXPYRE_INFO='{\"resources\" : {\"partitions\": \"DESIRED_PARTITION\"}}'" 1>&2
fi
WFL_PYTEST_EXPYRE_INFO=$(
cat << EOF | python3
import json, os
i = {"pre_cmds": ["module purge",
"module load compiler/gnu lapack/mkl python python_extras/structure python_extras/quippy python_extras/wif dft/vasp dft/pwscf",
"module list"]}
ienv = json.loads(os.environ.get("WFL_PYTEST_EXPYRE_INFO", "{}"))
i.update(ienv)
print(json.dumps(i))
EOF
)
export WFL_PYTEST_EXPYRE_INFO
echo "Using WFL_PYTEST_EXPYRE_INFO '$WFL_PYTEST_EXPYRE_INFO'" 1>&2

if [ ! -z $WFL_PYTHONPATH_EXTRA ]; then
echo "Adding WFL_PYTHONPATH_EXTRA '$WFL_PYTHONPATH_EXTRA'" 1>&2
export PYTHONPATH=${WFL_PYTHONPATH_EXTRA}:${PYTHONPATH}
else
echo "Adding no WFL_PYTHONPATH_EXTRA" 1>&2
fi

export JULIA_PROJECT=${PWD}/tests/assets/julia
Expand All @@ -29,14 +52,16 @@ echo "" >> complete_pytest.tin.out
# buildcell
export WFL_PYTEST_BUILDCELL=$HOME/src/work/AIRSS/airss-0.9.1/src/buildcell/src/buildcell
# VASP
module load dft/vasp
module load dft/vasp/serial
export ASE_VASP_COMMAND=vasp.serial
export ASE_VASP_COMMAND_GAMMA=vasp.gamma.serial
export PYTEST_VASP_POTCAR_DIR=$VASP_PATH/pot/rev_54/PBE
# QE
module load dft/pwscf
# no ORCA

module list

export OPENBLAS_NUM_THREADS=1
export MKL_NUM_THREADS=1
# required for descriptor calc to not hang
Expand Down Expand Up @@ -70,7 +95,7 @@ l=`egrep '^=.*(passed|failed|skipped|xfailed|error).* in ' complete_pytest.tin.o
echo "summary line $l"
lp=$( echo $l | sed -E -e 's/ in .*//' -e 's/\s*,\s*/\n/g' )

declare -A expected_n=( ["passed"]="177" ["skipped"]="21" ["warnings"]=823 ["xfailed"]=2 ["xpassed"]=1 )
declare -A expected_n=( ["passed"]="188" ["skipped"]="26" ["warnings"]=1068 ["xfailed"]=1 )
IFS=$'\n'
t_stat=0
for out in $lp; do
Expand Down
65 changes: 61 additions & 4 deletions tests/test_autoparallelize.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import pytest
import os
import time

import numpy as np
Expand All @@ -12,6 +13,12 @@
from wfl.calculators import generic
from wfl.autoparallelize import AutoparaInfo

try:
import torch
from mace.calculators.foundations_models import mace_mp
except ImportError:
torch = None


def test_empty_iterator(tmp_path):
co = buildcell.buildcell(range(0), OutputSpec(tmp_path / 'dummy.xyz'), buildcell_cmd='dummy', buildcell_input='dummy')
Expand All @@ -35,21 +42,71 @@ def test_autopara_info_dict():
def test_pool_speedup():
np.random.seed(5)

rng = np.random.default_rng(5)
ats = []
nconf = 60
at_prim = Atoms('Al', cell=[1, 1, 1], pbc=[True] * 3)
for _ in range(nconf):
ats.append(Atoms(['Al'] * nconf, scaled_positions=np.random.uniform(size=(nconf, 3)), cell=[10, 10, 10], pbc=[True] * 3))
ats.append(at_prim * (4, 4, 4))
ats[-1].rattle(rng=rng)

t0 = time.time()
co = generic.calculate(ConfigSet(ats), OutputSpec(), EMT(), output_prefix="_auto_", autopara_info=AutoparaInfo(num_python_subprocesses=1))
co = generic.calculate(ConfigSet(ats), OutputSpec(), EMT(), output_prefix="_auto_",
autopara_info=AutoparaInfo(num_python_subprocesses=1,
num_inputs_per_python_subprocess=30))
dt_1 = time.time() - t0

t0 = time.time()
co = generic.calculate(ConfigSet(ats), OutputSpec(), EMT(), output_prefix="_auto_", autopara_info=AutoparaInfo(num_python_subprocesses=2))
co = generic.calculate(ConfigSet(ats), OutputSpec(), EMT(), output_prefix="_auto_",
autopara_info=AutoparaInfo(num_python_subprocesses=2,
num_inputs_per_python_subprocess=30))
dt_2 = time.time() - t0

print("time ratio", dt_2 / dt_1)
assert dt_2 < dt_1 * (2/3)
assert dt_2 / dt_1 < 0.75


@pytest.mark.skipif(torch is None or not torch.cuda.is_available() or os.environ.get("WFL_TORCH_N_GPUS") is None, reason="No torch CUDA devices available, or WFL_TORCH_N_GPUS isn't set")
@pytest.mark.perf
def test_pool_speedup_GPU(monkeypatch):
np.random.seed(5)

rng = np.random.default_rng(5)
ats = []
nconf = 60
at_prim = Atoms('Al', cell=[1, 1, 1], pbc=[True] * 3)
for _ in range(nconf):
ats.append(at_prim * (5, 5, 5))
ats[-1].rattle(rng=rng)

calc = (mace_mp, ["small-omat-0"], {"device": "cuda"})

req_n_gpus = os.environ["WFL_TORCH_N_GPUS"]
if len(req_n_gpus) == 0:
req_n_gpus = str(len(os.environ["CUDA_VISIBLE_DEVICES"].split(",")))

if "WFL_TORCH_N_GPUS" in os.environ:
monkeypatch.delenv("WFL_TORCH_N_GPUS")

t0 = time.time()
co = generic.calculate(ConfigSet(ats), OutputSpec(), calc, output_prefix="_auto_",
autopara_info=AutoparaInfo(num_python_subprocesses=1,
num_inputs_per_python_subprocess=30))
dt_1 = time.time() - t0

monkeypatch.setenv("WFL_TORCH_N_GPUS", req_n_gpus)

t0 = time.time()
co = generic.calculate(ConfigSet(ats), OutputSpec(), calc, output_prefix="_auto_",
autopara_info=AutoparaInfo(num_python_subprocesses=2,
num_inputs_per_python_subprocess=30))
dt_2 = time.time() - t0

monkeypatch.delenv("WFL_TORCH_N_GPUS")

print("time ratio", dt_2 / dt_1)
assert dt_2 / dt_1 < 0.75


def test_outputspec_overwrite(tmp_path):
with open(tmp_path / "ats.xyz", "w") as fout:
Expand Down
64 changes: 64 additions & 0 deletions tests/test_clean_dir.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
from wfl.calculators.utils import clean_rundir

# def clean_rundir(rundir, keep_files, default_keep_files, calculation_succeeded):

all_files = ["a", "aa", "b", "c", "d"]
default_keep_files = ["a*", "b"]
actual_default_keep_files = ["a", "aa", "b"]

def create_files(dir):
for filename in all_files:
with open(dir / filename, "w") as fout:
fout.write("content\n")

def check_dir(dir, files):
if files is None:
# even path doesn't exist
assert not dir.is_dir()
return

files = set(files)

# all expected files are present
for file in files:
assert (dir / file).is_file()
# all present files are expected
for file in dir.iterdir():
assert file.name in files

def test_clean_rundir(tmp_path):
# keep True
# keep all files regardless of success
for succ, files in [(True, all_files), (False, all_files)]:
p = tmp_path / f"True_{succ}"
p.mkdir()
create_files(p)
clean_rundir(p, True, default_keep_files, calculation_succeeded=succ)
check_dir(p, files)

# keep False
# succeeded means keep nothing, failed means keep default
for succ, files in [(True, None), (False, actual_default_keep_files)]:
p = tmp_path / f"False_{succ}"
p.mkdir()
create_files(p)
clean_rundir(p, False, default_keep_files, calculation_succeeded=succ)
check_dir(p, files)

# keep subset of default
# succeeded means keep subset, failed means keep default
for succ, files in [(True, ["a"]), (False, actual_default_keep_files)]:
p = tmp_path / f"a_{succ}"
p.mkdir()
create_files(p)
clean_rundir(p, ["a"], default_keep_files, calculation_succeeded=succ)
check_dir(p, files)

# keep different set from default
# succeeded means keep set, failed means keep union of default and set
for succ, files in [(True, ["a", "c"]), (False, actual_default_keep_files + ["a", "c"])]:
p = tmp_path / f"ac_{succ}"
p.mkdir()
create_files(p)
clean_rundir(p, ["a", "c"], default_keep_files, calculation_succeeded=succ)
check_dir(p, files)
16 changes: 8 additions & 8 deletions tests/test_md.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@
from wfl.generate.md.abort import AbortOnCollision, AbortOnLowEnergy

try:
from wif.Langevin_BAOAB import Langevin_BAOAB
from ase.md.langevinbaoab import LangevinBAOAB
except ImportError:
Langevin_BAOAB = None
LangevinBAOAB = None

def select_every_10_steps_for_tests_during(at):
return at.info.get("MD_step", 1) % 10 == 0
Expand Down Expand Up @@ -153,14 +153,14 @@ def test_NPT_Berendsen(cu_slab):
assert np.allclose(atoms_traj[0].cell, atoms_traj[-1].cell * cell_f)


@pytest.mark.skipif(Langevin_BAOAB is None, reason="No Langevin_BAOAB available")
def test_NPT_Langevin_BAOAB(cu_slab):
@pytest.mark.skipif(LangevinBAOAB is None, reason="No LangevinBAOAB available")
def test_NPT_LangevinBAOAB(cu_slab):
calc = EMT()

inputs = ConfigSet(cu_slab)
outputs = OutputSpec()

atoms_traj = md.md(inputs, outputs, calculator=calc, integrator="Langevin_BAOAB", steps=300, dt=1.0,
atoms_traj = md.md(inputs, outputs, calculator=calc, integrator="LangevinBAOAB", steps=300, dt=1.0,
temperature=500.0, temperature_tau=100/fs, pressure=0.0,
rng=np.random.default_rng(1))

Expand All @@ -176,14 +176,14 @@ def test_NPT_Langevin_BAOAB(cu_slab):
assert np.allclose(atoms_traj[0].cell, atoms_traj[-1].cell * cell_f)


@pytest.mark.skipif(Langevin_BAOAB is None, reason="No Langevin_BAOAB available")
def test_NPT_Langevin_BAOAB_hydro_F(cu_slab):
@pytest.mark.skipif(LangevinBAOAB is None, reason="No LangevinBAOAB available")
def test_NPT_LangevinBAOAB_hydro_F(cu_slab):
calc = EMT()

inputs = ConfigSet(cu_slab)
outputs = OutputSpec()

atoms_traj = md.md(inputs, outputs, calculator=calc, integrator="Langevin_BAOAB", steps=300, dt=1.0,
atoms_traj = md.md(inputs, outputs, calculator=calc, integrator="LangevinBAOAB", steps=300, dt=1.0,
temperature=500.0, temperature_tau=100/fs, pressure=0.0, hydrostatic=False,
rng=np.random.default_rng(1))

Expand Down
6 changes: 6 additions & 0 deletions tests/test_phonopy.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@

from wfl.configset import ConfigSet, OutputSpec
from wfl.generate.phonopy import phonopy
try:
import phono3py
except ImportError:
phono3py = None


def test_phonopy(tmp_path):
Expand All @@ -33,6 +37,7 @@ def test_phonopy(tmp_path):
for v in at.positions[1:]:
assert min(np.linalg.norm(sc.positions[1:] - v, axis=1)) < 1.0e-7

@pytest.mark.skipif(phono3py is None, reason="No phono3py module")
def test_phono3py(tmp_path):
at0 = Atoms(numbers=[29], cell = [[0, 2, 2], [2, 0, 2], [2, 2, 0]], positions = [[0, 0, 0]], pbc = [True]*3)
at1 = Atoms(numbers=[29], cell = [[0, 1.9, 1.9], [1.9, 0, 1.9], [1.9, 1.9, 0]], positions = [[0, 0, 0]], pbc = [True]*3)
Expand Down Expand Up @@ -62,6 +67,7 @@ def test_phono3py(tmp_path):
assert sum([at.info["config_type"] == "phonon_cubic_1" for at in pert]) == 13*2


@pytest.mark.skipif(phono3py is None, reason="No phono3py module")
def test_phono3py_same_supercell(tmp_path):
at0 = Atoms(numbers=[29], cell = [[0, 2, 2], [2, 0, 2], [2, 2, 0]], positions = [[0, 0, 0]], pbc = [True]*3)
at1 = Atoms(numbers=[29], cell = [[0, 1.9, 1.9], [1.9, 0, 1.9], [1.9, 1.9, 0]], positions = [[0, 0, 0]], pbc = [True]*3)
Expand Down
2 changes: 1 addition & 1 deletion tests/test_remote_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def test_vasp_fail(tmp_path, expyre_systems, monkeypatch, remoteinfo_env):

def do_vasp_fail(tmp_path, sys_name, monkeypatch, remoteinfo_env):
ri = {'sys_name': sys_name, 'job_name': 'pytest_vasp_'+sys_name,
'env_vars' : ['ASE_VASP_COMMAND=NONE', 'ASE_VASP_COMMAND_GAMMA=NONE'],
'env_vars' : ['ASE_VASP_COMMAND=NO_VASP_FAIL', 'ASE_VASP_COMMAND_GAMMA=NO_VASP_FAIL'],
'input_files' : ['POTCARs'],
'resources': {'max_time': '5m', 'num_nodes': 1},
'num_inputs_per_queued_job': 1, 'check_interval': 10}
Expand Down
2 changes: 1 addition & 1 deletion wfl/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.3.4"
__version__ = "0.3.5"
Loading
Loading