From a28a93c8c40f43ea8f3cd38d04a6d49b3d54fb42 Mon Sep 17 00:00:00 2001 From: Hao-Wei Pang Date: Fri, 22 Mar 2024 14:08:04 -0400 Subject: [PATCH 01/48] HWP Initial JuliaCall efforts These squashed commits contain all of the initial efforts by HWP to switch from pythoncall to JuliaCall --- .github/workflows/CI.yml | 14 +- .github/workflows/docs.yml | 5 +- .gitignore | 3 +- Dockerfile | 27 +- Makefile | 8 +- documentation/Makefile | 2 +- .../rmg/installation/anacondaDeveloper.rst | 14 +- documentation/source/users/rmg/running.rst | 14 +- environment.yml | 16 +- .../networks/{CH2NH2 => CH2NH2_mse}/input.py | 0 .../yaml_files/CH2NH.yml | 0 .../yaml_files/CH2NH2.yml | 0 .../yaml_files/CH2NH_to_CH2NH2.yml | 0 .../yaml_files/CH2NH_to_CH3NH.yml | 0 .../yaml_files/CH3NH.yml | 0 .../yaml_files/CH3NH_to_CH2NH2.yml | 0 .../{CH2NH2 => CH2NH2_mse}/yaml_files/H.yml | 0 .../{acetyl+O2 => acetyl+O2_cse}/input.py | 0 .../arkane/networks/acetyl+O2_rs/input.py | 284 ++++++++++++++++++ .../arkane/networks/acetyl+O2_sls/input.py | 284 ++++++++++++++++++ .../arkane/networks/acetyl+O2_slse/input.py | 284 ++++++++++++++++++ .../{n-butanol => n-butanol_msc}/input.py | 0 .../ch4_o2/simulate_ch4o2cat_RMS.ipynb | 2 +- ...ensitivity_analysis_superminimal_RMS.ipynb | 2 +- ...simulation_flux_rop_superminimal_RMS.ipynb | 2 +- pytest.ini | 1 - rmg.py | 2 +- rmgpy/pdep/sls.py | 57 ++-- rmgpy/rmg/reactors.py | 155 ++++++---- test/arkane/arkaneFunctionalInputTest.py | 2 +- utilities.py | 2 +- 31 files changed, 1032 insertions(+), 148 deletions(-) rename examples/arkane/networks/{CH2NH2 => CH2NH2_mse}/input.py (100%) rename examples/arkane/networks/{CH2NH2 => CH2NH2_mse}/yaml_files/CH2NH.yml (100%) rename examples/arkane/networks/{CH2NH2 => CH2NH2_mse}/yaml_files/CH2NH2.yml (100%) rename examples/arkane/networks/{CH2NH2 => CH2NH2_mse}/yaml_files/CH2NH_to_CH2NH2.yml (100%) rename examples/arkane/networks/{CH2NH2 => CH2NH2_mse}/yaml_files/CH2NH_to_CH3NH.yml (100%) rename examples/arkane/networks/{CH2NH2 => CH2NH2_mse}/yaml_files/CH3NH.yml (100%) rename examples/arkane/networks/{CH2NH2 => CH2NH2_mse}/yaml_files/CH3NH_to_CH2NH2.yml (100%) rename examples/arkane/networks/{CH2NH2 => CH2NH2_mse}/yaml_files/H.yml (100%) rename examples/arkane/networks/{acetyl+O2 => acetyl+O2_cse}/input.py (100%) create mode 100644 examples/arkane/networks/acetyl+O2_rs/input.py create mode 100644 examples/arkane/networks/acetyl+O2_sls/input.py create mode 100644 examples/arkane/networks/acetyl+O2_slse/input.py rename examples/arkane/networks/{n-butanol => n-butanol_msc}/input.py (100%) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 3404fd956ad..57a925c1043 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -173,9 +173,11 @@ jobs: # RMS installation and linking to Julia - name: Install and link Julia dependencies timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). + # JULIA_CONDAPKG_EXE points to the existing conda/mamba to avoid JuliaCall from installing their own. See https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-a-Conda-environment. run: | - python -c "import julia; julia.install(); import diffeqpy; diffeqpy.install()" - julia -e 'using Pkg; Pkg.add(PackageSpec(name="ReactionMechanismSimulator",rev="for_rmg")); using ReactionMechanismSimulator' + juliaup status + export JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba + julia -e 'ENV["JULIA_CONDAPKG_BACKEND"] = "Current"; using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator' - name: Install Q2DTor run: echo "" | make q2dtor @@ -192,7 +194,7 @@ jobs: run: | for regr_test in aromatics liquid_oxidation nitrogen oxidation sulfur superminimal RMS_constantVIdealGasReactor_superminimal RMS_CSTR_liquid_oxidation RMS_liquidSurface_ch4o2cat fragment RMS_constantVIdealGasReactor_fragment minimal_surface; do - if python-jl rmg.py test/regression/"$regr_test"/input.py; then + if python rmg.py test/regression/"$regr_test"/input.py; then echo "$regr_test" "Executed Successfully" else echo "$regr_test" "Failed to Execute" | tee -a $GITHUB_STEP_SUMMARY @@ -282,7 +284,7 @@ jobs: echo "
" # Compare the edge and core - if python-jl scripts/checkModels.py \ + if python scripts/checkModels.py \ "$regr_test-core" \ $REFERENCE/"$regr_test"/chemkin/chem_annotated.inp \ $REFERENCE/"$regr_test"/chemkin/species_dictionary.txt \ @@ -299,7 +301,7 @@ jobs: cat "$regr_test-core.log" || (echo "Dumping the whole log failed, please download it from GitHub actions. Here are the first 100 lines:" && head -n100 "$regr_test-core.log") echo "
" echo "
" - if python-jl scripts/checkModels.py \ + if python scripts/checkModels.py \ "$regr_test-edge" \ $REFERENCE/"$regr_test"/chemkin/chem_edge_annotated.inp \ $REFERENCE/"$regr_test"/chemkin/species_edge_dictionary.txt \ @@ -320,7 +322,7 @@ jobs: if [ -f test/regression/"$regr_test"/regression_input.py ]; then echo "
" - if python-jl rmgpy/tools/regression.py \ + if python rmgpy/tools/regression.py \ test/regression/"$regr_test"/regression_input.py \ $REFERENCE/"$regr_test"/chemkin \ test/regression/"$regr_test"/chemkin diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index d722f595a27..dcebd83930d 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -68,8 +68,9 @@ jobs: - name: Install and link Julia dependencies timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). run: | - python -c "import julia; julia.install(); import diffeqpy; diffeqpy.install()" - julia -e 'using Pkg; Pkg.add(PackageSpec(name="ReactionMechanismSimulator",rev="for_rmg")); using ReactionMechanismSimulator' + which julia + export JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba + julia -e 'ENV["JULIA_CONDAPKG_BACKEND"] = "Current"; using Pkg; Pkg.add(PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator' - name: Checkout gh-pages Branch uses: actions/checkout@v2 diff --git a/.gitignore b/.gitignore index d52d5c680f3..946eb94d4ac 100644 --- a/.gitignore +++ b/.gitignore @@ -47,7 +47,7 @@ nbproject/* .vscode/* # Unit test files -.coverage +.coverage* testing/* htmlcov/* @@ -117,3 +117,4 @@ examples/**/dictionary.txt examples/**/reactions.py examples/**/RMG_libraries examples/**/species +examples/**/plots diff --git a/Dockerfile b/Dockerfile index cafc18c2a9a..8c8741ba2d3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,20 +13,20 @@ RUN ln -snf /bin/bash /bin/sh # - libxrender1 required by RDKit RUN apt-get update && \ apt-get install -y \ - make \ - gcc \ - wget \ - git \ - g++ \ - libxrender1 && \ + make \ + gcc \ + wget \ + git \ + g++ \ + libxrender1 && \ apt-get autoremove -y && \ apt-get clean -y # Install conda -RUN wget "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-x86_64.sh" && \ - bash Miniforge3-Linux-x86_64.sh -b -p /miniforge && \ - rm Miniforge3-Linux-x86_64.sh -ENV PATH="$PATH:/miniforge/bin" +RUN wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh && \ + bash Miniconda3-latest-Linux-x86_64.sh -b -p /miniconda && \ + rm Miniconda3-latest-Linux-x86_64.sh +ENV PATH="$PATH:/miniconda/bin" # Set solver backend to mamba for speed RUN conda install -n base conda-libmamba-solver && \ @@ -70,16 +70,15 @@ ENV PATH="$RUNNER_CWD/RMG-Py:$PATH" # setting this env variable fixes an issue with Julia precompilation on Windows ENV JULIA_CPU_TARGET="x86-64,haswell,skylake,broadwell,znver1,znver2,znver3,cascadelake,icelake-client,cooperlake,generic" RUN make && \ - julia -e 'using Pkg; Pkg.add(PackageSpec(name="PyCall",rev="master")); Pkg.add(PackageSpec(name="ReactionMechanismSimulator",rev=ENV["rmsbranch"])); using ReactionMechanismSimulator' && \ - python -c "import julia; julia.install(); import diffeqpy; diffeqpy.install()" + julia -e 'ENV["JULIA_CONDAPKG_BACKEND"] = "Current"; using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator' # RMG-Py should now be installed and ready - trigger precompilation and test run -RUN python-jl rmg.py examples/rmg/minimal/input.py +RUN python rmg.py examples/rmg/minimal/input.py # delete the results, preserve input.py RUN mv examples/rmg/minimal/input.py . && \ rm -rf examples/rmg/minimal/* && \ mv input.py examples/rmg/minimal/ # when running this image, open an interactive bash terminal inside the conda environment -RUN echo "source activate rmg_env" > ~/.bashrc +RUN echo "source activate rmg_env" >~/.bashrc ENTRYPOINT ["/bin/bash", "--login"] diff --git a/Makefile b/Makefile index a2f6b3fed24..06055daa3a7 100644 --- a/Makefile +++ b/Makefile @@ -62,16 +62,16 @@ decython: find . -name *.pyc -exec rm -f '{}' \; test-all: - python-jl -m pytest + python -m pytest test test-unittests: - python-jl -m pytest -m "not functional and not database" + python -m pytest -m "not functional and not database" test-functional: - python-jl -m pytest -m "functional" + python -m pytest -m "functional" test-database: - python-jl -m pytest -m "database" + python -m pytest -m "database" eg0: all mkdir -p testing/eg0 diff --git a/documentation/Makefile b/documentation/Makefile index d42279f6984..5c30e66cd72 100644 --- a/documentation/Makefile +++ b/documentation/Makefile @@ -3,7 +3,7 @@ # You can set these variables from the command line. SPHINXOPTS = -SPHINXBUILD = python-jl $$(which sphinx-build) +SPHINXBUILD = python $$(which sphinx-build) PAPER = BUILDDIR = build diff --git a/documentation/source/users/rmg/installation/anacondaDeveloper.rst b/documentation/source/users/rmg/installation/anacondaDeveloper.rst index 3459111559c..76993ab1c1e 100644 --- a/documentation/source/users/rmg/installation/anacondaDeveloper.rst +++ b/documentation/source/users/rmg/installation/anacondaDeveloper.rst @@ -124,7 +124,7 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux #. Finally, you can run RMG from any location by typing the following (given that you have prepared the input file as ``input.py`` in the current folder). :: - python-jl replace/with/path/to/rmg.py input.py + python replace/with/path/to/rmg.py input.py You may now use RMG-Py, Arkane, as well as any of the :ref:`Standalone Modules ` included in the RMG-Py package. For more information about using conda, please check out the `conda user guide `_. @@ -140,7 +140,7 @@ You might have to edit them slightly to match your exact paths. Specifically, you will need ``/opt/miniconda3/envs/rmg_env`` to point to where your conda environment is located. This configuration will allow you to debug the rms_constant_V example, running through -python-jl. :: +python. :: { "name": "Python: rmg.py rms_constant_V", @@ -148,7 +148,7 @@ python-jl. :: "request": "launch", "cwd": "${workspaceFolder}/", "program": "rmg.py", - "python": "/opt/miniconda3/envs/rmg_env/bin/python-jl", + "python": "/opt/miniconda3/envs/rmg_env/bin/python", "args": [ "examples/rmg/rms_constant_V/input.py", ], @@ -167,7 +167,7 @@ Open one of the many test files named ``*Test.py`` in ``test/rmgpy`` before you "type": "python", "request": "launch", "program": "/opt/miniconda3/envs/rmg_env/bin/pytest", - "python": "/opt/miniconda3/envs/rmg_env/bin/python-jl", + "python": "/opt/miniconda3/envs/rmg_env/bin/python", "args": [ "--capture=no", "--verbose", @@ -187,7 +187,7 @@ This configuration will allow you to debug running all the database tests.:: "type": "python", "request": "launch", "program": "/opt/miniconda3/envs/rmg_env/bin/pytest", - "python": "/opt/miniconda3/envs/rmg_env/bin/python-jl", + "python": "/opt/miniconda3/envs/rmg_env/bin/python", "args": [ "--capture=no", "--verbose", @@ -208,7 +208,7 @@ This configuration will allow you to use the debugger breakpoints inside unit te "request": "launch", "program": "${file}", "purpose": ["debug-test"], - "python": "/opt/miniconda3/envs/rmg_env/bin/python-jl", + "python": "/opt/miniconda3/envs/rmg_env/bin/python", "console": "integratedTerminal", "justMyCode": false, "env": {"PYTEST_ADDOPTS": "--no-cov",} // without disabling coverage VS Code doesn't stop at breakpoints while debugging because pytest-cov is using the same technique to access the source code being run @@ -256,7 +256,7 @@ To configure the rest of the settings, find the ``settings.json`` file in your ` You can use the following settings to configure the pytest framework:: "python.testing.pytestEnabled": true, - "python.testing.pytestPath": "python-jl -m pytest", + "python.testing.pytestPath": "python -m pytest", "python.testing.pytestArgs": [ "-p", "julia.pytestplugin", "--julia-compiled-modules=no", diff --git a/documentation/source/users/rmg/running.rst b/documentation/source/users/rmg/running.rst index 038aadb8935..0755c6199e7 100755 --- a/documentation/source/users/rmg/running.rst +++ b/documentation/source/users/rmg/running.rst @@ -10,7 +10,7 @@ Running a basic RMG job is straightforward, as shown in the example below. Howev Basic run:: - python-jl rmg.py input.py + python rmg.py input.py .. _inputflags: @@ -19,7 +19,7 @@ Input flags The options for input flags can be found in ``/RMG-Py/rmgpy/util.py``. Running :: - python-jl rmg.py -h + python rmg.py -h at the command line will print the documentation from ``util.py``, which is reproduced below for convenience:: @@ -63,23 +63,23 @@ Some representative example usages are shown below. Run by restarting from a seed mechanism:: - python-jl rmg.py -r path/to/seed/ input.py + python rmg.py -r path/to/seed/ input.py Run with CPU time profiling:: - python-jl rmg.py -p input.py + python rmg.py -p input.py Run with multiprocessing for reaction generation and QMTP:: - python-jl rmg.py -n input.py + python rmg.py -n input.py Run with setting a limit on the maximum execution time:: - python-jl rmg.py -t input.py + python rmg.py -t input.py Run with setting a limit on the maximum number of iterations:: - python-jl rmg.py -i input.py + python rmg.py -i input.py Details on the multiprocessing implementation diff --git a/environment.yml b/environment.yml index 39ca25e1d92..58e4440ed3a 100644 --- a/environment.yml +++ b/environment.yml @@ -18,6 +18,8 @@ # moved diffeqpy to pip and (temporarily) removed chemprop # - April 17, 2024 Limit versions of cclib at advice of maintainers. # - August 4, 2024 Restricted pyrms to <2 +# - May 14, 2024 Removed diffeqpy by switching to call SciMLBase and Sundials using JuliaCall + name: rmg_env channels: - rmg @@ -51,8 +53,8 @@ dependencies: - conda-forge::rdkit >=2022.09.1 # general-purpose external software tools - - conda-forge::julia=1.9.1 - - conda-forge::pyjulia >=0.6 + - conda-forge::juliaup + - conda-forge::pyjuliacall # for calling julia packages # Python tools - python >=3.7 @@ -90,18 +92,8 @@ dependencies: # packages we maintain - rmg::pydas >=1.0.3 - rmg::pydqed >=1.0.3 - - rmg::pyrms <2 - rmg::symmetry -# packages we would like to stop maintaining (and why) - - rmg::diffeqpy - # we should use the official verison https://github.com/SciML/diffeqpy), - # rather than ours (which is only made so that we can get it from conda) - # It is only on pip, so we will need to do something like: - # https://stackoverflow.com/a/35245610 - # Note that _some other_ dep. in this list requires diffeqpy in its recipe - # which will cause it to be downloaded from the rmg conda channel - # configure packages to use OpenBLAS instead of Intel MKL - blas=*=openblas diff --git a/examples/arkane/networks/CH2NH2/input.py b/examples/arkane/networks/CH2NH2_mse/input.py similarity index 100% rename from examples/arkane/networks/CH2NH2/input.py rename to examples/arkane/networks/CH2NH2_mse/input.py diff --git a/examples/arkane/networks/CH2NH2/yaml_files/CH2NH.yml b/examples/arkane/networks/CH2NH2_mse/yaml_files/CH2NH.yml similarity index 100% rename from examples/arkane/networks/CH2NH2/yaml_files/CH2NH.yml rename to examples/arkane/networks/CH2NH2_mse/yaml_files/CH2NH.yml diff --git a/examples/arkane/networks/CH2NH2/yaml_files/CH2NH2.yml b/examples/arkane/networks/CH2NH2_mse/yaml_files/CH2NH2.yml similarity index 100% rename from examples/arkane/networks/CH2NH2/yaml_files/CH2NH2.yml rename to examples/arkane/networks/CH2NH2_mse/yaml_files/CH2NH2.yml diff --git a/examples/arkane/networks/CH2NH2/yaml_files/CH2NH_to_CH2NH2.yml b/examples/arkane/networks/CH2NH2_mse/yaml_files/CH2NH_to_CH2NH2.yml similarity index 100% rename from examples/arkane/networks/CH2NH2/yaml_files/CH2NH_to_CH2NH2.yml rename to examples/arkane/networks/CH2NH2_mse/yaml_files/CH2NH_to_CH2NH2.yml diff --git a/examples/arkane/networks/CH2NH2/yaml_files/CH2NH_to_CH3NH.yml b/examples/arkane/networks/CH2NH2_mse/yaml_files/CH2NH_to_CH3NH.yml similarity index 100% rename from examples/arkane/networks/CH2NH2/yaml_files/CH2NH_to_CH3NH.yml rename to examples/arkane/networks/CH2NH2_mse/yaml_files/CH2NH_to_CH3NH.yml diff --git a/examples/arkane/networks/CH2NH2/yaml_files/CH3NH.yml b/examples/arkane/networks/CH2NH2_mse/yaml_files/CH3NH.yml similarity index 100% rename from examples/arkane/networks/CH2NH2/yaml_files/CH3NH.yml rename to examples/arkane/networks/CH2NH2_mse/yaml_files/CH3NH.yml diff --git a/examples/arkane/networks/CH2NH2/yaml_files/CH3NH_to_CH2NH2.yml b/examples/arkane/networks/CH2NH2_mse/yaml_files/CH3NH_to_CH2NH2.yml similarity index 100% rename from examples/arkane/networks/CH2NH2/yaml_files/CH3NH_to_CH2NH2.yml rename to examples/arkane/networks/CH2NH2_mse/yaml_files/CH3NH_to_CH2NH2.yml diff --git a/examples/arkane/networks/CH2NH2/yaml_files/H.yml b/examples/arkane/networks/CH2NH2_mse/yaml_files/H.yml similarity index 100% rename from examples/arkane/networks/CH2NH2/yaml_files/H.yml rename to examples/arkane/networks/CH2NH2_mse/yaml_files/H.yml diff --git a/examples/arkane/networks/acetyl+O2/input.py b/examples/arkane/networks/acetyl+O2_cse/input.py similarity index 100% rename from examples/arkane/networks/acetyl+O2/input.py rename to examples/arkane/networks/acetyl+O2_cse/input.py diff --git a/examples/arkane/networks/acetyl+O2_rs/input.py b/examples/arkane/networks/acetyl+O2_rs/input.py new file mode 100644 index 00000000000..84a57961fa6 --- /dev/null +++ b/examples/arkane/networks/acetyl+O2_rs/input.py @@ -0,0 +1,284 @@ +################################################################################ +# +# Arkane input file for acetyl + O2 pressure-dependent reaction network +# +################################################################################ + +title = 'acetyl + oxygen' + +description = \ +""" +The chemically-activated reaction of acetyl with oxygen. This system is of +interest in atmospheric chemistry as a step in the conversion of acetaldehyde +to the secondary pollutant peroxyacetylnitrate (PAN); it is also potentially +important in the ignition chemistry of ethanol. +""" + +species( + label = 'acetylperoxy', + structure = SMILES('CC(=O)O[O]'), + E0 = (-34.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([54.2977,104.836,156.05],"amu*angstrom^2"), symmetry=1), + HarmonicOscillator(frequencies=([319.695,500.474,536.674,543.894,727.156,973.365,1037.77,1119.72,1181.55,1391.11,1449.53,1454.72,1870.51,3037.12,3096.93,3136.39],"cm^-1")), + HinderedRotor(inertia=(7.38359,"amu*angstrom^2"), symmetry=1, fourier=([[-1.95191,-11.8215,0.740041,-0.049118,-0.464522],[0.000227764,0.00410782,-0.000805364,-0.000548218,-0.000266277]],"kJ/mol")), + HinderedRotor(inertia=(2.94723,"amu*angstrom^2"), symmetry=3, fourier=([[0.130647,0.0401507,-2.54582,-0.0436065,-0.120982],[-0.000701659,-0.000989654,0.00783349,-0.00140978,-0.00145843]],"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + molecularWeight = (75.04,"g/mol"), + collisionModel = TransportData(sigma=(5.09,'angstrom'), epsilon=(473,'K')), + energyTransferModel = SingleExponentialDown( + alpha0 = (0.5718,'kcal/mol'), + T0 = (300,'K'), + n = 0.85, + ), +) + +species( + label = 'hydroperoxylvinoxy', + structure = SMILES('[CH2]C(=O)OO'), + E0 = (-32.4,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([44.8034,110.225,155.029],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([318.758,420.907,666.223,675.962,752.824,864.66,998.471,1019.54,1236.21,1437.91,1485.74,1687.9,3145.44,3262.88,3434.34],"cm^-1")), + HinderedRotor(inertia=(1.68464,"u*angstrom**2"), symmetry=2, fourier=([[0.359649,-16.1155,-0.593311,1.72918,0.256314],[-7.42981e-06,-0.000238057,3.29276e-05,-6.62608e-05,8.8443e-05]],"kJ/mol")), + HinderedRotor(inertia=(8.50433,"u*angstrom**2"), symmetry=1, fourier=([[-7.53504,-23.4471,-3.3974,-0.940593,-0.313674],[-4.58248,-2.47177,0.550012,1.03771,0.844226]],"kJ/mol")), + HinderedRotor(inertia=(0.803309,"u*angstrom**2"), symmetry=1, fourier=([[-8.65946,-3.97888,-1.13469,-0.402197,-0.145101],[4.41884e-05,4.83249e-05,1.30275e-05,-1.31353e-05,-6.66878e-06]],"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + molecularWeight = (75.04,"g/mol"), + collisionModel = TransportData(sigma=(5.09,'angstrom'), epsilon=(473,'K')), + energyTransferModel = SingleExponentialDown( + alpha0 = (0.5718,'kcal/mol'), + T0 = (300,'K'), + n = 0.85, + ), +) + +species( + label = 'acetyl', + structure = SMILES('C[C]=O'), + E0 = (0.0,'kcal/mol'), #(-20.5205,"kJ/mol") + modes = [ + IdealGasTranslation(mass=(43.05,"g/mol")), + NonlinearRotor(inertia=([5.94518,50.8166,53.6436],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([464.313,845.126,1010.54,1038.43,1343.54,1434.69,1442.25,1906.18,2985.46,3076.57,3079.46],"cm^-1")), + HinderedRotor(inertia=(1.61752,"u*angstrom**2"), symmetry=3, barrier=(2.00242,"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, +) + +species( + label = 'oxygen', + structure = SMILES('[O][O]'), + E0 = (0.0,'kcal/mol'), #(-5.74557,"kJ/mol") + modes = [ + IdealGasTranslation(mass=(32.00,"g/mol")), + LinearRotor(inertia=(11.6056,"u*angstrom**2"), symmetry=2), + HarmonicOscillator(frequencies=([1621.54],"cm^-1")), + ], + spinMultiplicity = 3, + opticalIsomers = 1, +) + +species( + label = 'ketene', + structure = SMILES('C=C=O'), + E0 = (-6.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(42.0106,"g/mol")), + NonlinearRotor(inertia=([1.76922,48.8411,50.6103],"u*angstrom**2"), symmetry=2), + HarmonicOscillator(frequencies=([441.622,548.317,592.155,981.379,1159.66,1399.86,2192.1,3150.02,3240.58],"cm^-1")), + ], + spinMultiplicity = 1, + opticalIsomers = 1, +) + +species( + label = 'lactone', + structure = SMILES('C1OC1(=O)'), + E0 = (-30.8,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(58.0055,"g/mol")), + NonlinearRotor(inertia=([20.1707,62.4381,79.1616],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([484.387,527.771,705.332,933.372,985.563,1050.26,1107.93,1176.43,1466.54,1985.09,3086.23,3186.46],"cm^-1")), + ], + spinMultiplicity = 1, + opticalIsomers = 1, +) + + + +species( + label = 'hydroxyl', + structure = SMILES('[OH]'), + E0 = (0.0,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(17.0027,"g/mol")), + LinearRotor(inertia=(0.899988,"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([3676.39],"cm^-1")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, +) + +species( + label = 'hydroperoxyl', + structure = SMILES('O[O]'), + E0 = (0.0,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(32.9977,"g/mol")), + NonlinearRotor(inertia=([0.811564,14.9434,15.755],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([1156.77,1424.28,3571.81],"cm^-1")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, +) + +species( + label = 'nitrogen', + structure = SMILES('N#N'), + molecularWeight = (28.04,"g/mol"), + collisionModel = TransportData(sigma=(3.70,'angstrom'), epsilon=(94.9,'K')), + reactive = False +) + +################################################################################ + +transitionState( + label = 'entrance1', + E0 = (0.0,'kcal/mol'), +) + +transitionState( + label = 'isom1', + E0 = (-5.8,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([49.3418,103.697,149.682],"u*angstrom**2"), symmetry=1, quantum=False), + HarmonicOscillator(frequencies=([148.551,306.791,484.573,536.709,599.366,675.538,832.594,918.413,1022.28,1031.45,1101.01,1130.05,1401.51,1701.26,1844.17,3078.6,3163.07],"cm^-1"), quantum=True), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency = (-1679.04,'cm^-1'), +) + +transitionState( + label = 'exit1', + E0 = (0.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([55.4256, 136.1886, 188.2442],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([59.9256,204.218,352.811,466.297,479.997,542.345,653.897,886.657,1017.91,1079.17,1250.02,1309.14,1370.36,1678.52,2162.41,3061.53,3135.55],"cm^-1")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency=(-1053.25,'cm^-1'), +) + +transitionState( + label = 'exit2', + E0 = (-4.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.0082,"g/mol"), quantum=False), + NonlinearRotor(inertia=([51.7432,119.373,171.117],"u*angstrom**2"), symmetry=1, quantum=False), + HarmonicOscillator(frequencies=([250.311,383.83,544.382,578.988,595.324,705.422,964.712,1103.25,1146.91,1415.98,1483.33,1983.79,3128,3143.84,3255.29],"cm^-1")), + HinderedRotor(inertia=(9.35921,"u*angstrom**2"), symmetry=1, fourier=([[-11.2387,-12.5928,-3.87844,1.13314,-0.358812],[-1.59863,-8.03329,-5.05235,3.13723,2.45989]],"kJ/mol")), + HinderedRotor(inertia=(0.754698,"u*angstrom**2"), symmetry=1, barrier=(47.7645,"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency=(-404.271,'cm^-1'), +) + +transitionState( + label = 'exit3', + E0 = (-7.2,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([53.2821, 120.4050, 170.1570],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([152.155,181.909,311.746,348.646,608.487,624.378,805.347,948.875,995.256,996.982,1169.1,1412.6,1834.83,3124.43,3245.2,3634.45],"cm^-1")), + HinderedRotor(inertia=(0.813269,"u*angstrom**2"), symmetry=1, fourier=([[-1.15338,-2.18664,-0.669531,-0.11502,-0.0512599],[0.00245222,0.0107485,0.00334564,-0.00165288,-0.0028674]],"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency=(-618.234,'cm^-1'), +) + +################################################################################ + +reaction( + label = 'entrance1', + reactants = ['acetyl', 'oxygen'], + products = ['acetylperoxy'], + transitionState = 'entrance1', + kinetics = Arrhenius(A=(2.65e6,'m^3/(mol*s)'), n=0.0, Ea=(0.0,'kcal/mol'), T0=(1,"K")), +) + +reaction( + label = 'isom1', + reactants = ['acetylperoxy'], + products = ['hydroperoxylvinoxy'], + transitionState = 'isom1', + tunneling = 'Eckart', +) + +reaction( + label = 'exit1', + reactants = ['acetylperoxy'], + products = ['ketene', 'hydroperoxyl'], + transitionState = 'exit1', + tunneling = 'Eckart', +) + +reaction( + label = 'exit2', + reactants = ['hydroperoxylvinoxy'], + products = ['ketene', 'hydroperoxyl'], + transitionState = 'exit2', + tunneling = 'Eckart', +) + +reaction( + label = 'exit3', + reactants = ['hydroperoxylvinoxy'], + products = ['lactone', 'hydroxyl'], + transitionState = 'exit3', + tunneling = 'Eckart', +) + +################################################################################# + +network( + label = 'acetyl + O2', + isomers = [ + 'acetylperoxy', + 'hydroperoxylvinoxy', + ], + reactants = [ + ('acetyl', 'oxygen'), + ('ketene', 'hydroperoxyl'), + ('lactone', 'hydroxyl') + ], + bathGas = { + 'nitrogen': 1.0, + } +) + +################################################################################# + +pressureDependence( + 'acetyl + O2', + Tmin=(300.0,'K'), Tmax=(2000.0,'K'), Tcount=8, + Pmin=(0.01,'bar'), Pmax=(100.0,'bar'), Pcount=5, + maximumGrainSize = (1.0,'kcal/mol'), + minimumGrainCount = 250, + method = 'reservoir state', + interpolationModel = ('chebyshev', 6, 4), + activeJRotor = True, +) diff --git a/examples/arkane/networks/acetyl+O2_sls/input.py b/examples/arkane/networks/acetyl+O2_sls/input.py new file mode 100644 index 00000000000..4595e288fc1 --- /dev/null +++ b/examples/arkane/networks/acetyl+O2_sls/input.py @@ -0,0 +1,284 @@ +################################################################################ +# +# Arkane input file for acetyl + O2 pressure-dependent reaction network +# +################################################################################ + +title = 'acetyl + oxygen' + +description = \ +""" +The chemically-activated reaction of acetyl with oxygen. This system is of +interest in atmospheric chemistry as a step in the conversion of acetaldehyde +to the secondary pollutant peroxyacetylnitrate (PAN); it is also potentially +important in the ignition chemistry of ethanol. +""" + +species( + label = 'acetylperoxy', + structure = SMILES('CC(=O)O[O]'), + E0 = (-34.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([54.2977,104.836,156.05],"amu*angstrom^2"), symmetry=1), + HarmonicOscillator(frequencies=([319.695,500.474,536.674,543.894,727.156,973.365,1037.77,1119.72,1181.55,1391.11,1449.53,1454.72,1870.51,3037.12,3096.93,3136.39],"cm^-1")), + HinderedRotor(inertia=(7.38359,"amu*angstrom^2"), symmetry=1, fourier=([[-1.95191,-11.8215,0.740041,-0.049118,-0.464522],[0.000227764,0.00410782,-0.000805364,-0.000548218,-0.000266277]],"kJ/mol")), + HinderedRotor(inertia=(2.94723,"amu*angstrom^2"), symmetry=3, fourier=([[0.130647,0.0401507,-2.54582,-0.0436065,-0.120982],[-0.000701659,-0.000989654,0.00783349,-0.00140978,-0.00145843]],"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + molecularWeight = (75.04,"g/mol"), + collisionModel = TransportData(sigma=(5.09,'angstrom'), epsilon=(473,'K')), + energyTransferModel = SingleExponentialDown( + alpha0 = (0.5718,'kcal/mol'), + T0 = (300,'K'), + n = 0.85, + ), +) + +species( + label = 'hydroperoxylvinoxy', + structure = SMILES('[CH2]C(=O)OO'), + E0 = (-32.4,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([44.8034,110.225,155.029],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([318.758,420.907,666.223,675.962,752.824,864.66,998.471,1019.54,1236.21,1437.91,1485.74,1687.9,3145.44,3262.88,3434.34],"cm^-1")), + HinderedRotor(inertia=(1.68464,"u*angstrom**2"), symmetry=2, fourier=([[0.359649,-16.1155,-0.593311,1.72918,0.256314],[-7.42981e-06,-0.000238057,3.29276e-05,-6.62608e-05,8.8443e-05]],"kJ/mol")), + HinderedRotor(inertia=(8.50433,"u*angstrom**2"), symmetry=1, fourier=([[-7.53504,-23.4471,-3.3974,-0.940593,-0.313674],[-4.58248,-2.47177,0.550012,1.03771,0.844226]],"kJ/mol")), + HinderedRotor(inertia=(0.803309,"u*angstrom**2"), symmetry=1, fourier=([[-8.65946,-3.97888,-1.13469,-0.402197,-0.145101],[4.41884e-05,4.83249e-05,1.30275e-05,-1.31353e-05,-6.66878e-06]],"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + molecularWeight = (75.04,"g/mol"), + collisionModel = TransportData(sigma=(5.09,'angstrom'), epsilon=(473,'K')), + energyTransferModel = SingleExponentialDown( + alpha0 = (0.5718,'kcal/mol'), + T0 = (300,'K'), + n = 0.85, + ), +) + +species( + label = 'acetyl', + structure = SMILES('C[C]=O'), + E0 = (0.0,'kcal/mol'), #(-20.5205,"kJ/mol") + modes = [ + IdealGasTranslation(mass=(43.05,"g/mol")), + NonlinearRotor(inertia=([5.94518,50.8166,53.6436],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([464.313,845.126,1010.54,1038.43,1343.54,1434.69,1442.25,1906.18,2985.46,3076.57,3079.46],"cm^-1")), + HinderedRotor(inertia=(1.61752,"u*angstrom**2"), symmetry=3, barrier=(2.00242,"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, +) + +species( + label = 'oxygen', + structure = SMILES('[O][O]'), + E0 = (0.0,'kcal/mol'), #(-5.74557,"kJ/mol") + modes = [ + IdealGasTranslation(mass=(32.00,"g/mol")), + LinearRotor(inertia=(11.6056,"u*angstrom**2"), symmetry=2), + HarmonicOscillator(frequencies=([1621.54],"cm^-1")), + ], + spinMultiplicity = 3, + opticalIsomers = 1, +) + +species( + label = 'ketene', + structure = SMILES('C=C=O'), + E0 = (-6.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(42.0106,"g/mol")), + NonlinearRotor(inertia=([1.76922,48.8411,50.6103],"u*angstrom**2"), symmetry=2), + HarmonicOscillator(frequencies=([441.622,548.317,592.155,981.379,1159.66,1399.86,2192.1,3150.02,3240.58],"cm^-1")), + ], + spinMultiplicity = 1, + opticalIsomers = 1, +) + +species( + label = 'lactone', + structure = SMILES('C1OC1(=O)'), + E0 = (-30.8,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(58.0055,"g/mol")), + NonlinearRotor(inertia=([20.1707,62.4381,79.1616],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([484.387,527.771,705.332,933.372,985.563,1050.26,1107.93,1176.43,1466.54,1985.09,3086.23,3186.46],"cm^-1")), + ], + spinMultiplicity = 1, + opticalIsomers = 1, +) + + + +species( + label = 'hydroxyl', + structure = SMILES('[OH]'), + E0 = (0.0,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(17.0027,"g/mol")), + LinearRotor(inertia=(0.899988,"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([3676.39],"cm^-1")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, +) + +species( + label = 'hydroperoxyl', + structure = SMILES('O[O]'), + E0 = (0.0,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(32.9977,"g/mol")), + NonlinearRotor(inertia=([0.811564,14.9434,15.755],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([1156.77,1424.28,3571.81],"cm^-1")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, +) + +species( + label = 'nitrogen', + structure = SMILES('N#N'), + molecularWeight = (28.04,"g/mol"), + collisionModel = TransportData(sigma=(3.70,'angstrom'), epsilon=(94.9,'K')), + reactive = False +) + +################################################################################ + +transitionState( + label = 'entrance1', + E0 = (0.0,'kcal/mol'), +) + +transitionState( + label = 'isom1', + E0 = (-5.8,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([49.3418,103.697,149.682],"u*angstrom**2"), symmetry=1, quantum=False), + HarmonicOscillator(frequencies=([148.551,306.791,484.573,536.709,599.366,675.538,832.594,918.413,1022.28,1031.45,1101.01,1130.05,1401.51,1701.26,1844.17,3078.6,3163.07],"cm^-1"), quantum=True), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency = (-1679.04,'cm^-1'), +) + +transitionState( + label = 'exit1', + E0 = (0.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([55.4256, 136.1886, 188.2442],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([59.9256,204.218,352.811,466.297,479.997,542.345,653.897,886.657,1017.91,1079.17,1250.02,1309.14,1370.36,1678.52,2162.41,3061.53,3135.55],"cm^-1")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency=(-1053.25,'cm^-1'), +) + +transitionState( + label = 'exit2', + E0 = (-4.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.0082,"g/mol"), quantum=False), + NonlinearRotor(inertia=([51.7432,119.373,171.117],"u*angstrom**2"), symmetry=1, quantum=False), + HarmonicOscillator(frequencies=([250.311,383.83,544.382,578.988,595.324,705.422,964.712,1103.25,1146.91,1415.98,1483.33,1983.79,3128,3143.84,3255.29],"cm^-1")), + HinderedRotor(inertia=(9.35921,"u*angstrom**2"), symmetry=1, fourier=([[-11.2387,-12.5928,-3.87844,1.13314,-0.358812],[-1.59863,-8.03329,-5.05235,3.13723,2.45989]],"kJ/mol")), + HinderedRotor(inertia=(0.754698,"u*angstrom**2"), symmetry=1, barrier=(47.7645,"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency=(-404.271,'cm^-1'), +) + +transitionState( + label = 'exit3', + E0 = (-7.2,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([53.2821, 120.4050, 170.1570],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([152.155,181.909,311.746,348.646,608.487,624.378,805.347,948.875,995.256,996.982,1169.1,1412.6,1834.83,3124.43,3245.2,3634.45],"cm^-1")), + HinderedRotor(inertia=(0.813269,"u*angstrom**2"), symmetry=1, fourier=([[-1.15338,-2.18664,-0.669531,-0.11502,-0.0512599],[0.00245222,0.0107485,0.00334564,-0.00165288,-0.0028674]],"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency=(-618.234,'cm^-1'), +) + +################################################################################ + +reaction( + label = 'entrance1', + reactants = ['acetyl', 'oxygen'], + products = ['acetylperoxy'], + transitionState = 'entrance1', + kinetics = Arrhenius(A=(2.65e6,'m^3/(mol*s)'), n=0.0, Ea=(0.0,'kcal/mol'), T0=(1,"K")), +) + +reaction( + label = 'isom1', + reactants = ['acetylperoxy'], + products = ['hydroperoxylvinoxy'], + transitionState = 'isom1', + tunneling = 'Eckart', +) + +reaction( + label = 'exit1', + reactants = ['acetylperoxy'], + products = ['ketene', 'hydroperoxyl'], + transitionState = 'exit1', + tunneling = 'Eckart', +) + +reaction( + label = 'exit2', + reactants = ['hydroperoxylvinoxy'], + products = ['ketene', 'hydroperoxyl'], + transitionState = 'exit2', + tunneling = 'Eckart', +) + +reaction( + label = 'exit3', + reactants = ['hydroperoxylvinoxy'], + products = ['lactone', 'hydroxyl'], + transitionState = 'exit3', + tunneling = 'Eckart', +) + +################################################################################# + +network( + label = 'acetyl + O2', + isomers = [ + 'acetylperoxy', + 'hydroperoxylvinoxy', + ], + reactants = [ + ('acetyl', 'oxygen'), + ('ketene', 'hydroperoxyl'), + ('lactone', 'hydroxyl') + ], + bathGas = { + 'nitrogen': 1.0, + } +) + +################################################################################# + +pressureDependence( + 'acetyl + O2', + Tmin=(300.0,'K'), Tmax=(2000.0,'K'), Tcount=8, + Pmin=(0.01,'bar'), Pmax=(100.0,'bar'), Pcount=5, + maximumGrainSize = (1.0,'kcal/mol'), + minimumGrainCount = 250, + method = 'simulation least squares', + interpolationModel = ('chebyshev', 6, 4), + activeJRotor = True, +) diff --git a/examples/arkane/networks/acetyl+O2_slse/input.py b/examples/arkane/networks/acetyl+O2_slse/input.py new file mode 100644 index 00000000000..46114439b47 --- /dev/null +++ b/examples/arkane/networks/acetyl+O2_slse/input.py @@ -0,0 +1,284 @@ +################################################################################ +# +# Arkane input file for acetyl + O2 pressure-dependent reaction network +# +################################################################################ + +title = 'acetyl + oxygen' + +description = \ +""" +The chemically-activated reaction of acetyl with oxygen. This system is of +interest in atmospheric chemistry as a step in the conversion of acetaldehyde +to the secondary pollutant peroxyacetylnitrate (PAN); it is also potentially +important in the ignition chemistry of ethanol. +""" + +species( + label = 'acetylperoxy', + structure = SMILES('CC(=O)O[O]'), + E0 = (-34.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([54.2977,104.836,156.05],"amu*angstrom^2"), symmetry=1), + HarmonicOscillator(frequencies=([319.695,500.474,536.674,543.894,727.156,973.365,1037.77,1119.72,1181.55,1391.11,1449.53,1454.72,1870.51,3037.12,3096.93,3136.39],"cm^-1")), + HinderedRotor(inertia=(7.38359,"amu*angstrom^2"), symmetry=1, fourier=([[-1.95191,-11.8215,0.740041,-0.049118,-0.464522],[0.000227764,0.00410782,-0.000805364,-0.000548218,-0.000266277]],"kJ/mol")), + HinderedRotor(inertia=(2.94723,"amu*angstrom^2"), symmetry=3, fourier=([[0.130647,0.0401507,-2.54582,-0.0436065,-0.120982],[-0.000701659,-0.000989654,0.00783349,-0.00140978,-0.00145843]],"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + molecularWeight = (75.04,"g/mol"), + collisionModel = TransportData(sigma=(5.09,'angstrom'), epsilon=(473,'K')), + energyTransferModel = SingleExponentialDown( + alpha0 = (0.5718,'kcal/mol'), + T0 = (300,'K'), + n = 0.85, + ), +) + +species( + label = 'hydroperoxylvinoxy', + structure = SMILES('[CH2]C(=O)OO'), + E0 = (-32.4,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([44.8034,110.225,155.029],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([318.758,420.907,666.223,675.962,752.824,864.66,998.471,1019.54,1236.21,1437.91,1485.74,1687.9,3145.44,3262.88,3434.34],"cm^-1")), + HinderedRotor(inertia=(1.68464,"u*angstrom**2"), symmetry=2, fourier=([[0.359649,-16.1155,-0.593311,1.72918,0.256314],[-7.42981e-06,-0.000238057,3.29276e-05,-6.62608e-05,8.8443e-05]],"kJ/mol")), + HinderedRotor(inertia=(8.50433,"u*angstrom**2"), symmetry=1, fourier=([[-7.53504,-23.4471,-3.3974,-0.940593,-0.313674],[-4.58248,-2.47177,0.550012,1.03771,0.844226]],"kJ/mol")), + HinderedRotor(inertia=(0.803309,"u*angstrom**2"), symmetry=1, fourier=([[-8.65946,-3.97888,-1.13469,-0.402197,-0.145101],[4.41884e-05,4.83249e-05,1.30275e-05,-1.31353e-05,-6.66878e-06]],"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + molecularWeight = (75.04,"g/mol"), + collisionModel = TransportData(sigma=(5.09,'angstrom'), epsilon=(473,'K')), + energyTransferModel = SingleExponentialDown( + alpha0 = (0.5718,'kcal/mol'), + T0 = (300,'K'), + n = 0.85, + ), +) + +species( + label = 'acetyl', + structure = SMILES('C[C]=O'), + E0 = (0.0,'kcal/mol'), #(-20.5205,"kJ/mol") + modes = [ + IdealGasTranslation(mass=(43.05,"g/mol")), + NonlinearRotor(inertia=([5.94518,50.8166,53.6436],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([464.313,845.126,1010.54,1038.43,1343.54,1434.69,1442.25,1906.18,2985.46,3076.57,3079.46],"cm^-1")), + HinderedRotor(inertia=(1.61752,"u*angstrom**2"), symmetry=3, barrier=(2.00242,"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, +) + +species( + label = 'oxygen', + structure = SMILES('[O][O]'), + E0 = (0.0,'kcal/mol'), #(-5.74557,"kJ/mol") + modes = [ + IdealGasTranslation(mass=(32.00,"g/mol")), + LinearRotor(inertia=(11.6056,"u*angstrom**2"), symmetry=2), + HarmonicOscillator(frequencies=([1621.54],"cm^-1")), + ], + spinMultiplicity = 3, + opticalIsomers = 1, +) + +species( + label = 'ketene', + structure = SMILES('C=C=O'), + E0 = (-6.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(42.0106,"g/mol")), + NonlinearRotor(inertia=([1.76922,48.8411,50.6103],"u*angstrom**2"), symmetry=2), + HarmonicOscillator(frequencies=([441.622,548.317,592.155,981.379,1159.66,1399.86,2192.1,3150.02,3240.58],"cm^-1")), + ], + spinMultiplicity = 1, + opticalIsomers = 1, +) + +species( + label = 'lactone', + structure = SMILES('C1OC1(=O)'), + E0 = (-30.8,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(58.0055,"g/mol")), + NonlinearRotor(inertia=([20.1707,62.4381,79.1616],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([484.387,527.771,705.332,933.372,985.563,1050.26,1107.93,1176.43,1466.54,1985.09,3086.23,3186.46],"cm^-1")), + ], + spinMultiplicity = 1, + opticalIsomers = 1, +) + + + +species( + label = 'hydroxyl', + structure = SMILES('[OH]'), + E0 = (0.0,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(17.0027,"g/mol")), + LinearRotor(inertia=(0.899988,"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([3676.39],"cm^-1")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, +) + +species( + label = 'hydroperoxyl', + structure = SMILES('O[O]'), + E0 = (0.0,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(32.9977,"g/mol")), + NonlinearRotor(inertia=([0.811564,14.9434,15.755],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([1156.77,1424.28,3571.81],"cm^-1")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, +) + +species( + label = 'nitrogen', + structure = SMILES('N#N'), + molecularWeight = (28.04,"g/mol"), + collisionModel = TransportData(sigma=(3.70,'angstrom'), epsilon=(94.9,'K')), + reactive = False +) + +################################################################################ + +transitionState( + label = 'entrance1', + E0 = (0.0,'kcal/mol'), +) + +transitionState( + label = 'isom1', + E0 = (-5.8,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([49.3418,103.697,149.682],"u*angstrom**2"), symmetry=1, quantum=False), + HarmonicOscillator(frequencies=([148.551,306.791,484.573,536.709,599.366,675.538,832.594,918.413,1022.28,1031.45,1101.01,1130.05,1401.51,1701.26,1844.17,3078.6,3163.07],"cm^-1"), quantum=True), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency = (-1679.04,'cm^-1'), +) + +transitionState( + label = 'exit1', + E0 = (0.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([55.4256, 136.1886, 188.2442],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([59.9256,204.218,352.811,466.297,479.997,542.345,653.897,886.657,1017.91,1079.17,1250.02,1309.14,1370.36,1678.52,2162.41,3061.53,3135.55],"cm^-1")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency=(-1053.25,'cm^-1'), +) + +transitionState( + label = 'exit2', + E0 = (-4.6,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.0082,"g/mol"), quantum=False), + NonlinearRotor(inertia=([51.7432,119.373,171.117],"u*angstrom**2"), symmetry=1, quantum=False), + HarmonicOscillator(frequencies=([250.311,383.83,544.382,578.988,595.324,705.422,964.712,1103.25,1146.91,1415.98,1483.33,1983.79,3128,3143.84,3255.29],"cm^-1")), + HinderedRotor(inertia=(9.35921,"u*angstrom**2"), symmetry=1, fourier=([[-11.2387,-12.5928,-3.87844,1.13314,-0.358812],[-1.59863,-8.03329,-5.05235,3.13723,2.45989]],"kJ/mol")), + HinderedRotor(inertia=(0.754698,"u*angstrom**2"), symmetry=1, barrier=(47.7645,"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency=(-404.271,'cm^-1'), +) + +transitionState( + label = 'exit3', + E0 = (-7.2,'kcal/mol'), + modes = [ + IdealGasTranslation(mass=(75.04,"g/mol")), + NonlinearRotor(inertia=([53.2821, 120.4050, 170.1570],"u*angstrom**2"), symmetry=1), + HarmonicOscillator(frequencies=([152.155,181.909,311.746,348.646,608.487,624.378,805.347,948.875,995.256,996.982,1169.1,1412.6,1834.83,3124.43,3245.2,3634.45],"cm^-1")), + HinderedRotor(inertia=(0.813269,"u*angstrom**2"), symmetry=1, fourier=([[-1.15338,-2.18664,-0.669531,-0.11502,-0.0512599],[0.00245222,0.0107485,0.00334564,-0.00165288,-0.0028674]],"kJ/mol")), + ], + spinMultiplicity = 2, + opticalIsomers = 1, + frequency=(-618.234,'cm^-1'), +) + +################################################################################ + +reaction( + label = 'entrance1', + reactants = ['acetyl', 'oxygen'], + products = ['acetylperoxy'], + transitionState = 'entrance1', + kinetics = Arrhenius(A=(2.65e6,'m^3/(mol*s)'), n=0.0, Ea=(0.0,'kcal/mol'), T0=(1,"K")), +) + +reaction( + label = 'isom1', + reactants = ['acetylperoxy'], + products = ['hydroperoxylvinoxy'], + transitionState = 'isom1', + tunneling = 'Eckart', +) + +reaction( + label = 'exit1', + reactants = ['acetylperoxy'], + products = ['ketene', 'hydroperoxyl'], + transitionState = 'exit1', + tunneling = 'Eckart', +) + +reaction( + label = 'exit2', + reactants = ['hydroperoxylvinoxy'], + products = ['ketene', 'hydroperoxyl'], + transitionState = 'exit2', + tunneling = 'Eckart', +) + +reaction( + label = 'exit3', + reactants = ['hydroperoxylvinoxy'], + products = ['lactone', 'hydroxyl'], + transitionState = 'exit3', + tunneling = 'Eckart', +) + +################################################################################# + +network( + label = 'acetyl + O2', + isomers = [ + 'acetylperoxy', + 'hydroperoxylvinoxy', + ], + reactants = [ + ('acetyl', 'oxygen'), + ('ketene', 'hydroperoxyl'), + ('lactone', 'hydroxyl') + ], + bathGas = { + 'nitrogen': 1.0, + } +) + +################################################################################# + +pressureDependence( + 'acetyl + O2', + Tmin=(300.0,'K'), Tmax=(2000.0,'K'), Tcount=8, + Pmin=(0.01,'bar'), Pmax=(100.0,'bar'), Pcount=5, + maximumGrainSize = (1.0,'kcal/mol'), + minimumGrainCount = 250, + method = 'simulation least squares eigen', + interpolationModel = ('chebyshev', 6, 4), + activeJRotor = True, +) diff --git a/examples/arkane/networks/n-butanol/input.py b/examples/arkane/networks/n-butanol_msc/input.py similarity index 100% rename from examples/arkane/networks/n-butanol/input.py rename to examples/arkane/networks/n-butanol_msc/input.py diff --git a/examples/rmg/catalysis/ch4_o2/simulate_ch4o2cat_RMS.ipynb b/examples/rmg/catalysis/ch4_o2/simulate_ch4o2cat_RMS.ipynb index 700eb3cac31..371bfc2cadf 100644 --- a/examples/rmg/catalysis/ch4_o2/simulate_ch4o2cat_RMS.ipynb +++ b/examples/rmg/catalysis/ch4_o2/simulate_ch4o2cat_RMS.ipynb @@ -49,7 +49,7 @@ "metadata": {}, "outputs": [], "source": [ - "# ! python-jl /rmg/RMG-Py/rmg.py /rmg/RMG-Py/examples/rmg/catalysis/ch4_o2/input.py" + "# ! python /rmg/RMG-Py/rmg.py /rmg/RMG-Py/examples/rmg/catalysis/ch4_o2/input.py" ] }, { diff --git a/examples/rmg/superminimal/sensitivity_analysis_superminimal_RMS.ipynb b/examples/rmg/superminimal/sensitivity_analysis_superminimal_RMS.ipynb index ba6b3b6a98e..44befc780f1 100644 --- a/examples/rmg/superminimal/sensitivity_analysis_superminimal_RMS.ipynb +++ b/examples/rmg/superminimal/sensitivity_analysis_superminimal_RMS.ipynb @@ -40,7 +40,7 @@ "metadata": {}, "outputs": [], "source": [ - "# ! python-jl /rmg/RMG-Py/rmg.py /rmg/RMG-Py/examples/rmg/superminimal/input.py" + "# ! python /rmg/RMG-Py/rmg.py /rmg/RMG-Py/examples/rmg/superminimal/input.py" ] }, { diff --git a/examples/rmg/superminimal/simulation_flux_rop_superminimal_RMS.ipynb b/examples/rmg/superminimal/simulation_flux_rop_superminimal_RMS.ipynb index 7540b21b014..8ca7eb00292 100644 --- a/examples/rmg/superminimal/simulation_flux_rop_superminimal_RMS.ipynb +++ b/examples/rmg/superminimal/simulation_flux_rop_superminimal_RMS.ipynb @@ -40,7 +40,7 @@ "metadata": {}, "outputs": [], "source": [ - "# ! python-jl /rmg/RMG-Py/rmg.py /rmg/RMG-Py/examples/rmg/superminimal/input.py" + "# ! python /rmg/RMG-Py/rmg.py /rmg/RMG-Py/examples/rmg/superminimal/input.py" ] }, { diff --git a/pytest.ini b/pytest.ini index b17574cc176..7879baabed6 100644 --- a/pytest.ini +++ b/pytest.ini @@ -17,7 +17,6 @@ filterwarnings = # -n auto # will use all available cores, if you first pip install pytest-xdist, but it doesn't work well with julia (so add --no-julia? or give up?) addopts = --keep-duplicates - -p julia.pytestplugin -vv --ignore test/regression --cov=arkane --cov=rmgpy diff --git a/rmg.py b/rmg.py index 26c27c00a4d..0b27098b816 100644 --- a/rmg.py +++ b/rmg.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python-jl +#!/usr/bin/env python ############################################################################### # # diff --git a/rmgpy/pdep/sls.py b/rmgpy/pdep/sls.py index b183a0bba86..a3474ac6e51 100644 --- a/rmgpy/pdep/sls.py +++ b/rmgpy/pdep/sls.py @@ -31,20 +31,17 @@ Contains functionality for directly simulating the master equation and implementing the SLS master equation reduction method """ -import sys -from diffeqpy import de -from julia import Main -import scipy.sparse as sparse +from juliacall import Main +Main.seval("using ReactionMechanismSimulator.SciMLBase") +Main.seval("using ReactionMechanismSimulator.Sundials") import numpy as np import scipy.linalg -import mpmath import scipy.optimize as opt import rmgpy.constants as constants -from rmgpy.pdep.reaction import calculate_microcanonical_rate_coefficient from rmgpy.pdep.me import generate_full_me_matrix, states_to_configurations -from rmgpy.statmech.translation import IdealGasTranslation +from rmgpy.rmg.reactors import to_julia def get_initial_condition(network, x0, indices): @@ -85,30 +82,36 @@ def get_initial_condition(network, x0, indices): def solve_me(M, p0, t): - f = Main.eval( + f = Main.seval( """ -function f(u, M, t) - return M*u +function f(du, u, M, t) + du .= M * u + return du end""" ) - jac = Main.eval( + jac = Main.seval( """ -function jac(u, M, t) - return M +function jac(J, u, M, t) + J .= M + return J end""" ) + p0 = to_julia(p0) + M = to_julia(M) tspan = (0.0, t) - fcn = de.ODEFunction(f, jac=jac) - prob = de.ODEProblem(fcn, p0, tspan, M) - sol = de.solve(prob, solver=de.CVODE_BDF(), abstol=1e-16, reltol=1e-6) + fcn = Main.ODEFunction(f, jac=jac) + prob = Main.ODEProblem(fcn, p0, tspan, M) + sol = Main.solve(prob, Main.CVODE_BDF(), abstol=1e-16, reltol=1e-6) return sol def solve_me_fcns(f, jac, M, p0, t): + p0 = to_julia(p0) + M = to_julia(M) tspan = (0.0, t) - fcn = de.ODEFunction(f, jac=jac) - prob = de.ODEProblem(fcn, p0, tspan, M) - sol = de.solve(prob, solver=de.CVODE_BDF(), abstol=1e-16, reltol=1e-6) + fcn = Main.ODEFunction(f, jac=jac) + prob = Main.ODEProblem(fcn, p0, tspan, M) + sol = Main.solve(prob, Main.CVODE_BDF(), abstol=1e-16, reltol=1e-6) return sol @@ -150,16 +153,18 @@ def get_rate_coefficients_SLS(network, T, P, method="mexp", neglect_high_energy_ tau = np.abs(1.0 / fastest_reaction) if method == "ode": - f = Main.eval( + f = Main.seval( """ -function f(u,M,t) - return M*u +function f(du, u, M, t) + du .= M * u + return du end""" ) - jac = Main.eval( + jac = Main.seval( """ -function jac(u,M,t) - return M +function jac(J, u, M, t) + J .= M + return J end""" ) @@ -247,7 +252,7 @@ def run_equilibrium(network, channel): xseq = [] dxdtseq = [] - # Single domainant source simulations + # Single dominant source simulations for i, isomer in enumerate(isomers): xsout, dxdtout = run_single_source(network, isomer) for j in range(len(xsout)): diff --git a/rmgpy/rmg/reactors.py b/rmgpy/rmg/reactors.py index 9386fa2a202..a5e6692f5bf 100644 --- a/rmgpy/rmg/reactors.py +++ b/rmgpy/rmg/reactors.py @@ -30,48 +30,75 @@ """ Contains classes for building RMS simulations """ -import numpy as np -import sys -import logging import itertools -import rmgpy.constants as constants - -if __debug__: - try: - from os.path import dirname, abspath, join, exists +import logging +import sys - path_rms = dirname(dirname(dirname(abspath(__file__)))) - from julia.api import Julia +import juliacall +import numpy as np +from juliacall import Main - jl = Julia(sysimage=join(path_rms, "rms.so")) if exists(join(path_rms, "rms.so")) else Julia(compiled_modules=False) - from pyrms import rms - from diffeqpy import de - from julia import Main - except Exception as e: - import warnings +import rmgpy.constants as constants - warnings.warn("Unable to import Julia dependencies, original error: " + str(e), RuntimeWarning) -else: - from pyrms import rms - from diffeqpy import de - from julia import Main +Main.seval("using PythonCall") +Main.seval("using Sundials") +rms = juliacall.newmodule("RMS") +rms.seval("using ReactionMechanismSimulator") from rmgpy import constants -from rmgpy.species import Species +from rmgpy.data.kinetics.depository import DepositoryReaction +from rmgpy.data.kinetics.family import TemplateReaction +from rmgpy.data.solvation import SolventData +from rmgpy.kinetics.arrhenius import ( + Arrhenius, + ArrheniusBM, + ArrheniusChargeTransfer, + ArrheniusEP, + Marcus, + MultiArrhenius, + MultiPDepArrhenius, + PDepArrhenius, +) +from rmgpy.kinetics.chebyshev import Chebyshev +from rmgpy.kinetics.falloff import Lindemann, ThirdBody, Troe +from rmgpy.kinetics.kineticsdata import KineticsData +from rmgpy.kinetics.surface import StickingCoefficient, SurfaceChargeTransfer from rmgpy.molecule.fragment import Fragment from rmgpy.reaction import Reaction -from rmgpy.thermo.nasa import NASAPolynomial, NASA -from rmgpy.thermo.wilhoit import Wilhoit +from rmgpy.solver.termination import ( + TerminationConversion, + TerminationRateRatio, + TerminationTime, +) +from rmgpy.species import Species +from rmgpy.thermo.nasa import NASA, NASAPolynomial from rmgpy.thermo.thermodata import ThermoData -from rmgpy.kinetics.arrhenius import Arrhenius, ArrheniusEP, ArrheniusBM, PDepArrhenius, MultiArrhenius, MultiPDepArrhenius, ArrheniusChargeTransfer, Marcus -from rmgpy.kinetics.kineticsdata import KineticsData -from rmgpy.kinetics.falloff import Troe, ThirdBody, Lindemann -from rmgpy.kinetics.chebyshev import Chebyshev -from rmgpy.data.solvation import SolventData -from rmgpy.kinetics.surface import StickingCoefficient, SurfaceChargeTransfer -from rmgpy.solver.termination import TerminationTime, TerminationConversion, TerminationRateRatio -from rmgpy.data.kinetics.family import TemplateReaction -from rmgpy.data.kinetics.depository import DepositoryReaction +from rmgpy.thermo.wilhoit import Wilhoit + + +def to_julia(obj): + """ + Convert native Python object to julia object. If the object is a Python dict, it will be converted to a Julia Dict. If the object is a Python list, it will be converted to a Julia Vector. If the object is a 1-d numpy array, it will be converted to a Julia Array. If the object is a n-d (n > 1) numpy array, it will be converted to a Julia Matrix. + Otherwise, the object will be returned as is. Doesn't support RMG objects. + + Parameters + ---------- + obj : dict | list | np.ndarray | object + The native Python object to convert + + Returns + ------- + object : Main.Dict | Main.Vector | Main.Matrix | object + The Julia object + """ + if isinstance(obj, dict): + return Main.PythonCall.pyconvert(Main.Dict, obj) + elif isinstance(obj, (list, np.ndarray)): + if obj.getattr("shape", False) and len(obj.shape) > 1: + return Main.PythonCall.pyconvert(Main.Matrix, obj) + return Main.PythonCall.pyconvert(Main.Vector, obj) + else: # Other native Python project does not need special conversion. + return obj class PhaseSystem: @@ -153,8 +180,11 @@ def pass_species(self, label, phasesys): rxnlist = [] for i, rxn in enumerate(self.phases[phase_label].reactions): - if (spc.name in [spec.name for spec in rxn.reactants + rxn.products]) and all( - [spec.name in phasesys.species_dict for spec in rxn.reactants + rxn.products] + reactants = Main.pylist(rxn.reactants) + products = Main.pylist(rxn.products) + reacs_and_prods = reactants + products + if (spc.name in [spec.name for spec in reacs_and_prods]) and all( + [spec.name in phasesys.species_dict for spec in reacs_and_prods] ): rxnlist.append(rxn) @@ -168,8 +198,8 @@ def pass_species(self, label, phasesys): for i, rxn in enumerate(interface.reactions): if ( (spc in rxn.reactants or spc in rxn.products) - and all([spec in phasesys.interfaces[key].species for spec in rxn.reactants]) - and all([spec in phasesys.interfaces[key].species for spec in rxn.products]) + and all(spec.name in phasesys.species_dict for spec in rxn.reactants) + and all(spec.name in phasesys.species_dict for spec in rxn.products) ): rxnlist.append(rxn) @@ -448,7 +478,7 @@ def simulate(self, model_settings, simulator_settings, conditions): model_settings.tol_rxn_to_core_deadend_radical, atol=simulator_settings.atol, rtol=simulator_settings.rtol, - solver=de.CVODE_BDF(), + solver=Main.Sundials.CVODE_BDF(), ) return ( @@ -474,7 +504,7 @@ def generate_reactor(self, phase_system): """ phase = phase_system.phases["Default"] ig = rms.IdealGas(phase.species, phase.reactions) - domain, y0, p = rms.ConstantVDomain(phase=ig, initialconds=self.initial_conditions) + domain, y0, p = rms.ConstantVDomain(phase=ig, initialconds=to_julia(self.initial_conditions)) react = rms.Reactor(domain, y0, (0.0, self.tf), p=p) return react, domain, [], p @@ -501,13 +531,15 @@ def generate_reactor(self, phase_system): surf = rms.IdealSurface(surf.species, surf.reactions, surf.site_density, name="surface") liq_constant_species = [cspc for cspc in self.const_spc_names if cspc in [spc.name for spc in liq.species]] cat_constant_species = [cspc for cspc in self.const_spc_names if cspc in [spc.name for spc in surf.species]] - domainliq,y0liq,pliq = rms.ConstantTVDomain(phase=liq,initialconds=liq_initial_cond,constantspecies=liq_constant_species) - domaincat,y0cat,pcat = rms.ConstantTAPhiDomain(phase=surf,initialconds=self.initial_conditions["surface"],constantspecies=cat_constant_species) + domainliq, y0liq, pliq = rms.ConstantTVDomain(phase=liq, initialconds=to_julia(liq_initial_cond), constantspecies=to_julia(liq_constant_species)) + domaincat, y0cat, pcat = rms.ConstantTAPhiDomain( + phase=surf, initialconds=to_julia(liq_initial_cond), constantspecies=to_julia(cat_constant_species), + ) if interface.reactions == []: inter, pinter = rms.ReactiveInternalInterfaceConstantTPhi( domainliq, domaincat, - Main.eval("using ReactionMechanismSimulator; Vector{ElementaryReaction}()"), + Main.seval("using ReactionMechanismSimulator; Vector{ElementaryReaction}()"), self.initial_conditions["liquid"]["T"], self.initial_conditions["surface"]["A"], ) @@ -543,19 +575,19 @@ def generate_reactor(self, phase_system): """ phase = phase_system.phases["Default"] liq = rms.IdealDiluteSolution(phase.species, phase.reactions, phase.solvent) - domain, y0, p = rms.ConstantTVDomain(phase=liq, initialconds=self.initial_conditions, constantspecies=self.const_spc_names) + domain, y0, p = rms.ConstantTVDomain(phase=liq, initialconds=to_julia(self.initial_conditions), constantspecies=to_julia(self.const_spc_names)) interfaces = [] if self.inlet_conditions: inlet_conditions = {key: value for (key, value) in self.inlet_conditions.items() if key != "F"} total_molar_flow_rate = self.inlet_conditions["F"] - inlet = rms.Inlet(domain, inlet_conditions, Main.eval("x->" + str(total_molar_flow_rate))) + inlet = rms.Inlet(domain, to_julia(inlet_conditions), Main.seval("x->" + str(total_molar_flow_rate))) interfaces.append(inlet) if self.outlet_conditions: total_volumetric_flow_rate = self.outlet_conditions["Vout"] - outlet = rms.VolumetricFlowRateOutlet(domain, Main.eval("x->" + str(total_volumetric_flow_rate))) + outlet = rms.VolumetricFlowRateOutlet(domain, Main.seval("x->" + str(total_volumetric_flow_rate))) interfaces.append(outlet) if self.evap_cond_conditions: @@ -578,7 +610,7 @@ def generate_reactor(self, phase_system): """ phase = phase_system.phases["Default"] ig = rms.IdealGas(phase.species, phase.reactions) - domain, y0, p = rms.ConstantTPDomain(phase=ig, initialconds=self.initial_conditions) + domain, y0, p = rms.ConstantTPDomain(phase=ig, initialconds=to_julia(self.initial_conditions)) react = rms.Reactor(domain, y0, (0.0, self.tf), p=p) return react, domain, [], p @@ -620,21 +652,21 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): n = obj._n.value_si return rms.Marcus(A,n,obj._lmbd_i_coefs.value_si,obj._lmbd_o.value_si, obj._wr.value_si, obj._wp.value_si, obj._beta.value_si, rms.EmptyRateUncertainty()) elif isinstance(obj, PDepArrhenius): - Ps = obj._pressures.value_si - arrs = [to_rms(arr) for arr in obj.arrhenius] + Ps = to_julia(obj._pressures.value_si) + arrs = to_julia([to_rms(arr) for arr in obj.arrhenius]) return rms.PdepArrhenius(Ps, arrs, rms.EmptyRateUncertainty()) elif isinstance(obj, MultiArrhenius): - arrs = [to_rms(arr) for arr in obj.arrhenius] + arrs = to_julia([to_rms(arr) for arr in obj.arrhenius]) return rms.MultiArrhenius(arrs, rms.EmptyRateUncertainty()) elif isinstance(obj, MultiPDepArrhenius): - parrs = [to_rms(parr) for parr in obj.arrhenius] + parrs = to_julia([to_rms(parr) for parr in obj.arrhenius]) return rms.MultiPdepArrhenius(parrs, rms.EmptyRateUncertainty()) elif isinstance(obj, Chebyshev): Tmin = obj.Tmin.value_si Tmax = obj.Tmax.value_si Pmin = obj.Pmin.value_si Pmax = obj.Pmax.value_si - coeffs = obj.coeffs.value_si.tolist() + coeffs = to_julia(obj.coeffs.value_si) return rms.Chebyshev(coeffs, Tmin, Tmax, Pmin, Pmax) elif isinstance(obj, ThirdBody): arrstr = arrhenius_to_julia_string(obj.arrheniusLow) @@ -643,7 +675,7 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): for key, value in efficiencies.items(): dstr += '"' + key + '"' "=>" + str(value) + "," dstr += "])" - return Main.eval( + return Main.seval( "using ReactionMechanismSimulator; ThirdBody(" + arrstr + ", Dict{Int64,Float64}([]), " + dstr + "," + "EmptyRateUncertainty())" ) elif isinstance(obj, Lindemann): @@ -654,7 +686,7 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): for key, value in efficiencies.items(): dstr += '"' + key + '"' "=>" + str(value) + "," dstr += "])" - return Main.eval( + return Main.seval( "using ReactionMechanismSimulator; Lindemann(" + arrhigh + "," @@ -677,7 +709,7 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): for key, value in efficiencies.items(): dstr += '"' + key + '"' "=>" + str(value) + "," dstr += "])" - return Main.eval( + return Main.seval( "using ReactionMechanismSimulator; Troe(" + arrhigh + "," @@ -722,6 +754,7 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): atomnums[atm.element.symbol] += 1 else: atomnums[atm.element.symbol] = 1 + atomnums = to_julia(atomnums) bondnum = len(mol.get_all_edges()) if not obj.molecule[0].contains_surface_site(): @@ -730,12 +763,12 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): th = obj.get_thermo_data() thermo = to_rms(th) if obj.henry_law_constant_data: - kH = rms.TemperatureDependentHenryLawConstant(Ts=obj.henry_law_constant_data.Ts, kHs=obj.henry_law_constant_data.kHs) + kH = rms.TemperatureDependentHenryLawConstant(Ts=to_julia(obj.henry_law_constant_data.Ts), kHs=to_julia(obj.henry_law_constant_data.kHs)) else: kH = rms.EmptyHenryLawConstant() if obj.liquid_volumetric_mass_transfer_coefficient_data: kLA = rms.TemperatureDependentLiquidVolumetricMassTransferCoefficient( - Ts=obj.liquid_volumetric_mass_transfer_coefficient_data.Ts, kLAs=obj.liquid_volumetric_mass_transfer_coefficient_data.kLAs + Ts=to_julia(obj.liquid_volumetric_mass_transfer_coefficient_data.Ts), kLAs=to_julia(obj.liquid_volumetric_mass_transfer_coefficient_data.kLAs) ) else: kLA = rms.EmptyLiquidVolumetricMassTransferCoefficient() @@ -777,16 +810,16 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): comment=obj.thermo.comment, ) elif isinstance(obj, Reaction): - reactantinds = [species_names.index(spc.label) for spc in obj.reactants] - productinds = [species_names.index(spc.label) for spc in obj.products] - reactants = [rms_species_list[i] for i in reactantinds] - products = [rms_species_list[i] for i in productinds] + reactantinds = to_julia([species_names.index(spc.label) for spc in obj.reactants]) + productinds = to_julia([species_names.index(spc.label) for spc in obj.products]) + reactants = to_julia([rms_species_list[i] for i in reactantinds]) + products = to_julia([rms_species_list[i] for i in productinds]) if isinstance(obj.kinetics, SurfaceChargeTransfer): obj.set_reference_potential(300) kinetics = to_rms(obj.kinetics, species_names=species_names, rms_species_list=rms_species_list, rmg_species=rmg_species) radchange = sum([spc.molecule[0].multiplicity-1 for spc in obj.products]) - sum([spc.molecule[0].multiplicity-1 for spc in obj.reactants]) electronchange = -sum([spc.molecule[0].get_net_charge() for spc in obj.products]) + sum([spc.molecule[0].get_net_charge() for spc in obj.reactants]) - return rms.ElementaryReaction(index=obj.index, reactants=reactants, reactantinds=reactantinds, products=products, productinds=productinds, kinetics=kinetics, electronchange=electronchange, radicalchange=radchange, reversible=obj.reversible, pairs=[], comment=obj.kinetics.comment) + return rms.ElementaryReaction(index=obj.index, reactants=reactants, reactantinds=reactantinds, products=products, productinds=productinds, kinetics=kinetics, electronchange=electronchange, radicalchange=radchange, reversible=obj.reversible, pairs=to_julia([]), comment=obj.kinetics.comment) elif isinstance(obj, SolventData): return rms.Solvent("solvent", rms.RiedelViscosity(float(obj.A), float(obj.B), float(obj.C), float(obj.D), float(obj.E))) elif isinstance(obj, TerminationTime): diff --git a/test/arkane/arkaneFunctionalInputTest.py b/test/arkane/arkaneFunctionalInputTest.py index 89f05f6c2ee..8e1bcdb1b54 100644 --- a/test/arkane/arkaneFunctionalInputTest.py +++ b/test/arkane/arkaneFunctionalInputTest.py @@ -327,7 +327,7 @@ def test_load_input_file(self): "examples", "arkane", "networks", - "acetyl+O2", + "acetyl+O2_cse", "input.py", ) ( diff --git a/utilities.py b/utilities.py index c8e74258186..f84963a6324 100644 --- a/utilities.py +++ b/utilities.py @@ -310,7 +310,7 @@ def update_headers(): start of each file, be sure to double-check the results to make sure important lines aren't accidentally overwritten. """ - shebang = """#!/usr/bin/env python-jl + shebang = """#!/usr/bin/env python """ From 3b3c24427e9358880eef13083ce25f5c5c9f9e90 Mon Sep 17 00:00:00 2001 From: Jackson Burns Date: Thu, 27 Jun 2024 09:33:18 -0400 Subject: [PATCH 02/48] JWB initial increase to Python 3.9 These were the first commits to change to Python 3.9, and they do not work on their own but instead open the door to finding issues to make the rest of the changes --- .github/workflows/CI.yml | 88 ++++++-------------------------------- .github/workflows/docs.yml | 6 ++- environment.yml | 17 +++----- 3 files changed, 23 insertions(+), 88 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 57a925c1043..674622c8b6f 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -51,66 +51,14 @@ env: jobs: - build-osx: - runs-on: macos-13 - # skip scheduled runs from forks - if: ${{ !( github.repository != 'ReactionMechanismGenerator/RMG-Py' && github.event_name == 'schedule' ) }} - defaults: - run: - shell: bash -l {0} - steps: - - name: Checkout RMG-Py - uses: actions/checkout@v4 - - # Step to create a custom condarc.yml before setting up conda - - name: Create custom conda config file - run: | - RUNNER_CWD=$(pwd) - echo "channels:" > $RUNNER_CWD/condarc.yml - echo " - conda-forge" >> $RUNNER_CWD/condarc.yml - echo " - rmg" >> $RUNNER_CWD/condarc.yml - echo " - cantera" >> $RUNNER_CWD/condarc.yml - echo "show_channel_urls: true" >> $RUNNER_CWD/condarc.yml - - # configures the mamba environment manager and builds the environment - - name: Setup Miniforge Python 3.7 - uses: conda-incubator/setup-miniconda@v3 - with: - environment-file: environment.yml - miniforge-variant: Miniforge3 - miniforge-version: latest - python-version: 3.7 - condarc-file: condarc.yml - activate-environment: rmg_env - use-mamba: true - - # list the environment for debugging purposes - - name: mamba info - run: | - mamba info - mamba list - - # Clone RMG-database - - name: Clone RMG-database - run: | - cd .. - git clone -b $RMG_DATABASE_BRANCH https://github.com/ReactionMechanismGenerator/RMG-database.git - - # modify env variables as directed in the RMG installation instructions - - name: Set Environment Variables - run: | - RUNNER_CWD=$(pwd) - echo "PYTHONPATH=$RUNNER_CWD/RMG-Py:$PYTHONPATH" >> $GITHUB_ENV - echo "$RUNNER_CWD/RMG-Py" >> $GITHUB_PATH - - # RMG build step - - name: make RMG - run: | - make clean - make - - build-and-test-linux: - runs-on: ubuntu-latest + build-and-test: + strategy: + fail-fast: false + matrix: + python-version: ["3.9"] + os: [macos-13, macos-latest, ubuntu-latest] + runs-on: ${{ matrix.os }} + name: ${{ matrix.os }} Build and Test Python ${{ matrix.python-version }} # skip scheduled runs from forks if: ${{ !( github.repository != 'ReactionMechanismGenerator/RMG-Py' && github.event_name == 'schedule' ) }} env: @@ -123,27 +71,17 @@ jobs: - name: Checkout RMG-Py uses: actions/checkout@v4 - # Step to create a custom condarc.yml before setting up conda - - name: Create custom condarc.yml - run: | - RUNNER_CWD=$(pwd) - echo "channels:" > $RUNNER_CWD/condarc.yml - echo " - conda-forge" >> $RUNNER_CWD/condarc.yml - echo " - rmg" >> $RUNNER_CWD/condarc.yml - echo " - cantera" >> $RUNNER_CWD/condarc.yml - echo "show_channel_urls: true" >> $RUNNER_CWD/condarc.yml - - # configures the mamba environment manager and builds the environment - - name: Setup Miniforge Python 3.7 + - name: Setup Miniforge Python ${{ matrix.python-version }} uses: conda-incubator/setup-miniconda@v3 with: environment-file: environment.yml miniforge-variant: Miniforge3 miniforge-version: latest - python-version: 3.7 - condarc-file: condarc.yml + python-version: ${{ matrix.python-version }} activate-environment: rmg_env use-mamba: true + show-channel-urls: true + channels: conda-forge,cantera,rmg # list the environment for debugging purposes - name: mamba info @@ -386,7 +324,7 @@ jobs: # who knows ¯\_(ツ)_/¯ # # taken from https://github.com/docker/build-push-action - needs: build-and-test-linux + needs: build-and-test runs-on: ubuntu-latest if: github.ref == 'refs/heads/main' && github.repository == 'ReactionMechanismGenerator/RMG-Py' steps: diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index dcebd83930d..1addaaed435 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -26,15 +26,17 @@ jobs: with: fetch-depth: 0 - - name: Setup Mambaforge Python 3.7 + - name: Setup Miniforge Python 3.9 uses: conda-incubator/setup-miniconda@v3 with: environment-file: environment.yml miniforge-variant: Miniforge3 miniforge-version: latest - python-version: 3.7 + python-version: 3.9 activate-environment: rmg_env use-mamba: true + show-channel-urls: true + channels: conda-forge,cantera,rmg - name: Make libtiff Symlink to Avoid Runner Bug run: | # This action may need to be removed/adjusted in future runs. diff --git a/environment.yml b/environment.yml index 58e4440ed3a..1b53cf3c1ce 100644 --- a/environment.yml +++ b/environment.yml @@ -19,7 +19,8 @@ # - April 17, 2024 Limit versions of cclib at advice of maintainers. # - August 4, 2024 Restricted pyrms to <2 # - May 14, 2024 Removed diffeqpy by switching to call SciMLBase and Sundials using JuliaCall - +# - March 15, 2024 - started migration to Python 3.9 +# name: rmg_env channels: - rmg @@ -45,7 +46,7 @@ dependencies: # external software tools for chemistry - coolprop - - cantera::cantera=2.6 + - cantera::cantera >=3 - conda-forge::mopac # see https://github.com/ReactionMechanismGenerator/RMG-Py/pull/2639#issuecomment-2050292972 - conda-forge::cclib >=1.6.3,<1.9 @@ -57,11 +58,11 @@ dependencies: - conda-forge::pyjuliacall # for calling julia packages # Python tools - - python >=3.7 + - python >=3.9 # leave as GEQ so that GitHub actions can add EQ w/o breaking (contradictory deps) - coverage - cython >=0.25.2 - scikit-learn - - scipy <1.11 + - scipy - numpy >=1.10.0 - pydot - jinja2 @@ -72,13 +73,7 @@ dependencies: - networkx - pytest - pytest-cov - # we use a the pytest-check plugin, which is on Conda and PyPI, but the - # version compatible with Python 3.7 is only on PyPI - # switch to the conda version after upgrading to 3.11 - # - conda-forge::pytest-check - - pip - - pip: - - pytest-check + - conda-forge::pytest-check - matplotlib >=1.5 - mpmath - pandas From 1233b8c76163dd21258a189c33fb3d6450d0a9ef Mon Sep 17 00:00:00 2001 From: Jackson Burns Date: Mon, 1 Jul 2024 09:50:58 -0400 Subject: [PATCH 03/48] make muq optional (like the docs say!) and add a helpful msg if missing --- environment.yml | 1 - rmgpy/rmg/main.py | 7 +++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/environment.yml b/environment.yml index 1b53cf3c1ce..276b4352f91 100644 --- a/environment.yml +++ b/environment.yml @@ -80,7 +80,6 @@ dependencies: - conda-forge::gprof2dot - conda-forge::numdifftools - conda-forge::quantities - - conda-forge::muq - conda-forge::lpsolve55 - conda-forge::ringdecomposerlib-python diff --git a/rmgpy/rmg/main.py b/rmgpy/rmg/main.py index fb4ae33ef8b..bccf00a776f 100644 --- a/rmgpy/rmg/main.py +++ b/rmgpy/rmg/main.py @@ -1274,8 +1274,11 @@ def run_uncertainty_analysis(self): if self.uncertainty is not None and self.uncertainty["global"]: try: import muq - except ImportError: - logging.error("Unable to import MUQ. Skipping global uncertainty analysis.") + except ImportError as ie: + logging.error( + f"Skipping global uncertainty analysis! Unable to import MUQ (original error: {str(ie)})." + "Install muq with 'conda install -c conda-forge muq'." + ) self.uncertainty["global"] = False else: import re From 995b0a1f0ac1e6a34b87cc69b8e452cd2f95a80c Mon Sep 17 00:00:00 2001 From: Jackson Burns Date: Mon, 1 Jul 2024 10:39:41 -0400 Subject: [PATCH 04/48] fix cython compilation errors (0.23 -> 3.n), SEE EXTENDED!! here are all the solutions that were implemented: - in a couple places we were implicitly casting a double to a numpy float, now need to explicitly grad the _real_ part of the double - Cython does not allow nested function declaration inside cpdef functions, move all of these outside of their parent functions or redefine them to be purely functional where practical - similar to the above, lambda function are no longer allowed - get the same treatment. what's different here is that usually we are using lambda in sorting calls, so we can replace these with operator.itemgetter or attrgetter, where relevant. this also involves re-writing a couple reduce calls, which also used lambda - modern cython does not allow re-declaring existing Cython variables (in previous versions this was a warning I think), so I just remove these where needed. (btw Cython is super cool, actually points out both of the declaration so that you can delete one) i made these fixes while listening to The Metal by Tenacious D, Talk Too Much by Renee Rapp, BEST INTEREST by Tyler, The Creator (love the bass line), mos thoser by food house, and Doritos & Fritos by 100 Gecs --- rmgpy/kinetics/kineticsdata.pyx | 5 +- rmgpy/molecule/group.py | 168 ++++++++++++++++---------------- rmgpy/molecule/inchi.py | 25 +++-- rmgpy/molecule/molecule.py | 38 ++++---- rmgpy/molecule/resonance.py | 14 ++- rmgpy/pdep/reaction.pyx | 2 +- rmgpy/reaction.py | 79 +++++++-------- 7 files changed, 168 insertions(+), 163 deletions(-) diff --git a/rmgpy/kinetics/kineticsdata.pyx b/rmgpy/kinetics/kineticsdata.pyx index 7b4617cdd09..292a56a7ab3 100644 --- a/rmgpy/kinetics/kineticsdata.pyx +++ b/rmgpy/kinetics/kineticsdata.pyx @@ -249,9 +249,8 @@ cdef class PDepKineticsData(PDepKineticsModel): Plow = Pdata[j] Phigh = Pdata[j + 1] if Plow <= P and P <= Phigh: - klow = kdata[i, j] * (kdata[i + 1, j] / kdata[i, j]) ** ((T - Tlow) / (Thigh - Tlow)) - khigh = kdata[i, j + 1] * (kdata[i + 1, j + 1] / kdata[i, j + 1]) ** ( - (T - Tlow) / (Thigh - Tlow)) + klow = kdata[i, j] * (kdata[i + 1, j] / kdata[i, j]) ** ((T - Tlow) / (Thigh - Tlow)).real + khigh = kdata[i, j + 1] * (kdata[i + 1, j + 1] / kdata[i, j + 1]) ** ((T - Tlow) / (Thigh - Tlow)).real k = klow * (khigh / klow) ** (log(P / Plow) / log(Phigh / Plow)) break diff --git a/rmgpy/molecule/group.py b/rmgpy/molecule/group.py index 1dc56cc7f7b..e9a6e6433fd 100644 --- a/rmgpy/molecule/group.py +++ b/rmgpy/molecule/group.py @@ -46,8 +46,91 @@ from rmgpy.molecule.graph import Vertex, Edge, Graph from rmgpy.molecule.fragment import CuttingLabel +# helper functions +# these were originall nested inside the indicated parent function, but when we upgraded to +# Cython 3 this was no longer allowed - thus, they now live here. -################################################################################ +# add_implicit_benzene +def check_set(super_list, sub_list): + """ + Args: + super_list: list to check if superset of partList + sub_list: list to check if subset of superList + + Returns: Boolean to see if super_list is a superset of sub_list + + """ + super_set = set(super_list) + sub_set = set(sub_list) + return super_set.issuperset(sub_set) + +def add_cb_atom_to_ring(ring, cb_atom): + """ + Every 'Cb' atom belongs in exactly one benzene ring. This function checks + adds the cb_atom to the ring (in connectivity order) if the cb_atom is connected + to any the last or first atom in the partial ring. + + Args: + ring: list of :class:GroupAtoms representing a partial ring to merge + cb_atom: :class:GroupAtom with atomtype 'Cb' + + Returns: If cb_atom connects to the beginning or end of ring, returns a + new list of the merged ring, otherwise an empty list + + """ + + merged_ring = [] + # ring already complete + if len(ring) == 6: return merged_ring + for atom2, bond12 in cb_atom.bonds.items(): + if bond12.is_benzene(): + if atom2 is ring[-1]: + merged_ring = ring + [cb_atom] + elif atom2 is ring[0]: + merged_ring = [cb_atom] + ring + + return merged_ring + +def merge_overlapping_benzene_rings(ring1, ring2, od): + """ + The input arguments of rings are always in the order that the atoms appear + inside the ring. That is, each atom is connected to the ones adjacent on the + list. + + Args: + ring1: list of :class:GroupAtoms representing first partial ring to merge + ring2: list :class:GroupAtoms representing second partial ring to merge + od: in for overlap distance + + This function tries to see if the beginning or ends of each list have the + same atom objects, i.e the two part rings should be merged together. + + Returns: If rings are mergable, returns a new list of the merged ring, otherwise + an empty list + + """ + new_ring = [] + # ring already complete + if len(ring1) == 6 or len(ring2) == 6: return new_ring + + # start of ring1 matches end of ring2 + match_list1 = [x1 is x2 for x1, x2 in zip(ring1[-od:], ring2[:od])] + # end of ring1 matches end of ring2 + match_list2 = [x1 is x2 for x1, x2 in zip(ring1[-od:], ring2[:od - 1:-1])] + # start of ring1 matches end of ring2 + match_list3 = [x1 is x2 for x1, x2 in zip(ring1[:od], ring2[-od:])] + # start of ring1 matches start of ring2 + match_list4 = [x1 is x2 for x1, x2 in zip(ring1[:od], ring2[od::-1])] + if False not in match_list1: + new_ring = ring1 + ring2[od:] + elif False not in match_list2: + new_ring = ring1 + ring2[-od - 1::-1] + elif False not in match_list3: + new_ring = ring2[:-od] + ring1 + elif False not in match_list4: + new_ring = ring2[:od - 1:-1] + ring1 + + return new_ring class GroupAtom(Vertex): """ @@ -2531,89 +2614,6 @@ def add_implicit_benzene(self): # Note that atomtypes like N5bd are mostly referred to as Cb in this code, # which was first written for just carbon. - # First define some helper functions - def check_set(super_list, sub_list): - """ - Args: - super_list: list to check if superset of partList - sub_list: list to check if subset of superList - - Returns: Boolean to see if super_list is a superset of sub_list - - """ - super_set = set(super_list) - sub_set = set(sub_list) - return super_set.issuperset(sub_set) - - def add_cb_atom_to_ring(ring, cb_atom): - """ - Every 'Cb' atom belongs in exactly one benzene ring. This function checks - adds the cb_atom to the ring (in connectivity order) if the cb_atom is connected - to any the last or first atom in the partial ring. - - Args: - ring: list of :class:GroupAtoms representing a partial ring to merge - cb_atom: :class:GroupAtom with atomtype 'Cb' - - Returns: If cb_atom connects to the beginning or end of ring, returns a - new list of the merged ring, otherwise an empty list - - """ - - merged_ring = [] - # ring already complete - if len(ring) == 6: return merged_ring - for atom2, bond12 in cb_atom.bonds.items(): - if bond12.is_benzene(): - if atom2 is ring[-1]: - merged_ring = ring + [cb_atom] - elif atom2 is ring[0]: - merged_ring = [cb_atom] + ring - - return merged_ring - - def merge_overlapping_benzene_rings(ring1, ring2, od): - """ - The input arguments of rings are always in the order that the atoms appear - inside the ring. That is, each atom is connected to the ones adjacent on the - list. - - Args: - ring1: list of :class:GroupAtoms representing first partial ring to merge - ring2: list :class:GroupAtoms representing second partial ring to merge - od: in for overlap distance - - This function tries to see if the beginning or ends of each list have the - same atom objects, i.e the two part rings should be merged together. - - Returns: If rings are mergable, returns a new list of the merged ring, otherwise - an empty list - - """ - new_ring = [] - # ring already complete - if len(ring1) == 6 or len(ring2) == 6: return new_ring - - # start of ring1 matches end of ring2 - match_list1 = [x1 is x2 for x1, x2 in zip(ring1[-od:], ring2[:od])] - # end of ring1 matches end of ring2 - match_list2 = [x1 is x2 for x1, x2 in zip(ring1[-od:], ring2[:od - 1:-1])] - # start of ring1 matches end of ring2 - match_list3 = [x1 is x2 for x1, x2 in zip(ring1[:od], ring2[-od:])] - # start of ring1 matches start of ring2 - match_list4 = [x1 is x2 for x1, x2 in zip(ring1[:od], ring2[od::-1])] - if False not in match_list1: - new_ring = ring1 + ring2[od:] - elif False not in match_list2: - new_ring = ring1 + ring2[-od - 1::-1] - elif False not in match_list3: - new_ring = ring2[:-od] + ring1 - elif False not in match_list4: - new_ring = ring2[:od - 1:-1] + ring1 - - return new_ring - - ####################################################################################### # start of main algorithm copy_group = deepcopy(self) """ diff --git a/rmgpy/molecule/inchi.py b/rmgpy/molecule/inchi.py index 59e49d85223..438b2b15322 100644 --- a/rmgpy/molecule/inchi.py +++ b/rmgpy/molecule/inchi.py @@ -30,6 +30,7 @@ import itertools import re import warnings +from operator import itemgetter import cython from rdkit import Chem @@ -614,7 +615,7 @@ def create_augmented_layers(mol): atom_indices = [atom_indices.index(i + 1) for i, atom in enumerate(molcopy.atoms)] # sort the atoms based on the order of the atom indices - molcopy.atoms = [x for (y, x) in sorted(zip(atom_indices, molcopy.atoms), key=lambda pair: pair[0])] + molcopy.atoms = [x for (y, x) in sorted(zip(atom_indices, molcopy.atoms), key=itemgetter(0))] ulayer = _create_u_layer(molcopy, auxinfo) @@ -704,17 +705,15 @@ def _convert_3_atom_2_bond_path(start, mol): """ from rmgpy.data.kinetics.family import ReactionRecipe - def is_valid(mol): - """Check if total bond order of oxygen atoms is smaller than 4.""" - - for at in mol.atoms: - if at.number == 8: - order = at.get_total_bond_order() - not_correct = order >= 4 - if not_correct: - return False - - return True + # Check if total bond order of oxygen atoms is smaller than 4 + is_mol_valid = True + for at in mol.atoms: + if at.number == 8: + order = at.get_total_bond_order() + not_correct = order >= 4 + if not_correct: + is_mol_valid = False + break paths = pathfinder.find_allyl_end_with_charge(start) @@ -739,7 +738,7 @@ def is_valid(mol): end.charge += 1 if end.charge < 0 else -1 recipe.apply_forward(mol) - if is_valid(mol): + if is_mol_valid: # unlabel atoms so that they never cause trouble downstream for i, at in enumerate(path[::2]): at.label = '' diff --git a/rmgpy/molecule/molecule.py b/rmgpy/molecule/molecule.py index 5e024765502..c19d35307d4 100644 --- a/rmgpy/molecule/molecule.py +++ b/rmgpy/molecule/molecule.py @@ -41,6 +41,7 @@ from collections import OrderedDict, defaultdict from copy import deepcopy from urllib.parse import quote +from operator import attrgetter import cython import numpy as np @@ -62,6 +63,10 @@ ################################################################################ +# helper function for sorting +def _skip_first(in_tuple): + return in_tuple[1:] + bond_orders = {'S': 1, 'D': 2, 'T': 3, 'B': 1.5} globals().update({ @@ -2550,30 +2555,21 @@ def get_aromatic_rings(self, rings=None, save_order=False): if rings is None: rings = self.get_relevant_cycles() - def filter_fused_rings(_rings): - """ - Given a list of rings, remove ones which share more than 2 atoms. - """ - cython.declare(toRemove=set, i=cython.int, j=cython.int, toRemoveSorted=list) - - if len(_rings) < 2: - return _rings - + # Remove rings that share more than 3 atoms, since they cannot be planar + cython.declare(toRemove=set, j=cython.int, toRemoveSorted=list) + if len(rings) < 2: + pass + else: to_remove = set() - for i, j in itertools.combinations(range(len(_rings)), 2): - if len(set(_rings[i]) & set(_rings[j])) > 2: + for i, j in itertools.combinations(range(len(rings)), 2): + if len(set(rings[i]) & set(rings[j])) > 2: to_remove.add(i) to_remove.add(j) to_remove_sorted = sorted(to_remove, reverse=True) for i in to_remove_sorted: - del _rings[i] - - return _rings - - # Remove rings that share more than 3 atoms, since they cannot be planar - rings = filter_fused_rings(rings) + del rings[i] # Only keep rings with exactly 6 atoms, since RMG can only handle aromatic benzene rings = [ring for ring in rings if len(ring) == 6] @@ -2709,7 +2705,7 @@ def get_deterministic_sssr(self): tup = (vertex, get_vertex_connectivity_value(vertex), -origin_conn_dict[vertex]) root_candidates_tups.append(tup) - root_vertex = sorted(root_candidates_tups, key=lambda tup0: tup0[1:], reverse=True)[0][0] + root_vertex = sorted(root_candidates_tups, key=_skip_first, reverse=True)[0][0] # Get all cycles involving the root vertex cycles = graph0.get_all_cycles(root_vertex) @@ -2726,7 +2722,7 @@ def get_deterministic_sssr(self): -sum([v.get_total_bond_order() for v in cycle0])) cycle_candidate_tups.append(tup) - cycle = sorted(cycle_candidate_tups, key=lambda tup0: tup0[1:])[0][0] + cycle = sorted(cycle_candidate_tups, key=_skip_first)[0][0] cycle_list.append(cycle) @@ -2810,8 +2806,8 @@ def is_identical(self, other, strict=True): if atom_ids == other_ids: # If the two molecules have the same indices, then they might be identical # Sort the atoms by ID - atom_list = sorted(self.atoms, key=lambda x: x.id) - other_list = sorted(other.atoms, key=lambda x: x.id) + atom_list = sorted(self.atoms, key=attrgetter('id')) + other_list = sorted(other.atoms, key=attrgetter('id')) # If matching atom indices gives a valid mapping, then the molecules are fully identical mapping = {} diff --git a/rmgpy/molecule/resonance.py b/rmgpy/molecule/resonance.py index 0cca99734c0..ee109de50ee 100644 --- a/rmgpy/molecule/resonance.py +++ b/rmgpy/molecule/resonance.py @@ -56,6 +56,7 @@ """ import logging +from operator import attrgetter import cython @@ -965,6 +966,13 @@ def generate_clar_structures(mol, save_order=False): return mol_list +# helper functions for sorting +def _sum_atom_ids(atom_list): + return sum(atom.id for atom in atom_list) + +def _tuplize_bond(bond): + return (bond.atom1.id, bond.atom2.id) + def _clar_optimization(mol, constraints=None, max_num=None, save_order=False): """ @@ -993,7 +1001,7 @@ def _clar_optimization(mol, constraints=None, max_num=None, save_order=False): molecule = mol.copy(deep=True) aromatic_rings = molecule.get_aromatic_rings(save_order=save_order)[0] - aromatic_rings.sort(key=lambda x: sum([atom.id for atom in x])) + aromatic_rings.sort(key=_sum_atom_ids) if not aromatic_rings: return [] @@ -1002,13 +1010,13 @@ def _clar_optimization(mol, constraints=None, max_num=None, save_order=False): atoms = set() for ring in aromatic_rings: atoms.update(ring) - atoms = sorted(atoms, key=lambda x: x.id) + atoms = sorted(atoms, key=attrgetter('id')) # Get list of bonds involving the ring atoms, ignoring bonds to hydrogen bonds = set() for atom in atoms: bonds.update([atom.bonds[key] for key in atom.bonds.keys() if key.is_non_hydrogen()]) - bonds = sorted(bonds, key=lambda x: (x.atom1.id, x.atom2.id)) + bonds = sorted(bonds, key=_tuplize_bond) # Identify exocyclic bonds, and save their bond orders exo = [] diff --git a/rmgpy/pdep/reaction.pyx b/rmgpy/pdep/reaction.pyx index 6a6327c5d71..d8e6942de47 100644 --- a/rmgpy/pdep/reaction.pyx +++ b/rmgpy/pdep/reaction.pyx @@ -308,7 +308,7 @@ def apply_inverse_laplace_transform_method(transition_state, and abs(dens_states[r - m, s]) > 1e-12 \ and abs(dens_states[r - m - 1, s]) > 1e-12: num = dens_states[r - m, s] * (dens_states[r - m - 1, s] / dens_states[r - m, s]) \ - ** (-rem / (e_list[r - m - 1] - e_list[r - m])) + ** (-rem / (e_list[r - m - 1] - e_list[r - m])).real k[r, s] = freq_factor * num / dens_states[r, s] elif n >= n_crit: diff --git a/rmgpy/reaction.py b/rmgpy/reaction.py index b8ab3951c8c..66119c38059 100644 --- a/rmgpy/reaction.py +++ b/rmgpy/reaction.py @@ -65,6 +65,16 @@ ################################################################################ +# helper function for sorting +def get_sorting_key(spc): + # List of elements to sort by, order is intentional + numbers = [6, 8, 7, 14, 16, 15, 17, 53, 9, 35] # C, O, N, Si, S, P, Cl, I, F, Br + ele_count = dict([(n,0) for n in numbers]) + for atom in spc.molecule[0].atoms: + if isinstance(atom, Atom) and atom.element.number in numbers: + ele_count[atom.element.number] += 1 + return tuple(ele_count[n] for n in numbers) + class Reaction: """ @@ -1516,14 +1526,7 @@ def generate_pairs(self): reactants = self.reactants[:] products = self.products[:] - def get_sorting_key(spc): - # List of elements to sort by, order is intentional - numbers = [6, 8, 7, 14, 16, 15, 17, 53, 9, 35] # C, O, N, Si, S, P, Cl, I, F, Br - ele_count = dict([(n,0) for n in numbers]) - for atom in spc.molecule[0].atoms: - if isinstance(atom, Atom) and atom.element.number in numbers: - ele_count[atom.element.number] += 1 - return tuple(ele_count[n] for n in numbers) + # Sort the reactants and products by element counts reactants.sort(key=get_sorting_key) products.sort(key=get_sorting_key) @@ -1769,7 +1772,7 @@ def get_reduced_mass(self, reverse=False): mass_list = [spc.molecule[0].get_molecular_weight() for spc in self.products] else: mass_list = [spc.molecule[0].get_molecular_weight() for spc in self.reactants] - reduced_mass = reduce((lambda x, y: x * y), mass_list) / sum(mass_list) + reduced_mass = np.prod(mass_list) / sum(mass_list) return reduced_mass def get_mean_sigma_and_epsilon(self, reverse=False): @@ -1795,7 +1798,8 @@ def get_mean_sigma_and_epsilon(self, reverse=False): if any([x == 0 for x in sigmas + epsilons]): raise ValueError mean_sigmas = sum(sigmas) / num_of_spcs - mean_epsilons = reduce((lambda x, y: x * y), epsilons) ** (1 / len(epsilons)) + mean_epsilons = np.prod(epsilons) ** (1 / len(epsilons)) + return mean_sigmas, mean_epsilons def generate_high_p_limit_kinetics(self): @@ -1805,6 +1809,15 @@ def generate_high_p_limit_kinetics(self): """ raise NotImplementedError("generate_high_p_limit_kinetics is not implemented for all Reaction subclasses.") +def same_object(object1, object2, _check_identical, _only_check_label, + _generate_initial_map, _strict, _save_order): + if _only_check_label: + return str(object1) == str(object2) + elif _check_identical: + return object1.is_identical(object2, strict=_strict) + else: + return object1.is_isomorphic(object2, generate_initial_map=_generate_initial_map, + strict=_strict, save_order=_save_order) def same_species_lists(list1, list2, check_identical=False, only_check_label=False, generate_initial_map=False, strict=True, save_order=False): @@ -1826,45 +1839,35 @@ def same_species_lists(list1, list2, check_identical=False, only_check_label=Fal ``True`` if the lists are the same and ``False`` otherwise """ - def same(object1, object2, _check_identical=check_identical, _only_check_label=only_check_label, - _generate_initial_map=generate_initial_map, _strict=strict, _save_order=save_order): - if _only_check_label: - return str(object1) == str(object2) - elif _check_identical: - return object1.is_identical(object2, strict=_strict) - else: - return object1.is_isomorphic(object2, generate_initial_map=_generate_initial_map, - strict=_strict, save_order=_save_order) - if len(list1) == len(list2) == 1: - if same(list1[0], list2[0]): + if same_object(list1[0], list2[0]): return True elif len(list1) == len(list2) == 2: - if same(list1[0], list2[0]) and same(list1[1], list2[1]): + if same_object(list1[0], list2[0]) and same_object(list1[1], list2[1]): return True - elif same(list1[0], list2[1]) and same(list1[1], list2[0]): + elif same_object(list1[0], list2[1]) and same_object(list1[1], list2[0]): return True elif len(list1) == len(list2) == 3: - if same(list1[0], list2[0]): - if same(list1[1], list2[1]): - if same(list1[2], list2[2]): + if same_object(list1[0], list2[0]): + if same_object(list1[1], list2[1]): + if same_object(list1[2], list2[2]): return True - elif same(list1[1], list2[2]): - if same(list1[2], list2[1]): + elif same_object(list1[1], list2[2]): + if same_object(list1[2], list2[1]): return True - elif same(list1[0], list2[1]): - if same(list1[1], list2[0]): - if same(list1[2], list2[2]): + elif same_object(list1[0], list2[1]): + if same_object(list1[1], list2[0]): + if same_object(list1[2], list2[2]): return True - elif same(list1[1], list2[2]): - if same(list1[2], list2[0]): + elif same_object(list1[1], list2[2]): + if same_object(list1[2], list2[0]): return True - elif same(list1[0], list2[2]): - if same(list1[1], list2[0]): - if same(list1[2], list2[1]): + elif same_object(list1[0], list2[2]): + if same_object(list1[1], list2[0]): + if same_object(list1[2], list2[1]): return True - elif same(list1[1], list2[1]): - if same(list1[2], list2[0]): + elif same_object(list1[1], list2[1]): + if same_object(list1[2], list2[0]): return True elif len(list1) == len(list2): raise NotImplementedError("Can't check isomorphism of lists with {0} species/molecules".format(len(list1))) From d4af9e1d61912c3ddd72a6dd883d3a2ca8e3abfc Mon Sep 17 00:00:00 2001 From: Jackson Burns Date: Mon, 1 Jul 2024 15:33:46 -0400 Subject: [PATCH 05/48] add newly defined functions to `pxd` files as requested in: https://github.com/ReactionMechanismGenerator/RMG-Py/pull/2687#discussion_r1661430047 --- rmgpy/molecule/group.pxd | 6 ++++++ rmgpy/molecule/molecule.pxd | 2 ++ rmgpy/molecule/resonance.pxd | 4 ++++ rmgpy/reaction.pxd | 2 ++ 4 files changed, 14 insertions(+) diff --git a/rmgpy/molecule/group.pxd b/rmgpy/molecule/group.pxd index 83b4ae83c61..ada4072f781 100644 --- a/rmgpy/molecule/group.pxd +++ b/rmgpy/molecule/group.pxd @@ -31,6 +31,12 @@ cimport rmgpy.molecule.molecule as mol from cpython cimport bool ################################################################################ +cdef bool check_set(super_list, sub_list) + +cdef list add_cb_atom_to_ring(ring, cb_atom) + +cdef list merge_overlapping_benzene_rings(ring1, ring2, od) + cdef class GroupAtom(Vertex): cdef public list atomtype diff --git a/rmgpy/molecule/molecule.pxd b/rmgpy/molecule/molecule.pxd index adc0c4a47f2..8bbf3a5aa69 100644 --- a/rmgpy/molecule/molecule.pxd +++ b/rmgpy/molecule/molecule.pxd @@ -36,6 +36,8 @@ from rmgpy.molecule.graph cimport Vertex, Edge, Graph ################################################################################ cdef dict bond_orders +cdef tuple _skip_first(in_tuple) + cdef class Atom(Vertex): cdef public Element element diff --git a/rmgpy/molecule/resonance.pxd b/rmgpy/molecule/resonance.pxd index 7688b29eff4..7d424d0e7b4 100644 --- a/rmgpy/molecule/resonance.pxd +++ b/rmgpy/molecule/resonance.pxd @@ -28,6 +28,10 @@ from rmgpy.molecule.graph cimport Vertex, Edge, Graph from rmgpy.molecule.molecule cimport Atom, Bond, Molecule +cdef int _sum_atom_ids(atom_list) + +cdef tuple _tuplize_bond(bond) + cpdef list populate_resonance_algorithms(dict features=?) cpdef dict analyze_molecule(Graph mol, bint save_order=?) diff --git a/rmgpy/reaction.pxd b/rmgpy/reaction.pxd index 15feecef3f8..c6c8b35601c 100644 --- a/rmgpy/reaction.pxd +++ b/rmgpy/reaction.pxd @@ -38,6 +38,8 @@ cimport numpy as np ################################################################################ +cdef tuple get_sorting_key(spc) + cdef class Reaction: cdef public int index cdef public str label From a203d85e7fe7e34538ad0eccac69c18b49421f66 Mon Sep 17 00:00:00 2001 From: Hao-Wei Pang Date: Tue, 2 Jul 2024 11:09:57 -0400 Subject: [PATCH 06/48] HWP Additional JulicaCall Debugging These commits are the second (and final) round of attempting JuliaCall debugging by HWP --- .github/workflows/CI.yml | 36 +++++++++- environment.yml | 1 - pytest.ini | 1 + rmgpy/reaction.py | 47 +++++++------ rmgpy/rmg/reactors.py | 101 ++++++++++++++-------------- test/arkane/encorr/isodesmicTest.py | 40 +++++------ 6 files changed, 132 insertions(+), 94 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 674622c8b6f..e2fecf9e65f 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -83,6 +83,16 @@ jobs: show-channel-urls: true channels: conda-forge,cantera,rmg + - name: Fix ARM Mac environment + if: matrix.os == 'macos-latest' + run: | + conda deactivate + conda env remove -n rmg_env + conda create -n rmg_env + conda activate rmg_env + conda config --env --set subdir osx-64 + conda env update -f environment.yml + # list the environment for debugging purposes - name: mamba info run: | @@ -108,14 +118,34 @@ jobs: make clean make + # Setup Juliaup + - name: Set Julia paths + run: | + echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_ENV + echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_PATH + echo "JULIA_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_ENV + echo "JULIA_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_PATH + echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_ENV + echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_PATH + echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_ENV + echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_PATH + echo "JULIA_CONDAPKG_BACKEND=Current" >> $GITHUB_ENV + echo "JULIA_CONDAPKG_BACKEND=Current" >> $GITHUB_PATH + + - name: Setup Juliaup + uses: julia-actions/install-juliaup@v2 + with: + channel: '1.10' + + - name: Check Julia version + run: julia --version + # RMS installation and linking to Julia - name: Install and link Julia dependencies timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). # JULIA_CONDAPKG_EXE points to the existing conda/mamba to avoid JuliaCall from installing their own. See https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-a-Conda-environment. run: | - juliaup status - export JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba - julia -e 'ENV["JULIA_CONDAPKG_BACKEND"] = "Current"; using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator' + julia -e 'using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator' - name: Install Q2DTor run: echo "" | make q2dtor diff --git a/environment.yml b/environment.yml index 276b4352f91..b897e3a6e47 100644 --- a/environment.yml +++ b/environment.yml @@ -54,7 +54,6 @@ dependencies: - conda-forge::rdkit >=2022.09.1 # general-purpose external software tools - - conda-forge::juliaup - conda-forge::pyjuliacall # for calling julia packages # Python tools diff --git a/pytest.ini b/pytest.ini index 7879baabed6..f7643013e8a 100644 --- a/pytest.ini +++ b/pytest.ini @@ -17,6 +17,7 @@ filterwarnings = # -n auto # will use all available cores, if you first pip install pytest-xdist, but it doesn't work well with julia (so add --no-julia? or give up?) addopts = --keep-duplicates + -s -vv --ignore test/regression --cov=arkane --cov=rmgpy diff --git a/rmgpy/reaction.py b/rmgpy/reaction.py index 66119c38059..a42a1fc3f5e 100644 --- a/rmgpy/reaction.py +++ b/rmgpy/reaction.py @@ -43,7 +43,7 @@ import math import os.path from copy import deepcopy -from functools import reduce +from functools import reduce, partial from urllib.parse import quote import cython @@ -1838,36 +1838,45 @@ def same_species_lists(list1, list2, check_identical=False, only_check_label=Fal Returns: ``True`` if the lists are the same and ``False`` otherwise """ + + same_object_passthrough = partial( + same_object, + _check_identical=check_identical, + _only_check_label=only_check_label, + _generate_intial_map=generate_initial_map, + _strict=strict, + _save_order=save_order, + ) if len(list1) == len(list2) == 1: - if same_object(list1[0], list2[0]): + if same_object_passthrough(list1[0], list2[0]): return True elif len(list1) == len(list2) == 2: - if same_object(list1[0], list2[0]) and same_object(list1[1], list2[1]): + if same_object_passthrough(list1[0], list2[0]) and same_object_passthrough(list1[1], list2[1]): return True - elif same_object(list1[0], list2[1]) and same_object(list1[1], list2[0]): + elif same_object_passthrough(list1[0], list2[1]) and same_object_passthrough(list1[1], list2[0]): return True elif len(list1) == len(list2) == 3: - if same_object(list1[0], list2[0]): - if same_object(list1[1], list2[1]): - if same_object(list1[2], list2[2]): + if same_object_passthrough(list1[0], list2[0]): + if same_object_passthrough(list1[1], list2[1]): + if same_object_passthrough(list1[2], list2[2]): return True - elif same_object(list1[1], list2[2]): - if same_object(list1[2], list2[1]): + elif same_object_passthrough(list1[1], list2[2]): + if same_object_passthrough(list1[2], list2[1]): return True - elif same_object(list1[0], list2[1]): - if same_object(list1[1], list2[0]): - if same_object(list1[2], list2[2]): + elif same_object_passthrough(list1[0], list2[1]): + if same_object_passthrough(list1[1], list2[0]): + if same_object_passthrough(list1[2], list2[2]): return True - elif same_object(list1[1], list2[2]): - if same_object(list1[2], list2[0]): + elif same_object_passthrough(list1[1], list2[2]): + if same_object_passthrough(list1[2], list2[0]): return True - elif same_object(list1[0], list2[2]): - if same_object(list1[1], list2[0]): - if same_object(list1[2], list2[1]): + elif same_object_passthrough(list1[0], list2[2]): + if same_object_passthrough(list1[1], list2[0]): + if same_object_passthrough(list1[2], list2[1]): return True - elif same_object(list1[1], list2[1]): - if same_object(list1[2], list2[0]): + elif same_object_passthrough(list1[1], list2[1]): + if same_object_passthrough(list1[2], list2[0]): return True elif len(list1) == len(list2): raise NotImplementedError("Can't check isomorphism of lists with {0} species/molecules".format(len(list1))) diff --git a/rmgpy/rmg/reactors.py b/rmgpy/rmg/reactors.py index a5e6692f5bf..cc5f9a6b528 100644 --- a/rmgpy/rmg/reactors.py +++ b/rmgpy/rmg/reactors.py @@ -41,9 +41,8 @@ import rmgpy.constants as constants Main.seval("using PythonCall") -Main.seval("using Sundials") -rms = juliacall.newmodule("RMS") -rms.seval("using ReactionMechanismSimulator") +Main.seval("using ReactionMechanismSimulator") +Main.seval("using ReactionMechanismSimulator.Sundials") from rmgpy import constants from rmgpy.data.kinetics.depository import DepositoryReaction @@ -452,7 +451,7 @@ def simulate(self, model_settings, simulator_settings, conditions): max_edge_species_rate_ratios, t, x, - ) = rms.selectobjects( + ) = Main.selectobjects( core_react, edge_react, edge_domains, @@ -503,9 +502,9 @@ def generate_reactor(self, phase_system): Setup an RMS simulation for EdgeAnalysis """ phase = phase_system.phases["Default"] - ig = rms.IdealGas(phase.species, phase.reactions) - domain, y0, p = rms.ConstantVDomain(phase=ig, initialconds=to_julia(self.initial_conditions)) - react = rms.Reactor(domain, y0, (0.0, self.tf), p=p) + ig = Main.IdealGas(phase.species, phase.reactions) + domain, y0, p = Main.ConstantVDomain(phase=ig, initialconds=to_julia(self.initial_conditions)) + react = Main.Reactor(domain, y0, (0.0, self.tf), p=p) return react, domain, [], p @@ -521,22 +520,22 @@ def generate_reactor(self, phase_system): surf = phase_system.phases["Surface"] interface = list(phase_system.interfaces.values())[0] if "mu" in self.initial_conditions["liquid"].keys(): - solv = rms.Solvent("solvent",rms.ConstantViscosity(self.initial_conditions["liquid"]["mu"])) + solv = Main.Solvent("solvent", Main.ConstantViscosity(self.initial_conditions["liquid"]["mu"])) liq_initial_cond = self.initial_conditions["liquid"].copy() del liq_initial_cond["mu"] else: solv = liq.solvent liq_initial_cond = self.initial_conditions["liquid"] - liq = rms.IdealDiluteSolution(liq.species, liq.reactions, solv, name="liquid",diffusionlimited=True) - surf = rms.IdealSurface(surf.species, surf.reactions, surf.site_density, name="surface") + liq = Main.IdealDiluteSolution(liq.species, liq.reactions, solv, name="liquid",diffusionlimited=True) + surf = Main.IdealSurface(surf.species, surf.reactions, surf.site_density, name="surface") liq_constant_species = [cspc for cspc in self.const_spc_names if cspc in [spc.name for spc in liq.species]] cat_constant_species = [cspc for cspc in self.const_spc_names if cspc in [spc.name for spc in surf.species]] - domainliq, y0liq, pliq = rms.ConstantTVDomain(phase=liq, initialconds=to_julia(liq_initial_cond), constantspecies=to_julia(liq_constant_species)) - domaincat, y0cat, pcat = rms.ConstantTAPhiDomain( + domainliq, y0liq, pliq = Main.ConstantTVDomain(phase=liq, initialconds=to_julia(liq_initial_cond), constantspecies=to_julia(liq_constant_species)) + domaincat, y0cat, pcat = Main.ConstantTAPhiDomain( phase=surf, initialconds=to_julia(liq_initial_cond), constantspecies=to_julia(cat_constant_species), ) if interface.reactions == []: - inter, pinter = rms.ReactiveInternalInterfaceConstantTPhi( + inter, pinter = Main.ReactiveInternalInterfaceConstantTPhi( domainliq, domaincat, Main.seval("using ReactionMechanismSimulator; Vector{ElementaryReaction}()"), @@ -544,10 +543,10 @@ def generate_reactor(self, phase_system): self.initial_conditions["surface"]["A"], ) else: - inter, pinter = rms.ReactiveInternalInterfaceConstantTPhi( + inter, pinter = Main.ReactiveInternalInterfaceConstantTPhi( domainliq, domaincat, interface.reactions, self.initial_conditions["liquid"]["T"], self.initial_conditions["surface"]["A"] ) - react, y0, p = rms.Reactor((domainliq, domaincat), (y0liq, y0cat), (0.0, self.tf), (inter,), (pliq, pcat, pinter)) + react, y0, p = Main.Reactor((domainliq, domaincat), (y0liq, y0cat), (0.0, self.tf), (inter,), (pliq, pcat, pinter)) return react, (domainliq, domaincat), (inter,), p @@ -574,27 +573,27 @@ def generate_reactor(self, phase_system): Setup an RMS simulation for EdgeAnalysis """ phase = phase_system.phases["Default"] - liq = rms.IdealDiluteSolution(phase.species, phase.reactions, phase.solvent) - domain, y0, p = rms.ConstantTVDomain(phase=liq, initialconds=to_julia(self.initial_conditions), constantspecies=to_julia(self.const_spc_names)) + liq = Main.IdealDiluteSolution(phase.species, phase.reactions, phase.solvent) + domain, y0, p = Main.ConstantTVDomain(phase=liq, initialconds=to_julia(self.initial_conditions), constantspecies=to_julia(self.const_spc_names)) interfaces = [] if self.inlet_conditions: inlet_conditions = {key: value for (key, value) in self.inlet_conditions.items() if key != "F"} total_molar_flow_rate = self.inlet_conditions["F"] - inlet = rms.Inlet(domain, to_julia(inlet_conditions), Main.seval("x->" + str(total_molar_flow_rate))) + inlet = Main.Inlet(domain, to_julia(inlet_conditions), Main.seval("x->" + str(total_molar_flow_rate))) interfaces.append(inlet) if self.outlet_conditions: total_volumetric_flow_rate = self.outlet_conditions["Vout"] - outlet = rms.VolumetricFlowRateOutlet(domain, Main.seval("x->" + str(total_volumetric_flow_rate))) + outlet = Main.VolumetricFlowRateOutlet(domain, Main.seval("x->" + str(total_volumetric_flow_rate))) interfaces.append(outlet) if self.evap_cond_conditions: - kLA_kH_evap_cond = rms.kLAkHCondensationEvaporationWithReservoir(domain, self.evap_cond_conditions) + kLA_kH_evap_cond = Main.kLAkHCondensationEvaporationWithReservoir(domain, self.evap_cond_conditions) interfaces.append(kLA_kH_evap_cond) - react = rms.Reactor(domain, y0, (0.0, self.tf), interfaces, p=p) + react = Main.Reactor(domain, y0, (0.0, self.tf), interfaces, p=p) return react, domain, interfaces, p @@ -609,9 +608,9 @@ def generate_reactor(self, phase_system): Setup an RMS simulation for EdgeAnalysis """ phase = phase_system.phases["Default"] - ig = rms.IdealGas(phase.species, phase.reactions) - domain, y0, p = rms.ConstantTPDomain(phase=ig, initialconds=to_julia(self.initial_conditions)) - react = rms.Reactor(domain, y0, (0.0, self.tf), p=p) + ig = Main.IdealGas(phase.species, phase.reactions) + domain, y0, p = Main.ConstantTPDomain(phase=ig, initialconds=to_julia(self.initial_conditions)) + react = Main.Reactor(domain, y0, (0.0, self.tf), p=p) return react, domain, [], p @@ -628,7 +627,7 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): A = obj._A.value_si n = obj._n.value_si Ea = obj._Ea.value_si - return rms.Arrhenius(A, n, Ea, rms.EmptyRateUncertainty()) + return Main.Arrhenius(A, n, Ea, Main.EmptyRateUncertainty()) elif isinstance(obj, ArrheniusChargeTransfer): A = obj._A.value_si if obj._T0.value_si != 1.0: @@ -637,7 +636,7 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): Ea = obj._Ea.value_si q = obj._alpha.value_si*obj._electrons.value_si V0 = obj._V0.value_si - return rms.Arrheniusq(A, n, Ea, q, V0, rms.EmptyRateUncertainty()) + return Main.Arrheniusq(A, n, Ea, q, V0, Main.EmptyRateUncertainty()) elif isinstance(obj, SurfaceChargeTransfer): A = obj._A.value_si if obj._T0.value_si != 1.0: @@ -646,28 +645,28 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): Ea = obj._Ea.value_si q = obj._alpha.value_si*obj._electrons.value_si V0 = obj._V0.value_si - return rms.Arrheniusq(A, n, Ea, q, V0, rms.EmptyRateUncertainty()) + return Main.Arrheniusq(A, n, Ea, q, V0, Main.EmptyRateUncertainty()) elif isinstance(obj, Marcus): A = obj._A.value_si n = obj._n.value_si - return rms.Marcus(A,n,obj._lmbd_i_coefs.value_si,obj._lmbd_o.value_si, obj._wr.value_si, obj._wp.value_si, obj._beta.value_si, rms.EmptyRateUncertainty()) + return Main.Marcus(A,n,obj._lmbd_i_coefs.value_si,obj._lmbd_o.value_si, obj._wr.value_si, obj._wp.value_si, obj._beta.value_si, Main.EmptyRateUncertainty()) elif isinstance(obj, PDepArrhenius): Ps = to_julia(obj._pressures.value_si) arrs = to_julia([to_rms(arr) for arr in obj.arrhenius]) - return rms.PdepArrhenius(Ps, arrs, rms.EmptyRateUncertainty()) + return Main.PdepArrhenius(Ps, arrs, Main.EmptyRateUncertainty()) elif isinstance(obj, MultiArrhenius): arrs = to_julia([to_rms(arr) for arr in obj.arrhenius]) - return rms.MultiArrhenius(arrs, rms.EmptyRateUncertainty()) + return Main.MultiArrhenius(arrs, Main.EmptyRateUncertainty()) elif isinstance(obj, MultiPDepArrhenius): parrs = to_julia([to_rms(parr) for parr in obj.arrhenius]) - return rms.MultiPdepArrhenius(parrs, rms.EmptyRateUncertainty()) + return Main.MultiPdepArrhenius(parrs, Main.EmptyRateUncertainty()) elif isinstance(obj, Chebyshev): Tmin = obj.Tmin.value_si Tmax = obj.Tmax.value_si Pmin = obj.Pmin.value_si Pmax = obj.Pmax.value_si coeffs = to_julia(obj.coeffs.value_si) - return rms.Chebyshev(coeffs, Tmin, Tmax, Pmin, Pmax) + return Main.Chebyshev(coeffs, Tmin, Tmax, Pmin, Pmax) elif isinstance(obj, ThirdBody): arrstr = arrhenius_to_julia_string(obj.arrheniusLow) efficiencies = {rmg_species[i].label: float(val) for i, val in enumerate(obj.get_effective_collider_efficiencies(rmg_species)) if val != 1} @@ -735,11 +734,11 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): A = obj._A.value_si n = obj._n.value_si Ea = obj._Ea.value_si - return rms.StickingCoefficient(A, n, Ea, rms.EmptyRateUncertainty()) + return Main.StickingCoefficient(A, n, Ea, Main.EmptyRateUncertainty()) elif isinstance(obj, NASAPolynomial): - return rms.NASApolynomial(obj.coeffs, obj.Tmin.value_si, obj.Tmax.value_si) + return Main.NASApolynomial(obj.coeffs, obj.Tmin.value_si, obj.Tmax.value_si) elif isinstance(obj, NASA): - return rms.NASA([to_rms(poly) for poly in obj.polynomials], rms.EmptyThermoUncertainty()) + return Main.NASA([to_rms(poly) for poly in obj.polynomials], Main.EmptyThermoUncertainty()) elif isinstance(obj, Species): if isinstance(obj.molecule[0], Fragment): @@ -758,21 +757,21 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): bondnum = len(mol.get_all_edges()) if not obj.molecule[0].contains_surface_site(): - rad = rms.getspeciesradius(atomnums, bondnum) - diff = rms.StokesDiffusivity(rad) + rad = Main.getspeciesradius(atomnums, bondnum) + diff = Main.StokesDiffusivity(rad) th = obj.get_thermo_data() thermo = to_rms(th) if obj.henry_law_constant_data: - kH = rms.TemperatureDependentHenryLawConstant(Ts=to_julia(obj.henry_law_constant_data.Ts), kHs=to_julia(obj.henry_law_constant_data.kHs)) + kH = Main.TemperatureDependentHenryLawConstant(Ts=to_julia(obj.henry_law_constant_data.Ts), kHs=to_julia(obj.henry_law_constant_data.kHs)) else: - kH = rms.EmptyHenryLawConstant() + kH = Main.EmptyHenryLawConstant() if obj.liquid_volumetric_mass_transfer_coefficient_data: - kLA = rms.TemperatureDependentLiquidVolumetricMassTransferCoefficient( + kLA = Main.TemperatureDependentLiquidVolumetricMassTransferCoefficient( Ts=to_julia(obj.liquid_volumetric_mass_transfer_coefficient_data.Ts), kLAs=to_julia(obj.liquid_volumetric_mass_transfer_coefficient_data.kLAs) ) else: - kLA = rms.EmptyLiquidVolumetricMassTransferCoefficient() - return rms.Species( + kLA = Main.EmptyLiquidVolumetricMassTransferCoefficient() + return Main.Species( name=obj.label, index=obj.index, inchi="", @@ -792,7 +791,7 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): else: th = obj.get_thermo_data() thermo = to_rms(th) - return rms.Species( + return Main.Species( name=obj.label, index=obj.index, inchi="", @@ -801,12 +800,12 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): thermo=thermo, atomnums=atomnums, bondnum=bondnum, - diffusion=rms.EmptyDiffusivity(), + diffusion=Main.EmptyDiffusivity(), radius=0.0, radicalelectrons=obj.molecule[0].multiplicity - 1, molecularweight=0.0, - henrylawconstant=rms.EmptyHenryLawConstant(), - liquidvolumetricmasstransfercoefficient=rms.EmptyLiquidVolumetricMassTransferCoefficient(), + henrylawconstant=Main.EmptyHenryLawConstant(), + liquidvolumetricmasstransfercoefficient=Main.EmptyLiquidVolumetricMassTransferCoefficient(), comment=obj.thermo.comment, ) elif isinstance(obj, Reaction): @@ -819,15 +818,15 @@ def to_rms(obj, species_names=None, rms_species_list=None, rmg_species=None): kinetics = to_rms(obj.kinetics, species_names=species_names, rms_species_list=rms_species_list, rmg_species=rmg_species) radchange = sum([spc.molecule[0].multiplicity-1 for spc in obj.products]) - sum([spc.molecule[0].multiplicity-1 for spc in obj.reactants]) electronchange = -sum([spc.molecule[0].get_net_charge() for spc in obj.products]) + sum([spc.molecule[0].get_net_charge() for spc in obj.reactants]) - return rms.ElementaryReaction(index=obj.index, reactants=reactants, reactantinds=reactantinds, products=products, productinds=productinds, kinetics=kinetics, electronchange=electronchange, radicalchange=radchange, reversible=obj.reversible, pairs=to_julia([]), comment=obj.kinetics.comment) + return Main.ElementaryReaction(index=obj.index, reactants=reactants, reactantinds=reactantinds, products=products, productinds=productinds, kinetics=kinetics, electronchange=electronchange, radicalchange=radchange, reversible=obj.reversible, pairs=to_julia([]), comment=obj.kinetics.comment) elif isinstance(obj, SolventData): - return rms.Solvent("solvent", rms.RiedelViscosity(float(obj.A), float(obj.B), float(obj.C), float(obj.D), float(obj.E))) + return Main.Solvent("solvent", Main.RiedelViscosity(float(obj.A), float(obj.B), float(obj.C), float(obj.D), float(obj.E))) elif isinstance(obj, TerminationTime): - return rms.TerminationTime(obj.time.value_si) + return Main.TerminationTime(obj.time.value_si) elif isinstance(obj, TerminationConversion): - return rms.TerminationConversion(to_rms(obj.species), obj.conversion) + return Main.TerminationConversion(to_rms(obj.species), obj.conversion) elif isinstance(obj, TerminationRateRatio): - return rms.TerminationRateRatio(obj.ratio) + return Main.TerminationRateRatio(obj.ratio) elif isinstance(obj, tuple): # Handle TerminationConversion when the obj doesn't have thermo yet return obj else: diff --git a/test/arkane/encorr/isodesmicTest.py b/test/arkane/encorr/isodesmicTest.py index b5dc58a4b8a..83e36e06de1 100644 --- a/test/arkane/encorr/isodesmicTest.py +++ b/test/arkane/encorr/isodesmicTest.py @@ -288,26 +288,26 @@ def test_creating_error_canceling_schemes(self): assert isodesmic_scheme.reference_species == [self.butane, self.benzene] - def test_find_error_canceling_reaction(self): - """ - Test that the MILP problem can be solved to find a single isodesmic reaction - """ - scheme = IsodesmicScheme( - self.propene, - [self.propane, self.butane, self.butene, self.caffeine, self.ethyne], - ) - - # Note that caffeine and ethyne will not be allowed, so for the full set the indices are [0, 1, 2] - rxn, _ = scheme._find_error_canceling_reaction([0, 1, 2], milp_software=["lpsolve"]) - assert rxn.species[self.butane] == -1 - assert rxn.species[self.propane] == 1 - assert rxn.species[self.butene] == 1 - - if self.pyo is not None: - rxn, _ = scheme._find_error_canceling_reaction([0, 1, 2], milp_software=["pyomo"]) - assert rxn.species[self.butane] == -1 - assert rxn.species[self.propane] == 1 - assert rxn.species[self.butene] == 1 + # def test_find_error_canceling_reaction(self): + # """ + # Test that the MILP problem can be solved to find a single isodesmic reaction + # """ + # scheme = IsodesmicScheme( + # self.propene, + # [self.propane, self.butane, self.butene, self.caffeine, self.ethyne], + # ) + + # # Note that caffeine and ethyne will not be allowed, so for the full set the indices are [0, 1, 2] + # rxn, _ = scheme._find_error_canceling_reaction([0, 1, 2], milp_software=["lpsolve"]) + # assert rxn.species[self.butane] == -1 + # assert rxn.species[self.propane] == 1 + # assert rxn.species[self.butene] == 1 + + # if self.pyo is not None: + # rxn, _ = scheme._find_error_canceling_reaction([0, 1, 2], milp_software=["pyomo"]) + # assert rxn.species[self.butane] == -1 + # assert rxn.species[self.propane] == 1 + # assert rxn.species[self.butene] == 1 def test_multiple_error_canceling_reactions(self): """ From c054a6a5e61c3853a553a9301de5beb403a87c0e Mon Sep 17 00:00:00 2001 From: Xiaorui Dong Date: Mon, 4 Mar 2024 23:37:03 -0500 Subject: [PATCH 07/48] XD Initial MILP Re-implementation These commits reflect the initial effort by XD to re-implement the MILP for Clar optimization in SciPy. They were unsuccessful because at the time RMG could not actually run a required version of Python to install SciPy with MILP. --- rmgpy/exceptions.py | 9 -- rmgpy/molecule/resonance.pxd | 6 +- rmgpy/molecule/resonance.py | 172 +++++++++++++++++++---------------- 3 files changed, 101 insertions(+), 86 deletions(-) diff --git a/rmgpy/exceptions.py b/rmgpy/exceptions.py index 7b4420cf66a..f2e7dae4602 100644 --- a/rmgpy/exceptions.py +++ b/rmgpy/exceptions.py @@ -109,15 +109,6 @@ class ForbiddenStructureException(Exception): pass -class ILPSolutionError(Exception): - """ - An exception to be raised when solving an integer linear programming problem if a solution - could not be found or the solution is not valid. Can pass a string to indicate the reason - that the solution is invalid. - """ - pass - - class ImplicitBenzeneError(Exception): """ An exception class when encountering a group with too many implicit benzene diff --git a/rmgpy/molecule/resonance.pxd b/rmgpy/molecule/resonance.pxd index 7d424d0e7b4..0c9fccc572f 100644 --- a/rmgpy/molecule/resonance.pxd +++ b/rmgpy/molecule/resonance.pxd @@ -25,6 +25,8 @@ # # ############################################################################### +cimport numpy as cnp + from rmgpy.molecule.graph cimport Vertex, Edge, Graph from rmgpy.molecule.molecule cimport Atom, Bond, Molecule @@ -64,7 +66,9 @@ cpdef list generate_kekule_structure(Graph mol) cpdef list generate_clar_structures(Graph mol, bint save_order=?) -cpdef list _clar_optimization(Graph mol, list constraints=?, max_num=?, save_order=?) +cpdef list _clar_optimization(Graph mol, bint save_order=?) + +cpdef list _solve_clar_milp(cnp.ndarray[cnp.int_t, ndim=1] c, bounds, list constraints, int n_ring, max_num=?) cpdef list _clar_transformation(Graph mol, list aromatic_ring) diff --git a/rmgpy/molecule/resonance.py b/rmgpy/molecule/resonance.py index ee109de50ee..75b616abfc9 100644 --- a/rmgpy/molecule/resonance.py +++ b/rmgpy/molecule/resonance.py @@ -58,11 +58,14 @@ import logging from operator import attrgetter +import numpy as np +from scipy.optimize import Bounds, LinearConstraint, milp + import cython import rmgpy.molecule.filtration as filtration import rmgpy.molecule.pathfinder as pathfinder -from rmgpy.exceptions import ILPSolutionError, KekulizationError, AtomTypeError, ResonanceError +from rmgpy.exceptions import KekulizationError, AtomTypeError, ResonanceError from rmgpy.molecule.adjlist import Saturator from rmgpy.molecule.graph import Vertex from rmgpy.molecule.kekulize import kekulize @@ -917,8 +920,7 @@ def generate_clar_structures(mol, save_order=False): Returns a list of :class:`Molecule` objects corresponding to the Clar structures. """ - cython.declare(output=list, mol_list=list, new_mol=Graph, aromatic_rings=list, bonds=list, solution=list, - y=list, x=list, index=cython.int, bond=Edge, ring=list) + cython.declare(output=list, mol_list=list, new_mol=Graph, aromatic_rings=list, bonds=list, index=cython.int, bond=Edge, ring=list) if not mol.is_cyclic(): return [] @@ -928,19 +930,20 @@ def generate_clar_structures(mol, save_order=False): mol.assign_atom_ids() try: - output = _clar_optimization(mol, save_order=save_order) - except ILPSolutionError: + aromatic_rings, bonds, solutions = _clar_optimization(mol, save_order=save_order) + except RuntimeError: # The optimization algorithm did not work on the first iteration return [] mol_list = [] - for new_mol, aromatic_rings, bonds, solution in output: + for solution in solutions: + new_mol = mol.copy(deep=True) # The solution includes a part corresponding to rings, y, and a part corresponding to bonds, x, using # nomenclature from the paper. In y, 1 means the ring as a sextet, 0 means it does not. # In x, 1 corresponds to a double bond, 0 either means a single bond or the bond is part of a sextet. - y = solution[0:len(aromatic_rings)] + y = solution[:len(aromatic_rings)] x = solution[len(aromatic_rings):] # Apply results to molecule - double bond locations first @@ -950,7 +953,7 @@ def generate_clar_structures(mol, save_order=False): elif x[index] == 1: bond.order = 2 # double else: - raise ValueError('Unaccepted bond value {0} obtained from optimization.'.format(x[index])) + raise ValueError(f'Unaccepted bond value {x[index]} obtained from optimization.') # Then apply locations of aromatic sextets by converting to benzene bonds for index, ring in enumerate(aromatic_rings): @@ -974,28 +977,25 @@ def _tuplize_bond(bond): return (bond.atom1.id, bond.atom2.id) -def _clar_optimization(mol, constraints=None, max_num=None, save_order=False): +def _clar_optimization(mol, save_order=False): """ Implements linear programming algorithm for finding Clar structures. This algorithm maximizes the number of Clar sextets within the constraints of molecular geometry and atom valency. Returns a list of valid Clar solutions in the form of a tuple, with the following entries: - [0] Molecule object - [1] List of aromatic rings - [2] List of bonds - [3] Optimization solution + [0] List of aromatic rings + [1] List of bonds + [2] Optimization solutions - The optimization solution is a list of boolean values with sextet assignments followed by double bond assignments, - with indices corresponding to the list of aromatic rings and list of bonds, respectively. + The optimization solutions may contain multiple valid solutions, each is an array of boolean values with sextet assignments + followed by double bond assignments, with indices corresponding to the list of aromatic rings and list of bonds, respectively. Method adapted from: Hansen, P.; Zheng, M. The Clar Number of a Benzenoid Hydrocarbon and Linear Programming. J. Math. Chem. 1994, 15 (1), 93–107. """ - cython.declare(molecule=Graph, aromatic_rings=list, exo=list, l=cython.int, m=cython.int, n=cython.int, - a=list, objective=list, status=cython.int, solution=list, innerSolutions=list) - - from lpsolve55 import lpsolve + cython.declare(molecule=Graph, aromatic_rings=list, exo=list, n_rings=cython.int, n_atoms=cython.int, n_bonds=cython.int, + A=list, solutions=tuple) # Make a copy of the molecule so we don't destroy the original molecule = mol.copy(deep=True) @@ -1030,61 +1030,81 @@ def _clar_optimization(mol, constraints=None, max_num=None, save_order=False): exo.append(None) # Dimensions - l = len(aromatic_rings) - m = len(atoms) - n = l + len(bonds) + n_ring = len(aromatic_rings) + n_atom = len(atoms) + n_bond = len(bonds) + + # The aromaticity assignment problem is formulated as an MILP problem + # minimize: + # c @ x + # such that + # b_l <= A @ x <= b_u + # l <= x <= u + # x are integers # Connectivity matrix which indicates which rings and bonds each atom is in - # Part of equality constraint Ax=b - a = [] + # Part of equality constraint A_eq @ x = b_eq + A = [] for atom in atoms: in_ring = [1 if atom in ring else 0 for ring in aromatic_rings] in_bond = [1 if atom in [bond.atom1, bond.atom2] else 0 for bond in bonds] - a.append(in_ring + in_bond) + A.append(in_ring + in_bond) + constraints = [LinearConstraint( + A=np.array(A, dtype=int), lb=np.ones(n_atom, dtype=int), ub=np.ones(n_atom, dtype=int) + )] # Objective vector for optimization: sextets have a weight of 1, double bonds have a weight of 0 - objective = [1] * l + [0] * len(bonds) - - # Solve LP problem using lpsolve - lp = lpsolve('make_lp', m, n) # initialize lp with constraint matrix with m rows and n columns - lpsolve('set_verbose', lp, 2) # reduce messages from lpsolve - lpsolve('set_obj_fn', lp, objective) # set objective function - lpsolve('set_maxim', lp) # set solver to maximize objective - lpsolve('set_mat', lp, a) # set left hand side to constraint matrix - lpsolve('set_rh_vec', lp, [1] * m) # set right hand side to 1 for all constraints - for i in range(m): # set all constraints as equality constraints - lpsolve('set_constr_type', lp, i + 1, '=') - lpsolve('set_binary', lp, [True] * n) # set all variables to be binary - - # Constrain values of exocyclic bonds, since we don't want to modify them - for i in range(l, n): - if exo[i - l] is not None: - # NOTE: lpsolve indexes from 1, so the variable we're changing should be i + 1 - lpsolve('set_bounds', lp, i + 1, exo[i - l], exo[i - l]) - - # Add constraints to problem if provided - if constraints is not None: - for constraint in constraints: - try: - lpsolve('add_constraint', lp, constraint[0], '<=', constraint[1]) - except Exception as e: - logging.debug('Unable to add constraint: {0} <= {1}'.format(constraint[0], constraint[1])) - logging.debug(mol.to_adjacency_list()) - if str(e) == 'invalid vector.': - raise ILPSolutionError('Unable to add constraint, likely due to ' - 'inconsistent aromatic ring perception.') - else: - raise + c = - np.array([1] * n_ring + [0] * n_bond, dtype=int) + + # Variable bounds + bounds = Bounds( + lb=np.array( + [0] * n_ring + [1 if val == 1 else 0 for val in exo], + dtype=int, + ), # lower bounds: 0 except for exo double bonds + ub=np.array( + [1] * n_ring + [0 if val == 0 else 1 for val in exo], + dtype=int, + ), # upper bounds: 1 except for exo single bonds + ) + + solutions = _solve_clar_milp(c, bounds, constraints, n_ring) + + return aromatic_rings, bonds, solutions + + +def _solve_clar_milp( + c, + bounds, + constraints, + n_ring, + max_num=None, +): + """ + A helpful function to solve the formulated clar optimization MILP problem. ``c``, + ``bounds``, and ``constraints`` are computed in _clar_optimization and follow the + definition of their corresponding kwargs in scipy.optimize.milp. ``n_ring`` is the + number of aromatic rings in the molecule. ``max_num`` is the maximum number of sextets + that can be found. If ``max_num`` is None for first run but will be updated during the + recursion and is used to check sub-optimal solution. + """ + # To modify + cython.declare(inner_solutions=list) - status = lpsolve('solve', lp) - obj_val, solution = lpsolve('get_solution', lp)[0:2] - lpsolve('delete_lp', lp) # Delete the LP problem to clear up memory + result = milp( + c=-c, # negative for maximization + integrality=1, + bounds=bounds, + constraints=constraints, + options={'time_limit': 10}, + ) - # Check that optimization was successful - if status != 0: - raise ILPSolutionError('Optimization could not find a valid solution.') + if result.status != 0: + raise RuntimeError("Optimization could not find a valid solution.") - # Check that we the result contains at least one aromatic sextet + obj_val, solution = -result.fun, result.x + + # Check that the result contains at least one aromatic sextet if obj_val == 0: return [] @@ -1092,27 +1112,27 @@ def _clar_optimization(mol, constraints=None, max_num=None, save_order=False): if max_num is None: max_num = obj_val # This is the first solution, so the result should be an upper limit elif obj_val < max_num: - raise ILPSolutionError('Optimization obtained a sub-optimal solution.') + raise RuntimeError("Optimization obtained a sub-optimal solution.") if any([x != 1 and x != 0 for x in solution]): - raise ILPSolutionError('Optimization obtained a non-integer solution.') + raise RuntimeError('Optimization obtained a non-integer solution.') # Generate constraints based on the solution obtained - y = solution[0:l] - new_a = y + [0] * len(bonds) - new_b = sum(y) - 1 - if constraints is not None: - constraints.append((new_a, new_b)) - else: - constraints = [(new_a, new_b)] + y = solution[:n_ring] + constraints.append( + LinearConstraint( + A=np.hstack([y, [0] * (solution.shape[0] - n_ring)]), + ub=sum(y) - 1, + ), + ) # Run optimization with additional constraints try: - inner_solutions = _clar_optimization(mol, constraints=constraints, max_num=max_num, save_order=save_order) - except ILPSolutionError: + inner_solutions = _solve_clar_milp(c, bounds, constraints, n_ring, max_num) + except RuntimeError: inner_solutions = [] - return inner_solutions + [(molecule, aromatic_rings, bonds, solution)] + return inner_solutions + [solution] def _clar_transformation(mol, aromatic_ring): From 15d91e4ea30e282a83c34c34287e6f2c4f97c603 Mon Sep 17 00:00:00 2001 From: Hao-Wei Pang Date: Wed, 24 Jan 2024 17:47:00 -0500 Subject: [PATCH 08/48] HWP Isodesmic Improvements These commits rescued some abandoned work from another branch --- arkane/encorr/isodesmic.py | 712 ++++++++++++++++++++-------- environment.yml | 3 +- test/arkane/encorr/isodesmicTest.py | 124 +++-- 3 files changed, 577 insertions(+), 262 deletions(-) diff --git a/arkane/encorr/isodesmic.py b/arkane/encorr/isodesmic.py index 83a9164e175..b77020367b4 100644 --- a/arkane/encorr/isodesmic.py +++ b/arkane/encorr/isodesmic.py @@ -4,7 +4,7 @@ # # # RMG - Reaction Mechanism Generator # # # -# Copyright (c) 2002-2023 Prof. William H. Green (whgreen@mit.edu), # +# Copyright (c) 2002-2024 Prof. William H. Green (whgreen@mit.edu), # # Prof. Richard H. West (r.west@neu.edu) and the RMG Team (rmg_dev@mit.edu) # # # # Permission is hereby granted, free of charge, to any person obtaining a # @@ -41,28 +41,30 @@ https://doi.org/10.1021/jp404158v """ +import logging import signal -from collections import deque +from copy import deepcopy +from typing import List, Union -from lpsolve55 import lpsolve, EQ, LE import numpy as np - -from rmgpy.molecule import Molecule -from rmgpy.quantity import ScalarQuantity +from scipy.optimize import LinearConstraint, milp from arkane.modelchem import LOT - -# Optional Imports -try: - import pyomo.environ as pyo -except ImportError: - pyo = None +from rmgpy.molecule import Bond, Molecule +from rmgpy.quantity import ScalarQuantity class ErrorCancelingSpecies: """Class for target and known (reference) species participating in an error canceling reaction""" - def __init__(self, molecule, low_level_hf298, level_of_theory, high_level_hf298=None, source=None): + def __init__( + self, + molecule, + low_level_hf298, + level_of_theory, + high_level_hf298=None, + source=None, + ): """ Args: @@ -76,35 +78,45 @@ def __init__(self, molecule, low_level_hf298, level_of_theory, high_level_hf298= if isinstance(molecule, Molecule): self.molecule = molecule else: - raise ValueError(f'ErrorCancelingSpecies molecule attribute must be an rmgpy Molecule object. Instead a ' - f'{type(molecule)} object was given') + raise ValueError( + f"ErrorCancelingSpecies molecule attribute must be an rmgpy Molecule object. Instead a " + f"{type(molecule)} object was given" + ) if isinstance(level_of_theory, LOT): self.level_of_theory = level_of_theory else: - raise ValueError(f'The level of theory used to calculate the low level Hf298 must be provided ' - f'for consistency checks. Instead, a {type(level_of_theory)} object was given') + raise ValueError( + f"The level of theory used to calculate the low level Hf298 must be provided " + f"for consistency checks. Instead, a {type(level_of_theory)} object was given" + ) if not isinstance(low_level_hf298, ScalarQuantity): if isinstance(low_level_hf298, tuple): low_level_hf298 = ScalarQuantity(*low_level_hf298) else: - raise TypeError(f'Low level Hf298 should be a ScalarQuantity object or its tuple representation, but ' - f'received {low_level_hf298} instead.') + raise TypeError( + f"Low level Hf298 should be a ScalarQuantity object or its tuple representation, but " + f"received {low_level_hf298} instead." + ) self.low_level_hf298 = low_level_hf298 # If the species is a reference species, then the high level data is already known - if high_level_hf298 is not None and not isinstance(high_level_hf298, ScalarQuantity): + if high_level_hf298 is not None and not isinstance( + high_level_hf298, ScalarQuantity + ): if isinstance(high_level_hf298, tuple): high_level_hf298 = ScalarQuantity(*high_level_hf298) else: - raise TypeError(f'High level Hf298 should be a ScalarQuantity object or its tuple representation, but ' - f'received {high_level_hf298} instead.') + raise TypeError( + f"High level Hf298 should be a ScalarQuantity object or its tuple representation, but " + f"received {high_level_hf298} instead." + ) self.high_level_hf298 = high_level_hf298 self.source = source def __repr__(self): - return f'' + return f"" class ErrorCancelingReaction: @@ -131,22 +143,24 @@ def __init__(self, target, species): # Perform a consistency check that all species are using the same level of theory for spcs in species.keys(): if spcs.level_of_theory != self.level_of_theory: - raise ValueError(f'Species {spcs} has level of theory {spcs.level_of_theory}, which does not match the ' - f'level of theory of the reaction of {self.level_of_theory}') + raise ValueError( + f"Species {spcs} has level of theory {spcs.level_of_theory}, which does not match the " + f"level of theory of the reaction of {self.level_of_theory}" + ) # Does not include the target, which is handled separately. self.species = species def __repr__(self): - reactant_string = f'1*{self.target.molecule.to_smiles()}' - product_string = '' + reactant_string = f"1*{self.target.molecule.to_smiles()}" + product_string = "" for spcs, coeff in self.species.items(): if coeff > 0: - product_string += f' + {int(coeff)}*{spcs.molecule.to_smiles()}' + product_string += f" + {int(coeff)}*{spcs.molecule.to_smiles()}" else: - reactant_string += f' + {-1*int(coeff)}*{spcs.molecule.to_smiles()}' + reactant_string += f" + {-1*int(coeff)}*{spcs.molecule.to_smiles()}" - return f' {product_string[3:]} >' + return f" {product_string[3:]} >" def calculate_target_thermo(self): """ @@ -155,12 +169,191 @@ def calculate_target_thermo(self): Returns: rmgpy.quantity.ScalarQuantity: Hf298 in 'J/mol' estimated for the target species """ - low_level_h_rxn = sum(spec[0].low_level_hf298.value_si*spec[1] for spec in self.species.items()) - \ - self.target.low_level_hf298.value_si + low_level_h_rxn = ( + sum( + spec.low_level_hf298.value_si * coeff + for spec, coeff in self.species.items() + ) + - self.target.low_level_hf298.value_si + ) + + target_thermo = ( + sum( + spec.high_level_hf298.value_si * coeff + for spec, coeff in self.species.items() + ) + - low_level_h_rxn + ) + return ScalarQuantity(target_thermo, "J/mol") + + +class AtomConstraint: + def __init__(self, label, connections=None): + self.label = label + self.connections = connections if connections is not None else [] + + def __eq__(self, other): + if isinstance(other, AtomConstraint): + if self.label == other.label: + return self.match_connections(other) + + return False + + else: + raise NotImplementedError( + f"AtomConstraint object has no __eq__ defined for other object of type " + f"{type(other)}" + ) + + def match_connections(self, other): + if len(self.connections) != len(other.connections): + return False + + connections = deepcopy(other.connections) + for c in self.connections: + for i, c_other in enumerate(connections): + if c == c_other: + break + else: + return False + connections.pop(i) + + return True + + def __repr__(self): + return f"{self.label}" + "".join([f"({c})" for c in self.connections]) + + +class BondConstraint: + def __init__(self, atom1, atom2, bond_order): + self.atom1 = atom1 + self.atom2 = atom2 + self.bond_order = bond_order + + def __eq__(self, other): + if isinstance(other, BondConstraint): + if self.bond_order == other.bond_order: + if (self.atom1 == other.atom1 and self.atom2 == other.atom2) or ( + self.atom1 == other.atom2 and self.atom2 == other.atom1 + ): + return True + return False + + if isinstance(other, GenericConstraint): + return False + + else: + raise NotImplementedError( + f"BondConstraint object has no __eq__ defined for other object of type " + f"{type(other)}" + ) + + def __repr__(self): + symbols = ["", "-", "=", "#"] + return f"{self.atom1}{symbols[self.bond_order]}{self.atom2}" + + +class Connection: + def __init__(self, atom, bond_order): + self.atom = atom + self.bond_order = bond_order + + def __eq__(self, other): + if isinstance(other, Connection): + if self.bond_order == other.bond_order: + if self.atom == other.atom: + return True + return False + + else: + raise NotImplementedError( + f"Connection object has no __eq__ defined for other object of type {type(other)}" + ) + + def __repr__(self): + symbols = ["", "-", "=", "#"] + return f"{symbols[self.bond_order]}{self.atom}" + + +class GenericConstraint: + def __init__(self, constraint_str: str): + self.constraint_str = constraint_str + + def __eq__(self, other): + if isinstance(other, BondConstraint): + return False + elif isinstance(other, GenericConstraint): + return self.constraint_str == other.constraint_str + else: + raise NotImplementedError( + f"GenericConstraint object has no __eq__ defined for other object of " + f"type {type(other)}" + ) + def __repr__(self): + return self.constraint_str + + +def bond_centric_constraints( + species: ErrorCancelingSpecies, constraint_class: str +) -> List[BondConstraint]: + constraints = [] + contraint_func = CONSTRAINT_CLASSES[constraint_class] + molecule = species.molecule + + for bond in molecule.get_all_edges(): + constraints.append(contraint_func(bond)) + + return constraints + + +def _buerger_rc2(bond: Bond) -> BondConstraint: + atom1 = bond.atom1 + atom2 = bond.atom2 + + if atom1.symbol > atom2.symbol: + atom1, atom2 = atom2, atom1 + + atom1 = AtomConstraint(label=atom1.symbol) + atom2 = AtomConstraint(label=atom2.symbol) + + return BondConstraint(atom1=atom1, atom2=atom2, bond_order=int(bond.order)) + + +def _buerger_rc3(bond: Bond) -> BondConstraint: + atom1 = bond.atom1 + atom2 = bond.atom2 + + if atom1.symbol > atom2.symbol: + atom1, atom2 = atom2, atom1 - target_thermo = sum(spec[0].high_level_hf298.value_si*spec[1] for spec in self.species.items()) - \ - low_level_h_rxn - return ScalarQuantity(target_thermo, 'J/mol') + atom1 = AtomConstraint(label=f"{atom1.symbol}{atom1.connectivity1}") + atom2 = AtomConstraint(label=f"{atom2.symbol}{atom2.connectivity1}") + + return BondConstraint(atom1=atom1, atom2=atom2, bond_order=int(bond.order)) + + +def _buerger_rc4(bond: Bond) -> BondConstraint: + atom1 = bond.atom1 + atom2 = bond.atom2 + + if atom1.symbol > atom2.symbol: + atom1, atom2 = atom2, atom1 + + atoms = [] + + for atom in [atom1, atom2]: + connections = [] + for a, b in atom.bonds.items(): + ac = AtomConstraint(label=f"{a.symbol}{a.connectivity1}") + bond_order = b.order + connections.append(Connection(atom=ac, bond_order=bond_order)) + atoms.append( + AtomConstraint( + label=f"{atom.symbol}{atom.connectivity1}", connections=connections + ) + ) + + return BondConstraint(atom1=atoms[0], atom2=atoms[1], bond_order=int(bond.order)) class SpeciesConstraints: @@ -168,76 +361,181 @@ class SpeciesConstraints: A class for defining and enumerating constraints to ReferenceSpecies objects for error canceling reactions """ - def __init__(self, target, reference_list, conserve_bonds=True, conserve_ring_size=True): + def __init__( + self, + target, + reference_list, + isodesmic_class="rc2", + conserve_ring_size=True, + limit_charges=True, + limit_scope=True, + ): """ Define the constraints that will be enforced, and determine the mapping of indices in the constraint vector to the labels for these constraints. - To reduce the size of the linear programming problem that will try to find error canceling reactions of the - target and subsets of the reference species, the `reference_species` list is automatically pruned to remove - species that have additional atom, bond, and/or ring attributes not found in the target molecule. + Notes: + To reduce the size of the linear programming problem that will try to find error canceling reactions of the + target and subsets of the reference species, the `reference_species` list is automatically pruned to remove + species that have additional atom, bond, and/or ring attributes not found in the target molecule. + + Charge is also explicitly conserved, as there are charged species in the reference database Args: target (ErrorCancelingSpecies): The target species of the error canceling reaction scheme reference_list(list): A list of ErrorCancelingSpecies objects for the reference species that can participate in the error canceling reaction scheme - conserve_bonds (bool, optional): Enforce the number of each bond type be conserved - conserve_ring_size (bool, optional): Enforce that the number of each ring size be conserved + isodesmic_class (str, optional): Reaction classes as defined by Buerger et al. that determine how specific + the constraints are. + conserve_ring_size (bool, optional): Enforce that the number of each ring size be conserved. + limit_charges (bool, optional): Only allow species in the reaction that are within the range [C, 0] for + anions or [0, C] for cations where "C" is the charge of the target + limit_scope (bool, optional): Exclude any molecules from the reference set that have features not contained + in the target molecule. This will reduce the size of the linear programing problem being solved to yield + faster, possibly more accurate results """ self.target = target self.all_reference_species = reference_list self.reference_species = [] - self.conserve_bonds = conserve_bonds + self.isodesmic_class = isodesmic_class self.conserve_ring_size = conserve_ring_size - self.constraint_map = self._get_constraint_map() - - def _get_constraint_map(self): - # Enumerate all of the constraints in the target molecule to initialize the constraint mapping - constraint_map = {label: i for i, label in enumerate(self.target.molecule.get_element_count().keys())} - if self.conserve_bonds: - j = len(constraint_map) - constraint_map.update( - {label: j + i for i, label in enumerate(self.target.molecule.enumerate_bonds().keys())}) + self.limit_charges = limit_charges + self.limit_scope = limit_scope + + def _get_constraint_lists(self): + full_constraints_list = [self._get_all_constraints(self.target)] + for ref_spcs in self.all_reference_species: + full_constraints_list.append(self._get_all_constraints(ref_spcs)) + + return full_constraints_list + + def _get_ring_constraints( + self, species: ErrorCancelingSpecies + ) -> List[GenericConstraint]: + ring_features = [] + rings = species.molecule.get_smallest_set_of_smallest_rings() + for ring in rings: + ring_features.append(GenericConstraint(constraint_str=f"{len(ring)}_ring")) + + return ring_features + + def _get_all_constraints( + self, species: ErrorCancelingSpecies + ) -> List[Union[BondConstraint, GenericConstraint]]: + features = bond_centric_constraints(species, self.isodesmic_class) if self.conserve_ring_size: - j = len(constraint_map) - possible_rings_sizes = set(f'{len(sssr)}_ring' for sssr in - self.target.molecule.get_smallest_set_of_smallest_rings()) - constraint_map.update({label: j + i for i, label in enumerate(possible_rings_sizes)}) + features += self._get_ring_constraints(species) - return constraint_map + features.sort(key=lambda x: x.__repr__()) + return features - def _enumerate_constraints(self, species): + def _enumerate_constraints(self, full_constraints_list): + """ + Return the target constraint counts and the reference constraint counts. """ - Determine the constraint vector for a molecule given the enforced constraints - Args: - species (ErrorCancelingSpecies): Species whose constraints are to be enumerated + target_constraints = full_constraints_list[0] + reference_constraintss = full_constraints_list[1:] - Returns: - np.ndarray: vector of the number of instances of each constraining feature e.g. number of carbon atoms - """ - constraint_vector = np.zeros(len(self.constraint_map)) - molecule = species.molecule + # Enumerate through the constraints of reference species and keep only those that are present in the target + enumerated_reference_constraintss = [] + + if self.limit_scope: + + for i, ref_spc in enumerate(self.all_reference_species): - try: - atoms = molecule.get_element_count() - for atom_label, count in atoms.items(): - constraint_vector[self.constraint_map[atom_label]] += count + if not all(constraint in target_constraints for constraint in reference_constraintss[i]): + continue - if self.conserve_bonds: - bonds = molecule.enumerate_bonds() - for bond_label, count in bonds.items(): - constraint_vector[self.constraint_map[bond_label]] += count + self.reference_species.append(ref_spc) + enumerated_reference_constraintss.append(reference_constraintss[i]) - if self.conserve_ring_size: - rings = molecule.get_smallest_set_of_smallest_rings() - for ring in rings: - constraint_vector[self.constraint_map[f'{len(ring)}_ring']] += 1 - except KeyError: # This molecule has a feature not found in the target molecule. Return None to exclude this - return None + else: + self.reference_species = self.all_reference_species + enumerated_reference_constraintss = reference_constraintss + + # Get a list of the unique constraints sorted by their string representation + if self.limit_scope: - return constraint_vector + # The scope of constraints to consider is the target constraints + unique_constraints = self._get_unique_constraints(target_constraints) + unique_constraints.sort(key=lambda x: x.__repr__()) + + else: + all_constraints = target_constraints + [constraint for constraints in enumerated_reference_constraintss for constraint in constraints] + unique_constraints = self._get_unique_constraints(all_constraints) + unique_constraints.sort(key=lambda x: x.__repr__()) + + # Get the counts of each unique constraint in the target and reference constraints + target_constraint_counts = [target_constraints.count(c) for c in unique_constraints] + reference_constraint_counts = [] + + for i, ref_constraints in enumerate(enumerated_reference_constraintss): + reference_constraint_counts.append([ref_constraints.count(c) for c in unique_constraints]) + + return target_constraint_counts, reference_constraint_counts + + def _get_unique_constraints(self, constraints): + # Constraints are unhashable, so we need to use some workaround to get unique constraints + constraint_dict = {constraint.__repr__(): constraint for constraint in constraints} + return list(constraint_dict.values()) + + def _enumerate_charge_constraints(self, target_constraints, reference_constraints): + charge = self.target.molecule.get_net_charge() + target_constraints.append(charge) + + for i, spcs in enumerate(self.reference_species): + reference_constraints[i].append(spcs.molecule.get_net_charge()) + + if self.limit_charges: + allowed_reference_species = [] + new_constraints = [] + + if charge < 0: + allowable_charges = list(range(charge, 0)) + elif charge > 0: + allowable_charges = list(range(1, charge + 1)) + else: + allowable_charges = [0] + for i, spcs in enumerate(self.reference_species): + if reference_constraints[i][-1] in allowable_charges: + allowed_reference_species.append(spcs) + new_constraints.append(reference_constraints[i]) + + reference_constraints = new_constraints + self.reference_species = allowed_reference_species + + return target_constraints, reference_constraints + + def _enumerate_element_constraints(self, target_constraints, reference_constraints): + all_elements = set() + for spc in self.reference_species: + all_elements.update(spc.molecule.get_element_count().keys()) + + # Check that the target and reference species have the same elements to be able to satisfy mass conservation + if set(self.target.molecule.get_element_count().keys()) != all_elements: + logging.warning( + f"Target species and reference species do not have the same elements:", + f"Target: {self.target.molecule.get_element_count().keys()}", + f"Reference: {all_elements}", + ) + + all_elements.update(self.target.molecule.get_element_count().keys()) + all_elements = sorted(list(all_elements)) + + element_count = self.target.molecule.get_element_count() + new_constraints = [element_count.get(element, 0) for element in all_elements] + target_constraints.extend(new_constraints) + + for i, spc in enumerate(self.reference_species): + element_count = spc.molecule.get_element_count() + new_constraints = [ + element_count.get(element, 0) for element in all_elements + ] + reference_constraints[i].extend(new_constraints) + + return target_constraints, reference_constraints def calculate_constraints(self): """ @@ -248,16 +546,41 @@ def calculate_constraints(self): - target constraint vector (1 x len(constraints)) - constraint matrix for allowable reference species (len(self.reference_species) x len(constraints)) """ - target_constraints = self._enumerate_constraints(self.target) - constraint_matrix = [] - - for spcs in self.all_reference_species: - spcs_constraints = self._enumerate_constraints(spcs) - if spcs_constraints is not None: # This species is allowed - self.reference_species.append(spcs) - constraint_matrix.append(spcs_constraints) + full_constraint_list = self._get_constraint_lists() + target_constraints, reference_constraints = self._enumerate_constraints( + full_constraint_list + ) + target_constraints, reference_constraints = self._enumerate_charge_constraints( + target_constraints, reference_constraints + ) + target_constraints, reference_constraints = self._enumerate_element_constraints( + target_constraints, reference_constraints + ) + + target_constraints = np.array(target_constraints, dtype=int) + constraint_matrix = np.array(reference_constraints, dtype=int) + + return target_constraints, constraint_matrix + + +def _clean_up_constraints(target_constraints, constraint_matrix): + # make sure that the constraint matrix is 2d + if len(constraint_matrix.shape) == 1: + constraint_matrix = np.array([constraint_matrix], dtype=int) + + # Remove any columns that are all zeros + zero_indices = np.where(~constraint_matrix.any(axis=0))[0] + # Check that this wouldn't eliminate a non-zero target entry + for z in zero_indices: + if ( + target_constraints[z] != 0 + ): # This problem is not solvable. Return None to signal this + return None, None + indices = [i for i in range(constraint_matrix.shape[1]) if i not in zero_indices] + constraint_matrix = np.take(constraint_matrix, indices=indices, axis=1) + target_constraints = np.take(target_constraints, indices=indices) - return target_constraints, np.array(constraint_matrix, dtype=int) + return target_constraints, constraint_matrix class ErrorCancelingScheme: @@ -265,27 +588,42 @@ class ErrorCancelingScheme: A Base class for calculating target species thermochemistry using error canceling reactions """ - def __init__(self, target, reference_set, conserve_bonds, conserve_ring_size): + def __init__( + self, + target, + reference_set, + isodesmic_class, + conserve_ring_size, + limit_charges, + limit_scope, + ): """ Args: target (ErrorCancelingSpecies): Species whose Hf298 will be calculated using error canceling reactions reference_set (list): list of reference species (as ErrorCancelingSpecies) that can participate in error canceling reactions to help calculate the thermochemistry of the target - conserve_bonds (bool): Flag to determine if the number and type of each bond must be conserved in each error - canceling reaction conserve_ring_size (bool): Flag to determine if the number of each ring size must be conserved in each error canceling reaction """ self.target = target - self.constraints = SpeciesConstraints(target, reference_set, conserve_bonds=conserve_bonds, - conserve_ring_size=conserve_ring_size) - - self.target_constraint, self.constraint_matrix = self.constraints.calculate_constraints() + self.constraints = SpeciesConstraints( + target, + reference_set, + isodesmic_class=isodesmic_class, + conserve_ring_size=conserve_ring_size, + limit_charges=limit_charges, + limit_scope=limit_scope, + ) + + ( + self.target_constraint, + self.constraint_matrix, + ) = self.constraints.calculate_constraints() self.reference_species = self.constraints.reference_species - def _find_error_canceling_reaction(self, reference_subset, milp_software=None): + def _find_error_canceling_reaction(self, reference_subset): """ Automatically find a valid error canceling reaction given a subset of the available benchmark species. This is done by solving a mixed integer linear programming (MILP) problem similiar to @@ -293,107 +631,45 @@ def _find_error_canceling_reaction(self, reference_subset, milp_software=None): Args: reference_subset (list): A list of indices from self.reference_species that can participate in the reaction - milp_software (list, optional): Solvers to try in order. Defaults to ['lpsolve'] or if pyomo is available - defaults to ['lpsolve', 'pyomo']. lpsolve is usually faster. Returns: tuple(ErrorCancelingReaction, np.ndarray) - Reaction with the target species (if a valid reaction is found, else ``None``) - Indices (of the subset) for the species that participated in the return reaction """ - if milp_software is None: - milp_software = ['lpsolve'] - if pyo is not None: - milp_software.append('pyomo') # Define the constraints based on the provided subset c_matrix = np.take(self.constraint_matrix, reference_subset, axis=0) + + # Remove unnecessary constraints + target_constraint, c_matrix = _clean_up_constraints( + self.target_constraint, c_matrix + ) + if target_constraint is None or c_matrix is None: # The problem is not solvable + return None, None + + # Setup MILP problem c_matrix = np.tile(c_matrix, (2, 1)) sum_constraints = np.sum(c_matrix, 1, dtype=int) - targets = -1*self.target_constraint + targets = -1 * target_constraint m = c_matrix.shape[0] n = c_matrix.shape[1] - split = int(m/2) - - for solver in milp_software: - if solver == 'pyomo': - # Check that pyomo is available - if pyo is None: - raise ImportError('Cannot import optional package pyomo. Either install this dependency with ' - '`conda install -c conda-forge pyomo glpk` or set milp_software to `lpsolve`') - - # Setup the MILP problem using pyomo - lp_model = pyo.ConcreteModel() - lp_model.i = pyo.RangeSet(0, m - 1) - lp_model.j = pyo.RangeSet(0, n - 1) - lp_model.r = pyo.RangeSet(0, split-1) # indices before the split correspond to reactants - lp_model.p = pyo.RangeSet(split, m - 1) # indices after the split correspond to products - lp_model.v = pyo.Var(lp_model.i, domain=pyo.NonNegativeIntegers) # The stoich. coef. we are solving for - lp_model.c = pyo.Param(lp_model.i, lp_model.j, initialize=lambda _, i_ind, j_ind: c_matrix[i_ind, - j_ind]) - lp_model.s = pyo.Param(lp_model.i, initialize=lambda _, i_ind: sum_constraints[i_ind]) - lp_model.t = pyo.Param(lp_model.j, initialize=lambda _, j_ind: targets[j_ind]) - - lp_model.obj = pyo.Objective(rule=_pyo_obj_expression) - lp_model.constraints = pyo.Constraint(lp_model.j, rule=_pyo_constraint_rule) - - # Solve the MILP problem using the GLPK MILP solver (https://www.gnu.org/software/glpk/) - opt = pyo.SolverFactory('glpk') - results = opt.solve(lp_model, timelimit=1) - - # Return the solution if a valid reaction is found. Otherwise continue to next solver - if results.solver.termination_condition == pyo.TerminationCondition.optimal: - # Extract the solution and find the species with non-zero stoichiometric coefficients - solution = lp_model.v.extract_values().values() - break + split = int(m / 2) - elif solver == 'lpsolve': - # Save the current signal handler - sig = signal.getsignal(signal.SIGINT) - - # Setup the MILP problem using lpsolve - lp = lpsolve('make_lp', 0, m) - lpsolve('set_verbose', lp, 2) # Reduce the logging from lpsolve - lpsolve('set_obj_fn', lp, sum_constraints) - lpsolve('set_minim', lp) - - for j in range(n): - lpsolve('add_constraint', lp, np.concatenate((c_matrix[:split, j], -1*c_matrix[split:, j])), EQ, - targets[j]) - - lpsolve('add_constraint', lp, np.ones(m), LE, 20) # Use at most 20 species (including replicates) - lpsolve('set_timeout', lp, 1) # Move on if lpsolve can't find a solution quickly - - # Constrain v_i to be 4 or less - for i in range(m): - lpsolve('set_upbo', lp, i, 4) - - # All v_i must be integers - lpsolve('set_int', lp, [True]*m) - - status = lpsolve('solve', lp) - - # Reset signal handling since lpsolve changed it - try: - signal.signal(signal.SIGINT, sig) - except ValueError: - # This is not being run in the main thread, so we cannot reset signal - pass - except TypeError: - print( - "Failed to reset signal handling in LPSolve - are you running pytest?" - ) + constraints = [LinearConstraint(A=np.concatenate((c_matrix[:split, j], -1 * c_matrix[split:, j]), lb=targets[j], ub=targets[j])) for j in n] - # Return the solution if a valid reaction is found. Otherwise continue to next solver - if status == 0: - _, solution = lpsolve('get_solution', lp)[:2] - break + result = milp( + sum_constraints, + integrality=1, + constraints=constraints, + options={"time_limit": 10}, + ) - else: - raise ValueError(f'Unrecognized MILP solver {solver} for isodesmic reaction generation') - - else: + if result.status != 0: + logging.warning("Optimization could not find a valid solution.") return None, None + + solution = result.x reaction = ErrorCancelingReaction(self.target, dict()) subset_indices = [] @@ -401,13 +677,19 @@ def _find_error_canceling_reaction(self, reference_subset, milp_software=None): if v > 0: subset_indices.append(index % split) if index < split: - reaction.species.update({self.reference_species[reference_subset[index]]: -v}) + reaction.species.update( + {self.reference_species[reference_subset[index]]: -v} + ) else: - reaction.species.update({self.reference_species[reference_subset[index % split]]: v}) + reaction.species.update( + {self.reference_species[reference_subset[index % split]]: v} + ) return reaction, np.array(subset_indices) - def multiple_error_canceling_reaction_search(self, n_reactions_max=20, milp_software=None): + def multiple_error_canceling_reaction_search( + self, n_reactions_max=20, + ): """ Generate multiple error canceling reactions involving the target and a subset of the reference species. @@ -418,21 +700,20 @@ def multiple_error_canceling_reaction_search(self, n_reactions_max=20, milp_soft Args: n_reactions_max (int, optional): The maximum number of found reactions that will be returned, after which no further searching will occur even if there are possible subsets left in the queue. - milp_software (list, optional): Solvers to try in order. Defaults to ['lpsolve'] or if pyomo is available - defaults to ['lpsolve', 'pyomo']. lpsolve is usually faster. Returns: list: A list of the found error canceling reactions """ - subset_queue = deque() - subset_queue.append(np.arange(0, len(self.reference_species))) + subset_queue = [np.arange(0, len(self.reference_species))] reaction_list = [] while (len(subset_queue) != 0) and (len(reaction_list) < n_reactions_max): - subset = subset_queue.popleft() + subset = subset_queue.pop() if len(subset) == 0: continue - reaction, subset_indices = self._find_error_canceling_reaction(subset, milp_software=milp_software) + reaction, subset_indices = self._find_error_canceling_reaction( + subset + ) if reaction is None: continue else: @@ -441,9 +722,18 @@ def multiple_error_canceling_reaction_search(self, n_reactions_max=20, milp_soft for index in subset_indices: subset_queue.append(np.delete(subset, index)) + # Clean up the queue to remove subsets that would allow for already found reactions + new_queue = [] + reaction_indices = [subset[i] for i in subset_indices] + for s in subset_queue: + if not all([i in s for i in reaction_indices]): + new_queue.append(s) + + subset_queue = new_queue + return reaction_list - def calculate_target_enthalpy(self, n_reactions_max=20, milp_software=None): + def calculate_target_enthalpy(self, n_reactions_max=5, milp_software=None): """ Perform a multiple error canceling reactions search and calculate hf298 for the target species by taking the median hf298 value from among the error canceling reactions found @@ -459,13 +749,17 @@ def calculate_target_enthalpy(self, n_reactions_max=20, milp_software=None): - Standard heat of formation at 298 K calculated for the target species - reaction list containing all error canceling reactions found """ - reaction_list = self.multiple_error_canceling_reaction_search(n_reactions_max, milp_software) + reaction_list = self.multiple_error_canceling_reaction_search( + n_reactions_max, milp_software + ) + if len(reaction_list) == 0: # No reactions found + return None, reaction_list h298_list = np.zeros(len(reaction_list)) for i, rxn in enumerate(reaction_list): h298_list[i] = rxn.calculate_target_thermo().value_si - return ScalarQuantity(np.median(h298_list), 'J/mol'), reaction_list + return ScalarQuantity(np.median(h298_list), "J/mol"), reaction_list def _pyo_obj_expression(model): @@ -473,25 +767,47 @@ def _pyo_obj_expression(model): def _pyo_constraint_rule(model, col): - return sum(model.v[row] * model.c[row, col] for row in model.r) - \ - sum(model.v[row] * model.c[row, col] for row in model.p) == model.t[col] + return ( + sum(model.v[row] * model.c[row, col] for row in model.r) + - sum(model.v[row] * model.c[row, col] for row in model.p) + == model.t[col] + ) class IsodesmicScheme(ErrorCancelingScheme): """ An error canceling reaction where the number and type of both atoms and bonds are conserved """ + def __init__(self, target, reference_set): - super().__init__(target, reference_set, conserve_bonds=True, conserve_ring_size=False) + super().__init__( + target, + reference_set, + isodesmic_class="rc2", + conserve_ring_size=False, + limit_charges=True, + limit_scope=True, + ) class IsodesmicRingScheme(ErrorCancelingScheme): """ A stricter form of the traditional isodesmic reaction scheme where the number of each ring size is also conserved """ + def __init__(self, target, reference_set): - super().__init__(target, reference_set, conserve_bonds=True, conserve_ring_size=True) + super().__init__( + target, + reference_set, + isodesmic_class="rc2", + conserve_ring_size=True, + limit_charges=True, + limit_scope=True, + ) + + +CONSTRAINT_CLASSES = {"rc2": _buerger_rc2, "rc3": _buerger_rc3, "rc4": _buerger_rc4} -if __name__ == '__main__': +if __name__ == "__main__": pass diff --git a/environment.yml b/environment.yml index b897e3a6e47..b8014cb305c 100644 --- a/environment.yml +++ b/environment.yml @@ -61,7 +61,7 @@ dependencies: - coverage - cython >=0.25.2 - scikit-learn - - scipy + - scipy >=1.9 - numpy >=1.10.0 - pydot - jinja2 @@ -73,6 +73,7 @@ dependencies: - pytest - pytest-cov - conda-forge::pytest-check + - pyutilib - matplotlib >=1.5 - mpmath - pandas diff --git a/test/arkane/encorr/isodesmicTest.py b/test/arkane/encorr/isodesmicTest.py index 83e36e06de1..8b41066b3dd 100644 --- a/test/arkane/encorr/isodesmicTest.py +++ b/test/arkane/encorr/isodesmicTest.py @@ -31,7 +31,6 @@ This script contains unit tests of the :mod:`arkane.isodesmic` module. """ - import numpy as np from rmgpy.molecule import Molecule @@ -137,12 +136,11 @@ def test_initializing_constraint_map(self): """ Test that the constraint map is properly initialized when a SpeciesConstraints object is initialized """ - caffeine_consts = SpeciesConstraints(self.caffeine, [self.butane, self.benzene]) - assert set(caffeine_consts.constraint_map.keys()) == { - "H", - "C", - "O", - "N", + consts = SpeciesConstraints(self.caffeine, [self.butane, self.benzene]) + caffeine_features = consts._get_all_constraints(self.caffeine) + caffeine_constraint_list = [feat.__repr__() for feat in caffeine_features] + + assert set(caffeine_constraint_list) == { "C=O", "C-N", "C-H", @@ -154,55 +152,69 @@ def test_initializing_constraint_map(self): } no_rings = SpeciesConstraints(self.caffeine, [self.butane, self.benzene], conserve_ring_size=False) - assert set(no_rings.constraint_map.keys()) == {"H", "C", "O", "N", "C=O", "C-N", "C-H", "C=C", "C=N", "C-C"} - - atoms_only = SpeciesConstraints(self.caffeine, [self.butane], conserve_ring_size=False, conserve_bonds=False) - assert set(atoms_only.constraint_map.keys()) == {"H", "C", "O", "N"} + caffeine_features = no_rings._get_all_constraints(self.caffeine) + caffeine_constraint_list = [feat.__repr__() for feat in caffeine_features] + assert set(caffeine_constraint_list) == {"C=O", "C-N", "C-H", "C=C", "C=N", "C-C"} def test_enumerating_constraints(self): """ Test that a SpeciesConstraints object can properly enumerate the constraints of a given ErrorCancelingSpecies """ spcs_consts = SpeciesConstraints(self.benzene, []) - assert set(spcs_consts.constraint_map.keys()) == {"C", "H", "C=C", "C-C", "C-H", "6_ring"} - - # Now that we have confirmed that the correct keys are present, overwrite the constraint map to set the order - spcs_consts.constraint_map = { - "H": 0, - "C": 1, - "C=C": 2, - "C-C": 3, - "C-H": 4, - "6_ring": 5, - } + benzene_features = spcs_consts._get_all_constraints(self.benzene) + benzene_constraint_list = [feat.__repr__() for feat in benzene_features] + assert set(benzene_constraint_list) == {"C=C", "C-C", "C-H", "6_ring"} + + target_constraints, _ = spcs_consts._enumerate_constraints([benzene_features]) + benzene_constraints = target_constraints assert np.array_equal( - spcs_consts._enumerate_constraints(self.propene), - np.array([6, 3, 1, 1, 6, 0]), + benzene_constraints, + np.array([1, 3, 6, 3]), # ['6_ring', 'C-C', 'C-H', 'C=C'] ) + + spcs_consts.all_reference_species = [self.propene] + propene_features = spcs_consts._get_all_constraints(self.propene) + _, reference_constraints = spcs_consts._enumerate_constraints([benzene_features, propene_features]) + propene_constraints = reference_constraints[0] assert np.array_equal( - spcs_consts._enumerate_constraints(self.butane), - np.array([10, 4, 0, 3, 10, 0]), + propene_constraints, + np.array([0, 1, 6, 1]), # ['6_ring', 'C-C', 'C-H', 'C=C'] ) + + spcs_consts.all_reference_species = [self.butane] + butane_features = spcs_consts._get_all_constraints(self.butane) + _, reference_constraints = spcs_consts._enumerate_constraints([benzene_features, butane_features]) + butane_constraints = reference_constraints[0] assert np.array_equal( - spcs_consts._enumerate_constraints(self.benzene), - np.array([6, 6, 3, 3, 6, 1]), + butane_constraints, + np.array([0, 3, 10, 0]), # ['6_ring', 'C-C', 'C-H', 'C=C'] ) - # Caffeine and ethyne should return None since they have features not found in benzene - assert spcs_consts._enumerate_constraints(self.caffeine) is None - assert spcs_consts._enumerate_constraints(self.ethyne) is None + # Caffeine and ethyne should return empty list since they have features not found in benzene + spcs_consts.all_reference_species = [self.caffeine] + caffeine_features = spcs_consts._get_all_constraints(self.caffeine) + _, reference_constraints = spcs_consts._enumerate_constraints([benzene_features, caffeine_features]) + assert len(reference_constraints) == 0 + + spcs_consts.all_reference_species = [self.ethyne] + ethyne_features = spcs_consts._get_all_constraints(self.ethyne) + _, reference_constraints = spcs_consts._enumerate_constraints([benzene_features, ethyne_features]) + assert len(reference_constraints) == 0 def test_calculating_constraints(self): """ Test that a SpeciesConstraints object can properly return the target constraint vector and the constraint matrix """ spcs_consts = SpeciesConstraints(self.caffeine, [self.propene, self.butane, self.benzene, self.ethyne]) - assert set(spcs_consts.constraint_map.keys()) == { - "H", - "C", - "O", - "N", + caffeine_features = spcs_consts._get_all_constraints(self.caffeine) + propene_features = spcs_consts._get_all_constraints(self.propene) + butane_features = spcs_consts._get_all_constraints(self.butane) + benzene_features = spcs_consts._get_all_constraints(self.benzene) + ethyne_features = spcs_consts._get_all_constraints(self.ethyne) + + caffeine_feature_list = [feat.__repr__() for feat in caffeine_features] + assert set(caffeine_feature_list) == { "C=O", "C-N", "C-H", @@ -213,36 +225,20 @@ def test_calculating_constraints(self): "6_ring", } - # Now that we have confirmed that the correct keys are present, overwrite the constraint map to set the order - spcs_consts.constraint_map = { - "H": 0, - "C": 1, - "O": 2, - "N": 3, - "C=O": 4, - "C-N": 5, - "C-H": 6, - "C=C": 7, - "C=N": 8, - "C-C": 9, - "5_ring": 10, - "6_ring": 11, - } - target_consts, consts_matrix = spcs_consts.calculate_constraints() # First, test that ethyne is not included in the reference set assert spcs_consts.reference_species == [self.propene, self.butane, self.benzene] # Then, test the output of the calculation - assert np.array_equal(target_consts, np.array([10, 8, 2, 4, 2, 10, 10, 1, 1, 1, 1, 1])) + assert np.array_equal(target_consts, np.array([ 1, 1, 1, 10, 10, 1, 1, 2, 0, 8, 10, 4, 2])) # ['5_ring', '6_ring', 'C-C', 'C-H', 'C-N', 'C=C', 'C=N', 'C=O'] assert np.array_equal( consts_matrix, np.array( [ - [6, 3, 0, 0, 0, 0, 6, 1, 0, 1, 0, 0], - [10, 4, 0, 0, 0, 0, 10, 0, 0, 3, 0, 0], - [6, 6, 0, 0, 0, 0, 6, 3, 0, 3, 0, 1], + [ 0, 0, 1, 6, 0, 1, 0, 0, 0, 3, 6, 0, 0], # ['5_ring', '6_ring', 'C-C', 'C-H', 'C-N', 'C=C', 'C=N', 'C=O'] + [ 0, 0, 3, 10, 0, 0, 0, 0, 0, 4, 10, 0, 0], # ['5_ring', '6_ring', 'C-C', 'C-H', 'C-N', 'C=C', 'C=N', 'C=O'] + [ 0, 1, 3, 6, 0, 3, 0, 0, 0, 6, 6, 0, 0], # ['5_ring', '6_ring', 'C-C', 'C-H', 'C-N', 'C=C', 'C=N', 'C=O'] ] ), ) @@ -276,10 +272,12 @@ def setup_class(cls): def test_creating_error_canceling_schemes(self): scheme = ErrorCancelingScheme( - self.propene, - [self.butane, self.benzene, self.caffeine, self.ethyne], - True, - True, + target=self.propene, + reference_set=[self.butane, self.benzene, self.caffeine, self.ethyne], + isodesmic_class="rc2", + conserve_ring_size=True, + limit_charges=True, + limit_scope=True, ) assert scheme.reference_species == [self.butane] @@ -328,7 +326,7 @@ def test_multiple_error_canceling_reactions(self): ) reaction_list = scheme.multiple_error_canceling_reaction_search(n_reactions_max=20) - assert len(reaction_list) == 20 + assert len(reaction_list) == 6 reaction_string = reaction_list.__repr__() # Consider both permutations of the products in the reaction string rxn_str1 = " 1*CCC + 1*C=CCC >" @@ -361,9 +359,9 @@ def test_calculate_target_enthalpy(self): ) target_thermo, rxn_list = scheme.calculate_target_enthalpy(n_reactions_max=3, milp_software=["lpsolve"]) - assert target_thermo.value_si == 115000.0 + assert target_thermo.value_si == 110000.0 assert isinstance(rxn_list[0], ErrorCancelingReaction) if self.pyo is not None: target_thermo, _ = scheme.calculate_target_enthalpy(n_reactions_max=3, milp_software=["pyomo"]) - assert target_thermo.value_si == 115000.0 + assert target_thermo.value_si == 110000.0 From 13f2b1c599377a81359dfa200e41c84161379451 Mon Sep 17 00:00:00 2001 From: Jackson Burns Date: Tue, 3 Dec 2024 14:42:56 -0500 Subject: [PATCH 09/48] JWB Python 3.9 and Cython Debugging, Optional RMS, fix Clar This is a collection of largely unordered commits which implement all of the changes discussed in the commit name --- .conda/meta.yaml | 2 - .github/workflows/CI.yml | 42 ++-- .github/workflows/docs.yml | 7 - .gitignore | 1 + Dockerfile | 11 +- arkane/encorr/isodesmic.py | 29 +-- arkane/statmech.py | 9 +- .../rmg/installation/anacondaDeveloper.rst | 18 +- environment.yml | 83 ++++--- rmgpy/data/kinetics/family.py | 8 +- rmgpy/molecule/resonance.pxd | 8 +- rmgpy/molecule/resonance.py | 223 +++++++++--------- rmgpy/molecule/util.py | 4 +- rmgpy/pdep/sls.py | 23 +- rmgpy/reaction.py | 8 +- rmgpy/rmg/input.py | 32 ++- rmgpy/rmg/main.py | 115 +++++---- rmgpy/rmg/model.py | 139 +++++------ rmgpy/rmg/pdep.py | 6 +- ...=> reactionmechanismsimulator_reactors.py} | 17 +- rmgpy/tools/fluxdiagram.py | 7 +- test/arkane/encorr/isodesmicTest.py | 53 ++--- test/rmgpy/data/solvationTest.py | 14 +- test/rmgpy/molecule/resonanceTest.py | 10 - test/rmgpy/rmg/mainTest.py | 14 +- test/rmgpy/solver/liquidTest.py | 7 +- test/rmgpy/statmech/ndTorsionsTest.py | 2 +- utilities.py | 18 -- 28 files changed, 454 insertions(+), 456 deletions(-) rename rmgpy/rmg/{reactors.py => reactionmechanismsimulator_reactors.py} (98%) diff --git a/.conda/meta.yaml b/.conda/meta.yaml index 7cd02969fe7..e781b460496 100644 --- a/.conda/meta.yaml +++ b/.conda/meta.yaml @@ -14,7 +14,6 @@ requirements: - {{ compiler('c') }} # [unix] host: - cython >=0.25.2 - - lpsolve55 - numpy - openbabel >=3 - pydas >=1.0.2 @@ -39,7 +38,6 @@ requirements: - h5py - jinja2 - jupyter - - lpsolve55 - markupsafe - matplotlib >=1.5 - mopac diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index e2fecf9e65f..19146cb54c7 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -33,8 +33,8 @@ on: schedule: # * is a special character in YAML so you have to quote this string - cron: "0 8 * * *" - # runs on all branches on both RMG-Py and forks - push: + # allow running on RMG-Py on a pushed branch, only if triggered manually + workflow_dispatch: # runs on PRs against RMG-Py (and anywhere else, but we add this for RMG-Py) pull_request: @@ -57,8 +57,9 @@ jobs: matrix: python-version: ["3.9"] os: [macos-13, macos-latest, ubuntu-latest] + test_julia: [yes, no] runs-on: ${{ matrix.os }} - name: ${{ matrix.os }} Build and Test Python ${{ matrix.python-version }} + name: ${{ matrix.os }} Build and Test Python ${{ matrix.python-version }} (Julia? ${{ matrix.test_julia }}) # skip scheduled runs from forks if: ${{ !( github.repository != 'ReactionMechanismGenerator/RMG-Py' && github.event_name == 'schedule' ) }} env: @@ -83,16 +84,6 @@ jobs: show-channel-urls: true channels: conda-forge,cantera,rmg - - name: Fix ARM Mac environment - if: matrix.os == 'macos-latest' - run: | - conda deactivate - conda env remove -n rmg_env - conda create -n rmg_env - conda activate rmg_env - conda config --env --set subdir osx-64 - conda env update -f environment.yml - # list the environment for debugging purposes - name: mamba info run: | @@ -120,31 +111,36 @@ jobs: # Setup Juliaup - name: Set Julia paths + if: matrix.test_julia == 'yes' run: | - echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_ENV - echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_PATH - echo "JULIA_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_ENV - echo "JULIA_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_PATH - echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_ENV - echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_PATH - echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_ENV - echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_PATH + # echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_ENV + # echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_PATH + # echo "JULIA_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_ENV + # echo "JULIA_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_PATH + # echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_ENV + # echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_PATH + # echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_ENV + # echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_PATH echo "JULIA_CONDAPKG_BACKEND=Current" >> $GITHUB_ENV - echo "JULIA_CONDAPKG_BACKEND=Current" >> $GITHUB_PATH + # echo "JULIA_CONDAPKG_BACKEND=Current" >> $GITHUB_PATH - name: Setup Juliaup + if: matrix.test_julia == 'yes' uses: julia-actions/install-juliaup@v2 with: - channel: '1.10' + channel: '1.9' - name: Check Julia version + if: matrix.test_julia == 'yes' run: julia --version # RMS installation and linking to Julia - name: Install and link Julia dependencies + if: matrix.test_julia == 'yes' timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). # JULIA_CONDAPKG_EXE points to the existing conda/mamba to avoid JuliaCall from installing their own. See https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-a-Conda-environment. run: | + mamba install conda-forge::pyjuliacall julia -e 'using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator' - name: Install Q2DTor diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 1addaaed435..df149a557d7 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -67,13 +67,6 @@ jobs: make clean make - - name: Install and link Julia dependencies - timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). - run: | - which julia - export JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba - julia -e 'ENV["JULIA_CONDAPKG_BACKEND"] = "Current"; using Pkg; Pkg.add(PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator' - - name: Checkout gh-pages Branch uses: actions/checkout@v2 with: diff --git a/.gitignore b/.gitignore index 946eb94d4ac..38d73e2bc18 100644 --- a/.gitignore +++ b/.gitignore @@ -103,6 +103,7 @@ test/rmgpy/test_data/temp_dir_for_testing/cantera/chem001.yaml rmgpy/test_data/copied_kinetic_lib/ testing/qm/* test_log.txt +rmgpy/tools/data/flux/flux/1/*.dot # example directory - save the inputs but not the outputs # cantera input files diff --git a/Dockerfile b/Dockerfile index 8c8741ba2d3..5475205334c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,10 +28,6 @@ RUN wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh & rm Miniconda3-latest-Linux-x86_64.sh ENV PATH="$PATH:/miniconda/bin" -# Set solver backend to mamba for speed -RUN conda install -n base conda-libmamba-solver && \ - conda config --set solver libmamba - # Set Bash as the default shell for following commands SHELL ["/bin/bash", "-c"] @@ -50,8 +46,7 @@ RUN git clone --single-branch --branch ${RMG_Py_Branch} --depth 1 https://github WORKDIR /rmg/RMG-Py # build the conda environment -RUN conda env create --file environment.yml && \ - conda clean --all --yes +RUN conda env create --file environment.yml # This runs all subsequent commands inside the rmg_env conda environment # @@ -60,6 +55,10 @@ RUN conda env create --file environment.yml && \ # in a Dockerfile build script) SHELL ["conda", "run", "--no-capture-output", "-n", "rmg_env", "/bin/bash", "-c"] +RUN conda install -c conda-forge julia=1.9.1 pyjulia>=0.6 && \ + conda install -c rmg pyrms diffeqpy && \ + conda clean --all --yes + # Set environment variables as directed in the RMG installation instructions ENV RUNNER_CWD=/rmg ENV PYTHONPATH="$RUNNER_CWD/RMG-Py:$PYTHONPATH" diff --git a/arkane/encorr/isodesmic.py b/arkane/encorr/isodesmic.py index b77020367b4..f4f39d0d85d 100644 --- a/arkane/encorr/isodesmic.py +++ b/arkane/encorr/isodesmic.py @@ -42,7 +42,6 @@ """ import logging -import signal from copy import deepcopy from typing import List, Union @@ -516,9 +515,9 @@ def _enumerate_element_constraints(self, target_constraints, reference_constrain # Check that the target and reference species have the same elements to be able to satisfy mass conservation if set(self.target.molecule.get_element_count().keys()) != all_elements: logging.warning( - f"Target species and reference species do not have the same elements:", - f"Target: {self.target.molecule.get_element_count().keys()}", - f"Reference: {all_elements}", + "Target species and reference species do not have the same elements:" + f"Target: {' '.join(self.target.molecule.get_element_count().keys())}" + f"Reference: {all_elements}" ) all_elements.update(self.target.molecule.get_element_count().keys()) @@ -656,7 +655,7 @@ def _find_error_canceling_reaction(self, reference_subset): n = c_matrix.shape[1] split = int(m / 2) - constraints = [LinearConstraint(A=np.concatenate((c_matrix[:split, j], -1 * c_matrix[split:, j]), lb=targets[j], ub=targets[j])) for j in n] + constraints = tuple((LinearConstraint(A=np.concatenate((c_matrix[:split, j], -1 * c_matrix[split:, j])), lb=targets[j], ub=targets[j]) for j in range(n))) result = milp( sum_constraints, @@ -733,7 +732,7 @@ def multiple_error_canceling_reaction_search( return reaction_list - def calculate_target_enthalpy(self, n_reactions_max=5, milp_software=None): + def calculate_target_enthalpy(self, n_reactions_max=5): """ Perform a multiple error canceling reactions search and calculate hf298 for the target species by taking the median hf298 value from among the error canceling reactions found @@ -741,17 +740,13 @@ def calculate_target_enthalpy(self, n_reactions_max=5, milp_software=None): Args: n_reactions_max (int, optional): The maximum number of found reactions that will returned, after which no further searching will occur even if there are possible subsets left in the queue. - milp_software (list, optional): Solvers to try in order. Defaults to ['lpsolve'] or if pyomo is available - defaults to ['lpsolve', 'pyomo']. lpsolve is usually faster. Returns: tuple(ScalarQuantity, list) - Standard heat of formation at 298 K calculated for the target species - reaction list containing all error canceling reactions found """ - reaction_list = self.multiple_error_canceling_reaction_search( - n_reactions_max, milp_software - ) + reaction_list = self.multiple_error_canceling_reaction_search(n_reactions_max) if len(reaction_list) == 0: # No reactions found return None, reaction_list h298_list = np.zeros(len(reaction_list)) @@ -762,18 +757,6 @@ def calculate_target_enthalpy(self, n_reactions_max=5, milp_software=None): return ScalarQuantity(np.median(h298_list), "J/mol"), reaction_list -def _pyo_obj_expression(model): - return pyo.summation(model.v, model.s, index=model.i) - - -def _pyo_constraint_rule(model, col): - return ( - sum(model.v[row] * model.c[row, col] for row in model.r) - - sum(model.v[row] * model.c[row, col] for row in model.p) - == model.t[col] - ) - - class IsodesmicScheme(ErrorCancelingScheme): """ An error canceling reaction where the number and type of both atoms and bonds are conserved diff --git a/arkane/statmech.py b/arkane/statmech.py index c5ab9fffe57..a77d75219c7 100644 --- a/arkane/statmech.py +++ b/arkane/statmech.py @@ -37,6 +37,7 @@ import math import os import pathlib +import warnings import matplotlib.pyplot as plt import numpy as np @@ -1134,8 +1135,8 @@ def project_rotors(conformer, hessian, rotors, linear, is_ts, get_projected_out_ logging.debug('Frequencies from internal Hessian') for i in range(3 * n_atoms - external): - with np.warnings.catch_warnings(): - np.warnings.filterwarnings('ignore', r'invalid value encountered in sqrt') + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', r'invalid value encountered in sqrt') logging.debug(np.sqrt(eig[i]) / (2 * math.pi * constants.c * 100)) # Now we can start thinking about projecting out the internal rotations @@ -1245,8 +1246,8 @@ def project_rotors(conformer, hessian, rotors, linear, is_ts, get_projected_out_ logging.debug('Frequencies from projected Hessian') for i in range(3 * n_atoms): - with np.warnings.catch_warnings(): - np.warnings.filterwarnings('ignore', r'invalid value encountered in sqrt') + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', r'invalid value encountered in sqrt') logging.debug(np.sqrt(eig[i]) / (2 * math.pi * constants.c * 100)) return np.sqrt(eig[-n_vib:]) / (2 * math.pi * constants.c * 100) diff --git a/documentation/source/users/rmg/installation/anacondaDeveloper.rst b/documentation/source/users/rmg/installation/anacondaDeveloper.rst index 76993ab1c1e..3ea92591537 100644 --- a/documentation/source/users/rmg/installation/anacondaDeveloper.rst +++ b/documentation/source/users/rmg/installation/anacondaDeveloper.rst @@ -6,6 +6,11 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux #. Install the `conda` package manager via `miniforge`, if you do not already have it (or Anaconda), by following the `Miniforge installation instructions `_. +#. If your `conda` version is older than 23.10.0, switch the solver backend to `libmamba` :: + + conda install -n base conda-libmamba-solver + conda config --set solver libmamba + #. There are a few system-level dependencies which are required and should not be installed via Conda. These include `Git `_ for version control, `GNU Make `_, and the C and C++ compilers from the `GNU Compiler Collection (GCC) `_ for compiling RMG. @@ -52,11 +57,6 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux For information on using ``ssh`` with GitHub see the `Connecting to GitHub with SSH `_ -#. Switch the conda solver backend to speed up creation of the RMG environment :: - - conda install -n base conda-libmamba-solver - conda config --set solver libmamba - #. Navigate to the RMG-Py directory :: cd RMG-Py @@ -91,16 +91,11 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux conda activate rmg_env -#. Switch the conda solver to libmamba again, to accelerate any changes you might make to this conda environment in the future:: - - conda config --set solver libmamba - #. Compile RMG-Py after activating the conda environment :: make #. Modify environment variables. Add RMG-Py to the PYTHONPATH to ensure that you can access RMG modules from any folder. - *This is important before the next step in which julia dependencies are installed.* Also, add your RMG-Py folder to PATH to launch ``rmg.py`` from any folder. In general, these commands should be placed in the appropriate shell initialization file. @@ -115,12 +110,13 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux Be sure to either close and reopen your terminal to refresh your environment variables (``source ~/.bashrc`` or ``source ~/.zshrc``). -#. Install and Link Julia dependencies: :: +#. **Optional (Recommended)**: Install and Link Julia dependencies. Ensure that you have modified your environment variables as described above, and then run the following: :: julia -e 'using Pkg; Pkg.add("PyCall");Pkg.build("PyCall");Pkg.add(PackageSpec(name="ReactionMechanismSimulator",rev="for_rmg")); using ReactionMechanismSimulator;' python -c "import julia; julia.install(); import diffeqpy; diffeqpy.install()" + Installing these dependencies will allow using ``method='ode'`` when solving the Master Equation with Arkane and using ``ReactionMechanismSimulator.jl``-based reactors in RMG. #. Finally, you can run RMG from any location by typing the following (given that you have prepared the input file as ``input.py`` in the current folder). :: diff --git a/environment.yml b/environment.yml index b8014cb305c..e95f06f8f79 100644 --- a/environment.yml +++ b/environment.yml @@ -20,33 +20,37 @@ # - August 4, 2024 Restricted pyrms to <2 # - May 14, 2024 Removed diffeqpy by switching to call SciMLBase and Sundials using JuliaCall # - March 15, 2024 - started migration to Python 3.9 +# - Mar 11, 2024 Removed Julia dependencies, now considered optional # name: rmg_env channels: - - rmg - conda-forge - cantera + - rmg dependencies: # System-level dependencies - we could install these at the OS level # but by installing them in the conda environment we get better control - - cairo - - cairocffi - - ffmpeg - - xlrd - - xlwt - - h5py - - markupsafe - - psutil + - conda-forge::cairo + - conda-forge::cairocffi + - conda-forge::ffmpeg >= 7 + - conda-forge::xlrd + - conda-forge::xlwt + - conda-forge::h5py + - conda-forge::graphviz >=12 + - conda-forge::markupsafe + - conda-forge::psutil # conda-forge not default, since default has a version information bug # (see https://github.com/ReactionMechanismGenerator/RMG-Py/pull/2421) - conda-forge::ncurses - conda-forge::suitesparse # see https://github.com/ReactionMechanismGenerator/RMG-Py/issues/2750 - conda-forge::graphviz + # ThermoCentralDatabaseInterface fails if pyopenssl is too old. 20 is just a guess at the version number. + - conda-forge::pyopenssl >20 # external software tools for chemistry - - coolprop - - cantera::cantera >=3 + - conda-forge::coolprop + - cantera::cantera =2.6 - conda-forge::mopac # see https://github.com/ReactionMechanismGenerator/RMG-Py/pull/2639#issuecomment-2050292972 - conda-forge::cclib >=1.6.3,<1.9 @@ -57,30 +61,31 @@ dependencies: - conda-forge::pyjuliacall # for calling julia packages # Python tools - - python >=3.9 # leave as GEQ so that GitHub actions can add EQ w/o breaking (contradictory deps) - - coverage - - cython >=0.25.2 - - scikit-learn - - scipy >=1.9 - - numpy >=1.10.0 - - pydot - - jinja2 - - jupyter - - pymongo - - pyparsing - - pyyaml - - networkx - - pytest - - pytest-cov + - conda-forge::python >=3.9 # leave as GEQ so that GitHub actions can add EQ w/o breaking (contradictory deps) + - conda-forge::coverage + - conda-forge::cython >=0.25.2 + - conda-forge::scikit-learn + - conda-forge::scipy >=1.9 + - conda-forge::numpy >=1.10.0 + - conda-forge::pydot + - conda-forge::jinja2 + - conda-forge::jupyter + - conda-forge::pymongo + - conda-forge::pyparsing + - conda-forge::pyyaml + - conda-forge::networkx + - conda-forge::pytest + - conda-forge::pytest-cov - conda-forge::pytest-check - - pyutilib - - matplotlib >=1.5 - - mpmath - - pandas + - conda-forge::pyutilib + - conda-forge::matplotlib >=1.5 + - conda-forge::mpmath + - conda-forge::pandas - conda-forge::gprof2dot - conda-forge::numdifftools - - conda-forge::quantities - - conda-forge::lpsolve55 + # bug in quantities, see: + # https://github.com/ReactionMechanismGenerator/RMG-Py/pull/2694#issuecomment-2489286263 + - conda-forge::quantities !=0.16.0,!=0.16.1 - conda-forge::ringdecomposerlib-python # packages we maintain @@ -91,17 +96,9 @@ dependencies: # configure packages to use OpenBLAS instead of Intel MKL - blas=*=openblas -# Cantera 2.6 on the Cantera channel is configured to use Intel MKL. It doesn't specify a version minimum -# but crashes if MKL is too old, so we have to enforce a minimum version here. But, if we can switch over -# to the conda-forge version, we should be able to remove this line (and maybe the two after it) -# because it use OpenBLAS instead. - - conda-forge::mkl >=2023.1.0 - # 2023.1.0 is just a guess at the minimum version. Older versions might work too. - - conda-forge::libcurl <= 8.9 - # There's a Julia/PyCall installation bug associated with libcurl 8.10+: - # https://discourse.julialang.org/t/curl-multi-assign-and-segmentation-fault-when-installing-package/120901/3 - - conda-forge::pyopenssl >20 - # ThermoCentralDatabaseInterface fails if pyopenssl is too old. 20 is just a guess at the version number. +# optional dependencies for using ReactionMechanismSimulator +# remove the leading '#' to install the required dependencies + # - conda-forge::pyjuliacall # additional packages that are required, but not specified here (and why) # pydqed, pydas, mopac, and likely others require a fortran compiler (specifically gfortran) diff --git a/rmgpy/data/kinetics/family.py b/rmgpy/data/kinetics/family.py index 832e73a7650..6606dea06b9 100644 --- a/rmgpy/data/kinetics/family.py +++ b/rmgpy/data/kinetics/family.py @@ -3535,8 +3535,8 @@ def make_bm_rules_from_template_rxn_map(self, template_rxn_map, nprocs=1, Tref=1 entries = list(self.groups.entries.values()) rxnlists = [(template_rxn_map[entry.label], entry.label) if entry.label in template_rxn_map.keys() else [] for entry in entries] - inputs = np.array([(self.forward_recipe.actions, rxns, Tref, fmax, label, [r.rank for r in rxns]) - for rxns, label in rxnlists]) + inputs = [(self.forward_recipe.actions, rxns, Tref, fmax, label, [r.rank for r in rxns]) + for rxns, label in rxnlists] inds = np.arange(len(inputs)) np.random.shuffle(inds) # want to parallelize in random order @@ -3545,9 +3545,9 @@ def make_bm_rules_from_template_rxn_map(self, template_rxn_map, nprocs=1, Tref=1 if nprocs > 1: pool = mp.Pool(nprocs) - kinetics_list = np.array(pool.map(_make_rule, inputs[inds])) + kinetics_list = np.array(pool.map(_make_rule, list(inputs[i] for i in inds))) else: - kinetics_list = np.array(list(map(_make_rule, inputs[inds]))) + kinetics_list = np.array(list(map(_make_rule, list(inputs[i] for i in inds)))) kinetics_list = kinetics_list[revinds] # fix order diff --git a/rmgpy/molecule/resonance.pxd b/rmgpy/molecule/resonance.pxd index 0c9fccc572f..b95c0b8ca0b 100644 --- a/rmgpy/molecule/resonance.pxd +++ b/rmgpy/molecule/resonance.pxd @@ -66,14 +66,10 @@ cpdef list generate_kekule_structure(Graph mol) cpdef list generate_clar_structures(Graph mol, bint save_order=?) -cpdef list _clar_optimization(Graph mol, bint save_order=?) - -cpdef list _solve_clar_milp(cnp.ndarray[cnp.int_t, ndim=1] c, bounds, list constraints, int n_ring, max_num=?) - -cpdef list _clar_transformation(Graph mol, list aromatic_ring) +cpdef list _clar_optimization(Graph mol, list recursion_constraints=?, int clar_number=?, bint save_order=?) cpdef list generate_adsorbate_shift_down_resonance_structures(Graph mol) cpdef list generate_adsorbate_shift_up_resonance_structures(Graph mol) -cpdef list generate_adsorbate_conjugate_resonance_structures(Graph mol) \ No newline at end of file +cpdef list generate_adsorbate_conjugate_resonance_structures(Graph mol) diff --git a/rmgpy/molecule/resonance.py b/rmgpy/molecule/resonance.py index 75b616abfc9..d389143d243 100644 --- a/rmgpy/molecule/resonance.py +++ b/rmgpy/molecule/resonance.py @@ -58,19 +58,18 @@ import logging from operator import attrgetter +import cython import numpy as np from scipy.optimize import Bounds, LinearConstraint, milp -import cython - import rmgpy.molecule.filtration as filtration import rmgpy.molecule.pathfinder as pathfinder -from rmgpy.exceptions import KekulizationError, AtomTypeError, ResonanceError +from rmgpy.exceptions import AtomTypeError, KekulizationError, ResonanceError from rmgpy.molecule.adjlist import Saturator +from rmgpy.molecule.fragment import CuttingLabel from rmgpy.molecule.graph import Vertex from rmgpy.molecule.kekulize import kekulize from rmgpy.molecule.molecule import Atom, Bond, Molecule -from rmgpy.molecule.fragment import CuttingLabel def populate_resonance_algorithms(features=None): @@ -930,16 +929,14 @@ def generate_clar_structures(mol, save_order=False): mol.assign_atom_ids() try: - aromatic_rings, bonds, solutions = _clar_optimization(mol, save_order=save_order) - except RuntimeError: + solutions = _clar_optimization(mol, save_order=save_order) + except (RuntimeError, ValueError): # either a crash during optimization or the result was an empty tuple # The optimization algorithm did not work on the first iteration return [] mol_list = [] - for solution in solutions: - - new_mol = mol.copy(deep=True) + for new_mol, aromatic_rings, bonds, solution in solutions: # The solution includes a part corresponding to rings, y, and a part corresponding to bonds, x, using # nomenclature from the paper. In y, 1 means the ring as a sextet, 0 means it does not. # In x, 1 corresponds to a double bond, 0 either means a single bond or the bond is part of a sextet. @@ -958,13 +955,16 @@ def generate_clar_structures(mol, save_order=False): # Then apply locations of aromatic sextets by converting to benzene bonds for index, ring in enumerate(aromatic_rings): if y[index] == 1: - _clar_transformation(new_mol, ring) + for i, atom_1 in enumerate(ring): + for j, atom_2 in enumerate(ring): + if new_mol.has_bond(atom_1, atom_2): + new_mol.get_bond(atom_1, atom_2).order = 1.5 try: new_mol.update_atomtypes() except AtomTypeError: pass - else: + finally: mol_list.append(new_mol) return mol_list @@ -977,57 +977,79 @@ def _tuplize_bond(bond): return (bond.atom1.id, bond.atom2.id) -def _clar_optimization(mol, save_order=False): +def _clar_optimization(mol, recursion_constraints=None, clar_number=-1, save_order=False): """ - Implements linear programming algorithm for finding Clar structures. This algorithm maximizes the number - of Clar sextets within the constraints of molecular geometry and atom valency. + Implements Mixed Integer Linear Programming for finding Clar structures. + + First finds the Clar number (and an arbitrary structure with that number), then recursively + calls itself to enumerate more structures with that Clar number. No guarantees about + which structures will be found, or how many (we stop solving once solution would require + branching, which typically happens after at least a couple have already been found). Returns a list of valid Clar solutions in the form of a tuple, with the following entries: - [0] List of aromatic rings - [1] List of bonds - [2] Optimization solutions + [0] Copy of mol + [1] List of aromatic rings + [2] List of bonds + [3] Solution vector - The optimization solutions may contain multiple valid solutions, each is an array of boolean values with sextet assignments - followed by double bond assignments, with indices corresponding to the list of aromatic rings and list of bonds, respectively. + The solution vector is a binary integer list of length (# aromatic rings + # bonds) indicating + if each ring is aromatic and if each bond is double. - Method adapted from: - Hansen, P.; Zheng, M. The Clar Number of a Benzenoid Hydrocarbon and Linear Programming. - J. Math. Chem. 1994, 15 (1), 93–107. + This implementation follows the original implementation very closely (Hansen, P.; Zheng, M. The + Clar Number of a Benzenoid Hydrocarbon and Linear Programming. J. Math. Chem. 1994, 15 (1), 93–107.) + with only slight modifications to prevent changing bond orders for exocyclic atoms. """ - cython.declare(molecule=Graph, aromatic_rings=list, exo=list, n_rings=cython.int, n_atoms=cython.int, n_bonds=cython.int, - A=list, solutions=tuple) + cython.declare( + molecule=Graph, + aromatic_rings=list, + exo_info=list, + n_ring=cython.int, + n_atom=cython.int, + n_bond=cython.int, + A=list, + solutions=list, + ) - # Make a copy of the molecule so we don't destroy the original + # after we find all the Clar structures we will modify the molecule bond orders to be aromatic, + # so we make explicit copies to avoid overwrites. molecule = mol.copy(deep=True) aromatic_rings = molecule.get_aromatic_rings(save_order=save_order)[0] - aromatic_rings.sort(key=_sum_atom_ids) - if not aromatic_rings: + # doesn't work for system without aromatic rings, just exit + if len(aromatic_rings) == 0: return [] - # Get list of atoms that are in rings + # stability between multiple runs + aromatic_rings.sort(key=_sum_atom_ids) + + # Cython doesn't allow mutable defaults, so we just set it here instead + if recursion_constraints is None: + recursion_constraints = [] + + # Get set of atoms that are in rings atoms = set() for ring in aromatic_rings: atoms.update(ring) atoms = sorted(atoms, key=attrgetter('id')) - # Get list of bonds involving the ring atoms, ignoring bonds to hydrogen + # Get set of bonds involving the ring atoms, ignoring bonds to hydrogen bonds = set() for atom in atoms: bonds.update([atom.bonds[key] for key in atom.bonds.keys() if key.is_non_hydrogen()]) bonds = sorted(bonds, key=_tuplize_bond) - # Identify exocyclic bonds, and save their bond orders - exo = [] + # identify exocyclic bonds, and save their order if exocyclic + exo_info = [] for bond in bonds: if bond.atom1 not in atoms or bond.atom2 not in atoms: + # save order for exocyclic if bond.is_double(): - exo.append(1) + exo_info.append(1) else: - exo.append(0) - else: - exo.append(None) + exo_info.append(0) + else: # not exocyclic + exo_info.append(None) # Dimensions n_ring = len(aromatic_rings) @@ -1049,113 +1071,84 @@ def _clar_optimization(mol, save_order=False): in_ring = [1 if atom in ring else 0 for ring in aromatic_rings] in_bond = [1 if atom in [bond.atom1, bond.atom2] else 0 for bond in bonds] A.append(in_ring + in_bond) - constraints = [LinearConstraint( + initial_constraint = [LinearConstraint( # i.e. an equality constraint A=np.array(A, dtype=int), lb=np.ones(n_atom, dtype=int), ub=np.ones(n_atom, dtype=int) )] + # on recursive calls we already know the Clar number, so we can additionally constrain the system + # to only find structures with this number. Allows detecting when all formulae have been found and, + # in theory, should solve faster + if clar_number != -1: + initial_constraint += [ + LinearConstraint( + A=np.array([1] * n_ring + [0] * n_bond), + ub=clar_number, + lb=clar_number, + ), + ] + # Objective vector for optimization: sextets have a weight of 1, double bonds have a weight of 0 - c = - np.array([1] * n_ring + [0] * n_bond, dtype=int) + # we negate because the original problem is formulated as maximization, whereas SciPy only does min + c = -np.array([1] * n_ring + [0] * n_bond, dtype=int) - # Variable bounds + # variable bounds + # - binary problem, so everything must be either zero or one + # - rings are simply 0 or 1 + # - bonds are also 0 or 1, except exocyclic bonds which must remain unchanged bounds = Bounds( - lb=np.array( - [0] * n_ring + [1 if val == 1 else 0 for val in exo], - dtype=int, - ), # lower bounds: 0 except for exo double bonds - ub=np.array( - [1] * n_ring + [0 if val == 0 else 1 for val in exo], - dtype=int, - ), # upper bounds: 1 except for exo single bonds + lb=np.array([0] * n_ring + [0 if b is None else b for b in exo_info], dtype=int), + ub=np.array([1] * n_ring + [1 if b is None else b for b in exo_info], dtype=int), ) - solutions = _solve_clar_milp(c, bounds, constraints, n_ring) - - return aromatic_rings, bonds, solutions - - -def _solve_clar_milp( - c, - bounds, - constraints, - n_ring, - max_num=None, -): - """ - A helpful function to solve the formulated clar optimization MILP problem. ``c``, - ``bounds``, and ``constraints`` are computed in _clar_optimization and follow the - definition of their corresponding kwargs in scipy.optimize.milp. ``n_ring`` is the - number of aromatic rings in the molecule. ``max_num`` is the maximum number of sextets - that can be found. If ``max_num`` is None for first run but will be updated during the - recursion and is used to check sub-optimal solution. - """ - # To modify - cython.declare(inner_solutions=list) - result = milp( - c=-c, # negative for maximization + c=c, integrality=1, bounds=bounds, - constraints=constraints, + constraints=initial_constraint + recursion_constraints, options={'time_limit': 10}, ) - if result.status != 0: - raise RuntimeError("Optimization could not find a valid solution.") + if (status := result.status) != 0: + if status == 2: # infeasible + raise RuntimeError("All valid Clar formulae have been enumerated!") + else: + raise RuntimeError(f"Optimization failed (Exit Code {status}) for an unexpected reason '{result.message}'") - obj_val, solution = -result.fun, result.x + _clar_number, solution = -result.fun, result.x - # Check that the result contains at least one aromatic sextet - if obj_val == 0: + # optimization may have reached a bad local minimum - this case is rare + if _clar_number == 0: return [] - # Check that the solution contains the maximum number of sextets possible - if max_num is None: - max_num = obj_val # This is the first solution, so the result should be an upper limit - elif obj_val < max_num: - raise RuntimeError("Optimization obtained a sub-optimal solution.") - + # on later runs, non-integer solutions occur - branching might be able to find actual solutions, + # but we just call it good enough here if any([x != 1 and x != 0 for x in solution]): - raise RuntimeError('Optimization obtained a non-integer solution.') + raise RuntimeError("Optimization obtained a non-integer solution - no more formulae will be enumerated.") + + # first solution, so the result should be an upper limit + if clar_number == -1: + clar_number = _clar_number - # Generate constraints based on the solution obtained - y = solution[:n_ring] - constraints.append( + selected_sextets = list(solution[:n_ring]) + # restrict those rings which were selected for the current clar structure + # from forming it again by requiring that their sum is 1 less than it used + # to be + recursion_constraints += [ LinearConstraint( - A=np.hstack([y, [0] * (solution.shape[0] - n_ring)]), - ub=sum(y) - 1, + A=np.array(selected_sextets + [0] * n_bond), + ub=clar_number - 1, + lb=0, ), - ) + ] # Run optimization with additional constraints try: - inner_solutions = _solve_clar_milp(c, bounds, constraints, n_ring, max_num) - except RuntimeError: + inner_solutions = _clar_optimization(mol, recursion_constraints=recursion_constraints, clar_number=clar_number, save_order=save_order) + except RuntimeError as e: + logging.debug(f"Clar Optimization stopped: {e}") inner_solutions = [] - return inner_solutions + [solution] - - -def _clar_transformation(mol, aromatic_ring): - """ - Performs Clar transformation for given ring in a molecule, ie. conversion to aromatic sextet. - - Args: - mol a :class:`Molecule` object - aromaticRing a list of :class:`Atom` objects corresponding to an aromatic ring in mol - - This function directly modifies the input molecule and does not return anything. - """ - cython.declare(bondList=list, i=cython.int, atom1=Vertex, atom2=Vertex, bond=Edge) - - bond_list = [] - - for i, atom1 in enumerate(aromatic_ring): - for atom2 in aromatic_ring[i + 1:]: - if mol.has_bond(atom1, atom2): - bond_list.append(mol.get_bond(atom1, atom2)) - - for bond in bond_list: - bond.order = 1.5 + return inner_solutions + [(molecule, aromatic_rings, bonds, solution)] def generate_adsorbate_shift_down_resonance_structures(mol): diff --git a/rmgpy/molecule/util.py b/rmgpy/molecule/util.py index d1093f45821..46f8c3504c6 100644 --- a/rmgpy/molecule/util.py +++ b/rmgpy/molecule/util.py @@ -30,8 +30,8 @@ import itertools import re -from rmgpy.molecule.molecule import Molecule from rmgpy.molecule.fragment import Fragment +from rmgpy.molecule.molecule import Molecule def get_element_count(obj): @@ -47,7 +47,7 @@ def get_element_count(obj): match = re.match(r"([a-z]+)([0-9]*)", piece, re.I) if match: element, count = match.groups() - if count is '': + if count == '': count = 1 if element in element_count: element_count[element] += int(count) diff --git a/rmgpy/pdep/sls.py b/rmgpy/pdep/sls.py index a3474ac6e51..ce43f659e95 100644 --- a/rmgpy/pdep/sls.py +++ b/rmgpy/pdep/sls.py @@ -33,15 +33,31 @@ """ from juliacall import Main + Main.seval("using ReactionMechanismSimulator.SciMLBase") Main.seval("using ReactionMechanismSimulator.Sundials") +import sys +import logging + import numpy as np import scipy.linalg import scipy.optimize as opt +import scipy.sparse as sparse import rmgpy.constants as constants from rmgpy.pdep.me import generate_full_me_matrix, states_to_configurations -from rmgpy.rmg.reactors import to_julia +from rmgpy.rmg.reactionmechanismsimulator_reactors import to_julia + +NO_JULIA = False +try: + from diffeqpy import de + from julia import Main +except Exception as e: + logging.info( + f"Unable to import Julia dependencies, original error: {str(e)}" + ". Master equation method 'ode' will not be available on this execution." + ) + NO_JULIA = True def get_initial_condition(network, x0, indices): @@ -153,6 +169,11 @@ def get_rate_coefficients_SLS(network, T, P, method="mexp", neglect_high_energy_ tau = np.abs(1.0 / fastest_reaction) if method == "ode": + if NO_JULIA: + raise RuntimeError( + "Required Julia dependencies for method 'ode' are not installed.\n" + "Please follow the steps to install Julia dependencies at https://reactionmechanismgenerator.github.io/RMG-Py/users/rmg/installation/anacondaDeveloper.html." + ) f = Main.seval( """ function f(du, u, M, t) diff --git a/rmgpy/reaction.py b/rmgpy/reaction.py index a42a1fc3f5e..1997824afcc 100644 --- a/rmgpy/reaction.py +++ b/rmgpy/reaction.py @@ -1809,8 +1809,8 @@ def generate_high_p_limit_kinetics(self): """ raise NotImplementedError("generate_high_p_limit_kinetics is not implemented for all Reaction subclasses.") -def same_object(object1, object2, _check_identical, _only_check_label, - _generate_initial_map, _strict, _save_order): +def _same_object(object1, object2, _check_identical=False, _only_check_label=False, + _generate_initial_map=False, _strict=True, _save_order=False): if _only_check_label: return str(object1) == str(object2) elif _check_identical: @@ -1840,10 +1840,10 @@ def same_species_lists(list1, list2, check_identical=False, only_check_label=Fal """ same_object_passthrough = partial( - same_object, + _same_object, _check_identical=check_identical, _only_check_label=only_check_label, - _generate_intial_map=generate_initial_map, + _generate_initial_map=generate_initial_map, _strict=strict, _save_order=save_order, ) diff --git a/rmgpy/rmg/input.py b/rmgpy/rmg/input.py index 117c19644cf..ba48ceb48a5 100644 --- a/rmgpy/rmg/input.py +++ b/rmgpy/rmg/input.py @@ -35,23 +35,36 @@ from rmgpy import settings from rmgpy.data.base import Entry +from rmgpy.data.solvation import SolventData +from rmgpy.data.surface import MetalDatabase +from rmgpy.data.vaporLiquidMassTransfer import ( + liquidVolumetricMassTransferCoefficientPowerLaw, +) from rmgpy.exceptions import DatabaseError, InputError from rmgpy.molecule import Molecule +from rmgpy.molecule.fragment import Fragment from rmgpy.molecule.group import Group -from rmgpy.quantity import Quantity, Energy, RateCoefficient, SurfaceConcentration +from rmgpy.quantity import Energy, Quantity, RateCoefficient, SurfaceConcentration from rmgpy.rmg.model import CoreEdgeReactionModel +from rmgpy.rmg.reactionmechanismsimulator_reactors import ( + NO_JULIA, + ConstantTLiquidSurfaceReactor, + ConstantTPIdealGasReactor, + ConstantTVLiquidReactor, + ConstantVIdealGasReactor, + Reactor, +) from rmgpy.rmg.settings import ModelSettings, SimulatorSettings -from rmgpy.solver.termination import TerminationTime, TerminationConversion, TerminationRateRatio from rmgpy.solver.liquid import LiquidReactor from rmgpy.solver.mbSampled import MBSampledReactor from rmgpy.solver.simple import SimpleReactor from rmgpy.solver.surface import SurfaceReactor +from rmgpy.solver.termination import ( + TerminationConversion, + TerminationRateRatio, + TerminationTime, +) from rmgpy.util import as_list -from rmgpy.data.surface import MetalDatabase -from rmgpy.rmg.reactors import Reactor, ConstantVIdealGasReactor, ConstantTLiquidSurfaceReactor, ConstantTVLiquidReactor, ConstantTPIdealGasReactor -from rmgpy.data.vaporLiquidMassTransfer import liquidVolumetricMassTransferCoefficientPowerLaw -from rmgpy.molecule.fragment import Fragment -from rmgpy.data.solvation import SolventData ################################################################################ @@ -1580,6 +1593,11 @@ def read_input_file(path, rmg0): exec(f.read(), global_context, local_context) except (NameError, TypeError, SyntaxError) as e: logging.error('The input file "{0}" was invalid:'.format(full_path)) + if NO_JULIA: + logging.error( + "During runtime, import of Julia dependencies failed. To use phase systems and RMS reactors, install RMG-Py with RMS." + " (https://reactionmechanismgenerator.github.io/RMG-Py/users/rmg/installation/anacondaDeveloper.html)" + ) logging.exception(e) raise finally: diff --git a/rmgpy/rmg/main.py b/rmgpy/rmg/main.py index bccf00a776f..f3199d09338 100644 --- a/rmgpy/rmg/main.py +++ b/rmgpy/rmg/main.py @@ -52,8 +52,6 @@ from scipy.optimize import brute import rmgpy.util as util -from rmgpy.rmg.model import Species, CoreEdgeReactionModel -from rmgpy.rmg.pdep import PDepNetwork from rmgpy import settings from rmgpy.chemkin import ChemkinWriter from rmgpy.constraints import fails_species_constraints @@ -61,26 +59,32 @@ from rmgpy.data.kinetics.family import TemplateReaction from rmgpy.data.kinetics.library import KineticsLibrary, LibraryReaction from rmgpy.data.rmg import RMGDatabase -from rmgpy.exceptions import ForbiddenStructureException, DatabaseError, CoreError, InputError -from rmgpy.kinetics.diffusionLimited import diffusion_limiter from rmgpy.data.vaporLiquidMassTransfer import vapor_liquid_mass_transfer -from rmgpy.kinetics import ThirdBody -from rmgpy.kinetics import Troe +from rmgpy.exceptions import ( + CoreError, + DatabaseError, + ForbiddenStructureException, + InputError, +) +from rmgpy.kinetics import ThirdBody, Troe +from rmgpy.kinetics.diffusionLimited import diffusion_limiter from rmgpy.molecule import Molecule from rmgpy.qm.main import QMDatabaseWriter from rmgpy.reaction import Reaction -from rmgpy.rmg.listener import SimulationProfileWriter, SimulationProfilePlotter +from rmgpy.rmg.listener import SimulationProfilePlotter, SimulationProfileWriter +from rmgpy.rmg.model import CoreEdgeReactionModel, Species from rmgpy.rmg.output import OutputHTMLWriter -from rmgpy.rmg.pdep import PDepReaction +from rmgpy.rmg.pdep import PDepNetwork, PDepReaction +from rmgpy.rmg.reactionmechanismsimulator_reactors import NO_JULIA +from rmgpy.rmg.reactionmechanismsimulator_reactors import Reactor as RMSReactor from rmgpy.rmg.settings import ModelSettings -from rmgpy.solver.base import TerminationTime, TerminationConversion +from rmgpy.solver.base import TerminationConversion, TerminationTime from rmgpy.solver.simple import SimpleReactor from rmgpy.stats import ExecutionStatsWriter from rmgpy.thermo.thermoengine import submit from rmgpy.tools.plot import plot_sensitivity from rmgpy.tools.uncertainty import Uncertainty, process_local_results from rmgpy.yml import RMSWriter -from rmgpy.rmg.reactors import Reactor ################################################################################ @@ -271,12 +275,6 @@ def load_input(self, path=None): if self.solvent: self.reaction_model.solvent_name = self.solvent - if self.surface_site_density: - self.reaction_model.surface_site_density = self.surface_site_density - self.reaction_model.core.phase_system.phases["Surface"].site_density = self.surface_site_density.value_si - self.reaction_model.edge.phase_system.phases["Surface"].site_density = self.surface_site_density.value_si - self.reaction_model.coverage_dependence = self.coverage_dependence - self.reaction_model.verbose_comments = self.verbose_comments self.reaction_model.save_edge_species = self.save_edge_species @@ -513,6 +511,19 @@ def initialize(self, **kwargs): # Read input file self.load_input(self.input_file) + + # Check if ReactionMechanismSimulator reactors are being used + # if RMS is not installed but the user attempted to use it, the load_input_file would have failed + # if RMS is installed but they did not use it, we can avoid extra work + # if RMS is not installed and they did not use it, we avoid calling certain functions that would raise an error + requires_rms = any(isinstance(reactor_system, RMSReactor) for reactor_system in self.reaction_systems) + + if self.surface_site_density: + self.reaction_model.surface_site_density = self.surface_site_density + if requires_rms: + self.reaction_model.core.phase_system.phases["Surface"].site_density = self.surface_site_density.value_si + self.reaction_model.edge.phase_system.phases["Surface"].site_density = self.surface_site_density.value_si + self.reaction_model.coverage_dependence = self.coverage_dependence if kwargs.get("restart", ""): import rmgpy.rmg.input @@ -555,8 +566,11 @@ def initialize(self, **kwargs): # Load databases self.load_database() + for spec in self.initial_species: + self.reaction_model.add_species_to_edge(spec, requires_rms=requires_rms) + for reaction_system in self.reaction_systems: - if isinstance(reaction_system, Reactor): + if isinstance(reaction_system, RMSReactor): reaction_system.finish_termination_criteria() # Load restart seed mechanism (if specified) @@ -604,8 +618,9 @@ def initialize(self, **kwargs): longDesc='', ) solvent_mol = False - self.reaction_model.core.phase_system.phases["Default"].set_solvent(solvent_data) - self.reaction_model.edge.phase_system.phases["Default"].set_solvent(solvent_data) + if requires_rms: + self.reaction_model.core.phase_system.phases["Default"].set_solvent(solvent_data) + self.reaction_model.edge.phase_system.phases["Default"].set_solvent(solvent_data) diffusion_limiter.enable(solvent_data, self.database.solvation) logging.info("Setting solvent data for {0}".format(self.solvent)) @@ -647,12 +662,12 @@ def initialize(self, **kwargs): # Seed mechanisms: add species and reactions from seed mechanism # DON'T generate any more reactions for the seed species at this time for seed_mechanism in self.seed_mechanisms: - self.reaction_model.add_seed_mechanism_to_core(seed_mechanism, react=False) + self.reaction_model.add_seed_mechanism_to_core(seed_mechanism, react=False, requires_rms=requires_rms) # Reaction libraries: add species and reactions from reaction library to the edge so # that RMG can find them if their rates are large enough for library, option in self.reaction_libraries: - self.reaction_model.add_reaction_library_to_edge(library) + self.reaction_model.add_reaction_library_to_edge(library, requires_rms=requires_rms) # Also always add in a few bath gases (since RMG-Java does) for label, smiles in [("Ar", "[Ar]"), ("He", "[He]"), ("Ne", "[Ne]"), ("N2", "N#N")]: @@ -728,35 +743,37 @@ def initialize(self, **kwargs): # This is necessary so that the PDep algorithm can identify the bath gas for spec in self.initial_species: if not spec.reactive: - self.reaction_model.enlarge(spec) + self.reaction_model.enlarge(spec, requires_rms=requires_rms) for spec in self.initial_species: if spec.reactive: - self.reaction_model.enlarge(spec) + self.reaction_model.enlarge(spec, requires_rms=requires_rms) # chatelak: store constant SPC indices in the reactor attributes if any constant SPC provided in the input file # advantages to write it here: this is run only once (as species indexes does not change over the generation) if self.solvent is not None: for index, reaction_system in enumerate(self.reaction_systems): if ( - not isinstance(reaction_system, Reactor) and reaction_system.const_spc_names is not None - ): # if no constant species provided do nothing + not isinstance(reaction_system, RMSReactor) + ) and reaction_system.const_spc_names is not None: # if no constant species provided do nothing reaction_system.get_const_spc_indices(self.reaction_model.core.species) # call the function to identify indices in the solver self.initialize_reaction_threshold_and_react_flags() if self.filter_reactions and self.init_react_tuples: - self.react_init_tuples() + self.react_init_tuples(requires_rms=requires_rms) self.reaction_model.initialize_index_species_dict() self.initialize_seed_mech() + return requires_rms - def register_listeners(self): + def register_listeners(self, requires_rms=False): """ Attaches listener classes depending on the options found in the RMG input file. """ self.attach(ChemkinWriter(self.output_directory)) - self.attach(RMSWriter(self.output_directory)) + if requires_rms: + self.attach(RMSWriter(self.output_directory)) if self.generate_output_html: self.attach(OutputHTMLWriter(self.output_directory)) @@ -768,7 +785,7 @@ def register_listeners(self): if self.save_simulation_profiles: for index, reaction_system in enumerate(self.reaction_systems): - if isinstance(reaction_system, Reactor): + if requires_rms and isinstance(reaction_system, RMSReactor): typ = type(reaction_system) raise InputError(f"save_simulation_profiles=True not compatible with reactor of type {typ}") reaction_system.attach(SimulationProfileWriter(self.output_directory, index, self.reaction_model.core.species)) @@ -782,10 +799,10 @@ def execute(self, initialize=True, **kwargs): """ if initialize: - self.initialize(**kwargs) + requires_rms = self.initialize(**kwargs) # register listeners - self.register_listeners() + self.register_listeners(requires_rms=requires_rms) self.done = False @@ -812,7 +829,7 @@ def execute(self, initialize=True, **kwargs): # Update react flags if self.filter_reactions: # Run the reaction system to update threshold and react flags - if isinstance(reaction_system, Reactor): + if requires_rms and isinstance(reaction_system, RMSReactor): self.update_reaction_threshold_and_react_flags( rxn_sys_unimol_threshold=np.zeros((len(self.reaction_model.core.species),), bool), rxn_sys_bimol_threshold=np.zeros((len(self.reaction_model.core.species), len(self.reaction_model.core.species)), bool), @@ -855,6 +872,7 @@ def execute(self, initialize=True, **kwargs): unimolecular_react=self.unimolecular_react, bimolecular_react=self.bimolecular_react, trimolecular_react=self.trimolecular_react, + requires_rms=requires_rms, ) if not np.isinf(self.model_settings_list[0].thermo_tol_keep_spc_in_edge): @@ -867,7 +885,7 @@ def execute(self, initialize=True, **kwargs): ) if not np.isinf(self.model_settings_list[0].thermo_tol_keep_spc_in_edge): - self.reaction_model.thermo_filter_down(maximum_edge_species=self.model_settings_list[0].maximum_edge_species) + self.reaction_model.thermo_filter_down(maximum_edge_species=self.model_settings_list[0].maximum_edge_species, requires_rms=requires_rms) logging.info("Completed initial enlarge edge step.\n") @@ -933,7 +951,7 @@ def execute(self, initialize=True, **kwargs): prune = False try: - if isinstance(reaction_system, Reactor): + if requires_rms and isinstance(reaction_system, RMSReactor): ( terminated, resurrected, @@ -1026,7 +1044,7 @@ def execute(self, initialize=True, **kwargs): # Add objects to enlarge to the core first for objectToEnlarge in objects_to_enlarge: - self.reaction_model.enlarge(objectToEnlarge) + self.reaction_model.enlarge(objectToEnlarge, requires_rms=requires_rms) if model_settings.filter_reactions: # Run a raw simulation to get updated reaction system threshold values @@ -1035,7 +1053,7 @@ def execute(self, initialize=True, **kwargs): temp_model_settings.tol_keep_in_edge = 0 if not resurrected: try: - if isinstance(reaction_system, Reactor): + if requires_rms and isinstance(reaction_system, RMSReactor): ( terminated, resurrected, @@ -1104,7 +1122,7 @@ def execute(self, initialize=True, **kwargs): skip_update=True, ) logging.warning( - "Reaction thresholds/flags for Reaction System {0} was not updated due " "to resurrection".format(index + 1) + "Reaction thresholds/flags for Reaction System {0} was not updated due to resurrection".format(index + 1) ) logging.info("") @@ -1127,13 +1145,14 @@ def execute(self, initialize=True, **kwargs): unimolecular_react=self.unimolecular_react, bimolecular_react=self.bimolecular_react, trimolecular_react=self.trimolecular_react, + requires_rms=requires_rms, ) if old_edge_size != len(self.reaction_model.edge.reactions) or old_core_size != len(self.reaction_model.core.reactions): reactor_done = False if not np.isinf(self.model_settings_list[0].thermo_tol_keep_spc_in_edge): - self.reaction_model.thermo_filter_down(maximum_edge_species=model_settings.maximum_edge_species) + self.reaction_model.thermo_filter_down(maximum_edge_species=model_settings.maximum_edge_species, requires_rms=requires_rms) max_num_spcs_hit = len(self.reaction_model.core.species) >= model_settings.max_num_species @@ -1160,6 +1179,7 @@ def execute(self, initialize=True, **kwargs): model_settings.tol_move_to_core, model_settings.maximum_edge_species, model_settings.min_species_exist_iterations_for_prune, + requires_rms=requires_rms, ) # Perform garbage collection after pruning collected = gc.collect() @@ -1281,8 +1301,9 @@ def run_uncertainty_analysis(self): ) self.uncertainty["global"] = False else: - import re import random + import re + from rmgpy.tools.canteramodel import Cantera from rmgpy.tools.globaluncertainty import ReactorPCEFactory @@ -1927,7 +1948,7 @@ def initialize_reaction_threshold_and_react_flags(self): if self.trimolecular: self.trimolecular_react[:num_restart_spcs, :num_restart_spcs, :num_restart_spcs] = False - def react_init_tuples(self): + def react_init_tuples(self, requires_rms=False): """ Reacts tuples given in the react block """ @@ -1960,6 +1981,7 @@ def react_init_tuples(self): unimolecular_react=self.unimolecular_react, bimolecular_react=self.bimolecular_react, trimolecular_react=self.trimolecular_react, + requires_rms=requires_rms, ) def update_reaction_threshold_and_react_flags( @@ -2254,7 +2276,7 @@ def __init__(self, reaction_system, bspc): if isinstance(value, list): self.Ranges[key] = [v.value_si for v in value] - if isinstance(reaction_system, Reactor): + if isinstance(reaction_system, RMSReactor): self.tmax = reaction_system.tf else: for term in reaction_system.termination: @@ -2472,7 +2494,16 @@ def make_profile_graph(stats_file, force_graph_generation=False): if display_found or force_graph_generation: try: - from gprof2dot import PstatsParser, DotWriter, SAMPLES, themes, TIME, TIME_RATIO, TOTAL_TIME, TOTAL_TIME_RATIO + from gprof2dot import ( + SAMPLES, + TIME, + TIME_RATIO, + TOTAL_TIME, + TOTAL_TIME_RATIO, + DotWriter, + PstatsParser, + themes, + ) except ImportError: logging.warning("Trouble importing from package gprof2dot. Unable to create a graph of the profile statistics.") logging.warning("Try getting the latest version with something like `pip install --upgrade gprof2dot`.") diff --git a/rmgpy/rmg/model.py b/rmgpy/rmg/model.py index ee6795806b2..713e7b6ea08 100644 --- a/rmgpy/rmg/model.py +++ b/rmgpy/rmg/model.py @@ -44,21 +44,27 @@ from rmgpy.data.kinetics.depository import DepositoryReaction from rmgpy.data.kinetics.family import KineticsFamily, TemplateReaction from rmgpy.data.kinetics.library import KineticsLibrary, LibraryReaction -from rmgpy.data.vaporLiquidMassTransfer import vapor_liquid_mass_transfer -from rmgpy.molecule.group import Group from rmgpy.data.rmg import get_db +from rmgpy.data.vaporLiquidMassTransfer import vapor_liquid_mass_transfer from rmgpy.display import display from rmgpy.exceptions import ForbiddenStructureException -from rmgpy.kinetics import KineticsData, Arrhenius +from rmgpy.kinetics import Arrhenius, KineticsData +from rmgpy.molecule.fragment import Fragment +from rmgpy.molecule.group import Group from rmgpy.quantity import Quantity from rmgpy.reaction import Reaction -from rmgpy.rmg.pdep import PDepReaction, PDepNetwork +from rmgpy.rmg.decay import decay_species +from rmgpy.rmg.pdep import PDepNetwork, PDepReaction from rmgpy.rmg.react import react_all +from rmgpy.rmg.reactionmechanismsimulator_reactors import ( + NO_JULIA, + Interface, + Phase, + PhaseSystem, +) +from rmgpy.rmg.reactionmechanismsimulator_reactors import Reactor as RMSReactor from rmgpy.species import Species from rmgpy.thermo.thermoengine import submit -from rmgpy.rmg.decay import decay_species -from rmgpy.rmg.reactors import PhaseSystem, Phase, Interface, Reactor -from rmgpy.molecule.fragment import Fragment ################################################################################ @@ -75,7 +81,7 @@ def __init__(self, species=None, reactions=None, phases=None, interfaces={}): if phases is None: phases = {"Default": Phase(), "Surface": Phase()} interfaces = {frozenset({"Default", "Surface"}): Interface(list(phases.values()))} - self.phase_system = PhaseSystem(phases, interfaces) + self.phase_system = None if NO_JULIA else PhaseSystem(phases, interfaces) def __reduce__(self): """ @@ -349,9 +355,10 @@ def make_new_species(self, object, label="", reactive=True, check_existing=True, orilabel = spec.label label = orilabel i = 2 - while any([label in phase.names for phase in self.edge.phase_system.phases.values()]): - label = orilabel + "-" + str(i) - i += 1 + if self.edge.phase_system: # None when RMS not installed + while any([label in phase.names for phase in self.edge.phase_system.phases.values()]): + label = orilabel + "-" + str(i) + i += 1 spec.label = label logging.debug("Creating new species %s", spec.label) @@ -594,7 +601,7 @@ def make_new_pdep_reaction(self, forward): return forward - def enlarge(self, new_object=None, react_edge=False, unimolecular_react=None, bimolecular_react=None, trimolecular_react=None): + def enlarge(self, new_object=None, react_edge=False, unimolecular_react=None, bimolecular_react=None, trimolecular_react=None, requires_rms=False): """ Enlarge a reaction model by processing the objects in the list `new_object`. If `new_object` is a @@ -641,7 +648,7 @@ def enlarge(self, new_object=None, react_edge=False, unimolecular_react=None, bi # Add new species if new_species not in self.core.species: - reactions_moved_from_edge = self.add_species_to_core(new_species) + reactions_moved_from_edge = self.add_species_to_core(new_species, requires_rms=requires_rms) else: reactions_moved_from_edge = [] @@ -649,7 +656,7 @@ def enlarge(self, new_object=None, react_edge=False, unimolecular_react=None, bi pdep_network, new_species = new_object new_reactions.extend(pdep_network.explore_isomer(new_species)) - self.process_new_reactions(new_reactions, new_species, pdep_network) + self.process_new_reactions(new_reactions, new_species, pdep_network, requires_rms=requires_rms) else: raise TypeError( @@ -673,7 +680,7 @@ def enlarge(self, new_object=None, react_edge=False, unimolecular_react=None, bi if len(products) == 1 and products[0] == species: new_reactions = network.explore_isomer(species) - self.process_new_reactions(new_reactions, species, network) + self.process_new_reactions(new_reactions, species, network, requires_rms=requires_rms) network.update_configurations(self) index = 0 break @@ -698,7 +705,7 @@ def enlarge(self, new_object=None, react_edge=False, unimolecular_react=None, bi # Identify a core species which was used to generate the reaction # This is only used to determine the reaction direction for processing spc = spcTuple[0] - self.process_new_reactions(rxnList, spc) + self.process_new_reactions(rxnList, spc, requires_rms=requires_rms) ################################################################ # Begin processing the new species and reactions @@ -710,7 +717,7 @@ def enlarge(self, new_object=None, react_edge=False, unimolecular_react=None, bi # Do thermodynamic filtering if not np.isinf(self.thermo_tol_keep_spc_in_edge) and self.new_species_list != []: - self.thermo_filter_species(self.new_species_list) + self.thermo_filter_species(self.new_species_list, requires_rms=requires_rms) # Update unimolecular (pressure dependent) reaction networks if self.pressure_dependence: @@ -807,7 +814,7 @@ def clear_surface_adjustments(self): self.new_surface_spcs_loss = set() self.new_surface_rxns_loss = set() - def process_new_reactions(self, new_reactions, new_species, pdep_network=None, generate_thermo=True, generate_kinetics=True): + def process_new_reactions(self, new_reactions, new_species, pdep_network=None, generate_thermo=True, generate_kinetics=True, requires_rms=False): """ Process a list of newly-generated reactions involving the new core species or explored isomer `new_species` in network `pdep_network`. @@ -833,12 +840,12 @@ def process_new_reactions(self, new_reactions, new_species, pdep_network=None, g if spec not in self.core.species: all_species_in_core = False if spec not in self.edge.species: - self.add_species_to_edge(spec) + self.add_species_to_edge(spec, requires_rms=requires_rms) for spec in rxn.products: if spec not in self.core.species: all_species_in_core = False if spec not in self.edge.species: - self.add_species_to_edge(spec) + self.add_species_to_edge(spec, requires_rms=requires_rms) isomer_atoms = sum([len(spec.molecule[0].atoms) for spec in rxn.reactants]) @@ -866,9 +873,9 @@ def process_new_reactions(self, new_reactions, new_species, pdep_network=None, g # The reaction is not new, so it should already be in the core or edge continue if all_species_in_core: - self.add_reaction_to_core(rxn) + self.add_reaction_to_core(rxn, requires_rms=requires_rms) else: - self.add_reaction_to_edge(rxn) + self.add_reaction_to_edge(rxn, requires_rms=requires_rms) else: # Add the reaction to the appropriate unimolecular reaction network # If pdep_network is not None then that will be the network the @@ -1111,7 +1118,7 @@ def log_enlarge_summary( logging.info(" The model edge has {0:d} species and {1:d} reactions".format(edge_species_count, edge_reaction_count)) logging.info("") - def add_species_to_core(self, spec): + def add_species_to_core(self, spec, requires_rms=False): """ Add a species `spec` to the reaction model core (and remove from edge if necessary). This function also moves any reactions in the edge that gain @@ -1129,7 +1136,7 @@ def add_species_to_core(self, spec): if spec in self.edge.species: # remove forbidden species from edge logging.info("Species {0} was Forbidden and not added to Core...Removing from Edge.".format(spec)) - self.remove_species_from_edge(self.reaction_systems, spec) + self.remove_species_from_edge(self.reaction_systems, spec, requires_rms=requires_rms) # remove any empty pdep networks as a result of species removal if self.pressure_dependence: self.remove_empty_pdep_networks() @@ -1141,7 +1148,8 @@ def add_species_to_core(self, spec): rxn_list = [] if spec in self.edge.species: - self.edge.phase_system.pass_species(spec.label, self.core.phase_system) + if requires_rms: + self.edge.phase_system.pass_species(spec.label, self.core.phase_system) # If species was in edge, remove it logging.debug("Removing species %s from edge.", spec) self.edge.species.remove(spec) @@ -1161,31 +1169,26 @@ def add_species_to_core(self, spec): # Move any identified reactions to the core for rxn in rxn_list: - self.add_reaction_to_core(rxn) + self.add_reaction_to_core(rxn, requires_rms=requires_rms) logging.debug("Moving reaction from edge to core: %s", rxn) - else: - if spec.molecule[0].contains_surface_site(): - self.core.phase_system.phases["Surface"].add_species(spec, edge_phase=self.edge.phase_system.phases["Surface"]) - self.edge.phase_system.species_dict[spec.label] = spec - self.core.phase_system.species_dict[spec.label] = spec - else: - self.core.phase_system.phases["Default"].add_species(spec, edge_phase=self.edge.phase_system.phases["Default"]) - self.edge.phase_system.species_dict[spec.label] = spec - self.core.phase_system.species_dict[spec.label] = spec + elif requires_rms: + destination_phase = "Surface" if spec.molecule[0].contains_surface_site() else "Default" + self.core.phase_system.phases[destination_phase].add_species(spec, edge_phase=self.edge.phase_system.phases[destination_phase]) + self.edge.phase_system.species_dict[spec.label] = spec + self.core.phase_system.species_dict[spec.label] = spec return rxn_list - def add_species_to_edge(self, spec): + def add_species_to_edge(self, spec, requires_rms=False): """ - Add a species `spec` to the reaction model edge. + Add a species `spec` to the reaction model edge and optionally the RMS phase. """ self.edge.species.append(spec) - if spec.molecule[0].contains_surface_site(): - self.edge.phase_system.phases["Surface"].add_species(spec) - self.edge.phase_system.species_dict[spec.label] = spec - else: - self.edge.phase_system.phases["Default"].add_species(spec) - self.edge.phase_system.species_dict[spec.label] = spec + if not requires_rms: + return + destination_phase = "Surface" if spec.molecule[0].contains_surface_site() else "Default" + self.edge.phase_system.phases[destination_phase].add_species(spec) + self.edge.phase_system.species_dict[spec.label] = spec def set_thermodynamic_filtering_parameters( self, Tmax, thermo_tol_keep_spc_in_edge, min_core_size_for_prune, maximum_edge_species, reaction_systems @@ -1209,7 +1212,7 @@ def set_thermodynamic_filtering_parameters( self.reaction_systems = reaction_systems self.maximum_edge_species = maximum_edge_species - def thermo_filter_species(self, spcs): + def thermo_filter_species(self, spcs, requires_rms=False): """ checks Gibbs energy of the species in species against the maximum allowed Gibbs energy @@ -1224,13 +1227,13 @@ def thermo_filter_species(self, spcs): "greater than the thermo_tol_keep_spc_in_edge of " "{3} ".format(spc, G, Gn, self.thermo_tol_keep_spc_in_edge) ) - self.remove_species_from_edge(self.reaction_systems, spc) + self.remove_species_from_edge(self.reaction_systems, spc, requires_rms=requires_rms) # Delete any networks that became empty as a result of pruning if self.pressure_dependence: self.remove_empty_pdep_networks() - def thermo_filter_down(self, maximum_edge_species, min_species_exist_iterations_for_prune=0): + def thermo_filter_down(self, maximum_edge_species, min_species_exist_iterations_for_prune=0, requires_rms=False): """ removes species from the edge based on their Gibbs energy until maximum_edge_species is reached under the constraint that all removed species are older than @@ -1272,7 +1275,7 @@ def thermo_filter_down(self, maximum_edge_species, min_species_exist_iterations_ logging.info( "Removing species {0} from edge to meet maximum number of edge species, Gibbs " "number is {1}".format(spc, Gns[rInds[i]]) ) - self.remove_species_from_edge(self.reaction_systems, spc) + self.remove_species_from_edge(self.reaction_systems, spc, requires_rms=requires_rms) # Delete any networks that became empty as a result of pruning if self.pressure_dependence: @@ -1302,7 +1305,7 @@ def remove_empty_pdep_networks(self): del self.network_dict[source] self.network_list.remove(network) - def prune(self, reaction_systems, tol_keep_in_edge, tol_move_to_core, maximum_edge_species, min_species_exist_iterations_for_prune): + def prune(self, reaction_systems, tol_keep_in_edge, tol_move_to_core, maximum_edge_species, min_species_exist_iterations_for_prune, requires_rms=False): """ Remove species from the model edge based on the simulation results from the list of `reaction_systems`. @@ -1382,7 +1385,7 @@ def prune(self, reaction_systems, tol_keep_in_edge, tol_move_to_core, maximum_ed for index, spec in species_to_prune[0:prune_due_to_rate_counter]: logging.info("Pruning species %s", spec) logging.debug(" %-56s %10.4e", spec, max_edge_species_rate_ratios[index]) - self.remove_species_from_edge(reaction_systems, spec) + self.remove_species_from_edge(reaction_systems, spec, requires_rms=requires_rms) if len(species_to_prune) - prune_due_to_rate_counter > 0: logging.info( "Pruning %d species to obtain an edge size of %d species", len(species_to_prune) - prune_due_to_rate_counter, maximum_edge_species @@ -1390,7 +1393,7 @@ def prune(self, reaction_systems, tol_keep_in_edge, tol_move_to_core, maximum_ed for index, spec in species_to_prune[prune_due_to_rate_counter:]: logging.info("Pruning species %s", spec) logging.debug(" %-56s %10.4e", spec, max_edge_species_rate_ratios[index]) - self.remove_species_from_edge(reaction_systems, spec) + self.remove_species_from_edge(reaction_systems, spec, requires_rms=requires_rms) # Delete any networks that became empty as a result of pruning if self.pressure_dependence: @@ -1398,7 +1401,7 @@ def prune(self, reaction_systems, tol_keep_in_edge, tol_move_to_core, maximum_ed logging.info("") - def remove_species_from_edge(self, reaction_systems, spec): + def remove_species_from_edge(self, reaction_systems, spec, requires_rms=False): """ Remove species `spec` from the reaction model edge. """ @@ -1406,11 +1409,12 @@ def remove_species_from_edge(self, reaction_systems, spec): # remove the species self.edge.species.remove(spec) self.index_species_dict.pop(spec.index) - self.edge.phase_system.remove_species(spec) + if requires_rms: + self.edge.phase_system.remove_species(spec) # clean up species references in reaction_systems for reaction_system in reaction_systems: - if not isinstance(reaction_system, Reactor): + if not requires_rms or not isinstance(reaction_system, RMSReactor): try: reaction_system.species_index.pop(spec) except KeyError: @@ -1482,7 +1486,7 @@ def remove_species_from_edge(self, reaction_systems, spec): self.species_cache.remove(spec) self.species_cache.append(None) - def add_reaction_to_core(self, rxn): + def add_reaction_to_core(self, rxn, requires_rms=False): """ Add a reaction `rxn` to the reaction model core (and remove from edge if necessary). This function assumes `rxn` has already been checked to @@ -1491,7 +1495,8 @@ def add_reaction_to_core(self, rxn): """ if rxn not in self.core.reactions: self.core.reactions.append(rxn) - if rxn not in self.edge.reactions: + + if requires_rms and rxn not in self.edge.reactions: # If a reaction is not in edge but is going to add to core, it is either a seed mechanism or a newly generated reaction where all reactants and products are already in core # If the reaction is in edge, then the corresponding rms_rxn was moved from edge phase to core phase in pass_species already. rms_species_list = self.core.phase_system.get_rms_species_list() @@ -1508,7 +1513,7 @@ def add_reaction_to_core(self, rxn): if rxn in self.edge.reactions: self.edge.reactions.remove(rxn) - def add_reaction_to_edge(self, rxn): + def add_reaction_to_edge(self, rxn, requires_rms=False): """ Add a reaction `rxn` to the reaction model edge. This function assumes `rxn` has already been checked to ensure it is supposed to be an edge @@ -1517,6 +1522,8 @@ def add_reaction_to_edge(self, rxn): edge). """ self.edge.reactions.append(rxn) + if not requires_rms: + return rms_species_list = self.edge.phase_system.get_rms_species_list() species_names = self.edge.phase_system.get_species_names() bits = np.array([spc.molecule[0].contains_surface_site() for spc in rxn.reactants + rxn.products]) @@ -1573,7 +1580,7 @@ def get_stoichiometry_matrix(self): stoichiometry[i, j] = nu return stoichiometry.tocsr() - def add_seed_mechanism_to_core(self, seed_mechanism, react=False): + def add_seed_mechanism_to_core(self, seed_mechanism, react=False, requires_rms=False): """ Add all species and reactions from `seed_mechanism`, a :class:`KineticsPrimaryDatabase` object, to the model core. If `react` @@ -1645,9 +1652,9 @@ def add_seed_mechanism_to_core(self, seed_mechanism, react=False): # This unimolecular library reaction is flagged as `elementary_high_p` and has Arrhenius type kinetics. # We should calculate a pressure-dependent rate for it if len(rxn.reactants) == 1: - self.process_new_reactions(new_reactions=[rxn], new_species=rxn.reactants[0]) + self.process_new_reactions(new_reactions=[rxn], new_species=rxn.reactants[0], requires_rms=requires_rms) else: - self.process_new_reactions(new_reactions=[rxn], new_species=rxn.products[0]) + self.process_new_reactions(new_reactions=[rxn], new_species=rxn.products[0], requires_rms=requires_rms) # Perform species constraints and forbidden species checks @@ -1684,7 +1691,7 @@ def add_seed_mechanism_to_core(self, seed_mechanism, react=False): spec.get_liquid_volumetric_mass_transfer_coefficient_data() spec.get_henry_law_constant_data() - self.add_species_to_core(spec) + self.add_species_to_core(spec, requires_rms=requires_rms) for rxn in self.new_reaction_list: if self.pressure_dependence and rxn.is_unimolecular(): @@ -1695,7 +1702,7 @@ def add_seed_mechanism_to_core(self, seed_mechanism, react=False): submit(spec, self.solvent_name) rxn.fix_barrier_height(force_positive=True, solvent=self.solvent_name) - self.add_reaction_to_core(rxn) + self.add_reaction_to_core(rxn, requires_rms=requires_rms) # Check we didn't introduce unmarked duplicates self.mark_chemkin_duplicates() @@ -1707,7 +1714,7 @@ def add_seed_mechanism_to_core(self, seed_mechanism, react=False): new_edge_reactions=[], ) - def add_reaction_library_to_edge(self, reaction_library): + def add_reaction_library_to_edge(self, reaction_library, requires_rms=False): """ Add all species and reactions from `reaction_library`, a :class:`KineticsPrimaryDatabase` object, to the model edge. @@ -1772,9 +1779,9 @@ def add_reaction_library_to_edge(self, reaction_library): # This unimolecular library reaction is flagged as `elementary_high_p` and has Arrhenius type kinetics. # We should calculate a pressure-dependent rate for it if len(rxn.reactants) == 1: - self.process_new_reactions(new_reactions=[rxn], new_species=rxn.reactants[0]) + self.process_new_reactions(new_reactions=[rxn], new_species=rxn.reactants[0], requires_rms=requires_rms) else: - self.process_new_reactions(new_reactions=[rxn], new_species=rxn.products[0]) + self.process_new_reactions(new_reactions=[rxn], new_species=rxn.products[0], requires_rms=requires_rms) # Perform species constraints and forbidden species checks for spec in self.new_species_list: @@ -1811,7 +1818,7 @@ def add_reaction_library_to_edge(self, reaction_library): spec.get_liquid_volumetric_mass_transfer_coefficient_data() spec.get_henry_law_constant_data() - self.add_species_to_edge(spec) + self.add_species_to_edge(spec, requires_rms=requires_rms) for rxn in self.new_reaction_list: if not ( @@ -1826,7 +1833,7 @@ def add_reaction_library_to_edge(self, reaction_library): ) ): # Don't add to the edge library reactions that were already processed - self.add_reaction_to_edge(rxn) + self.add_reaction_to_edge(rxn, requires_rms=requires_rms) if self.save_edge_species: from rmgpy.chemkin import mark_duplicate_reaction diff --git a/rmgpy/rmg/pdep.py b/rmgpy/rmg/pdep.py index 0e1398e51a7..834f0f72432 100644 --- a/rmgpy/rmg/pdep.py +++ b/rmgpy/rmg/pdep.py @@ -736,7 +736,7 @@ def remove_products_from_reactants(self): self.reactants.remove(prod) self.products = self.products_cache - def update(self, reaction_model, pdep_settings): + def update(self, reaction_model, pdep_settings, requires_rms=False): """ Regenerate the :math:`k(T,P)` values for this partial network if the network is marked as invalid. @@ -917,7 +917,7 @@ def update(self, reaction_model, pdep_settings): f'from the {rxn.library} library, and was not added to the model') break else: - reaction_model.add_reaction_to_core(net_reaction) + reaction_model.add_reaction_to_core(net_reaction, requires_rms=requires_rms) else: # Check whether netReaction already exists in the edge as a LibraryReaction for rxn in reaction_model.edge.reactions: @@ -929,7 +929,7 @@ def update(self, reaction_model, pdep_settings): f'from the {rxn.library} library, and was not added to the model') break else: - reaction_model.add_reaction_to_edge(net_reaction) + reaction_model.add_reaction_to_edge(net_reaction, requires_rms=requires_rms) # Set/update the net reaction kinetics using interpolation model kdata = K[:, :, i, j].copy() diff --git a/rmgpy/rmg/reactors.py b/rmgpy/rmg/reactionmechanismsimulator_reactors.py similarity index 98% rename from rmgpy/rmg/reactors.py rename to rmgpy/rmg/reactionmechanismsimulator_reactors.py index cc5f9a6b528..567c370d342 100644 --- a/rmgpy/rmg/reactors.py +++ b/rmgpy/rmg/reactionmechanismsimulator_reactors.py @@ -93,12 +93,27 @@ def to_julia(obj): if isinstance(obj, dict): return Main.PythonCall.pyconvert(Main.Dict, obj) elif isinstance(obj, (list, np.ndarray)): - if obj.getattr("shape", False) and len(obj.shape) > 1: + if getattr(obj, "shape", False) and len(obj.shape) > 1: return Main.PythonCall.pyconvert(Main.Matrix, obj) return Main.PythonCall.pyconvert(Main.Vector, obj) else: # Other native Python project does not need special conversion. return obj +NO_JULIA = False +try: + if __debug__: + from os.path import abspath, dirname, exists, join + + from julia.api import Julia + path_rms = dirname(dirname(dirname(abspath(__file__)))) + jl = Julia(sysimage=join(path_rms, "rms.so")) if exists(join(path_rms, "rms.so")) else Julia(compiled_modules=False) + from diffeqpy import de + from julia import Main + from pyrms import rms +except Exception as e: + logging.info("Unable to import Julia dependencies, original error: " + str(e) + ". RMS features will not be available on this execution.") + NO_JULIA = True + class PhaseSystem: """ diff --git a/rmgpy/tools/fluxdiagram.py b/rmgpy/tools/fluxdiagram.py index 8a472046e06..81ecf44f89a 100644 --- a/rmgpy/tools/fluxdiagram.py +++ b/rmgpy/tools/fluxdiagram.py @@ -42,7 +42,7 @@ from rmgpy.kinetics.diffusionLimited import diffusion_limiter from rmgpy.rmg.settings import SimulatorSettings -from rmgpy.solver.base import TerminationTime, TerminationConversion +from rmgpy.solver.base import TerminationConversion, TerminationTime from rmgpy.solver.liquid import LiquidReactor from rmgpy.tools.loader import load_rmg_job @@ -308,7 +308,10 @@ def generate_flux_diagram(reaction_model, times, concentrations, reaction_rates, '-pix_fmt', 'yuv420p', # Pixel format 'flux_diagram.avi'] # Output filename - subprocess.check_call(command, cwd=output_directory) + try: + subprocess.run(command, check=True, capture_output=True, cwd=output_directory) + except subprocess.CalledProcessError as err: + raise RuntimeError(f"{err} {err.stderr.decode('utf8')} {err.stdout.decode('utf8')}") from err ################################################################################ diff --git a/test/arkane/encorr/isodesmicTest.py b/test/arkane/encorr/isodesmicTest.py index 8b41066b3dd..02e5684f440 100644 --- a/test/arkane/encorr/isodesmicTest.py +++ b/test/arkane/encorr/isodesmicTest.py @@ -251,12 +251,6 @@ class TestErrorCancelingScheme: @classmethod def setup_class(cls): - try: - import pyomo as pyo - except ImportError: - pyo = None - cls.pyo = pyo - lot = LevelOfTheory("test") cls.propene = ErrorCancelingSpecies(Molecule(smiles="CC=C"), (100, "kJ/mol"), lot, (105, "kJ/mol")) cls.propane = ErrorCancelingSpecies(Molecule(smiles="CCC"), (75, "kJ/mol"), lot, (80, "kJ/mol")) @@ -286,26 +280,20 @@ def test_creating_error_canceling_schemes(self): assert isodesmic_scheme.reference_species == [self.butane, self.benzene] - # def test_find_error_canceling_reaction(self): - # """ - # Test that the MILP problem can be solved to find a single isodesmic reaction - # """ - # scheme = IsodesmicScheme( - # self.propene, - # [self.propane, self.butane, self.butene, self.caffeine, self.ethyne], - # ) - - # # Note that caffeine and ethyne will not be allowed, so for the full set the indices are [0, 1, 2] - # rxn, _ = scheme._find_error_canceling_reaction([0, 1, 2], milp_software=["lpsolve"]) - # assert rxn.species[self.butane] == -1 - # assert rxn.species[self.propane] == 1 - # assert rxn.species[self.butene] == 1 - - # if self.pyo is not None: - # rxn, _ = scheme._find_error_canceling_reaction([0, 1, 2], milp_software=["pyomo"]) - # assert rxn.species[self.butane] == -1 - # assert rxn.species[self.propane] == 1 - # assert rxn.species[self.butene] == 1 + def test_find_error_canceling_reaction(self): + """ + Test that the MILP problem can be solved to find a single isodesmic reaction + """ + scheme = IsodesmicScheme( + self.propene, + [self.propane, self.butane, self.butene, self.caffeine, self.ethyne], + ) + + # Note that caffeine and ethyne will not be allowed, so for the full set the indices are [0, 1, 2] + rxn, _ = scheme._find_error_canceling_reaction([0, 1, 2]) + assert rxn.species[self.butane] == -1 + assert rxn.species[self.propane] == 1 + assert rxn.species[self.butene] == 1 def test_multiple_error_canceling_reactions(self): """ @@ -333,13 +321,6 @@ def test_multiple_error_canceling_reactions(self): rxn_str2 = " 1*C=CCC + 1*CCC >" assert any(rxn_string in reaction_string for rxn_string in [rxn_str1, rxn_str2]) - if self.pyo is not None: - # pyomo is slower, so don't test as many - reaction_list = scheme.multiple_error_canceling_reaction_search(n_reactions_max=5, milp_software=["pyomo"]) - assert len(reaction_list) == 5 - reaction_string = reaction_list.__repr__() - assert any(rxn_string in reaction_string for rxn_string in [rxn_str1, rxn_str2]) - def test_calculate_target_enthalpy(self): """ Test that ErrorCancelingScheme is able to calculate thermochemistry for the target species @@ -358,10 +339,6 @@ def test_calculate_target_enthalpy(self): ], ) - target_thermo, rxn_list = scheme.calculate_target_enthalpy(n_reactions_max=3, milp_software=["lpsolve"]) + target_thermo, rxn_list = scheme.calculate_target_enthalpy(n_reactions_max=3) assert target_thermo.value_si == 110000.0 assert isinstance(rxn_list[0], ErrorCancelingReaction) - - if self.pyo is not None: - target_thermo, _ = scheme.calculate_target_enthalpy(n_reactions_max=3, milp_software=["pyomo"]) - assert target_thermo.value_si == 110000.0 diff --git a/test/rmgpy/data/solvationTest.py b/test/rmgpy/data/solvationTest.py index 72d0926d552..5aceb1b1ebd 100644 --- a/test/rmgpy/data/solvationTest.py +++ b/test/rmgpy/data/solvationTest.py @@ -29,6 +29,8 @@ import os +import pytest + from rmgpy import settings from rmgpy.data.solvation import ( DatabaseError, @@ -36,14 +38,12 @@ SolvationDatabase, SolventLibrary, get_critical_temperature, - get_liquid_saturation_density, get_gas_saturation_density, + get_liquid_saturation_density, ) -from rmgpy.molecule import Molecule -from rmgpy.rmg.main import RMG -from rmgpy.rmg.main import Species from rmgpy.exceptions import InputError -import pytest +from rmgpy.molecule import Molecule +from rmgpy.rmg.main import RMG, Species class TestSoluteDatabase: @@ -141,8 +141,8 @@ def test_saturation_density(self): """ compound_name = "Hexane" temp = 400 # in K - assert round(abs(get_liquid_saturation_density(compound_name, temp) - 6383.22), 2) == 0 - assert round(abs(get_gas_saturation_density(compound_name, temp) - 162.99), 2) == 0 + assert round(abs(get_liquid_saturation_density(compound_name, temp) - 6385.15), 2) == 0 + assert round(abs(get_gas_saturation_density(compound_name, temp) - 163.02), 2) == 0 # Unsupported compound name with pytest.raises(DatabaseError): get_gas_saturation_density("Hexadecane", temp) diff --git a/test/rmgpy/molecule/resonanceTest.py b/test/rmgpy/molecule/resonanceTest.py index 19a8eb7612c..98a28f5413a 100644 --- a/test/rmgpy/molecule/resonanceTest.py +++ b/test/rmgpy/molecule/resonanceTest.py @@ -31,7 +31,6 @@ from rmgpy.molecule.molecule import Molecule from rmgpy.molecule.resonance import ( _clar_optimization, - _clar_transformation, generate_clar_structures, generate_kekule_structure, generate_optimal_aromatic_resonance_structures, @@ -1427,15 +1426,6 @@ class ClarTest: Contains unit tests for Clar structure methods. """ - def test_clar_transformation(self): - """Test that clarTransformation generates an aromatic ring.""" - mol = Molecule().from_smiles("c1ccccc1") - sssr = mol.get_smallest_set_of_smallest_rings() - _clar_transformation(mol, sssr[0]) - mol.update_atomtypes() - - assert mol.is_aromatic() - def test_clar_optimization(self): """Test to ensure pi electrons are conserved during optimization""" mol = Molecule().from_smiles("C1=CC=C2C=CC=CC2=C1") # Naphthalene diff --git a/test/rmgpy/rmg/mainTest.py b/test/rmgpy/rmg/mainTest.py index cea42c859cb..2a68efc73d7 100644 --- a/test/rmgpy/rmg/mainTest.py +++ b/test/rmgpy/rmg/mainTest.py @@ -32,15 +32,12 @@ import shutil from unittest.mock import patch -import pytest - import pandas as pd +import pytest -from rmgpy.rmg.main import RMG, initialize_log, make_profile_graph -from rmgpy.rmg.main import RMG_Memory -from rmgpy import get_path -from rmgpy import settings +from rmgpy import get_path, settings from rmgpy.data.rmg import RMGDatabase +from rmgpy.rmg.main import RMG, RMG_Memory, initialize_log, make_profile_graph from rmgpy.rmg.model import CoreEdgeReactionModel originalPath = get_path() @@ -195,7 +192,10 @@ def setup_class(cls): cls.outputDir = os.path.join(cls.testDir, "output_w_filters") cls.databaseDirectory = settings["database.directory"] - os.mkdir(cls.outputDir) + try: + os.mkdir(cls.outputDir) + except FileExistsError: + pass # output directory already exists initialize_log(logging.INFO, os.path.join(cls.outputDir, "RMG.log")) cls.rmg = RMG( diff --git a/test/rmgpy/solver/liquidTest.py b/test/rmgpy/solver/liquidTest.py index dd03ce323b9..3559a595b36 100644 --- a/test/rmgpy/solver/liquidTest.py +++ b/test/rmgpy/solver/liquidTest.py @@ -29,7 +29,6 @@ import os - import numpy as np from rmgpy.kinetics import Arrhenius @@ -635,5 +634,7 @@ def teardown_class(cls): import rmgpy.data.rmg rmgpy.data.rmg.database = None - - os.remove(os.path.join(cls.file_dir, "restart_from_seed.py")) + try: + os.remove(os.path.join(cls.file_dir, "restart_from_seed.py")) + except FileNotFoundError: + pass # file will not be present if any tests failed diff --git a/test/rmgpy/statmech/ndTorsionsTest.py b/test/rmgpy/statmech/ndTorsionsTest.py index 80186b1f3e3..5df907f074b 100644 --- a/test/rmgpy/statmech/ndTorsionsTest.py +++ b/test/rmgpy/statmech/ndTorsionsTest.py @@ -109,4 +109,4 @@ def test_hindered_rotor_nd(self): hdnd.read_scan() assert round(abs(hdnd.Es[0]) - 8.58538448, 4) == 0 hdnd.fit() - assert round(abs(hdnd.calc_partition_function(300.0)) - 2.899287634962152, 5) == 0 + assert round(abs(hdnd.calc_partition_function(300.0)) - 2.899287634962152, 4) == 0 diff --git a/utilities.py b/utilities.py index f84963a6324..87d39e73845 100644 --- a/utilities.py +++ b/utilities.py @@ -48,7 +48,6 @@ def check_dependencies(): print('{0:<15}{1:<15}{2}'.format('Package', 'Version', 'Location')) missing = { - 'lpsolve': _check_lpsolve(), 'openbabel': _check_openbabel(), 'pydqed': _check_pydqed(), 'pyrdl': _check_pyrdl(), @@ -80,23 +79,6 @@ def check_dependencies(): """) -def _check_lpsolve(): - """Check for lpsolve""" - missing = False - - try: - import lpsolve55 - except ImportError: - print('{0:<30}{1}'.format('lpsolve55', - 'Not found. Necessary for generating Clar structures for aromatic species.')) - missing = True - else: - location = lpsolve55.__file__ - print('{0:<30}{1}'.format('lpsolve55', location)) - - return missing - - def _check_openbabel(): """Check for OpenBabel""" missing = False From 0e69020b441b8a497af6c8d26729c64e002951cf Mon Sep 17 00:00:00 2001 From: Jackson Burns Date: Tue, 3 Dec 2024 15:00:24 -0500 Subject: [PATCH 10/48] dont install juliacall by default --- environment.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/environment.yml b/environment.yml index e95f06f8f79..9c6960f3b69 100644 --- a/environment.yml +++ b/environment.yml @@ -57,9 +57,6 @@ dependencies: - conda-forge::openbabel >= 3 - conda-forge::rdkit >=2022.09.1 -# general-purpose external software tools - - conda-forge::pyjuliacall # for calling julia packages - # Python tools - conda-forge::python >=3.9 # leave as GEQ so that GitHub actions can add EQ w/o breaking (contradictory deps) - conda-forge::coverage From 2e96175e8e6814ba5438160ff6af25f8865f4bfd Mon Sep 17 00:00:00 2001 From: Jackson Burns Date: Tue, 3 Dec 2024 15:02:03 -0500 Subject: [PATCH 11/48] np.bool is deprecated, bool --- test/rmgpy/kinetics/kineticsSurfaceTest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/rmgpy/kinetics/kineticsSurfaceTest.py b/test/rmgpy/kinetics/kineticsSurfaceTest.py index 5aeaf0b746e..1f67453082e 100644 --- a/test/rmgpy/kinetics/kineticsSurfaceTest.py +++ b/test/rmgpy/kinetics/kineticsSurfaceTest.py @@ -594,7 +594,7 @@ def test_is_temperature_valid(self): Test the SurfaceChargeTransfer.is_temperature_valid() method. """ T_data = np.array([200, 400, 600, 800, 1000, 1200, 1400, 1600, 1800, 4000]) - valid_data = np.array([False, True, True, True, True, True, True, True, True, False], np.bool) + valid_data = np.array([False, True, True, True, True, True, True, True, True, False], bool) for T, valid in zip(T_data, valid_data): valid0 = self.surfchargerxn_reduction.is_temperature_valid(T) assert valid0 == valid From a58d79f3a65c61fe86b5c59db69be0c7e91e7786 Mon Sep 17 00:00:00 2001 From: Jackson Burns Date: Tue, 3 Dec 2024 15:15:03 -0500 Subject: [PATCH 12/48] re-bury the julia imports that got shuffled around --- rmgpy/pdep/sls.py | 12 +++++------- rmgpy/rmg/reactionmechanismsimulator_reactors.py | 15 ++++++++++----- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/rmgpy/pdep/sls.py b/rmgpy/pdep/sls.py index ce43f659e95..12a0dd8e05a 100644 --- a/rmgpy/pdep/sls.py +++ b/rmgpy/pdep/sls.py @@ -32,12 +32,8 @@ and implementing the SLS master equation reduction method """ -from juliacall import Main - -Main.seval("using ReactionMechanismSimulator.SciMLBase") -Main.seval("using ReactionMechanismSimulator.Sundials") -import sys import logging +import sys import numpy as np import scipy.linalg @@ -50,8 +46,10 @@ NO_JULIA = False try: - from diffeqpy import de - from julia import Main + from juliacall import Main + + Main.seval("using ReactionMechanismSimulator.SciMLBase") + Main.seval("using ReactionMechanismSimulator.Sundials") except Exception as e: logging.info( f"Unable to import Julia dependencies, original error: {str(e)}" diff --git a/rmgpy/rmg/reactionmechanismsimulator_reactors.py b/rmgpy/rmg/reactionmechanismsimulator_reactors.py index 567c370d342..f40ba703d6d 100644 --- a/rmgpy/rmg/reactionmechanismsimulator_reactors.py +++ b/rmgpy/rmg/reactionmechanismsimulator_reactors.py @@ -34,15 +34,20 @@ import logging import sys -import juliacall import numpy as np -from juliacall import Main import rmgpy.constants as constants -Main.seval("using PythonCall") -Main.seval("using ReactionMechanismSimulator") -Main.seval("using ReactionMechanismSimulator.Sundials") +NO_JULIA = True +try: + import juliacall + from juliacall import Main + Main.seval("using PythonCall") + Main.seval("using ReactionMechanismSimulator") + Main.seval("using ReactionMechanismSimulator.Sundials") + NO_JULIA = False +except: + logging.warning("Julia import failed, RMS reactors not available.") from rmgpy import constants from rmgpy.data.kinetics.depository import DepositoryReaction From a298cb8606c2afdf9ebc8c7bfc4a6ce34eb5f579 Mon Sep 17 00:00:00 2001 From: Jackson Burns Date: Tue, 3 Dec 2024 15:27:08 -0500 Subject: [PATCH 13/48] remove initial species add - was carried over by accident during development, gone on main --- rmgpy/rmg/main.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/rmgpy/rmg/main.py b/rmgpy/rmg/main.py index f3199d09338..3f4b574c77d 100644 --- a/rmgpy/rmg/main.py +++ b/rmgpy/rmg/main.py @@ -566,9 +566,6 @@ def initialize(self, **kwargs): # Load databases self.load_database() - for spec in self.initial_species: - self.reaction_model.add_species_to_edge(spec, requires_rms=requires_rms) - for reaction_system in self.reaction_systems: if isinstance(reaction_system, RMSReactor): reaction_system.finish_termination_criteria() From c022d42809cb0108c3563a1773c370035cc35b18 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Tue, 14 Jan 2025 11:38:25 -0500 Subject: [PATCH 14/48] fix call to `get_reaction_matches` (should match other test) --- test/rmgpy/data/kinetics/familyTest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/rmgpy/data/kinetics/familyTest.py b/test/rmgpy/data/kinetics/familyTest.py index 2ce15fc2060..7cac1651728 100644 --- a/test/rmgpy/data/kinetics/familyTest.py +++ b/test/rmgpy/data/kinetics/familyTest.py @@ -864,7 +864,7 @@ def test_d_regularization_dims(self): """ test that appropriate regularization dimensions have been identified """ - template_rxn_map = self.family.get_reaction_matches(thermo_database=self.database.thermo, estimate_thermo=False) + template_rxn_map = self.family.get_reaction_matches(thermo_database=self.thermoDatabase) for entry in self.family.groups.entries.values(): if entry.children == []: From c2b79e4b0c69c87b286a90cc9aec9d72a32ac22f Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Tue, 14 Jan 2025 12:08:51 -0500 Subject: [PATCH 15/48] make the m1 mac run in x86 compat mode --- .github/workflows/CI.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 19146cb54c7..e4a9a89281d 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -72,6 +72,10 @@ jobs: - name: Checkout RMG-Py uses: actions/checkout@v4 + - name: Compel Arm-based Mac to use x86 + if: matrix.os == 'macos-latest' + run: echo "CONDA_SUBDIR=osx-64" >> $GITHUB_ENV + - name: Setup Miniforge Python ${{ matrix.python-version }} uses: conda-incubator/setup-miniconda@v3 with: From 4b6122fcf15fba5cd5ce649ac82f141d5f9e5738 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Tue, 14 Jan 2025 13:31:58 -0500 Subject: [PATCH 16/48] split the python-only test into separate CI to simplify reg test logic --- .github/workflows/CI.yml | 7 +-- .github/workflows/pure_python_ci.yml | 93 ++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/pure_python_ci.yml diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index e4a9a89281d..ff6bf85b59e 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -57,9 +57,8 @@ jobs: matrix: python-version: ["3.9"] os: [macos-13, macos-latest, ubuntu-latest] - test_julia: [yes, no] runs-on: ${{ matrix.os }} - name: ${{ matrix.os }} Build and Test Python ${{ matrix.python-version }} (Julia? ${{ matrix.test_julia }}) + name: ${{ matrix.os }} Build and Test Python ${{ matrix.python-version }} # skip scheduled runs from forks if: ${{ !( github.repository != 'ReactionMechanismGenerator/RMG-Py' && github.event_name == 'schedule' ) }} env: @@ -115,7 +114,6 @@ jobs: # Setup Juliaup - name: Set Julia paths - if: matrix.test_julia == 'yes' run: | # echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_ENV # echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_PATH @@ -129,18 +127,15 @@ jobs: # echo "JULIA_CONDAPKG_BACKEND=Current" >> $GITHUB_PATH - name: Setup Juliaup - if: matrix.test_julia == 'yes' uses: julia-actions/install-juliaup@v2 with: channel: '1.9' - name: Check Julia version - if: matrix.test_julia == 'yes' run: julia --version # RMS installation and linking to Julia - name: Install and link Julia dependencies - if: matrix.test_julia == 'yes' timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). # JULIA_CONDAPKG_EXE points to the existing conda/mamba to avoid JuliaCall from installing their own. See https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-a-Conda-environment. run: | diff --git a/.github/workflows/pure_python_ci.yml b/.github/workflows/pure_python_ci.yml new file mode 100644 index 00000000000..9cc0d88e105 --- /dev/null +++ b/.github/workflows/pure_python_ci.yml @@ -0,0 +1,93 @@ +# Pure Python CI +# Runs a reduced set of tests compared to CI.yml, skips CD altogether, ensure pure-python +# mode (i.e. no Julia-based dependencies) works. + +name: Pure Python Continuous Integration + +on: + schedule: + # * is a special character in YAML so you have to quote this string + - cron: "0 8 * * *" + # allow running on RMG-Py on a pushed branch, only if triggered manually + workflow_dispatch: + # runs on PRs against RMG-Py (and anywhere else, but we add this for RMG-Py) + pull_request: + +# this prevents one PR from simultaneously running multiple runners, which will clog up the queue +# and prevent other PRs from running the CI +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +env: + # if running on RMG-Py but requiring changes on an un-merged branch of RMG-database, replace + # main with the name of the branch + RMG_DATABASE_BRANCH: main + + +jobs: + build-and-test: + strategy: + fail-fast: false + matrix: + python-version: ["3.9"] + os: [macos-13, macos-latest, ubuntu-latest] + runs-on: ${{ matrix.os }} + name: ${{ matrix.os }} Build and Minimal Test Python ${{ matrix.python-version }} + # skip scheduled runs from forks + if: ${{ !( github.repository != 'ReactionMechanismGenerator/RMG-Py' && github.event_name == 'schedule' ) }} + defaults: + run: + shell: bash -l {0} + steps: + - name: Checkout RMG-Py + uses: actions/checkout@v4 + + - name: Compel Arm-based Mac to use x86 + if: matrix.os == 'macos-latest' + run: echo "CONDA_SUBDIR=osx-64" >> $GITHUB_ENV + + - name: Setup Miniforge Python ${{ matrix.python-version }} + uses: conda-incubator/setup-miniconda@v3 + with: + environment-file: environment.yml + miniforge-variant: Miniforge3 + miniforge-version: latest + python-version: ${{ matrix.python-version }} + activate-environment: rmg_env + use-mamba: true + show-channel-urls: true + channels: conda-forge,cantera,rmg + + # list the environment for debugging purposes + - name: mamba info + run: | + mamba info + mamba list + + # Clone RMG-database + - name: Clone RMG-database + run: | + cd .. + git clone -b $RMG_DATABASE_BRANCH https://github.com/ReactionMechanismGenerator/RMG-database.git + + # modify env variables as directed in the RMG installation instructions + - name: Set Environment Variables + run: | + RUNNER_CWD=$(pwd) + echo "PYTHONPATH=$RUNNER_CWD/RMG-Py:$PYTHONPATH" >> $GITHUB_ENV + echo "$RUNNER_CWD/RMG-Py" >> $GITHUB_PATH + + # RMG build step + - name: make RMG + run: | + make clean + make + + - name: Install Q2DTor + run: echo "" | make q2dtor + + # non-regression testing + - name: Run Unit, Functional, and Database Tests + # aggregate into one command so we only have to eat the collection time once + run: make test-all From 0bf2cf57c5f86eaa33fc5b41d87fe1aae939ad53 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sat, 25 Jan 2025 16:38:25 -0500 Subject: [PATCH 17/48] fix `juliacall` incompatibilities; see extended - CI updates: run CI on all platforms with and without RMS, split regression tests into separate job after that so that regression is only checked on one platform - installation updates: make RMG-Py actually `pip`-installable as `reactionmechanismgenerator` to avoid having to set the `PYTHONPATH` variable, which breaks `juliacall`, add a convenience script for installing RMS - code changes: fix some small bugs in the new optional-rms setup (one missed `requires_rms` and one incorrect rebase) --- .github/workflows/CI.yml | 139 +++++++---- .github/workflows/pure_python_ci.yml | 93 ------- .gitignore | 5 + Makefile | 23 +- install_rms.sh | 31 +++ rmg.py | 10 - rmgpy/rmg/main.py | 5 +- .../reactionmechanismsimulator_reactors.py | 47 +--- setup.py | 230 +++++------------- test/conftest.py | 7 + utilities.py | 32 +-- 11 files changed, 228 insertions(+), 394 deletions(-) delete mode 100644 .github/workflows/pure_python_ci.yml create mode 100644 install_rms.sh create mode 100644 test/conftest.py diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index ff6bf85b59e..ce533a0663a 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -49,7 +49,6 @@ env: # main with the name of the branch RMG_DATABASE_BRANCH: main - jobs: build-and-test: strategy: @@ -57,13 +56,16 @@ jobs: matrix: python-version: ["3.9"] os: [macos-13, macos-latest, ubuntu-latest] + include-rms: ["", "with RMS"] + exclude: + - os: macos-latest # needs Cantera 3 b/c x86 emulation breaks with Julia + include-rms: 'with RMS' + - os: macos-13 # GitHub's runners just aren't up to the task of installing Julia + include-rms: 'with RMS' runs-on: ${{ matrix.os }} - name: ${{ matrix.os }} Build and Test Python ${{ matrix.python-version }} + name: Python ${{ matrix.python-version }} ${{ matrix.os }} Build and Test ${{ matrix.include-rms }} # skip scheduled runs from forks if: ${{ !( github.repository != 'ReactionMechanismGenerator/RMG-Py' && github.event_name == 'schedule' ) }} - env: - # This is true only if this is a reference case for the regression testing: - REFERENCE_JOB: ${{ github.ref == 'refs/heads/main' && github.repository == 'ReactionMechanismGenerator/RMG-Py' }} defaults: run: shell: bash -l {0} @@ -83,15 +85,16 @@ jobs: miniforge-version: latest python-version: ${{ matrix.python-version }} activate-environment: rmg_env - use-mamba: true + auto-update-conda: true show-channel-urls: true channels: conda-forge,cantera,rmg + conda-remove-defaults: "true" # list the environment for debugging purposes - - name: mamba info + - name: conda info run: | - mamba info - mamba list + conda info + conda list # Clone RMG-database - name: Clone RMG-database @@ -99,57 +102,111 @@ jobs: cd .. git clone -b $RMG_DATABASE_BRANCH https://github.com/ReactionMechanismGenerator/RMG-database.git - # modify env variables as directed in the RMG installation instructions - - name: Set Environment Variables - run: | - RUNNER_CWD=$(pwd) - echo "PYTHONPATH=$RUNNER_CWD/RMG-Py:$PYTHONPATH" >> $GITHUB_ENV - echo "$RUNNER_CWD/RMG-Py" >> $GITHUB_PATH - # RMG build step - - name: make RMG - run: | - make clean - make - - # Setup Juliaup - - name: Set Julia paths - run: | - # echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_ENV - # echo "JULIAUP_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_PATH - # echo "JULIA_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_ENV - # echo "JULIA_DEPOT_PATH=$CONDA/envs/rmg_env/.julia" >> $GITHUB_PATH - # echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_ENV - # echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_PATH - # echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_ENV - # echo "JULIA_CONDAPKG_EXE=$CONDA/condabin/mamba" >> $GITHUB_PATH - echo "JULIA_CONDAPKG_BACKEND=Current" >> $GITHUB_ENV - # echo "JULIA_CONDAPKG_BACKEND=Current" >> $GITHUB_PATH + - run: make install - name: Setup Juliaup + if: matrix.include-rms == 'with RMS' uses: julia-actions/install-juliaup@v2 with: channel: '1.9' - - name: Check Julia version - run: julia --version + - name: Set some env vars + if: matrix.include-rms == 'with RMS' + run: | + # https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-Python-and-required-Python-packages-installed + echo "JULIA_CONDAPKG_BACKEND=Null" >> $GITHUB_ENV + echo "JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python" >> $GITHUB_ENV # RMS installation and linking to Julia - name: Install and link Julia dependencies + if: matrix.include-rms == 'with RMS' timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). - # JULIA_CONDAPKG_EXE points to the existing conda/mamba to avoid JuliaCall from installing their own. See https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-a-Conda-environment. + run: . install_rms.sh + + - name: Set some other env vars + if: matrix.include-rms == 'with RMS' run: | - mamba install conda-forge::pyjuliacall - julia -e 'using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator' + # ensure that juliacall in Python uses the correct julia executable and packages: https://github.com/JuliaPy/PyJuliaPkg?tab=readme-ov-file#which-julia-gets-used + echo "PYTHON_JULIAPKG_EXE=$(which julia)" >> $GITHUB_ENV + echo "PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages" >> $GITHUB_ENV - name: Install Q2DTor run: echo "" | make q2dtor # non-regression testing - name: Run Unit, Functional, and Database Tests - # aggregate into one command so we only have to eat the collection time once run: make test-all + regression-test: + needs: build-and-test + runs-on: ubuntu-latest + name: Regression Test + # skip scheduled runs from forks + if: ${{ !( github.repository != 'ReactionMechanismGenerator/RMG-Py' && github.event_name == 'schedule' ) }} + env: + # This is true only if this is a reference case for the regression testing: + REFERENCE_JOB: ${{ github.ref == 'refs/heads/main' && github.repository == 'ReactionMechanismGenerator/RMG-Py' }} + defaults: + run: + shell: bash -l {0} + steps: + - name: Checkout RMG-Py + uses: actions/checkout@v4 + + - name: Setup Miniforge Python 3.9 + uses: conda-incubator/setup-miniconda@v3 + with: + environment-file: environment.yml + miniforge-variant: Miniforge3 + miniforge-version: latest + python-version: 3.9 + activate-environment: rmg_env + auto-update-conda: true + show-channel-urls: true + channels: conda-forge,cantera,rmg + conda-remove-defaults: "true" + + # list the environment for debugging purposes + - name: conda info + run: | + conda info + conda list + + # Clone RMG-database + - name: Clone RMG-database + run: | + cd .. + git clone -b $RMG_DATABASE_BRANCH https://github.com/ReactionMechanismGenerator/RMG-database.git + + # RMG build step + - run: make install + + - name: Setup Juliaup + uses: julia-actions/install-juliaup@v2 + with: + channel: '1.9' + + - name: Set some env vars + run: | + # https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-Python-and-required-Python-packages-installed + echo "JULIA_CONDAPKG_BACKEND=Null" >> $GITHUB_ENV + echo "JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python" >> $GITHUB_ENV + + # RMS installation and linking to Julia + - name: Install and link Julia dependencies + timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). + run: . install_rms.sh + + - name: Set some other env vars + run: | + # ensure that juliacall in Python uses the correct julia executable and packages: https://github.com/JuliaPy/PyJuliaPkg?tab=readme-ov-file#which-julia-gets-used + echo "PYTHON_JULIAPKG_EXE=$(which julia)" >> $GITHUB_ENV + echo "PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages" >> $GITHUB_ENV + + - name: Install Q2DTor + run: echo "" | make q2dtor + # Regression Testing - Test Execution - name: Regression Tests - Execution id: regression-execution @@ -339,7 +396,7 @@ jobs: - name: Code coverage install and run if: success() || ( failure() && steps.regression-execution.conclusion == 'success' ) run: | - mamba install -y -c conda-forge codecov + conda install -y -c conda-forge codecov codecov build-and-push-docker: diff --git a/.github/workflows/pure_python_ci.yml b/.github/workflows/pure_python_ci.yml deleted file mode 100644 index 9cc0d88e105..00000000000 --- a/.github/workflows/pure_python_ci.yml +++ /dev/null @@ -1,93 +0,0 @@ -# Pure Python CI -# Runs a reduced set of tests compared to CI.yml, skips CD altogether, ensure pure-python -# mode (i.e. no Julia-based dependencies) works. - -name: Pure Python Continuous Integration - -on: - schedule: - # * is a special character in YAML so you have to quote this string - - cron: "0 8 * * *" - # allow running on RMG-Py on a pushed branch, only if triggered manually - workflow_dispatch: - # runs on PRs against RMG-Py (and anywhere else, but we add this for RMG-Py) - pull_request: - -# this prevents one PR from simultaneously running multiple runners, which will clog up the queue -# and prevent other PRs from running the CI -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -env: - # if running on RMG-Py but requiring changes on an un-merged branch of RMG-database, replace - # main with the name of the branch - RMG_DATABASE_BRANCH: main - - -jobs: - build-and-test: - strategy: - fail-fast: false - matrix: - python-version: ["3.9"] - os: [macos-13, macos-latest, ubuntu-latest] - runs-on: ${{ matrix.os }} - name: ${{ matrix.os }} Build and Minimal Test Python ${{ matrix.python-version }} - # skip scheduled runs from forks - if: ${{ !( github.repository != 'ReactionMechanismGenerator/RMG-Py' && github.event_name == 'schedule' ) }} - defaults: - run: - shell: bash -l {0} - steps: - - name: Checkout RMG-Py - uses: actions/checkout@v4 - - - name: Compel Arm-based Mac to use x86 - if: matrix.os == 'macos-latest' - run: echo "CONDA_SUBDIR=osx-64" >> $GITHUB_ENV - - - name: Setup Miniforge Python ${{ matrix.python-version }} - uses: conda-incubator/setup-miniconda@v3 - with: - environment-file: environment.yml - miniforge-variant: Miniforge3 - miniforge-version: latest - python-version: ${{ matrix.python-version }} - activate-environment: rmg_env - use-mamba: true - show-channel-urls: true - channels: conda-forge,cantera,rmg - - # list the environment for debugging purposes - - name: mamba info - run: | - mamba info - mamba list - - # Clone RMG-database - - name: Clone RMG-database - run: | - cd .. - git clone -b $RMG_DATABASE_BRANCH https://github.com/ReactionMechanismGenerator/RMG-database.git - - # modify env variables as directed in the RMG installation instructions - - name: Set Environment Variables - run: | - RUNNER_CWD=$(pwd) - echo "PYTHONPATH=$RUNNER_CWD/RMG-Py:$PYTHONPATH" >> $GITHUB_ENV - echo "$RUNNER_CWD/RMG-Py" >> $GITHUB_PATH - - # RMG build step - - name: make RMG - run: | - make clean - make - - - name: Install Q2DTor - run: echo "" | make q2dtor - - # non-regression testing - - name: Run Unit, Functional, and Database Tests - # aggregate into one command so we only have to eat the collection time once - run: make test-all diff --git a/.gitignore b/.gitignore index 38d73e2bc18..723a18e5cbc 100644 --- a/.gitignore +++ b/.gitignore @@ -4,6 +4,9 @@ # ################################################################################ +# build dir +reactionmechanismgenerator.egg-info/ + # MacOS files .DS_Store @@ -11,6 +14,8 @@ *.pyc *.so *.pyd +# and intermediate source files +*.c # Image files generated by RMG *.png diff --git a/Makefile b/Makefile index 06055daa3a7..36c0f7b13db 100644 --- a/Makefile +++ b/Makefile @@ -7,28 +7,13 @@ CC=gcc CXX=g++ -.PHONY : all minimal main solver check pycheck arkane clean install decython documentation test q2dtor +.PHONY : all check clean install decython documentation test q2dtor -all: pycheck main solver check - -minimal: - python setup.py build_ext minimal --inplace --build-temp . - -main: - python setup.py build_ext main --inplace --build-temp . - -solver: - @ python utilities.py check-pydas - python setup.py build_ext solver --inplace --build-temp . - -arkane: - python setup.py build_ext arkane --inplace --build-temp . +all: check install check check: @ python utilities.py check-dependencies - -pycheck: - @ python utilities.py check-python + @ python utilities.py check-pydas documentation: $(MAKE) -C documentation html @@ -42,7 +27,7 @@ clean-solver: install: @ python utilities.py check-pydas - python setup.py install + python -m pip install -vv --no-cache-dir -e . q2dtor: @ echo -e "\nInstalling Q2DTor...\n" diff --git a/install_rms.sh b/install_rms.sh new file mode 100644 index 00000000000..57f495c6ab7 --- /dev/null +++ b/install_rms.sh @@ -0,0 +1,31 @@ +# install_rms.sh +# +# Convenience script to install ReactionMechanismSimulator inside your rmg_env conda environment +# +# Note that you will have to manually install juliaup before running this script: +# curl -fsSL https://install.julialang.org | sh +# # restart shell +# juliaup add 1.9 +# juliaup default 1.9 +# juliaup remove release + + +conda install -y conda-forge::pyjuliacall + +export JULIA_CONDAPKG_BACKEND=Null +export JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python + +julia -e 'using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator; Pkg.instantiate()' || echo "RMS install error - continuing anyway ¯\_(ツ)_/¯" + +export PYTHON_JULIAPKG_EXE=$(which julia) +export PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages + +echo "Copy the following text into your terminal profile (.bashrc, .zshrc, etc.): + +export JULIA_CONDAPKG_BACKEND=Null +export JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python +export PYTHON_JULIAPKG_EXE=$(which julia) +export PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages + +or otherwise run these 4 commands when first opening a terminal that will run RMG requiring RMS. +""" diff --git a/rmg.py b/rmg.py index 0b27098b816..6127d761f80 100644 --- a/rmg.py +++ b/rmg.py @@ -34,16 +34,6 @@ import os.path import logging -# Before importing any RMG modules, check Python version -try: - import utilities -except ImportError: # This is likely the binary install version, which does not include utilities - # If this is in fact the binary version, conda has ensured that the python version is correct. - # It is okay to skip this test here - utilities = None -else: - utilities.check_python() - import rmgpy from rmgpy.rmg.main import RMG, initialize_log, process_profile_stats, make_profile_graph from rmgpy.util import parse_command_line_arguments diff --git a/rmgpy/rmg/main.py b/rmgpy/rmg/main.py index 3f4b574c77d..1d9cb8d55f5 100644 --- a/rmgpy/rmg/main.py +++ b/rmgpy/rmg/main.py @@ -514,8 +514,8 @@ def initialize(self, **kwargs): # Check if ReactionMechanismSimulator reactors are being used # if RMS is not installed but the user attempted to use it, the load_input_file would have failed - # if RMS is installed but they did not use it, we can avoid extra work # if RMS is not installed and they did not use it, we avoid calling certain functions that would raise an error + # if RMS is installed but they did not use it, we can avoid extra work requires_rms = any(isinstance(reactor_system, RMSReactor) for reactor_system in self.reaction_systems) if self.surface_site_density: @@ -654,7 +654,7 @@ def initialize(self, **kwargs): if vapor_liquid_mass_transfer.enabled: spec.get_liquid_volumetric_mass_transfer_coefficient_data() spec.get_henry_law_constant_data() - self.reaction_model.add_species_to_edge(spec) + self.reaction_model.add_species_to_edge(spec, requires_rms=requires_rms) # Seed mechanisms: add species and reactions from seed mechanism # DON'T generate any more reactions for the seed species at this time @@ -795,6 +795,7 @@ def execute(self, initialize=True, **kwargs): ``initialize`` is a ``bool`` type flag used to determine whether to call self.initialize() """ + requires_rms=False if initialize: requires_rms = self.initialize(**kwargs) diff --git a/rmgpy/rmg/reactionmechanismsimulator_reactors.py b/rmgpy/rmg/reactionmechanismsimulator_reactors.py index f40ba703d6d..77b0ffb8b01 100644 --- a/rmgpy/rmg/reactionmechanismsimulator_reactors.py +++ b/rmgpy/rmg/reactionmechanismsimulator_reactors.py @@ -32,32 +32,13 @@ """ import itertools import logging -import sys import numpy as np -import rmgpy.constants as constants - -NO_JULIA = True -try: - import juliacall - from juliacall import Main - Main.seval("using PythonCall") - Main.seval("using ReactionMechanismSimulator") - Main.seval("using ReactionMechanismSimulator.Sundials") - NO_JULIA = False -except: - logging.warning("Julia import failed, RMS reactors not available.") - -from rmgpy import constants -from rmgpy.data.kinetics.depository import DepositoryReaction -from rmgpy.data.kinetics.family import TemplateReaction from rmgpy.data.solvation import SolventData from rmgpy.kinetics.arrhenius import ( Arrhenius, - ArrheniusBM, ArrheniusChargeTransfer, - ArrheniusEP, Marcus, MultiArrhenius, MultiPDepArrhenius, @@ -65,7 +46,6 @@ ) from rmgpy.kinetics.chebyshev import Chebyshev from rmgpy.kinetics.falloff import Lindemann, ThirdBody, Troe -from rmgpy.kinetics.kineticsdata import KineticsData from rmgpy.kinetics.surface import StickingCoefficient, SurfaceChargeTransfer from rmgpy.molecule.fragment import Fragment from rmgpy.reaction import Reaction @@ -77,7 +57,17 @@ from rmgpy.species import Species from rmgpy.thermo.nasa import NASA, NASAPolynomial from rmgpy.thermo.thermodata import ThermoData -from rmgpy.thermo.wilhoit import Wilhoit + + +NO_JULIA = True +try: + from juliacall import Main + Main.seval("using PythonCall") + Main.seval("using ReactionMechanismSimulator") + Main.seval("using ReactionMechanismSimulator.Sundials") + NO_JULIA = False +except Exception as e: + logging.warning(f"Julia import failed, RMS reactors not available.\nException: {str(e)}\nStacktrace:\n{e.__traceback__}") def to_julia(obj): @@ -104,21 +94,6 @@ def to_julia(obj): else: # Other native Python project does not need special conversion. return obj -NO_JULIA = False -try: - if __debug__: - from os.path import abspath, dirname, exists, join - - from julia.api import Julia - path_rms = dirname(dirname(dirname(abspath(__file__)))) - jl = Julia(sysimage=join(path_rms, "rms.so")) if exists(join(path_rms, "rms.so")) else Julia(compiled_modules=False) - from diffeqpy import de - from julia import Main - from pyrms import rms -except Exception as e: - logging.info("Unable to import Julia dependencies, original error: " + str(e) + ". RMS features will not be available on this execution.") - NO_JULIA = True - class PhaseSystem: """ diff --git a/setup.py b/setup.py index 604a856eb19..4bebd5a5566 100644 --- a/setup.py +++ b/setup.py @@ -27,16 +27,7 @@ # # ############################################################################### -import sys -import os -from collections import OrderedDict - -try: - from distutils.core import setup - from distutils.extension import Extension -except ImportError: - print('The distutils package is required to build or install RMG Py.') - raise +from setuptools import setup try: from Cython.Build import cythonize @@ -51,8 +42,10 @@ print('NumPy (http://numpy.scipy.org/) is required to build or install RMG Py.') raise +from setuptools import find_packages + # Create annotated HTML files for each of the Cython modules -Options.annotate = True +Options.annotate = False directives = { # Set input language version to python 3 @@ -63,143 +56,68 @@ # 'embedsignature': True, } -################################################################################ - -main_ext_modules = [ +ext_modules = [ # RMG - Extension('rmgpy.rmgobject', ['rmgpy/rmgobject.pyx']), + 'rmgpy/rmgobject.pyx', # Kinetics - Extension('rmgpy.kinetics.arrhenius', ['rmgpy/kinetics/arrhenius.pyx']), - Extension('rmgpy.kinetics.chebyshev', ['rmgpy/kinetics/chebyshev.pyx']), - Extension('rmgpy.kinetics.kineticsdata', ['rmgpy/kinetics/kineticsdata.pyx']), - Extension('rmgpy.kinetics.falloff', ['rmgpy/kinetics/falloff.pyx']), - Extension('rmgpy.kinetics.model', ['rmgpy/kinetics/model.pyx']), - Extension('rmgpy.kinetics.tunneling', ['rmgpy/kinetics/tunneling.pyx']), - Extension('rmgpy.kinetics.surface', ['rmgpy/kinetics/surface.pyx']), - Extension('rmgpy.kinetics.uncertainties', ['rmgpy/kinetics/uncertainties.pyx']), + 'rmgpy/kinetics/arrhenius.pyx', + 'rmgpy/kinetics/chebyshev.pyx', + 'rmgpy/kinetics/kineticsdata.pyx', + 'rmgpy/kinetics/falloff.pyx', + 'rmgpy/kinetics/model.pyx', + 'rmgpy/kinetics/tunneling.pyx', + 'rmgpy/kinetics/surface.pyx', + 'rmgpy/kinetics/uncertainties.pyx', # Molecules and molecular representations - Extension('rmgpy.molecule.atomtype', ['rmgpy/molecule/atomtype.py'], include_dirs=['.']), - Extension('rmgpy.molecule.element', ['rmgpy/molecule/element.py'], include_dirs=['.']), - Extension('rmgpy.molecule.graph', ['rmgpy/molecule/graph.pyx'], include_dirs=['.']), - Extension('rmgpy.molecule.group', ['rmgpy/molecule/group.py'], include_dirs=['.']), - Extension('rmgpy.molecule.molecule', ['rmgpy/molecule/molecule.py'], include_dirs=['.']), - Extension('rmgpy.molecule.symmetry', ['rmgpy/molecule/symmetry.py'], include_dirs=['.']), - Extension('rmgpy.molecule.vf2', ['rmgpy/molecule/vf2.pyx'], include_dirs=['.']), - Extension('rmgpy.molecule.converter', ['rmgpy/molecule/converter.py'], include_dirs=['.']), - Extension('rmgpy.molecule.translator', ['rmgpy/molecule/translator.py'], include_dirs=['.']), - Extension('rmgpy.molecule.util', ['rmgpy/molecule/util.py'], include_dirs=['.']), - Extension('rmgpy.molecule.inchi', ['rmgpy/molecule/inchi.py'], include_dirs=['.']), - Extension('rmgpy.molecule.resonance', ['rmgpy/molecule/resonance.py'], include_dirs=['.']), - Extension('rmgpy.molecule.pathfinder', ['rmgpy/molecule/pathfinder.py'], include_dirs=['.']), - Extension('rmgpy.molecule.kekulize', ['rmgpy/molecule/kekulize.pyx'], include_dirs=['.']), - # Pressure dependence - Extension('rmgpy.pdep.collision', ['rmgpy/pdep/collision.pyx']), - Extension('rmgpy.pdep.configuration', ['rmgpy/pdep/configuration.pyx']), - Extension('rmgpy.pdep.me', ['rmgpy/pdep/me.pyx']), - Extension('rmgpy.pdep.msc', ['rmgpy/pdep/msc.pyx']), - Extension('rmgpy.pdep.reaction', ['rmgpy/pdep/reaction.pyx']), - Extension('rmgpy.pdep.rs', ['rmgpy/pdep/rs.pyx']), - Extension('rmgpy.pdep.cse', ['rmgpy/pdep/cse.pyx']), - # Statistical mechanics - Extension('rmgpy.statmech.conformer', ['rmgpy/statmech/conformer.pyx']), - Extension('rmgpy.statmech.mode', ['rmgpy/statmech/mode.pyx']), - Extension('rmgpy.statmech.rotation', ['rmgpy/statmech/rotation.pyx']), - Extension('rmgpy.statmech.schrodinger', ['rmgpy/statmech/schrodinger.pyx']), - Extension('rmgpy.statmech.torsion', ['rmgpy/statmech/torsion.pyx']), - Extension('rmgpy.statmech.translation', ['rmgpy/statmech/translation.pyx']), - Extension('rmgpy.statmech.vibration', ['rmgpy/statmech/vibration.pyx']), - # Thermodynamics - Extension('rmgpy.thermo.thermodata', ['rmgpy/thermo/thermodata.pyx']), - Extension('rmgpy.thermo.model', ['rmgpy/thermo/model.pyx']), - Extension('rmgpy.thermo.nasa', ['rmgpy/thermo/nasa.pyx']), - Extension('rmgpy.thermo.wilhoit', ['rmgpy/thermo/wilhoit.pyx']), - # Miscellaneous - Extension('rmgpy.constants', ['rmgpy/constants.py'], include_dirs=['.']), - Extension('rmgpy.quantity', ['rmgpy/quantity.py'], include_dirs=['.']), - Extension('rmgpy.reaction', ['rmgpy/reaction.py'], include_dirs=['.']), - Extension('rmgpy.species', ['rmgpy/species.py'], include_dirs=['.']), - Extension('rmgpy.chemkin', ['rmgpy/chemkin.pyx'], include_dirs=['.']), -] - -solver_ext_modules = [ - Extension('rmgpy.solver.base', ['rmgpy/solver/base.pyx'], include_dirs=['.']), - Extension('rmgpy.solver.simple', ['rmgpy/solver/simple.pyx'], include_dirs=['.']), - Extension('rmgpy.solver.liquid', ['rmgpy/solver/liquid.pyx'], include_dirs=['.']), - Extension('rmgpy.solver.mbSampled', ['rmgpy/solver/mbSampled.pyx'], include_dirs=['.']), - Extension('rmgpy.solver.surface', ['rmgpy/solver/surface.pyx'], include_dirs=['.']), -] - -arkane_ext_modules = [ - # RMG - Extension('rmgpy.rmgobject', ['rmgpy/rmgobject.pyx']), - # Kinetics - Extension('rmgpy.kinetics.arrhenius', ['rmgpy/kinetics/arrhenius.pyx']), - Extension('rmgpy.kinetics.chebyshev', ['rmgpy/kinetics/chebyshev.pyx']), - Extension('rmgpy.kinetics.kineticsdata', ['rmgpy/kinetics/kineticsdata.pyx']), - Extension('rmgpy.kinetics.falloff', ['rmgpy/kinetics/falloff.pyx']), - Extension('rmgpy.kinetics.model', ['rmgpy/kinetics/model.pyx']), - Extension('rmgpy.kinetics.tunneling', ['rmgpy/kinetics/tunneling.pyx']), + 'rmgpy/molecule/atomtype.py', + 'rmgpy/molecule/element.py', + 'rmgpy/molecule/graph.pyx', + 'rmgpy/molecule/group.py', + 'rmgpy/molecule/molecule.py', + 'rmgpy/molecule/symmetry.py', + 'rmgpy/molecule/vf2.pyx', + 'rmgpy/molecule/converter.py', + 'rmgpy/molecule/translator.py', + 'rmgpy/molecule/util.py', + 'rmgpy/molecule/inchi.py', + 'rmgpy/molecule/resonance.py', + 'rmgpy/molecule/pathfinder.py', + 'rmgpy/molecule/kekulize.pyx', # Pressure dependence - Extension('rmgpy.pdep.collision', ['rmgpy/pdep/collision.pyx']), - Extension('rmgpy.pdep.configuration', ['rmgpy/pdep/configuration.pyx']), - Extension('rmgpy.pdep.me', ['rmgpy/pdep/me.pyx']), - Extension('rmgpy.pdep.msc', ['rmgpy/pdep/msc.pyx']), - Extension('rmgpy.pdep.reaction', ['rmgpy/pdep/reaction.pyx']), - Extension('rmgpy.pdep.rs', ['rmgpy/pdep/rs.pyx']), - Extension('rmgpy.pdep.cse', ['rmgpy/pdep/cse.pyx']), + 'rmgpy/pdep/collision.pyx', + 'rmgpy/pdep/configuration.pyx', + 'rmgpy/pdep/me.pyx', + 'rmgpy/pdep/msc.pyx', + 'rmgpy/pdep/reaction.pyx', + 'rmgpy/pdep/rs.pyx', + 'rmgpy/pdep/cse.pyx', # Statistical mechanics - Extension('rmgpy.statmech.conformer', ['rmgpy/statmech/conformer.pyx']), - Extension('rmgpy.statmech.mode', ['rmgpy/statmech/mode.pyx']), - Extension('rmgpy.statmech.rotation', ['rmgpy/statmech/rotation.pyx']), - Extension('rmgpy.statmech.schrodinger', ['rmgpy/statmech/schrodinger.pyx']), - Extension('rmgpy.statmech.torsion', ['rmgpy/statmech/torsion.pyx']), - Extension('rmgpy.statmech.translation', ['rmgpy/statmech/translation.pyx']), - Extension('rmgpy.statmech.vibration', ['rmgpy/statmech/vibration.pyx']), + 'rmgpy/statmech/conformer.pyx', + 'rmgpy/statmech/mode.pyx', + 'rmgpy/statmech/rotation.pyx', + 'rmgpy/statmech/schrodinger.pyx', + 'rmgpy/statmech/torsion.pyx', + 'rmgpy/statmech/translation.pyx', + 'rmgpy/statmech/vibration.pyx', # Thermodynamics - Extension('rmgpy.thermo.thermodata', ['rmgpy/thermo/thermodata.pyx']), - Extension('rmgpy.thermo.model', ['rmgpy/thermo/model.pyx']), - Extension('rmgpy.thermo.nasa', ['rmgpy/thermo/nasa.pyx']), - Extension('rmgpy.thermo.wilhoit', ['rmgpy/thermo/wilhoit.pyx']), + 'rmgpy/thermo/thermodata.pyx', + 'rmgpy/thermo/model.pyx', + 'rmgpy/thermo/nasa.pyx', + 'rmgpy/thermo/wilhoit.pyx', # Miscellaneous - Extension('rmgpy.constants', ['rmgpy/constants.py'], include_dirs=['.']), - Extension('rmgpy.quantity', ['rmgpy/quantity.py'], include_dirs=['.']), + 'rmgpy/constants.py', + 'rmgpy/quantity.py', + 'rmgpy/reaction.py', + 'rmgpy/species.py', + 'rmgpy/chemkin.pyx', + # solvers + 'rmgpy/solver/base.pyx', + 'rmgpy/solver/simple.pyx', + 'rmgpy/solver/liquid.pyx', + 'rmgpy/solver/mbSampled.pyx', + 'rmgpy/solver/surface.pyx', ] -################################################################################ - -ext_modules = [] -if 'install' in sys.argv: - # This is so users can still do simply `python setup.py install` - ext_modules.extend(main_ext_modules) - ext_modules.extend(solver_ext_modules) -if 'main' in sys.argv: - # This is for `python setup.py build_ext main` - sys.argv.remove('main') - ext_modules.extend(main_ext_modules) -if 'solver' in sys.argv: - # This is for `python setup.py build_ext solver` - sys.argv.remove('solver') - ext_modules.extend(solver_ext_modules) -if 'arkane' in sys.argv: - # This is for `python setup.py build_ext arkane` - sys.argv.remove('arkane') - ext_modules.extend(main_ext_modules) - ext_modules.extend(arkane_ext_modules) -if 'minimal' in sys.argv: - # This starts with the full install list, but removes anything that has a pure python mode - # i.e. in only includes things whose source is .pyx - sys.argv.remove('minimal') - temporary_list = [] - temporary_list.extend(main_ext_modules) - temporary_list.extend(solver_ext_modules) - for module in temporary_list: - for source in module.sources: - if os.path.splitext(source)[1] == '.pyx': - ext_modules.append(module) - -# Remove duplicates while preserving order: -ext_modules = list(OrderedDict.fromkeys(ext_modules)) - scripts = [ 'Arkane.py', 'rmg.py', @@ -215,41 +133,27 @@ 'scripts/standardizeModelSpeciesNames.py', 'scripts/thermoEstimator.py', 'scripts/isotopes.py', - 'testing/databaseTest.py', ] -modules = [] -for root, dirs, files in os.walk('rmgpy'): - if 'test_data' in root: - continue - for f in files: - if f.endswith('.py') or f.endswith('.pyx'): - if 'Test' not in f and '__init__' not in f: - module = 'rmgpy' + root.partition('rmgpy')[-1].replace('/', '.') + '.' + f.partition('.py')[0] - modules.append(module) -for root, dirs, files in os.walk('arkane'): - if 'data' in root: - continue - for f in files: - if f.endswith('.py') or f.endswith('.pyx'): - if 'Test' not in f and '__init__' not in f: - module = 'arkane' + root.partition('arkane')[-1].replace('/', '.') + '.' + f.partition('.py')[0] - modules.append(module) - # Read the version number exec(open('rmgpy/version.py').read()) # Initiate the build and/or installation setup( - name='RMG-Py', + name='reactionmechanismgenerator', version=__version__, description='Reaction Mechanism Generator', author='William H. Green and the RMG Team', author_email='rmg_dev@mit.edu', url='http://reactionmechanismgenerator.github.io', - packages=['rmgpy', 'arkane'], - py_modules=modules, + python_requires='>=3.9,<3.10', + packages=find_packages(where='.', include=["rmgpy*"]) + find_packages(where='.', include=["arkane*"]), scripts=scripts, - ext_modules=cythonize(ext_modules, build_dir='build', compiler_directives=directives), - include_dirs=['.', numpy.get_include()], + include_package_data=True, + package_data={ + "": ["*.pxd"], + }, + ext_modules=cythonize(ext_modules, compiler_directives=directives), + include_dirs=numpy.get_include(), + install_requires=["Cython", "numpy"], ) diff --git a/test/conftest.py b/test/conftest.py new file mode 100644 index 00000000000..e8ce4f9c01d --- /dev/null +++ b/test/conftest.py @@ -0,0 +1,7 @@ +import multiprocessing + +multiprocessing.set_start_method('fork') + +from openbabel import pybel + +pybel.ob.obErrorLog.SetOutputLevel(0) diff --git a/utilities.py b/utilities.py index 87d39e73845..10178c10dfc 100644 --- a/utilities.py +++ b/utilities.py @@ -59,20 +59,10 @@ def check_dependencies(): print(""" There are missing dependencies as listed above. Please install them before proceeding. -Using Anaconda, these dependencies can be individually installed from the RMG channel as follows: - - conda install -c rmg [package name] -{0} -You can alternatively update your environment and install all missing dependencies as follows: - conda env update -f environment.yml Be sure to activate your conda environment (rmg_env by default) before installing or updating. -""".format(""" -RDKit should be installed from the RDKit channel instead: - - conda install -c rdkit rdkit -""" if missing['rdkit'] else '')) +""") else: print(""" Everything was found :) @@ -228,21 +218,6 @@ def check_pydas(): elif dassl: f.write('DEF DASPK = 0\n') - -def check_python(): - """ - Check that Python 3 is in the environment. - """ - major = sys.version_info.major - minor = sys.version_info.minor - if not (major == 3 and minor >= 7): - sys.exit('\nRMG-Py requires Python 3.7 or higher. You are using Python {0}.{1}.\n\n' - 'If you are using Anaconda, you should create a new environment using\n\n' - ' conda env create -f environment.yml\n\n' - 'If you have an existing rmg_env, you can remove it using\n\n' - ' conda remove --name rmg_env --all\n'.format(major, minor)) - - def clean(subdirectory=''): """ Removes files generated during compilation. @@ -253,7 +228,7 @@ def clean(subdirectory=''): if platform.system() == 'Windows': extensions = ['.pyd', '.pyc'] else: - extensions = ['.so', '.pyc'] + extensions = ['.so', '.pyc', ".c"] # Remove temporary build files print('Removing build directory...') @@ -393,7 +368,6 @@ def replace_header(oldfile): parser.add_argument('command', metavar='COMMAND', type=str, choices=['check-dependencies', 'check-pydas', - 'check-python', 'clean', 'clean-solver', 'update-headers'], @@ -405,8 +379,6 @@ def replace_header(oldfile): check_dependencies() elif args.command == 'check-pydas': check_pydas() - elif args.command == 'check-python': - check_python() elif args.command == 'clean': clean() elif args.command == 'clean-solver': From 9a2e6f4379cff2ecda24097654c9e75241ce7ecd Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sat, 25 Jan 2025 16:44:47 -0500 Subject: [PATCH 18/48] docker build should wait for regression test --- .github/workflows/CI.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index ce533a0663a..2d09a850fd8 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -406,7 +406,7 @@ jobs: # who knows ¯\_(ツ)_/¯ # # taken from https://github.com/docker/build-push-action - needs: build-and-test + needs: regression-test runs-on: ubuntu-latest if: github.ref == 'refs/heads/main' && github.repository == 'ReactionMechanismGenerator/RMG-Py' steps: From e2dbd17de7edce86129a16ec2d55b6b1f72d5a23 Mon Sep 17 00:00:00 2001 From: jonwzheng Date: Mon, 27 Jan 2025 17:01:08 -0500 Subject: [PATCH 19/48] change liquid initial conditions to surface initial conditions for ConstantTLiquidSurfaceReactor --- rmgpy/rmg/reactionmechanismsimulator_reactors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rmgpy/rmg/reactionmechanismsimulator_reactors.py b/rmgpy/rmg/reactionmechanismsimulator_reactors.py index 77b0ffb8b01..d5938581153 100644 --- a/rmgpy/rmg/reactionmechanismsimulator_reactors.py +++ b/rmgpy/rmg/reactionmechanismsimulator_reactors.py @@ -527,7 +527,7 @@ def generate_reactor(self, phase_system): cat_constant_species = [cspc for cspc in self.const_spc_names if cspc in [spc.name for spc in surf.species]] domainliq, y0liq, pliq = Main.ConstantTVDomain(phase=liq, initialconds=to_julia(liq_initial_cond), constantspecies=to_julia(liq_constant_species)) domaincat, y0cat, pcat = Main.ConstantTAPhiDomain( - phase=surf, initialconds=to_julia(liq_initial_cond), constantspecies=to_julia(cat_constant_species), + phase=surf, initialconds=to_julia(self.initial_conditions["surface"]), constantspecies=to_julia(cat_constant_species), ) if interface.reactions == []: inter, pinter = Main.ReactiveInternalInterfaceConstantTPhi( From 6a0d5b4d779c4d18b0fb6605be2fefd39561da18 Mon Sep 17 00:00:00 2001 From: Jackson Burns <33505528+JacksonBurns@users.noreply.github.com> Date: Tue, 25 Feb 2025 12:25:33 -0500 Subject: [PATCH 20/48] forbid incompatible numpy version --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index 9c6960f3b69..b2a74c3bc22 100644 --- a/environment.yml +++ b/environment.yml @@ -63,7 +63,7 @@ dependencies: - conda-forge::cython >=0.25.2 - conda-forge::scikit-learn - conda-forge::scipy >=1.9 - - conda-forge::numpy >=1.10.0 + - conda-forge::numpy >=1.10.0,<2 - conda-forge::pydot - conda-forge::jinja2 - conda-forge::jupyter From afc90b4b1e19388a340ddf3092aa83f2d34ec5a3 Mon Sep 17 00:00:00 2001 From: Jackson Burns <33505528+JacksonBurns@users.noreply.github.com> Date: Tue, 25 Feb 2025 12:25:56 -0500 Subject: [PATCH 21/48] run ci on arm macs natively --- .github/workflows/CI.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 2d09a850fd8..14b8d50f048 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -58,8 +58,6 @@ jobs: os: [macos-13, macos-latest, ubuntu-latest] include-rms: ["", "with RMS"] exclude: - - os: macos-latest # needs Cantera 3 b/c x86 emulation breaks with Julia - include-rms: 'with RMS' - os: macos-13 # GitHub's runners just aren't up to the task of installing Julia include-rms: 'with RMS' runs-on: ${{ matrix.os }} @@ -73,10 +71,6 @@ jobs: - name: Checkout RMG-Py uses: actions/checkout@v4 - - name: Compel Arm-based Mac to use x86 - if: matrix.os == 'macos-latest' - run: echo "CONDA_SUBDIR=osx-64" >> $GITHUB_ENV - - name: Setup Miniforge Python ${{ matrix.python-version }} uses: conda-incubator/setup-miniconda@v3 with: From 143bb6698d324e7dc6a50ba7f2a3c23c3d80da01 Mon Sep 17 00:00:00 2001 From: Jackson Burns <33505528+JacksonBurns@users.noreply.github.com> Date: Tue, 25 Feb 2025 12:26:26 -0500 Subject: [PATCH 22/48] get cantera from conda-forge, which has arm mac binaries https://anaconda.org/conda-forge/cantera/files?page=3&version=2.6.0 --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index b2a74c3bc22..1db3e2bd2fa 100644 --- a/environment.yml +++ b/environment.yml @@ -50,7 +50,7 @@ dependencies: # external software tools for chemistry - conda-forge::coolprop - - cantera::cantera =2.6 + - conda-forge::cantera =2.6 - conda-forge::mopac # see https://github.com/ReactionMechanismGenerator/RMG-Py/pull/2639#issuecomment-2050292972 - conda-forge::cclib >=1.6.3,<1.9 From 821ec6f2101d068439ec13e5c59b03ac0b7730e4 Mon Sep 17 00:00:00 2001 From: Jackson Burns <33505528+JacksonBurns@users.noreply.github.com> Date: Tue, 25 Feb 2025 12:31:45 -0500 Subject: [PATCH 23/48] remove cantera channel from environment file --- environment.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/environment.yml b/environment.yml index 1db3e2bd2fa..7f2a1a7103a 100644 --- a/environment.yml +++ b/environment.yml @@ -25,7 +25,6 @@ name: rmg_env channels: - conda-forge - - cantera - rmg dependencies: # System-level dependencies - we could install these at the OS level From 6fcf8680c3f672797151e4d9587de80814ca0fba Mon Sep 17 00:00:00 2001 From: Jackson Burns <33505528+JacksonBurns@users.noreply.github.com> Date: Tue, 25 Feb 2025 12:34:52 -0500 Subject: [PATCH 24/48] don't patch the channels in the CI this is handled in the env file --- .github/workflows/CI.yml | 2 -- .github/workflows/docs.yml | 1 - 2 files changed, 3 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 14b8d50f048..1fa7d32b7ed 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -81,7 +81,6 @@ jobs: activate-environment: rmg_env auto-update-conda: true show-channel-urls: true - channels: conda-forge,cantera,rmg conda-remove-defaults: "true" # list the environment for debugging purposes @@ -158,7 +157,6 @@ jobs: activate-environment: rmg_env auto-update-conda: true show-channel-urls: true - channels: conda-forge,cantera,rmg conda-remove-defaults: "true" # list the environment for debugging purposes diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index df149a557d7..8c22acd7cfc 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -36,7 +36,6 @@ jobs: activate-environment: rmg_env use-mamba: true show-channel-urls: true - channels: conda-forge,cantera,rmg - name: Make libtiff Symlink to Avoid Runner Bug run: | # This action may need to be removed/adjusted in future runs. From 92b23b3cdbfb09e5a3c46d3afe2b7d16e2d2aeac Mon Sep 17 00:00:00 2001 From: Jackson Burns <33505528+JacksonBurns@users.noreply.github.com> Date: Tue, 25 Feb 2025 14:07:46 -0500 Subject: [PATCH 25/48] change lt to leq to account for slightly different arm mac floating point result --- test/rmgpy/solver/liquidTest.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/rmgpy/solver/liquidTest.py b/test/rmgpy/solver/liquidTest.py index 3559a595b36..1dc2c5eb2af 100644 --- a/test/rmgpy/solver/liquidTest.py +++ b/test/rmgpy/solver/liquidTest.py @@ -167,21 +167,21 @@ def test_compute_flux(self): # Check that we're computing the species fluxes correctly for i in range(t.shape[0]): - assert abs(reaction_rates[i, 0] - species_rates[i, 0]) < abs( + assert abs(reaction_rates[i, 0] - species_rates[i, 0]) <= abs( 1e-6 * reaction_rates[i, 0] ) - assert abs(reaction_rates[i, 0] - -species_rates[i, 1]) < abs( + assert abs(reaction_rates[i, 0] - -species_rates[i, 1]) <= abs( 1e-6 * reaction_rates[i, 0] ) - assert abs(reaction_rates[i, 0] - -species_rates[i, 2]) < abs( + assert abs(reaction_rates[i, 0] - -species_rates[i, 2]) <= abs( 1e-6 * reaction_rates[i, 0] ) - assert abs(reaction_rates[i, 0] - species_rates[i, 3]) < abs( + assert abs(reaction_rates[i, 0] - species_rates[i, 3]) <= abs( 1e-6 * reaction_rates[i, 0] ) # Check that we've reached equilibrium - assert abs(reaction_rates[-1, 0] - 0.0) < 1e-2 + assert abs(reaction_rates[-1, 0] - 0.0) <= 1e-2 def test_jacobian(self): """ From cb167a2ae07f5ea4baf000f2f8018cc481c2b165 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Wed, 26 Feb 2025 08:47:50 -0500 Subject: [PATCH 26/48] update devloper installation process docs - apple silicon now does native install - don't touch `PYTHONPATH` - points towards the new RMS installation script --- .../rmg/installation/anacondaDeveloper.rst | 27 +++++-------------- 1 file changed, 7 insertions(+), 20 deletions(-) diff --git a/documentation/source/users/rmg/installation/anacondaDeveloper.rst b/documentation/source/users/rmg/installation/anacondaDeveloper.rst index 3ea92591537..336180dd4b4 100644 --- a/documentation/source/users/rmg/installation/anacondaDeveloper.rst +++ b/documentation/source/users/rmg/installation/anacondaDeveloper.rst @@ -6,7 +6,7 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux #. Install the `conda` package manager via `miniforge`, if you do not already have it (or Anaconda), by following the `Miniforge installation instructions `_. -#. If your `conda` version is older than 23.10.0, switch the solver backend to `libmamba` :: +#. If your `conda` version is older than 23.10.0, manually switch the solver backend to `libmamba` :: conda install -n base conda-libmamba-solver conda config --set solver libmamba @@ -61,16 +61,6 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux cd RMG-Py -#. Apple silicon (M1+) users only: execute the following commands - **instead of** the following `conda env create -f environment.yml` step. - (This will tell conda that we want to the environment to use x86 - architecture rather than the native ARM64 architecture) :: - - conda create -n rmg_env - conda activate rmg_env - conda config --env --set subdir osx-64 - conda env update -f environment.yml - #. Create the conda environment for RMG-Py:: conda env create -f environment.yml @@ -95,28 +85,25 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux make -#. Modify environment variables. Add RMG-Py to the PYTHONPATH to ensure that you can access RMG modules from any folder. - Also, add your RMG-Py folder to PATH to launch ``rmg.py`` from any folder. +#. **Optional**: add your RMG-Py folder to ``PATH`` to launch ``rmg.py`` from any folder. - In general, these commands should be placed in the appropriate shell initialization file. + In general, this commands should be placed in the appropriate shell initialization file. For Linux users using bash (the default on distributions mentioned here), these should be placed in ``~/.bashrc``. For MacOS users using bash (default before MacOS Catalina), these should be placed in ``~/.bash_profile``, which you should create if it doesn't exist. For MacOS users using zsh (default beginning in MacOS Catalina), these should be placed in ``~/.zshrc``. :: - export PYTHONPATH=YourFolder/RMG-Py/:$PYTHONPATH export PATH=YourFolder/RMG-Py/:$PATH NOTE: Make sure to change ``YourFolder`` to the path leading to the ``RMG-Py`` code. Not doing so will lead to an error stating that python cannot find the module ``rmgpy``. Be sure to either close and reopen your terminal to refresh your environment variables (``source ~/.bashrc`` or ``source ~/.zshrc``). -#. **Optional (Recommended)**: Install and Link Julia dependencies. Ensure that you have modified your environment variables as described above, and then run the following: :: - - julia -e 'using Pkg; Pkg.add("PyCall");Pkg.build("PyCall");Pkg.add(PackageSpec(name="ReactionMechanismSimulator",rev="for_rmg")); using ReactionMechanismSimulator;' +#. **Optional (Recommended)**: Install and Link Julia dependencies using ``install_rms.sh``. Ensure that you have modified your environment variables as described above, and then run the following: :: - python -c "import julia; julia.install(); import diffeqpy; diffeqpy.install()" + . install_rms.sh - Installing these dependencies will allow using ``method='ode'`` when solving the Master Equation with Arkane and using ``ReactionMechanismSimulator.jl``-based reactors in RMG. + Follow the final set of instructions from this installation script with regard to setting additional environment variables. + Installing RMS will allow using ``method='ode'`` when solving the Master Equation with Arkane and using ``ReactionMechanismSimulator.jl``-based reactors in RMG. #. Finally, you can run RMG from any location by typing the following (given that you have prepared the input file as ``input.py`` in the current folder). :: From 3958a098c7cfa481ea9a7e5a59b28860cfcd47f8 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sat, 12 Apr 2025 16:15:45 -0400 Subject: [PATCH 27/48] clarify RMS installation process --- environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/environment.yml b/environment.yml index 7f2a1a7103a..4e846b6d3e1 100644 --- a/environment.yml +++ b/environment.yml @@ -94,6 +94,7 @@ dependencies: # optional dependencies for using ReactionMechanismSimulator # remove the leading '#' to install the required dependencies +# or simply run `install_rms.sh` which will handle this for you # - conda-forge::pyjuliacall # additional packages that are required, but not specified here (and why) From f4a25e049c75358c320acc81ff61cc3ccf2b0b30 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sat, 12 Apr 2025 16:19:43 -0400 Subject: [PATCH 28/48] explain why we have two nearly identical input files --- examples/arkane/networks/acetyl+O2_sls/input.py | 2 ++ examples/arkane/networks/acetyl+O2_slse/input.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/examples/arkane/networks/acetyl+O2_sls/input.py b/examples/arkane/networks/acetyl+O2_sls/input.py index 4595e288fc1..fb12cf217a2 100644 --- a/examples/arkane/networks/acetyl+O2_sls/input.py +++ b/examples/arkane/networks/acetyl+O2_sls/input.py @@ -2,6 +2,8 @@ # # Arkane input file for acetyl + O2 pressure-dependent reaction network # +# uses method = 'simulation least squares eigen' for PressureDependence +# ################################################################################ title = 'acetyl + oxygen' diff --git a/examples/arkane/networks/acetyl+O2_slse/input.py b/examples/arkane/networks/acetyl+O2_slse/input.py index 46114439b47..8e57408de5c 100644 --- a/examples/arkane/networks/acetyl+O2_slse/input.py +++ b/examples/arkane/networks/acetyl+O2_slse/input.py @@ -2,6 +2,8 @@ # # Arkane input file for acetyl + O2 pressure-dependent reaction network # +# uses method = 'simulation least squares eigen' for PressureDependence +# ################################################################################ title = 'acetyl + oxygen' From e4a22999a6f545fe8b1cb2aad4f79169bb87c56b Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sat, 12 Apr 2025 16:21:52 -0400 Subject: [PATCH 29/48] provide a more helpful suggestion --- pytest.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pytest.ini b/pytest.ini index f7643013e8a..af0b28f1619 100644 --- a/pytest.ini +++ b/pytest.ini @@ -14,7 +14,7 @@ filterwarnings = # --last-failed to run only the tests which failed on the previous run # --show-capture no/stdout/stderr/both # -k "test name" will run only the tests that match the pattern in quotes, i.e. "test_arkane or test_rmg" would run those two tests -# -n auto # will use all available cores, if you first pip install pytest-xdist, but it doesn't work well with julia (so add --no-julia? or give up?) +# -n auto # will use all available cores, if you first pip install pytest-xdist, but it doesn't work well with julia (so run in pure python, i.e. without RMS installed) addopts = --keep-duplicates -s From f05b983109cce3a30b425d06afa4688c450ef466 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sat, 12 Apr 2025 17:41:18 -0400 Subject: [PATCH 30/48] update expected smioles --- test/rmgpy/molecule/translatorTest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/rmgpy/molecule/translatorTest.py b/test/rmgpy/molecule/translatorTest.py index cb62d9dc97e..adc0e87a21b 100644 --- a/test/rmgpy/molecule/translatorTest.py +++ b/test/rmgpy/molecule/translatorTest.py @@ -874,7 +874,7 @@ def test_surface_molecule_rdkit(self): 5 X u0 p0 c0 {1,S} """ ) - smiles = "C[Pt]" + smiles = "[CH3][Pt]" assert to_smiles(mol, backend="rdkit") == smiles From 60c412573efe5b8c51b81925334f758d93d52a5a Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Thu, 22 May 2025 10:59:07 -0400 Subject: [PATCH 31/48] misc. small fixes from debugging and review - hide some pointless output in a dropdown in the regression tests - move RMS into its own conda env to avoid Sundials breaking for Cantera - further limit size of surface RMS mechanism to avoid memory error - explicitly pass linear solver to CVODE to avoid it not being initializated - a typo fix - setuptools and cython version limtiations --- .github/workflows/CI.yml | 33 +++++++++++++++---- environment.yml | 5 +-- .../arkane/networks/acetyl+O2_sls/input.py | 2 +- install_rms.sh | 5 +-- rmgpy/molecule/group.py | 2 +- .../reactionmechanismsimulator_reactors.py | 2 +- .../RMS_liquidSurface_ch4o2cat/input.py | 4 +-- 7 files changed, 37 insertions(+), 16 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 1fa7d32b7ed..2345852de17 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -48,6 +48,8 @@ env: # if running on RMG-Py but requiring changes on an un-merged branch of RMG-database, replace # main with the name of the branch RMG_DATABASE_BRANCH: main + # julia parallel pre-compilation leads to race conditions and hangs, so we limit it to run in serial + JULIA_NUM_PRECOMPILE_TASKS: 1 jobs: build-and-test: @@ -102,7 +104,7 @@ jobs: if: matrix.include-rms == 'with RMS' uses: julia-actions/install-juliaup@v2 with: - channel: '1.9' + channel: '1.10' - name: Set some env vars if: matrix.include-rms == 'with RMS' @@ -173,14 +175,19 @@ jobs: # RMG build step - run: make install + + - name: Make separate RMS conda env + run: | + conda create --name rmg_env_with_rms --clone rmg_env - name: Setup Juliaup uses: julia-actions/install-juliaup@v2 with: - channel: '1.9' + channel: '1.10' - name: Set some env vars run: | + conda activate rmg_env_with_rms # https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-Python-and-required-Python-packages-installed echo "JULIA_CONDAPKG_BACKEND=Null" >> $GITHUB_ENV echo "JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python" >> $GITHUB_ENV @@ -188,10 +195,13 @@ jobs: # RMS installation and linking to Julia - name: Install and link Julia dependencies timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). - run: . install_rms.sh + run: | + conda activate rmg_env_with_rms + . install_rms.sh - name: Set some other env vars run: | + conda activate rmg_env_with_rms # ensure that juliacall in Python uses the correct julia executable and packages: https://github.com/JuliaPy/PyJuliaPkg?tab=readme-ov-file#which-julia-gets-used echo "PYTHON_JULIAPKG_EXE=$(which julia)" >> $GITHUB_ENV echo "PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages" >> $GITHUB_ENV @@ -202,8 +212,8 @@ jobs: # Regression Testing - Test Execution - name: Regression Tests - Execution id: regression-execution - timeout-minutes: 60 run: | + conda activate rmg_env_with_rms for regr_test in aromatics liquid_oxidation nitrogen oxidation sulfur superminimal RMS_constantVIdealGasReactor_superminimal RMS_CSTR_liquid_oxidation RMS_liquidSurface_ch4o2cat fragment RMS_constantVIdealGasReactor_fragment minimal_surface; do if python rmg.py test/regression/"$regr_test"/input.py; then @@ -278,7 +288,11 @@ jobs: env: REFERENCE: stable_regression_results run: | + # we _do not_ run in the with_rms env here + # conda activate rmg_env_with_rms + exec 2> >(tee -a regression.stderr >&2) 1> >(tee -a regression.stdout) + touch checkModels.log mkdir -p "test/regression-diff" for regr_test in aromatics liquid_oxidation nitrogen oxidation sulfur superminimal RMS_constantVIdealGasReactor_superminimal RMS_CSTR_liquid_oxidation fragment RMS_constantVIdealGasReactor_fragment minimal_surface; do @@ -301,7 +315,7 @@ jobs: $REFERENCE/"$regr_test"/chemkin/chem_annotated.inp \ $REFERENCE/"$regr_test"/chemkin/species_dictionary.txt \ test/regression/"$regr_test"/chemkin/chem_annotated.inp \ - test/regression/"$regr_test"/chemkin/species_dictionary.txt + test/regression/"$regr_test"/chemkin/species_dictionary.txt &> checkModels.log then echo "$regr_test Passed Core Comparison ✅" else @@ -318,7 +332,7 @@ jobs: $REFERENCE/"$regr_test"/chemkin/chem_edge_annotated.inp \ $REFERENCE/"$regr_test"/chemkin/species_edge_dictionary.txt \ test/regression/"$regr_test"/chemkin/chem_edge_annotated.inp \ - test/regression/"$regr_test"/chemkin/species_edge_dictionary.txt + test/regression/"$regr_test"/chemkin/species_edge_dictionary.txt &> checkModels.log then echo "$regr_test Passed Edge Comparison ✅" else @@ -337,7 +351,7 @@ jobs: if python rmgpy/tools/regression.py \ test/regression/"$regr_test"/regression_input.py \ $REFERENCE/"$regr_test"/chemkin \ - test/regression/"$regr_test"/chemkin + test/regression/"$regr_test"/chemkin &> checkModels.log then echo "$regr_test Passed Observable Testing ✅" else @@ -366,6 +380,11 @@ jobs: cat regression.stdout >> summary.txt echo "
" >> summary.txt echo "" >> summary.txt + echo "
" >> summary.txt + echo "Debugging info for `checkModels.py` (if any)." >> summary.txt + cat checkModels.log >> summary.txt + echo "
" >> summary.txt + echo "" >> summary.txt echo "_beep boop this comment was written by a bot_ :robot:" >> summary.txt cat summary.txt > $GITHUB_STEP_SUMMARY diff --git a/environment.yml b/environment.yml index 4e846b6d3e1..591acb91a51 100644 --- a/environment.yml +++ b/environment.yml @@ -20,7 +20,7 @@ # - August 4, 2024 Restricted pyrms to <2 # - May 14, 2024 Removed diffeqpy by switching to call SciMLBase and Sundials using JuliaCall # - March 15, 2024 - started migration to Python 3.9 -# - Mar 11, 2024 Removed Julia dependencies, now considered optional +# - April 14, 2025 Removed Julia dependencies, now considered optional # name: rmg_env channels: @@ -58,8 +58,9 @@ dependencies: # Python tools - conda-forge::python >=3.9 # leave as GEQ so that GitHub actions can add EQ w/o breaking (contradictory deps) + - conda-forge::setuptools <80 - conda-forge::coverage - - conda-forge::cython >=0.25.2 + - conda-forge::cython >=0.25.2,<3.1 - conda-forge::scikit-learn - conda-forge::scipy >=1.9 - conda-forge::numpy >=1.10.0,<2 diff --git a/examples/arkane/networks/acetyl+O2_sls/input.py b/examples/arkane/networks/acetyl+O2_sls/input.py index fb12cf217a2..31b1c945c24 100644 --- a/examples/arkane/networks/acetyl+O2_sls/input.py +++ b/examples/arkane/networks/acetyl+O2_sls/input.py @@ -2,7 +2,7 @@ # # Arkane input file for acetyl + O2 pressure-dependent reaction network # -# uses method = 'simulation least squares eigen' for PressureDependence +# uses method = 'simulation least squares' for PressureDependence # ################################################################################ diff --git a/install_rms.sh b/install_rms.sh index 57f495c6ab7..d65238af68c 100644 --- a/install_rms.sh +++ b/install_rms.sh @@ -5,10 +5,11 @@ # Note that you will have to manually install juliaup before running this script: # curl -fsSL https://install.julialang.org | sh # # restart shell -# juliaup add 1.9 -# juliaup default 1.9 +# juliaup add 1.10 +# juliaup default 1.10 # juliaup remove release +#!/bin/bash conda install -y conda-forge::pyjuliacall diff --git a/rmgpy/molecule/group.py b/rmgpy/molecule/group.py index e9a6e6433fd..f0af3727b77 100644 --- a/rmgpy/molecule/group.py +++ b/rmgpy/molecule/group.py @@ -47,7 +47,7 @@ from rmgpy.molecule.fragment import CuttingLabel # helper functions -# these were originall nested inside the indicated parent function, but when we upgraded to +# these were originally nested inside the indicated parent function, but when we upgraded to # Cython 3 this was no longer allowed - thus, they now live here. # add_implicit_benzene diff --git a/rmgpy/rmg/reactionmechanismsimulator_reactors.py b/rmgpy/rmg/reactionmechanismsimulator_reactors.py index d5938581153..062480fd6f8 100644 --- a/rmgpy/rmg/reactionmechanismsimulator_reactors.py +++ b/rmgpy/rmg/reactionmechanismsimulator_reactors.py @@ -472,7 +472,7 @@ def simulate(self, model_settings, simulator_settings, conditions): model_settings.tol_rxn_to_core_deadend_radical, atol=simulator_settings.atol, rtol=simulator_settings.rtol, - solver=Main.Sundials.CVODE_BDF(), + solver=Main.Sundials.CVODE_BDF(linear_solver=Main.Symbol("GMRES")), ) return ( diff --git a/test/regression/RMS_liquidSurface_ch4o2cat/input.py b/test/regression/RMS_liquidSurface_ch4o2cat/input.py index ad6039eee37..f2c56dc2a4a 100644 --- a/test/regression/RMS_liquidSurface_ch4o2cat/input.py +++ b/test/regression/RMS_liquidSurface_ch4o2cat/input.py @@ -75,11 +75,11 @@ toleranceMoveToCore=0.01, toleranceKeepInEdge=0.001, toleranceInterruptSimulation=1e8, - maximumEdgeSpecies=10000, + maximumEdgeSpecies=100, minCoreSizeForPrune=10, minSpeciesExistIterationsForPrune=2, maxNumObjsPerIter=3, - maxNumSpecies=35, + maxNumSpecies=10, ) options( From 5de1dfa3a805048d821c7e18cfb233890203ebeb Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Thu, 22 May 2025 11:21:07 -0400 Subject: [PATCH 32/48] port ci updates to docs build, set sphinx to older, compatible version --- .github/workflows/docs.yml | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 8c22acd7cfc..8aefce11149 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -22,7 +22,7 @@ jobs: git config --global user.email "github-actions@github.com" - name: Clone RMG-Py - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: fetch-depth: 0 @@ -32,10 +32,11 @@ jobs: environment-file: environment.yml miniforge-variant: Miniforge3 miniforge-version: latest - python-version: 3.9 + python-version: "3.9" activate-environment: rmg_env - use-mamba: true + auto-update-conda: true show-channel-urls: true + conda-remove-defaults: "true" - name: Make libtiff Symlink to Avoid Runner Bug run: | # This action may need to be removed/adjusted in future runs. @@ -43,12 +44,12 @@ jobs: find /usr/lib -name libtiff* - name: Install sphinx - run: mamba install -y sphinx + run: conda install -y sphinx~=5.0 - - name: Mamba info + - name: conda info run: | - mamba info - mamba list + conda info + conda list - name: Clone RMG-database run: | From 4f0e3fb55b77457bd0c4f57b9a719d2440e9cb21 Mon Sep 17 00:00:00 2001 From: Jackson Burns <33505528+JacksonBurns@users.noreply.github.com> Date: Fri, 23 May 2025 13:54:51 -0400 Subject: [PATCH 33/48] Apply suggestions from code review Co-authored-by: Richard West --- .../source/users/rmg/installation/anacondaDeveloper.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/documentation/source/users/rmg/installation/anacondaDeveloper.rst b/documentation/source/users/rmg/installation/anacondaDeveloper.rst index 336180dd4b4..062495e5673 100644 --- a/documentation/source/users/rmg/installation/anacondaDeveloper.rst +++ b/documentation/source/users/rmg/installation/anacondaDeveloper.rst @@ -100,7 +100,7 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux #. **Optional (Recommended)**: Install and Link Julia dependencies using ``install_rms.sh``. Ensure that you have modified your environment variables as described above, and then run the following: :: - . install_rms.sh + source install_rms.sh Follow the final set of instructions from this installation script with regard to setting additional environment variables. Installing RMS will allow using ``method='ode'`` when solving the Master Equation with Arkane and using ``ReactionMechanismSimulator.jl``-based reactors in RMG. From 0bd55d23681d515bbe97f961c619a7f3d97869d5 Mon Sep 17 00:00:00 2001 From: Jackson Burns <33505528+JacksonBurns@users.noreply.github.com> Date: Fri, 23 May 2025 13:59:23 -0400 Subject: [PATCH 34/48] update the install_rms script to be easier to follow --- install_rms.sh | 44 +++++++++++++++++++++++++++++++++++--------- 1 file changed, 35 insertions(+), 9 deletions(-) diff --git a/install_rms.sh b/install_rms.sh index d65238af68c..a55cf3652f1 100644 --- a/install_rms.sh +++ b/install_rms.sh @@ -1,15 +1,41 @@ # install_rms.sh # -# Convenience script to install ReactionMechanismSimulator inside your rmg_env conda environment +# Convenience script to install ReactionMechanismSimulator into an rmg_env conda environment # -# Note that you will have to manually install juliaup before running this script: -# curl -fsSL https://install.julialang.org | sh -# # restart shell -# juliaup add 1.10 -# juliaup default 1.10 -# juliaup remove release - -#!/bin/bash + +# Check if juliaup is installed +if ! command -v juliaup &> /dev/null; then + echo "Could not find julia via juliaup. Please install it by running:" + echo "curl -fsSL https://install.julialang.org | sh" + echo "# Restart your shell" + echo "juliaup add 1.10" + echo "juliaup default 1.10" + echo "juliaup remove release" + exit 1 +fi + +# Check if julia command is available +if ! command -v julia &> /dev/null; then + echo "Julia is not installed. Please install version 1.10 by running:" + echo "juliaup add 1.10" + echo "juliaup default 1.10" + echo "juliaup remove release" + exit 1 +fi + +# Check if Julia version is 1.10 +if ! julia --version | grep -q "1.10"; then + echo "Julia 1.10 is not installed. Current version is $(julia --version)." + echo "Please install Julia 1.10 by running:" + echo "juliaup add 1.10" + echo "juliaup default 1.10" + echo "juliaup remove release" + exit 1 +fi + +# Print the path of the Julia binary +julia_path=$(which julia) +echo "Julia 1.10 binary path: $julia_path" conda install -y conda-forge::pyjuliacall From 4881ea74bd28eca192781f889e7dacd1f94386d4 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sun, 25 May 2025 16:17:14 -0400 Subject: [PATCH 35/48] remove dirty makefile hack --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 36c0f7b13db..ce2103a74b7 100644 --- a/Makefile +++ b/Makefile @@ -21,13 +21,14 @@ documentation: clean: @ python utilities.py clean + python -m pip uninstall --yes . clean-solver: @ python utilities.py clean-solver install: @ python utilities.py check-pydas - python -m pip install -vv --no-cache-dir -e . + python -m pip install -vv -e . q2dtor: @ echo -e "\nInstalling Q2DTor...\n" From e254b5e37a86dd672d3d1bcab92eed28b7a49951 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sun, 25 May 2025 16:19:48 -0400 Subject: [PATCH 36/48] more elegant way to set env vars for julia install fix --- .github/workflows/CI.yml | 36 +++--------------------------------- install_rms.sh | 25 +++++++++---------------- 2 files changed, 12 insertions(+), 49 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 2345852de17..70ceb539d94 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -106,26 +106,12 @@ jobs: with: channel: '1.10' - - name: Set some env vars - if: matrix.include-rms == 'with RMS' - run: | - # https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-Python-and-required-Python-packages-installed - echo "JULIA_CONDAPKG_BACKEND=Null" >> $GITHUB_ENV - echo "JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python" >> $GITHUB_ENV - # RMS installation and linking to Julia - name: Install and link Julia dependencies if: matrix.include-rms == 'with RMS' timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). run: . install_rms.sh - - name: Set some other env vars - if: matrix.include-rms == 'with RMS' - run: | - # ensure that juliacall in Python uses the correct julia executable and packages: https://github.com/JuliaPy/PyJuliaPkg?tab=readme-ov-file#which-julia-gets-used - echo "PYTHON_JULIAPKG_EXE=$(which julia)" >> $GITHUB_ENV - echo "PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages" >> $GITHUB_ENV - - name: Install Q2DTor run: echo "" | make q2dtor @@ -176,22 +162,15 @@ jobs: # RMG build step - run: make install - - name: Make separate RMS conda env + - name: Make separate No-RMS conda env run: | - conda create --name rmg_env_with_rms --clone rmg_env + conda create --name rmg_env_without_rms --clone rmg_env - name: Setup Juliaup uses: julia-actions/install-juliaup@v2 with: channel: '1.10' - - name: Set some env vars - run: | - conda activate rmg_env_with_rms - # https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-Python-and-required-Python-packages-installed - echo "JULIA_CONDAPKG_BACKEND=Null" >> $GITHUB_ENV - echo "JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python" >> $GITHUB_ENV - # RMS installation and linking to Julia - name: Install and link Julia dependencies timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). @@ -199,13 +178,6 @@ jobs: conda activate rmg_env_with_rms . install_rms.sh - - name: Set some other env vars - run: | - conda activate rmg_env_with_rms - # ensure that juliacall in Python uses the correct julia executable and packages: https://github.com/JuliaPy/PyJuliaPkg?tab=readme-ov-file#which-julia-gets-used - echo "PYTHON_JULIAPKG_EXE=$(which julia)" >> $GITHUB_ENV - echo "PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages" >> $GITHUB_ENV - - name: Install Q2DTor run: echo "" | make q2dtor @@ -213,7 +185,6 @@ jobs: - name: Regression Tests - Execution id: regression-execution run: | - conda activate rmg_env_with_rms for regr_test in aromatics liquid_oxidation nitrogen oxidation sulfur superminimal RMS_constantVIdealGasReactor_superminimal RMS_CSTR_liquid_oxidation RMS_liquidSurface_ch4o2cat fragment RMS_constantVIdealGasReactor_fragment minimal_surface; do if python rmg.py test/regression/"$regr_test"/input.py; then @@ -288,8 +259,7 @@ jobs: env: REFERENCE: stable_regression_results run: | - # we _do not_ run in the with_rms env here - # conda activate rmg_env_with_rms + conda activate rmg_env_without_rms exec 2> >(tee -a regression.stderr >&2) 1> >(tee -a regression.stdout) touch checkModels.log diff --git a/install_rms.sh b/install_rms.sh index a55cf3652f1..aba269bf237 100644 --- a/install_rms.sh +++ b/install_rms.sh @@ -37,22 +37,15 @@ fi julia_path=$(which julia) echo "Julia 1.10 binary path: $julia_path" -conda install -y conda-forge::pyjuliacall +conda activate rmg_env +# https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-Python-and-required-Python-packages-installed +conda env config vars set JULIA_CONDAPKG_BACKEND=Null +conda env config vars set JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python +conda env config vars set PYTHON_JULIAPKG_EXE=$(which julia) +conda env config vars set PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages +conda deactivate +conda activate rmg_env -export JULIA_CONDAPKG_BACKEND=Null -export JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python +conda install -y conda-forge::pyjuliacall julia -e 'using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator; Pkg.instantiate()' || echo "RMS install error - continuing anyway ¯\_(ツ)_/¯" - -export PYTHON_JULIAPKG_EXE=$(which julia) -export PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages - -echo "Copy the following text into your terminal profile (.bashrc, .zshrc, etc.): - -export JULIA_CONDAPKG_BACKEND=Null -export JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python -export PYTHON_JULIAPKG_EXE=$(which julia) -export PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages - -or otherwise run these 4 commands when first opening a terminal that will run RMG requiring RMS. -""" From 9639a701547fef60a2c760e922e8d4ec321ea992 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sun, 25 May 2025 16:25:01 -0400 Subject: [PATCH 37/48] uninstall requries name and edge case where clean can fail --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index ce2103a74b7..ab26c08e7f6 100644 --- a/Makefile +++ b/Makefile @@ -21,7 +21,7 @@ documentation: clean: @ python utilities.py clean - python -m pip uninstall --yes . + python -m pip uninstall --yes reactionmechanismgenerator || true # can fail if RMG not installed at all clean-solver: @ python utilities.py clean-solver From d0b1481e6c3e20c56319ade87208d383d5e26b29 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sun, 25 May 2025 17:46:23 -0400 Subject: [PATCH 38/48] update dockerfile with new rms install --- Dockerfile | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 5475205334c..253304f5df3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -55,21 +55,17 @@ RUN conda env create --file environment.yml # in a Dockerfile build script) SHELL ["conda", "run", "--no-capture-output", "-n", "rmg_env", "/bin/bash", "-c"] -RUN conda install -c conda-forge julia=1.9.1 pyjulia>=0.6 && \ - conda install -c rmg pyrms diffeqpy && \ - conda clean --all --yes +RUN conda clean --all --yes # Set environment variables as directed in the RMG installation instructions ENV RUNNER_CWD=/rmg -ENV PYTHONPATH="$RUNNER_CWD/RMG-Py:$PYTHONPATH" ENV PATH="$RUNNER_CWD/RMG-Py:$PATH" # 1. Build RMG # 2. Install and link Julia dependencies for RMS # setting this env variable fixes an issue with Julia precompilation on Windows ENV JULIA_CPU_TARGET="x86-64,haswell,skylake,broadwell,znver1,znver2,znver3,cascadelake,icelake-client,cooperlake,generic" -RUN make && \ - julia -e 'ENV["JULIA_CONDAPKG_BACKEND"] = "Current"; using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator' +RUN source install_rms.sh # RMG-Py should now be installed and ready - trigger precompilation and test run RUN python rmg.py examples/rmg/minimal/input.py From 556467415e1fdb6dfd44af4abe2899512f289ae8 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sun, 25 May 2025 17:47:45 -0400 Subject: [PATCH 39/48] developer warning about Julia sharp edge --- .../source/users/rmg/installation/anacondaDeveloper.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/documentation/source/users/rmg/installation/anacondaDeveloper.rst b/documentation/source/users/rmg/installation/anacondaDeveloper.rst index 062495e5673..891586a39fd 100644 --- a/documentation/source/users/rmg/installation/anacondaDeveloper.rst +++ b/documentation/source/users/rmg/installation/anacondaDeveloper.rst @@ -104,6 +104,8 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux Follow the final set of instructions from this installation script with regard to setting additional environment variables. Installing RMS will allow using ``method='ode'`` when solving the Master Equation with Arkane and using ``ReactionMechanismSimulator.jl``-based reactors in RMG. + Note that installing RMS will cause errors when running Cantera simulations. + This should not affect normal RMG use, but if you wish to run Cantera simulations you will need to maintain a separate ``rmg_env`` without RMS in it. #. Finally, you can run RMG from any location by typing the following (given that you have prepared the input file as ``input.py`` in the current folder). :: From 4924b08f9212d5214b1449ff5b42c44b703627b7 Mon Sep 17 00:00:00 2001 From: JacksonBurns Date: Sun, 25 May 2025 19:58:11 -0400 Subject: [PATCH 40/48] straggler --- .github/workflows/CI.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index 70ceb539d94..26da29a5a8e 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -175,7 +175,6 @@ jobs: - name: Install and link Julia dependencies timeout-minutes: 120 # this usually takes 20-45 minutes (or hangs for 6+ hours). run: | - conda activate rmg_env_with_rms . install_rms.sh - name: Install Q2DTor From a98ba35e3a1b23da7a003532cb0cc6b2deb2c16a Mon Sep 17 00:00:00 2001 From: Richard West Date: Tue, 27 May 2025 13:48:05 -0400 Subject: [PATCH 41/48] A note about renaming conda environment. --- .../source/users/rmg/installation/anacondaDeveloper.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/documentation/source/users/rmg/installation/anacondaDeveloper.rst b/documentation/source/users/rmg/installation/anacondaDeveloper.rst index 891586a39fd..405a26d6811 100644 --- a/documentation/source/users/rmg/installation/anacondaDeveloper.rst +++ b/documentation/source/users/rmg/installation/anacondaDeveloper.rst @@ -65,6 +65,13 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux conda env create -f environment.yml + To give it a different name (such as ``rmg_env2``), you can use the ``-n`` flag:: + + conda env create -f environment.yml -n rmg_env2 + + but be sure to edit the ``install_rms.sh`` script to use the new name, before + using it in the steps below. + If either of these commands return an error due to being unable to find the ``conda`` command, try to either close and reopen your terminal to refresh your environment variables or type the following command. From 22e153b2e037a939b12415591e2ecba2f80e8238 Mon Sep 17 00:00:00 2001 From: Richard West Date: Tue, 27 May 2025 13:49:18 -0400 Subject: [PATCH 42/48] Use current environment name, in install_rms.sh --- .../users/rmg/installation/anacondaDeveloper.rst | 3 --- install_rms.sh | 10 ++++++++-- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/documentation/source/users/rmg/installation/anacondaDeveloper.rst b/documentation/source/users/rmg/installation/anacondaDeveloper.rst index 405a26d6811..ca24181edeb 100644 --- a/documentation/source/users/rmg/installation/anacondaDeveloper.rst +++ b/documentation/source/users/rmg/installation/anacondaDeveloper.rst @@ -69,9 +69,6 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux conda env create -f environment.yml -n rmg_env2 - but be sure to edit the ``install_rms.sh`` script to use the new name, before - using it in the steps below. - If either of these commands return an error due to being unable to find the ``conda`` command, try to either close and reopen your terminal to refresh your environment variables or type the following command. diff --git a/install_rms.sh b/install_rms.sh index aba269bf237..4009874ec90 100644 --- a/install_rms.sh +++ b/install_rms.sh @@ -37,14 +37,20 @@ fi julia_path=$(which julia) echo "Julia 1.10 binary path: $julia_path" -conda activate rmg_env +# Get current conda environment name +current_env=$(conda info --envs | grep '\*' | awk '{print $1}') +echo "Current conda environment: $current_env" + +# Set environment variables for the current environment # https://juliapy.github.io/PythonCall.jl/stable/pythoncall/#If-you-already-have-Python-and-required-Python-packages-installed conda env config vars set JULIA_CONDAPKG_BACKEND=Null conda env config vars set JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python conda env config vars set PYTHON_JULIAPKG_EXE=$(which julia) conda env config vars set PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages + +# Reactivate the current environment to apply the new variables conda deactivate -conda activate rmg_env +conda activate $current_env conda install -y conda-forge::pyjuliacall From c46a861a88961acec5ef211b0a38cfc39df54313 Mon Sep 17 00:00:00 2001 From: Richard West Date: Wed, 28 May 2025 08:43:13 -0400 Subject: [PATCH 43/48] Fix install_rms.sh to not kill the calling shell. return 1 exits the current function or sourced script with a non-zero exit status (indicating failure), but it does NOT close the shell. Here's the key difference: exit 1: Terminates the entire shell process (would close the user's terminal) return 1: Only exits the current script/function and returns control to the calling shell When someone runs source install_rms.sh, the script is executed in the current shell context. Using return 1 means: The script stops executing at that point Control returns to the user's shell prompt The shell remains open and usable The return code 1 indicates the script failed (which the user can check with echo $?) This is exactly what you want for a sourced script - it allows the script to abort gracefully without killing the user's shell session. --- install_rms.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/install_rms.sh b/install_rms.sh index 4009874ec90..b45a3d58caa 100644 --- a/install_rms.sh +++ b/install_rms.sh @@ -11,7 +11,7 @@ if ! command -v juliaup &> /dev/null; then echo "juliaup add 1.10" echo "juliaup default 1.10" echo "juliaup remove release" - exit 1 + return 1 fi # Check if julia command is available @@ -20,7 +20,7 @@ if ! command -v julia &> /dev/null; then echo "juliaup add 1.10" echo "juliaup default 1.10" echo "juliaup remove release" - exit 1 + return 1 fi # Check if Julia version is 1.10 @@ -30,7 +30,7 @@ if ! julia --version | grep -q "1.10"; then echo "juliaup add 1.10" echo "juliaup default 1.10" echo "juliaup remove release" - exit 1 + return 1 fi # Print the path of the Julia binary From f7ca69d471f1acd9108907ad99eeb7f1c82dcccf Mon Sep 17 00:00:00 2001 From: Richard West Date: Wed, 28 May 2025 10:12:21 -0400 Subject: [PATCH 44/48] Don't set PYTHON_JULIAPKG_PROJECT environment variable. This way it ends up in a julia_env subdirectory of the active conda environment. --- install_rms.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/install_rms.sh b/install_rms.sh index b45a3d58caa..1a0912c0589 100644 --- a/install_rms.sh +++ b/install_rms.sh @@ -46,7 +46,6 @@ echo "Current conda environment: $current_env" conda env config vars set JULIA_CONDAPKG_BACKEND=Null conda env config vars set JULIA_PYTHONCALL_EXE=$CONDA_PREFIX/bin/python conda env config vars set PYTHON_JULIAPKG_EXE=$(which julia) -conda env config vars set PYTHON_JULIAPKG_PROJECT=$HOME/.julia/packages # Reactivate the current environment to apply the new variables conda deactivate From 774115bf0849873d7ea8cfff7c334a56f981ff00 Mon Sep 17 00:00:00 2001 From: Richard West Date: Wed, 28 May 2025 08:57:45 -0400 Subject: [PATCH 45/48] Make julia version string more precise. 1.10 would have matched 1.11.10, were it ever to exist --- install_rms.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install_rms.sh b/install_rms.sh index 1a0912c0589..35a95582d07 100644 --- a/install_rms.sh +++ b/install_rms.sh @@ -24,7 +24,7 @@ if ! command -v julia &> /dev/null; then fi # Check if Julia version is 1.10 -if ! julia --version | grep -q "1.10"; then +if ! julia --version | grep -q " 1\.10"; then echo "Julia 1.10 is not installed. Current version is $(julia --version)." echo "Please install Julia 1.10 by running:" echo "juliaup add 1.10" From b9db18fcb409edbeed5bd73593ac4a65329878d1 Mon Sep 17 00:00:00 2001 From: Richard West Date: Wed, 28 May 2025 10:07:40 -0400 Subject: [PATCH 46/48] Update developer instructions for Julia and RMS steps. Users need to run install_rms.sh then follow its instructions then run it again. Also made a one-liner test that RMS is installed (though carefully avoid the many minutes of pre-compiling) --- .../users/rmg/installation/anacondaDeveloper.rst | 15 ++++++++++----- install_rms.sh | 2 ++ 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/documentation/source/users/rmg/installation/anacondaDeveloper.rst b/documentation/source/users/rmg/installation/anacondaDeveloper.rst index ca24181edeb..5df44793c4c 100644 --- a/documentation/source/users/rmg/installation/anacondaDeveloper.rst +++ b/documentation/source/users/rmg/installation/anacondaDeveloper.rst @@ -102,14 +102,19 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux Be sure to either close and reopen your terminal to refresh your environment variables (``source ~/.bashrc`` or ``source ~/.zshrc``). -#. **Optional (Recommended)**: Install and Link Julia dependencies using ``install_rms.sh``. Ensure that you have modified your environment variables as described above, and then run the following: :: +#. **Optional (Recommended)**: Install and Link Julia dependencies. + + Installing Julia and ReactionMechanismSimulator.jl (RMS) will enable all the features in RMG that require RMS-based reactors, + as well as using ``method='ode'`` when solving the Master Equation with Arkane. + Note that installing RMS can cause errors when running Cantera simulations; this should not affect normal RMG use, but if you wish to run Cantera simulations you will need to maintain a separate conda environment without RMS in it. + + Ensure that you have modified your environment variables as described above, and then run the following: :: source install_rms.sh - Follow the final set of instructions from this installation script with regard to setting additional environment variables. - Installing RMS will allow using ``method='ode'`` when solving the Master Equation with Arkane and using ``ReactionMechanismSimulator.jl``-based reactors in RMG. - Note that installing RMS will cause errors when running Cantera simulations. - This should not affect normal RMG use, but if you wish to run Cantera simulations you will need to maintain a separate ``rmg_env`` without RMS in it. + Follow the instructions that it provides for installing Julia and Juliaup, + which can involve several steps including restarting your terminal or shell. + Run the script again, until it finishes installing RMS and all of its dependencies, and reports that ReactionMechanismSimulator is installed. #. Finally, you can run RMG from any location by typing the following (given that you have prepared the input file as ``input.py`` in the current folder). :: diff --git a/install_rms.sh b/install_rms.sh index 35a95582d07..e6d3c6ba666 100644 --- a/install_rms.sh +++ b/install_rms.sh @@ -54,3 +54,5 @@ conda activate $current_env conda install -y conda-forge::pyjuliacall julia -e 'using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator; Pkg.instantiate()' || echo "RMS install error - continuing anyway ¯\_(ツ)_/¯" + +python -c "from juliacall import Main; import sys; sys.exit(0 if Main.seval('Base.identify_package(\"ReactionMechanismSimulator\") !== nothing') and print('ReactionMechanismSimulator is installed in $current_env') is None else 1)" From 311e31969a1499c6af7ecfdd55eaa771377d20a2 Mon Sep 17 00:00:00 2001 From: Richard West Date: Wed, 28 May 2025 10:08:03 -0400 Subject: [PATCH 47/48] Tweak to developer instructions. --- .../source/users/rmg/installation/anacondaDeveloper.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/documentation/source/users/rmg/installation/anacondaDeveloper.rst b/documentation/source/users/rmg/installation/anacondaDeveloper.rst index 5df44793c4c..466c09812ec 100644 --- a/documentation/source/users/rmg/installation/anacondaDeveloper.rst +++ b/documentation/source/users/rmg/installation/anacondaDeveloper.rst @@ -6,7 +6,7 @@ Installation by Source Using Anaconda Environment for Unix-based Systems: Linux #. Install the `conda` package manager via `miniforge`, if you do not already have it (or Anaconda), by following the `Miniforge installation instructions `_. -#. If your `conda` version is older than 23.10.0, manually switch the solver backend to `libmamba` :: +#. If your `conda` version is older than 23.10.0, manually switch the solver backend to `libmamba`(or update your conda):: conda install -n base conda-libmamba-solver conda config --set solver libmamba From 7d24fce9e5b262ec57da356358c4daa7171f20f9 Mon Sep 17 00:00:00 2001 From: Jackson Burns <33505528+JacksonBurns@users.noreply.github.com> Date: Thu, 29 May 2025 12:46:37 -0400 Subject: [PATCH 48/48] Update install_rms.sh - stable RMG branch for rms --- install_rms.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/install_rms.sh b/install_rms.sh index e6d3c6ba666..2870e263ab3 100644 --- a/install_rms.sh +++ b/install_rms.sh @@ -53,6 +53,6 @@ conda activate $current_env conda install -y conda-forge::pyjuliacall -julia -e 'using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/hwpang/ReactionMechanismSimulator.jl.git", rev="fix_installation")); using ReactionMechanismSimulator; Pkg.instantiate()' || echo "RMS install error - continuing anyway ¯\_(ツ)_/¯" +julia -e 'using Pkg; Pkg.add(Pkg.PackageSpec(name="ReactionMechanismSimulator", url="https://github.com/ReactionMechanismGenerator/ReactionMechanismSimulator.jl.git", rev="for_rmg")); using ReactionMechanismSimulator; Pkg.instantiate()' || echo "RMS install error - continuing anyway ¯\_(ツ)_/¯" python -c "from juliacall import Main; import sys; sys.exit(0 if Main.seval('Base.identify_package(\"ReactionMechanismSimulator\") !== nothing') and print('ReactionMechanismSimulator is installed in $current_env') is None else 1)"