diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 040221ad4..9412fe249 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,19 +29,13 @@ jobs: fail-fast: false matrix: os_dist: [ - {os: ubuntu-24.04, dist: cp38-manylinux_x86_64}, - {os: ubuntu-24.04, dist: cp39-manylinux_x86_64}, {os: ubuntu-24.04, dist: cp310-manylinux_x86_64}, {os: ubuntu-24.04, dist: cp311-manylinux_x86_64}, {os: ubuntu-24.04, dist: cp312-manylinux_x86_64}, {os: ubuntu-24.04, dist: cp313-manylinux_x86_64}, {os: ubuntu-24.04, dist: cp314-manylinux_x86_64}, - # cp38-manylinux_i686 disabled because pandas isn't prebuilt and takes 20 minutes to build. - # {os: ubuntu-latest, dist: cp38-manylinux_i686}, - # cp39-manylinux_i686 disabled because pandas isn't prebuilt and takes 20 minutes to build. - # {os: ubuntu-latest, dist: cp39-manylinux_i686}, - # cp310-manylinux_i686 disabled because scipy isn't prebuilt and fails to build. + # manylinux_i686 disabled because scipy isn't prebuilt and fails to build. # # The actual error seen in github actions: # @@ -59,11 +53,7 @@ jobs: # libraries found. To build Scipy from sources, BLAS & LAPACK # libraries need to be installed. # - # {os: ubuntu-latest, dist: pp37-manylinux_x86_64}, - # {os: ubuntu-latest, dist: pp38-manylinux_x86_64}, # {os: ubuntu-latest, dist: pp39-manylinux_x86_64}, - # {os: ubuntu-latest, dist: pp37-manylinux_i686}, - # {os: ubuntu-latest, dist: pp38-manylinux_i686}, # {os: ubuntu-latest, dist: pp39-manylinux_i686}, # musllinux builds disabled because scipy isn't prebuilt and fails to build. @@ -74,27 +64,15 @@ jobs: # libraries found. To build Scipy from sources, BLAS & LAPACK # libraries need to be installed. # - # {os: ubuntu-latest, dist: cp36-musllinux_x86_64}, - # {os: ubuntu-latest, dist: cp37-musllinux_x86_64}, - # {os: ubuntu-latest, dist: cp38-musllinux_x86_64}, - # {os: ubuntu-latest, dist: cp39-musllinux_x86_64}, # {os: ubuntu-latest, dist: cp310-musllinux_x86_64}, - # {os: ubuntu-latest, dist: cp36-musllinux_i686}, - # {os: ubuntu-latest, dist: cp37-musllinux_i686}, - # {os: ubuntu-latest, dist: cp38-musllinux_i686}, - # {os: ubuntu-latest, dist: cp39-musllinux_i686}, # {os: ubuntu-latest, dist: cp310-musllinux_i686}, - {os: macos-14, dist: cp38-macosx_x86_64, macosarch: x86_64}, - {os: macos-14, dist: cp39-macosx_x86_64, macosarch: x86_64}, {os: macos-14, dist: cp310-macosx_x86_64, macosarch: x86_64}, {os: macos-14, dist: cp311-macosx_x86_64, macosarch: x86_64}, {os: macos-14, dist: cp312-macosx_x86_64, macosarch: x86_64}, {os: macos-14, dist: cp313-macosx_x86_64, macosarch: x86_64}, {os: macos-14, dist: cp314-macosx_x86_64, macosarch: x86_64}, - {os: macos-14, dist: cp38-macosx_arm64, macosarch: arm64}, - {os: macos-14, dist: cp39-macosx_arm64, macosarch: arm64}, {os: macos-14, dist: cp310-macosx_arm64, macosarch: arm64}, {os: macos-14, dist: cp311-macosx_arm64, macosarch: arm64}, {os: macos-14, dist: cp312-macosx_arm64, macosarch: arm64}, @@ -112,28 +90,14 @@ jobs: # BLAS and LAPACK by setting the environment variables # NPY_BLAS_ORDER="" and NPY_LAPACK_ORDER="" before building NumPy. # - # {os: macOS-10.15, dist: pp37-macosx_x86_64}, - # {os: macOS-10.15, dist: pp38-macosx_x86_64}, # {os: macOS-10.15, dist: pp39-macosx_x86_64}, - {os: windows-2025, dist: cp38-win_amd64}, - {os: windows-2025, dist: cp39-win_amd64}, {os: windows-2025, dist: cp310-win_amd64}, {os: windows-2025, dist: cp311-win_amd64}, {os: windows-2025, dist: cp312-win_amd64}, {os: windows-2025, dist: cp313-win_amd64}, {os: windows-2025, dist: cp314-win_amd64}, - # cp38-win32 and cp39-win32 disabled because scipy fails to build. - # - # The actual error seen in github actions: - # - # Need python for 64-bit, but found 32-bit - # ..\..\meson.build:82:0: ERROR: Python dependency not found - # - #{os: windows-2025, dist: cp38-win32}, - #{os: windows-2025, dist: cp39-win32}, - # cp310-win32 disabled because numpy isn't prebuilt and fails to build. # # The actual error seen in github actions: @@ -254,7 +218,7 @@ jobs: steps: - uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1 - run: cmake . - - run: make stim -j 2 + - run: make stim -j $(getconf _NPROCESSORS_ONLN) - run: echo -e "H 0 \n CNOT 0 1 \n M 0 1" | out/stim --sample build_bazel: runs-on: ubuntu-24.04 @@ -324,7 +288,7 @@ jobs: steps: - uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1 - run: cmake . - - run: make libstim -j 2 + - run: make libstim -j $(getconf _NPROCESSORS_ONLN) - run: echo -e '#include "stim.h"\nint main(int argc,const char **argv) {return !stim::find_bool_argument("test", argc, argv);}' > test.cc - run: g++ -std=c++20 test.cc out/libstim.a -I src - run: ./a.out test @@ -334,7 +298,7 @@ jobs: - uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1 - run: mkdir install_dir - run: cmake . -DCMAKE_INSTALL_PREFIX=install_dir - - run: make -j 2 + - run: make -j $(getconf _NPROCESSORS_ONLN) - run: make install - run: echo -e '#include "stim.h"\nint main(int argc,const char **argv) {return !stim::find_bool_argument("test", argc, argv);}' > test.cc - run: g++ -std=c++20 test.cc install_dir/lib/libstim.a -I install_dir/include @@ -356,7 +320,7 @@ jobs: steps: - uses: actions/checkout@50fbc622fc4ef5163becd7fab6573eac35f8462e # v1 - run: cmake . -DSIMD_WIDTH=${{ matrix.simd_width }} - - run: make stim_perf -j 2 + - run: make stim_perf -j $(getconf _NPROCESSORS_ONLN) - run: out/stim_perf test: runs-on: ubuntu-24.04 @@ -373,7 +337,7 @@ jobs: make sudo make install - run: cmake . -DSIMD_WIDTH=${{ matrix.simd_width }} - - run: make stim_test -j 2 + - run: make stim_test -j $(getconf _NPROCESSORS_ONLN) - run: out/stim_test test_o3: runs-on: ubuntu-24.04 @@ -387,7 +351,7 @@ jobs: make sudo make install - run: cmake . -DSIMD_WIDTH=256 - - run: make stim_test_o3 -j 2 + - run: make stim_test_o3 -j $(getconf _NPROCESSORS_ONLN) - run: out/stim_test_o3 test_generated_docs_are_fresh: runs-on: ubuntu-24.04 diff --git a/dev/clean_build_files.sh b/dev/clean_build_files.sh index 9f7fcdf86..52f7887e1 100755 --- a/dev/clean_build_files.sh +++ b/dev/clean_build_files.sh @@ -2,7 +2,7 @@ set -e ######################################################################### -# Deletes files created by cmake, python setup.py, and other build steps. +# Deletes files created by cmake, pip install ., and other build steps. ######################################################################### # Get to this script's git repo root. diff --git a/dev/overwrite_dev_versions_with_date.py b/dev/overwrite_dev_versions_with_date.py index e031ffeda..b9c192c5f 100755 --- a/dev/overwrite_dev_versions_with_date.py +++ b/dev/overwrite_dev_versions_with_date.py @@ -23,7 +23,7 @@ def main(): # Generate dev version starting from major.minor version. # (Requires the existing version to have a 'dev' suffix.) # (Uses the timestamp of the HEAD commit, to ensure consistency when run multiple times.) - with open('setup.py') as f: + with open('glue/python/src/stim_custom_setup/__init__.py') as f: maj_min_version_line, = [line for line in f.read().splitlines() if re.match("^__version__ = '[^']+'", line)] maj_version, min_version, patch = maj_min_version_line.split()[-1].strip("'").split('.') if 'dev' not in patch: @@ -33,7 +33,7 @@ def main(): # Overwrite existing versions. package_setup_files = [ - "setup.py", + "glue/python/src/stim_custom_setup/__init__.py", "glue/cirq/setup.py", "glue/cirq/stimcirq/__init__.py", "glue/zx/stimzx/__init__.py", diff --git a/glue/python/src/stim_custom_setup/__init__.py b/glue/python/src/stim_custom_setup/__init__.py new file mode 100644 index 000000000..68aed3fad --- /dev/null +++ b/glue/python/src/stim_custom_setup/__init__.py @@ -0,0 +1,332 @@ +"""This file is responsible for creating stim wheels. + +Basically, it plays the role that "setup.py" plays in most packages. + +Why does it exist? Because I got sick and tired of trying to get +setuptools to perform parallel builds and to lay out the wheel +files in the exact way that I wanted. +""" + +import base64 +import glob +import hashlib +import os +import pathlib +import platform +import subprocess +import sys +import sysconfig +import tempfile +import time +import zipfile + +import pybind11 + +__version__ = '1.16.dev0' + + +def _get_wheel_tag() -> str: + python_tag = { + 'cpython': f'cp{sys.version_info.major}{sys.version_info.minor}', + }.get(sys.implementation.name) + if python_tag is None: + raise NotImplementedError(f"Don't know package tag for {sys.implementation.name=}") + + abi_tag = python_tag + if sysconfig.get_config_var('Py_GIL_DISABLED'): + abi_tag += 't' + if sysconfig.get_config_var('Py_DEBUG'): + abi_tag += 'd' + + mac_ver = platform.mac_ver()[0] + if mac_ver: + mac_ver = mac_ver.split('.') + else: + mac_ver = '?', '?' + plat_tag = { + ( 'linux', 'x86_64' ): 'linux_x86_64', + ( 'linux', 'amd64' ): 'linux_x86_64', + ( 'linux', 'aarch64'): 'linux_aarch64', + ( 'linux', 'arm64' ): 'linux_arm64', + ( 'darwin', 'x86_64' ): f'macosx_{mac_ver[0]}_{mac_ver[1]}_x86_64', + ( 'darwin', 'amd64' ): f'macosx_{mac_ver[0]}_{mac_ver[1]}_x86_64', + ( 'darwin', 'aarch64'): f'macosx_{mac_ver[0]}_{mac_ver[1]}_aarch64', + ( 'darwin', 'arm64' ): f'macosx_{mac_ver[0]}_{mac_ver[1]}_arm64', + ('windows', 'x86_64' ): 'win_amd64', + ('windows', 'amd64' ): 'win_amd64', + ('windows', 'aarch64'): 'aarch64', + ('windows', 'arm64' ): 'arm64', + ('windows', '????' ): 'win32', + }.get((platform.system().lower(), platform.machine().lower())) + if plat_tag is None: + raise NotImplementedError(f"Don't know platform tag for {platform.system()=} {platform.machine()=}") + + return f"{python_tag}-{abi_tag}-{plat_tag}" + + +def _get_content_hash(content: bytes) -> str: + digest = hashlib.sha256(content).digest() + hash_str = base64.urlsafe_b64encode(digest).decode().rstrip("=") + return f"sha256={hash_str},{len(content)}" + + +def _run_processes_in_parallel(action_name, commands: list[list[str]]): + """You're not gonna believe this, but this method... + + ...runs processes in parallel. + + It avoids starting more processes while cpu_count of them + are currently running. + """ + running = [] + try: + cpus = os.cpu_count() + left = len(commands) + for step, cmd in enumerate(commands): + # Busy-wait until fewer than n subprocesses are running. + while len(running) == cpus: + time.sleep(0.001) + for k in range(cpus)[::-1]: + return_code = running[k].poll() + if return_code: + raise RuntimeError("A sub-process failed.") + if return_code is not None: + running[k] = running[-1] + running.pop() + left -= 1 + + # Go! + print(" ".join(cmd), file=sys.stderr) + running.append(subprocess.Popen(cmd)) + print(f"# {action_name} (remaining={left} running={len(running)})", file=sys.stderr) + + # Wait for the remaining processes. + while running: + if running[-1].wait(): + raise RuntimeError("A sub-process failed.") + running.pop() + left -= 1 + if left: + print(f"# {action_name} (remaining={left} running={len(running)})", file=sys.stderr) + print(f"# done {action_name}", file=sys.stderr) + finally: + for r in running: + try: + r.kill() + except: + pass + + +def find_cl_exe() -> str: + program_files = os.environ.get("ProgramFiles(x86)") + if program_files is None: + return 'cl.exe' + + vswhere = pathlib.Path(program_files) / "Microsoft Visual Studio" / "Installer" / "vswhere.exe" + if not vswhere.exists(): + return 'cl.exe' + + vs_root = subprocess.check_output([ + str(vswhere), + "-latest", + "-products", "*", + "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "-property", "installationPath" + ], encoding='utf-8').strip() + if not vs_root: + return 'cl.exe' + + msvc_root = pathlib.Path(vs_root) / "VC" / "Tools" / "MSVC" + version = sorted(msvc_root.iterdir())[-1].name + arch = "x64" if platform.machine().lower() in ("amd64", "x86_64") else "x86" + cl_path = msvc_root / version / "bin" / f"Host{arch}" / arch / "cl.exe" + if not cl_path.exists(): + return 'cl.exe' + + return str(cl_path) + + +def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): + wheel_name = f'stim-{__version__}-{_get_wheel_tag()}.whl' + wheel_path = pathlib.Path(wheel_directory) / wheel_name + + # Collect source files. + ALL_SOURCE_FILES = glob.glob("src/**/*.cc", recursive=True) + MUX_SOURCE_FILES = glob.glob("src/**/march.pybind.cc", recursive=True) + TEST_FILES = glob.glob("src/**/*.test.cc", recursive=True) + PERF_FILES = glob.glob("src/**/*.perf.cc", recursive=True) + MAIN_FILES = glob.glob("src/**/main.cc", recursive=True) + HEADER_FILES = glob.glob("src/**/*.h", recursive=True) + glob.glob("src/**/*.inl", recursive=True) + RELEVANT_SOURCE_FILES = sorted(set(ALL_SOURCE_FILES) - set(TEST_FILES + PERF_FILES + MAIN_FILES + MUX_SOURCE_FILES)) + + is_windows = platform.system().lower().startswith('win') + # Determine the compiler to use. + compiler = None + if compiler is None and config_settings is not None: + compiler = config_settings.get("compiler", None) + if compiler is None: + compiler = os.environ.get('CXX', None) + if compiler is None: + if is_windows: + compiler = find_cl_exe() + else: + compiler = 'g++' + + # Determine the linker to use. + linker = None + if linker is None and config_settings is not None: + linker = config_settings.get("linker", None) + if linker is None: + if is_windows: + linker = find_cl_exe()[:-6] + 'link.exe' + else: + linker = compiler + + # Plan out compiler and linker commands. + configs = { + '_detect_machine_architecture': ((), MUX_SOURCE_FILES), + '_stim_polyfill': ((), RELEVANT_SOURCE_FILES), + '_stim_sse2': ( + ('/arch:SSE2',) if is_windows else ('-msse2', '-mno-avx2',), + RELEVANT_SOURCE_FILES, + ), + # NOTE: disabled until https://github.com/quantumlib/Stim/issues/432 is fixed + # '_stim_avx': (('-msse2', '-mavx2',), RELEVANT_SOURCE_FILES), + } + + if is_windows: + so = 'pyd' + else: + so = 'so' + + with tempfile.TemporaryDirectory() as temp_dir: + link_outputs = [] + temp_dir: pathlib.Path = pathlib.Path(temp_dir) + compile_commands: list[list[str]] = [] + link_commands: list[list[str]] = [] + for name, (flags, files) in configs.items(): + object_paths = [] + for src_path in files: + out_path: str = str(temp_dir / name / src_path) + ".o" + object_paths.append(out_path) + pathlib.Path(out_path).parent.mkdir(parents=True, exist_ok=True) + if is_windows: + compile_commands.append([ + compiler, + "/c", src_path, + f"/Fo{out_path}", + "/Isrc", + f"/I{pybind11.get_include()}", + f"/I{sysconfig.get_path('include')}", + "/W4", + "/std:c++20", + "/O2", + "/MD", + "/EHsc", + "/nologo", + "/DNDEBUG", + f"/DVERSION_INFO={__version__}", + f"/DSTIM_PYBIND11_MODULE_NAME={name}", + *flags, + ]) + else: + compile_commands.append([ + compiler, + "-c", src_path, + "-o", out_path, + "-Isrc", + f"-I{pybind11.get_include()}", + f"-I{sysconfig.get_path('include')}", + "-Wall", + "-fPIC", + "-std=c++20", + "-fno-strict-aliasing", + "-fvisibility=hidden", + "-O3", + "-g0", + "-DNDEBUG", + f"-DVERSION_INFO={__version__}", + f"-DSTIM_PYBIND11_MODULE_NAME={name}", + *flags, + ]) + link_out = str(temp_dir / name / f'{name}.{so}') + if is_windows: + link_commands.append([ + linker, + "/DLL", + f"/OUT:{link_out}", + "/nologo", + f"/LIBPATH:{pathlib.Path(sysconfig.get_config_var('BINDIR')) / 'libs'}", + *object_paths, + ]) + else: + osx_flags = [] + if platform.system().lower() == 'darwin': + osx_flags = ["-undefined", "dynamic_lookup"] + link_commands.append([ + linker, + "-shared", + *osx_flags, + "-o", link_out, + *object_paths, + ]) + + # Perform compilation and linking. + _run_processes_in_parallel("compiling", compile_commands) + _run_processes_in_parallel("linking", link_commands) + + # Define the files to put into the wheel file. + files: dict[str, bytes] = {} + dist_info_dir = f"stim-{__version__}.dist-info" + files[f'{dist_info_dir}/top_level.txt'] = "stim".encode('UTF-8') + files[f'{dist_info_dir}/WHEEL'] = f""" +Wheel-Version: 1.0 +Generator: stim_custom_setup +Root-Is-Purelib: false +Tag: {_get_wheel_tag()} +""".lstrip().encode('UTF-8') + with open('LICENSE', 'rb') as f: + files[f'{dist_info_dir}/license/LICENSE'] = f.read() + for file in pathlib.Path("glue/python/src/stim").iterdir(): + with open(file, 'rb') as f: + files[f'stim/{file.name}'] = f.read() + for name in configs.keys(): + with open(temp_dir / name / f'{name}.{so}', 'rb') as f: + files[f'stim/{name}' + sysconfig.get_config_var('EXT_SUFFIX')] = f.read() + files[f'{dist_info_dir}/entry_points.txt'] = """ +[console_scripts] +stim = stim._main_argv:main_argv +""".strip().encode('UTF-8') + with open('glue/python/README.md', encoding='UTF-8') as f: + files[f'{dist_info_dir}/METADATA'] = (""" +Metadata-Version: 2.4 +Name: stim +Version: 1.15.0 +Summary: A fast library for analyzing with quantum stabilizer circuits. +Home-page: https://github.com/quantumlib/stim +Author: Craig Gidney +Author-email: craig.gidney@gmail.com +License: Apache 2 +Requires-Python: >=3.10.0 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: numpy + +""".lstrip() + f.read()).encode('UTF-8') + + # Record files and their hashes in the RECORD file. + records = [f"{dist_info_dir}/RECORD,,"] + for k, v in files.items(): + records.append(f"{k},{_get_content_hash(v)}") + files[f'{dist_info_dir}/RECORD'] = "\n".join(records).encode('UTF-8') + + # Write the wheel file. + with zipfile.ZipFile(wheel_path, 'w', compression=zipfile.ZIP_DEFLATED) as f: + for k, v in files.items(): + f.writestr(k, v) + + return wheel_name + + +if __name__ == '__main__': + build_wheel('') diff --git a/pyproject.toml b/pyproject.toml index 408cd00bf..d8325843d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,4 @@ [build-system] requires = ["setuptools", "wheel", "pybind11~=2.11.1"] -build-backend = "setuptools.build_meta" +build-backend = "stim_custom_setup" +backend-path = ["glue/python/src"] diff --git a/setup.py b/setup.py deleted file mode 100644 index 238431d16..000000000 --- a/setup.py +++ /dev/null @@ -1,133 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import platform - -from setuptools import setup, Extension -import glob -import pybind11 - -ALL_SOURCE_FILES = glob.glob("src/**/*.cc", recursive=True) -MUX_SOURCE_FILES = glob.glob("src/**/march.pybind.cc", recursive=True) -TEST_FILES = glob.glob("src/**/*.test.cc", recursive=True) -PERF_FILES = glob.glob("src/**/*.perf.cc", recursive=True) -MAIN_FILES = glob.glob("src/**/main.cc", recursive=True) -HEADER_FILES = glob.glob("src/**/*.h", recursive=True) + glob.glob("src/**/*.inl", recursive=True) -RELEVANT_SOURCE_FILES = sorted(set(ALL_SOURCE_FILES) - set(TEST_FILES + PERF_FILES + MAIN_FILES + MUX_SOURCE_FILES)) - -__version__ = '1.16.dev0' - -if platform.system().startswith('Win'): - common_compile_args = [ - '/std:c++20', - '/O2', - f'/DVERSION_INFO={__version__}', - ] - arch_avx = ['/arch:AVX2'] - arch_sse = ['/arch:SSE2'] - arch_basic = [] -else: - common_compile_args = [ - '-std=c++20', - '-fno-strict-aliasing', - '-O3', - '-g0', - f'-DVERSION_INFO={__version__}', - ] - arch_avx = ['-mavx2'] - arch_sse = ['-msse2', '-mno-avx2'] - arch_basic = [] - -stim_detect_machine_architecture = Extension( - 'stim._detect_machine_architecture', - sources=MUX_SOURCE_FILES, - include_dirs=[pybind11.get_include(), "src"], - language='c++', - extra_compile_args=[ - *common_compile_args, - *arch_basic, - ], -) -stim_polyfill = Extension( - 'stim._stim_polyfill', - sources=RELEVANT_SOURCE_FILES, - include_dirs=[pybind11.get_include(), "src"], - language='c++', - extra_compile_args=[ - *common_compile_args, - *arch_basic, - '-DSTIM_PYBIND11_MODULE_NAME=_stim_polyfill', - ], -) -stim_sse2 = Extension( - 'stim._stim_sse2', - sources=RELEVANT_SOURCE_FILES, - include_dirs=[pybind11.get_include(), "src"], - language='c++', - extra_compile_args=[ - *common_compile_args, - *arch_sse, - '-DSTIM_PYBIND11_MODULE_NAME=_stim_sse2', - ], -) - -# NOTE: disabled until https://github.com/quantumlib/Stim/issues/432 is fixed -# stim_avx2 = Extension( -# 'stim._stim_avx2', -# sources=RELEVANT_SOURCE_FILES, -# include_dirs=[pybind11.get_include(), "src"], -# language='c++', -# extra_compile_args=[ -# *common_compile_args, -# *arch_avx, -# '-DSTIM_PYBIND11_MODULE_NAME=_stim_avx2', -# ], -# ) - -with open('glue/python/README.md', encoding='UTF-8') as f: - long_description = f.read() - -def _get_extensions(): - archs=["x86", "i686", "i386", "amd64"] - if any(_ext in platform.processor().lower() for _ext in archs): - # NOTE: disabled until https://github.com/quantumlib/Stim/issues/432 is fixed - # stim_avx2, - return [stim_detect_machine_architecture, stim_polyfill, - # stim_avx2, - stim_sse2] - else: - return [stim_detect_machine_architecture, stim_polyfill] - -setup( - name='stim', - version=__version__, - author='Craig Gidney', - author_email='craig.gidney@gmail.com', - url='https://github.com/quantumlib/stim', - license='Apache 2', - description='A fast library for analyzing with quantum stabilizer circuits.', - long_description=long_description, - long_description_content_type='text/markdown', - ext_modules=_get_extensions(), - python_requires='>=3.6.0', - packages=['stim'], - package_dir={'stim': 'glue/python/src/stim'}, - package_data={'': [*HEADER_FILES, 'glue/python/src/stim/__init__.pyi', 'glue/python/README.md', 'pyproject.toml']}, - include_package_data=True, - install_requires=['numpy'], - entry_points={ - 'console_scripts': ['stim=stim._main_argv:main_argv'], - }, - # Needed on Windows to avoid the default `build` colliding with Bazel's `BUILD`. - options={'build': {'build_base': 'python_build_stim'}}, -) diff --git a/src/stim/stabilizers/clifford_string.h b/src/stim/stabilizers/clifford_string.h index 8ca44c7b7..8df3f0a29 100644 --- a/src/stim/stabilizers/clifford_string.h +++ b/src/stim/stabilizers/clifford_string.h @@ -362,7 +362,7 @@ struct CliffordString { for (size_t k = 0; k < x_signs.num_simd_words; k++) { auto delta = word_at(k); CliffordWord> total{}; - for (size_t step = 0; step < power; step++) { + for (size_t step = 0; step < (size_t)power; step++) { total = total * delta; } set_word_at(k, total);