diff --git a/_build/html/.buildinfo b/_build/html/.buildinfo
new file mode 100644
index 000000000..018b64f4d
--- /dev/null
+++ b/_build/html/.buildinfo
@@ -0,0 +1,4 @@
+# Sphinx build info version 1
+# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
+config: 93b73046da2cf168a56c08972ad08e76
+tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/_build/html/.doctrees/contribute.doctree b/_build/html/.doctrees/contribute.doctree
new file mode 100644
index 000000000..ddd075086
Binary files /dev/null and b/_build/html/.doctrees/contribute.doctree differ
diff --git a/_build/html/.doctrees/discussions/deploying-python-applications.doctree b/_build/html/.doctrees/discussions/deploying-python-applications.doctree
new file mode 100644
index 000000000..efa2f6c2f
Binary files /dev/null and b/_build/html/.doctrees/discussions/deploying-python-applications.doctree differ
diff --git a/_build/html/.doctrees/discussions/distribution-package-vs-import-package.doctree b/_build/html/.doctrees/discussions/distribution-package-vs-import-package.doctree
new file mode 100644
index 000000000..1244f3eeb
Binary files /dev/null and b/_build/html/.doctrees/discussions/distribution-package-vs-import-package.doctree differ
diff --git a/_build/html/.doctrees/discussions/downstream-packaging.doctree b/_build/html/.doctrees/discussions/downstream-packaging.doctree
new file mode 100644
index 000000000..29b9ba6a9
Binary files /dev/null and b/_build/html/.doctrees/discussions/downstream-packaging.doctree differ
diff --git a/_build/html/.doctrees/discussions/index.doctree b/_build/html/.doctrees/discussions/index.doctree
new file mode 100644
index 000000000..e42feb27d
Binary files /dev/null and b/_build/html/.doctrees/discussions/index.doctree differ
diff --git a/_build/html/.doctrees/discussions/install-requires-vs-requirements.doctree b/_build/html/.doctrees/discussions/install-requires-vs-requirements.doctree
new file mode 100644
index 000000000..ce30d0bf1
Binary files /dev/null and b/_build/html/.doctrees/discussions/install-requires-vs-requirements.doctree differ
diff --git a/_build/html/.doctrees/discussions/package-formats.doctree b/_build/html/.doctrees/discussions/package-formats.doctree
new file mode 100644
index 000000000..cd5cac505
Binary files /dev/null and b/_build/html/.doctrees/discussions/package-formats.doctree differ
diff --git a/_build/html/.doctrees/discussions/pip-vs-easy-install.doctree b/_build/html/.doctrees/discussions/pip-vs-easy-install.doctree
new file mode 100644
index 000000000..7c5925bd9
Binary files /dev/null and b/_build/html/.doctrees/discussions/pip-vs-easy-install.doctree differ
diff --git a/_build/html/.doctrees/discussions/setup-py-deprecated.doctree b/_build/html/.doctrees/discussions/setup-py-deprecated.doctree
new file mode 100644
index 000000000..5b4e91dc5
Binary files /dev/null and b/_build/html/.doctrees/discussions/setup-py-deprecated.doctree differ
diff --git a/_build/html/.doctrees/discussions/single-source-version.doctree b/_build/html/.doctrees/discussions/single-source-version.doctree
new file mode 100644
index 000000000..90228ee9c
Binary files /dev/null and b/_build/html/.doctrees/discussions/single-source-version.doctree differ
diff --git a/_build/html/.doctrees/discussions/src-layout-vs-flat-layout.doctree b/_build/html/.doctrees/discussions/src-layout-vs-flat-layout.doctree
new file mode 100644
index 000000000..9ef5c5dcf
Binary files /dev/null and b/_build/html/.doctrees/discussions/src-layout-vs-flat-layout.doctree differ
diff --git a/_build/html/.doctrees/discussions/versioning.doctree b/_build/html/.doctrees/discussions/versioning.doctree
new file mode 100644
index 000000000..eb089fc4d
Binary files /dev/null and b/_build/html/.doctrees/discussions/versioning.doctree differ
diff --git a/_build/html/.doctrees/environment.pickle b/_build/html/.doctrees/environment.pickle
new file mode 100644
index 000000000..8ec91b549
Binary files /dev/null and b/_build/html/.doctrees/environment.pickle differ
diff --git a/_build/html/.doctrees/flow.doctree b/_build/html/.doctrees/flow.doctree
new file mode 100644
index 000000000..4e8c0b478
Binary files /dev/null and b/_build/html/.doctrees/flow.doctree differ
diff --git a/_build/html/.doctrees/glossary.doctree b/_build/html/.doctrees/glossary.doctree
new file mode 100644
index 000000000..92f32b130
Binary files /dev/null and b/_build/html/.doctrees/glossary.doctree differ
diff --git a/_build/html/.doctrees/guides/analyzing-pypi-package-downloads.doctree b/_build/html/.doctrees/guides/analyzing-pypi-package-downloads.doctree
new file mode 100644
index 000000000..18c212726
Binary files /dev/null and b/_build/html/.doctrees/guides/analyzing-pypi-package-downloads.doctree differ
diff --git a/_build/html/.doctrees/guides/creating-and-discovering-plugins.doctree b/_build/html/.doctrees/guides/creating-and-discovering-plugins.doctree
new file mode 100644
index 000000000..9cdd61bc4
Binary files /dev/null and b/_build/html/.doctrees/guides/creating-and-discovering-plugins.doctree differ
diff --git a/_build/html/.doctrees/guides/creating-command-line-tools.doctree b/_build/html/.doctrees/guides/creating-command-line-tools.doctree
new file mode 100644
index 000000000..581b6ffe4
Binary files /dev/null and b/_build/html/.doctrees/guides/creating-command-line-tools.doctree differ
diff --git a/_build/html/.doctrees/guides/distributing-packages-using-setuptools.doctree b/_build/html/.doctrees/guides/distributing-packages-using-setuptools.doctree
new file mode 100644
index 000000000..8e94ca46e
Binary files /dev/null and b/_build/html/.doctrees/guides/distributing-packages-using-setuptools.doctree differ
diff --git a/_build/html/.doctrees/guides/dropping-older-python-versions.doctree b/_build/html/.doctrees/guides/dropping-older-python-versions.doctree
new file mode 100644
index 000000000..7090f3fc7
Binary files /dev/null and b/_build/html/.doctrees/guides/dropping-older-python-versions.doctree differ
diff --git a/_build/html/.doctrees/guides/hosting-your-own-index.doctree b/_build/html/.doctrees/guides/hosting-your-own-index.doctree
new file mode 100644
index 000000000..537cb712c
Binary files /dev/null and b/_build/html/.doctrees/guides/hosting-your-own-index.doctree differ
diff --git a/_build/html/.doctrees/guides/index-mirrors-and-caches.doctree b/_build/html/.doctrees/guides/index-mirrors-and-caches.doctree
new file mode 100644
index 000000000..e28752e2a
Binary files /dev/null and b/_build/html/.doctrees/guides/index-mirrors-and-caches.doctree differ
diff --git a/_build/html/.doctrees/guides/index.doctree b/_build/html/.doctrees/guides/index.doctree
new file mode 100644
index 000000000..ca6bd4761
Binary files /dev/null and b/_build/html/.doctrees/guides/index.doctree differ
diff --git a/_build/html/.doctrees/guides/installing-scientific-packages.doctree b/_build/html/.doctrees/guides/installing-scientific-packages.doctree
new file mode 100644
index 000000000..cf108a2f3
Binary files /dev/null and b/_build/html/.doctrees/guides/installing-scientific-packages.doctree differ
diff --git a/_build/html/.doctrees/guides/installing-stand-alone-command-line-tools.doctree b/_build/html/.doctrees/guides/installing-stand-alone-command-line-tools.doctree
new file mode 100644
index 000000000..92874a7b0
Binary files /dev/null and b/_build/html/.doctrees/guides/installing-stand-alone-command-line-tools.doctree differ
diff --git a/_build/html/.doctrees/guides/installing-using-linux-tools.doctree b/_build/html/.doctrees/guides/installing-using-linux-tools.doctree
new file mode 100644
index 000000000..ceb003aef
Binary files /dev/null and b/_build/html/.doctrees/guides/installing-using-linux-tools.doctree differ
diff --git a/_build/html/.doctrees/guides/installing-using-pip-and-virtual-environments.doctree b/_build/html/.doctrees/guides/installing-using-pip-and-virtual-environments.doctree
new file mode 100644
index 000000000..cf3dbb5c8
Binary files /dev/null and b/_build/html/.doctrees/guides/installing-using-pip-and-virtual-environments.doctree differ
diff --git a/_build/html/.doctrees/guides/installing-using-virtualenv.doctree b/_build/html/.doctrees/guides/installing-using-virtualenv.doctree
new file mode 100644
index 000000000..e5fc7ff58
Binary files /dev/null and b/_build/html/.doctrees/guides/installing-using-virtualenv.doctree differ
diff --git a/_build/html/.doctrees/guides/licensing-examples-and-user-scenarios.doctree b/_build/html/.doctrees/guides/licensing-examples-and-user-scenarios.doctree
new file mode 100644
index 000000000..92d7d0cb7
Binary files /dev/null and b/_build/html/.doctrees/guides/licensing-examples-and-user-scenarios.doctree differ
diff --git a/_build/html/.doctrees/guides/making-a-pypi-friendly-readme.doctree b/_build/html/.doctrees/guides/making-a-pypi-friendly-readme.doctree
new file mode 100644
index 000000000..cefb161ec
Binary files /dev/null and b/_build/html/.doctrees/guides/making-a-pypi-friendly-readme.doctree differ
diff --git a/_build/html/.doctrees/guides/migrating-to-pypi-org.doctree b/_build/html/.doctrees/guides/migrating-to-pypi-org.doctree
new file mode 100644
index 000000000..510376f43
Binary files /dev/null and b/_build/html/.doctrees/guides/migrating-to-pypi-org.doctree differ
diff --git a/_build/html/.doctrees/guides/modernize-setup-py-project.doctree b/_build/html/.doctrees/guides/modernize-setup-py-project.doctree
new file mode 100644
index 000000000..12790bcd0
Binary files /dev/null and b/_build/html/.doctrees/guides/modernize-setup-py-project.doctree differ
diff --git a/_build/html/.doctrees/guides/multi-version-installs.doctree b/_build/html/.doctrees/guides/multi-version-installs.doctree
new file mode 100644
index 000000000..4ef9557e0
Binary files /dev/null and b/_build/html/.doctrees/guides/multi-version-installs.doctree differ
diff --git a/_build/html/.doctrees/guides/packaging-binary-extensions.doctree b/_build/html/.doctrees/guides/packaging-binary-extensions.doctree
new file mode 100644
index 000000000..06d12910b
Binary files /dev/null and b/_build/html/.doctrees/guides/packaging-binary-extensions.doctree differ
diff --git a/_build/html/.doctrees/guides/packaging-namespace-packages.doctree b/_build/html/.doctrees/guides/packaging-namespace-packages.doctree
new file mode 100644
index 000000000..d92a645c9
Binary files /dev/null and b/_build/html/.doctrees/guides/packaging-namespace-packages.doctree differ
diff --git a/_build/html/.doctrees/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.doctree b/_build/html/.doctrees/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.doctree
new file mode 100644
index 000000000..8bfaa1a9f
Binary files /dev/null and b/_build/html/.doctrees/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows.doctree differ
diff --git a/_build/html/.doctrees/guides/section-build-and-publish.doctree b/_build/html/.doctrees/guides/section-build-and-publish.doctree
new file mode 100644
index 000000000..372c51b3f
Binary files /dev/null and b/_build/html/.doctrees/guides/section-build-and-publish.doctree differ
diff --git a/_build/html/.doctrees/guides/section-hosting.doctree b/_build/html/.doctrees/guides/section-hosting.doctree
new file mode 100644
index 000000000..97aae7f91
Binary files /dev/null and b/_build/html/.doctrees/guides/section-hosting.doctree differ
diff --git a/_build/html/.doctrees/guides/section-install.doctree b/_build/html/.doctrees/guides/section-install.doctree
new file mode 100644
index 000000000..ced3ff48c
Binary files /dev/null and b/_build/html/.doctrees/guides/section-install.doctree differ
diff --git a/_build/html/.doctrees/guides/single-sourcing-package-version.doctree b/_build/html/.doctrees/guides/single-sourcing-package-version.doctree
new file mode 100644
index 000000000..1ecf9f1bf
Binary files /dev/null and b/_build/html/.doctrees/guides/single-sourcing-package-version.doctree differ
diff --git a/_build/html/.doctrees/guides/supporting-multiple-python-versions.doctree b/_build/html/.doctrees/guides/supporting-multiple-python-versions.doctree
new file mode 100644
index 000000000..c94663900
Binary files /dev/null and b/_build/html/.doctrees/guides/supporting-multiple-python-versions.doctree differ
diff --git a/_build/html/.doctrees/guides/supporting-windows-using-appveyor.doctree b/_build/html/.doctrees/guides/supporting-windows-using-appveyor.doctree
new file mode 100644
index 000000000..ccbcaf6af
Binary files /dev/null and b/_build/html/.doctrees/guides/supporting-windows-using-appveyor.doctree differ
diff --git a/_build/html/.doctrees/guides/tool-recommendations.doctree b/_build/html/.doctrees/guides/tool-recommendations.doctree
new file mode 100644
index 000000000..d30d6b80b
Binary files /dev/null and b/_build/html/.doctrees/guides/tool-recommendations.doctree differ
diff --git a/_build/html/.doctrees/guides/using-manifest-in.doctree b/_build/html/.doctrees/guides/using-manifest-in.doctree
new file mode 100644
index 000000000..a0cb5e4c7
Binary files /dev/null and b/_build/html/.doctrees/guides/using-manifest-in.doctree differ
diff --git a/_build/html/.doctrees/guides/using-testpypi.doctree b/_build/html/.doctrees/guides/using-testpypi.doctree
new file mode 100644
index 000000000..e8ff766a7
Binary files /dev/null and b/_build/html/.doctrees/guides/using-testpypi.doctree differ
diff --git a/_build/html/.doctrees/guides/writing-pyproject-toml.doctree b/_build/html/.doctrees/guides/writing-pyproject-toml.doctree
new file mode 100644
index 000000000..ead202e50
Binary files /dev/null and b/_build/html/.doctrees/guides/writing-pyproject-toml.doctree differ
diff --git a/_build/html/.doctrees/index.doctree b/_build/html/.doctrees/index.doctree
new file mode 100644
index 000000000..4797a50ba
Binary files /dev/null and b/_build/html/.doctrees/index.doctree differ
diff --git a/_build/html/.doctrees/key_projects.doctree b/_build/html/.doctrees/key_projects.doctree
new file mode 100644
index 000000000..a8b7a536a
Binary files /dev/null and b/_build/html/.doctrees/key_projects.doctree differ
diff --git a/_build/html/.doctrees/news.doctree b/_build/html/.doctrees/news.doctree
new file mode 100644
index 000000000..ae7b2908b
Binary files /dev/null and b/_build/html/.doctrees/news.doctree differ
diff --git a/_build/html/.doctrees/overview.doctree b/_build/html/.doctrees/overview.doctree
new file mode 100644
index 000000000..20822286c
Binary files /dev/null and b/_build/html/.doctrees/overview.doctree differ
diff --git a/_build/html/.doctrees/shared/build-backend-tabs.doctree b/_build/html/.doctrees/shared/build-backend-tabs.doctree
new file mode 100644
index 000000000..18f9b1457
Binary files /dev/null and b/_build/html/.doctrees/shared/build-backend-tabs.doctree differ
diff --git a/_build/html/.doctrees/specifications/binary-distribution-format.doctree b/_build/html/.doctrees/specifications/binary-distribution-format.doctree
new file mode 100644
index 000000000..34012ec87
Binary files /dev/null and b/_build/html/.doctrees/specifications/binary-distribution-format.doctree differ
diff --git a/_build/html/.doctrees/specifications/build-details/index.doctree b/_build/html/.doctrees/specifications/build-details/index.doctree
new file mode 100644
index 000000000..8120ef75f
Binary files /dev/null and b/_build/html/.doctrees/specifications/build-details/index.doctree differ
diff --git a/_build/html/.doctrees/specifications/build-details/v1.0.doctree b/_build/html/.doctrees/specifications/build-details/v1.0.doctree
new file mode 100644
index 000000000..8fa324a60
Binary files /dev/null and b/_build/html/.doctrees/specifications/build-details/v1.0.doctree differ
diff --git a/_build/html/.doctrees/specifications/core-metadata.doctree b/_build/html/.doctrees/specifications/core-metadata.doctree
new file mode 100644
index 000000000..9eb703cc6
Binary files /dev/null and b/_build/html/.doctrees/specifications/core-metadata.doctree differ
diff --git a/_build/html/.doctrees/specifications/dependency-groups.doctree b/_build/html/.doctrees/specifications/dependency-groups.doctree
new file mode 100644
index 000000000..4b1abef77
Binary files /dev/null and b/_build/html/.doctrees/specifications/dependency-groups.doctree differ
diff --git a/_build/html/.doctrees/specifications/dependency-specifiers.doctree b/_build/html/.doctrees/specifications/dependency-specifiers.doctree
new file mode 100644
index 000000000..576ca8b92
Binary files /dev/null and b/_build/html/.doctrees/specifications/dependency-specifiers.doctree differ
diff --git a/_build/html/.doctrees/specifications/direct-url-data-structure.doctree b/_build/html/.doctrees/specifications/direct-url-data-structure.doctree
new file mode 100644
index 000000000..186946fc5
Binary files /dev/null and b/_build/html/.doctrees/specifications/direct-url-data-structure.doctree differ
diff --git a/_build/html/.doctrees/specifications/direct-url.doctree b/_build/html/.doctrees/specifications/direct-url.doctree
new file mode 100644
index 000000000..fc5041b15
Binary files /dev/null and b/_build/html/.doctrees/specifications/direct-url.doctree differ
diff --git a/_build/html/.doctrees/specifications/entry-points.doctree b/_build/html/.doctrees/specifications/entry-points.doctree
new file mode 100644
index 000000000..f1d4ccdf5
Binary files /dev/null and b/_build/html/.doctrees/specifications/entry-points.doctree differ
diff --git a/_build/html/.doctrees/specifications/externally-managed-environments.doctree b/_build/html/.doctrees/specifications/externally-managed-environments.doctree
new file mode 100644
index 000000000..3c2e1e655
Binary files /dev/null and b/_build/html/.doctrees/specifications/externally-managed-environments.doctree differ
diff --git a/_build/html/.doctrees/specifications/file-yanking.doctree b/_build/html/.doctrees/specifications/file-yanking.doctree
new file mode 100644
index 000000000..27c931fe7
Binary files /dev/null and b/_build/html/.doctrees/specifications/file-yanking.doctree differ
diff --git a/_build/html/.doctrees/specifications/glob-patterns.doctree b/_build/html/.doctrees/specifications/glob-patterns.doctree
new file mode 100644
index 000000000..7b8953c75
Binary files /dev/null and b/_build/html/.doctrees/specifications/glob-patterns.doctree differ
diff --git a/_build/html/.doctrees/specifications/index-hosted-attestations.doctree b/_build/html/.doctrees/specifications/index-hosted-attestations.doctree
new file mode 100644
index 000000000..a11b91eba
Binary files /dev/null and b/_build/html/.doctrees/specifications/index-hosted-attestations.doctree differ
diff --git a/_build/html/.doctrees/specifications/index.doctree b/_build/html/.doctrees/specifications/index.doctree
new file mode 100644
index 000000000..5ba2275d4
Binary files /dev/null and b/_build/html/.doctrees/specifications/index.doctree differ
diff --git a/_build/html/.doctrees/specifications/inline-script-metadata.doctree b/_build/html/.doctrees/specifications/inline-script-metadata.doctree
new file mode 100644
index 000000000..4e102c63b
Binary files /dev/null and b/_build/html/.doctrees/specifications/inline-script-metadata.doctree differ
diff --git a/_build/html/.doctrees/specifications/license-expression.doctree b/_build/html/.doctrees/specifications/license-expression.doctree
new file mode 100644
index 000000000..aa48dcd5f
Binary files /dev/null and b/_build/html/.doctrees/specifications/license-expression.doctree differ
diff --git a/_build/html/.doctrees/specifications/name-normalization.doctree b/_build/html/.doctrees/specifications/name-normalization.doctree
new file mode 100644
index 000000000..ebc1fcb2b
Binary files /dev/null and b/_build/html/.doctrees/specifications/name-normalization.doctree differ
diff --git a/_build/html/.doctrees/specifications/platform-compatibility-tags.doctree b/_build/html/.doctrees/specifications/platform-compatibility-tags.doctree
new file mode 100644
index 000000000..32f83cf22
Binary files /dev/null and b/_build/html/.doctrees/specifications/platform-compatibility-tags.doctree differ
diff --git a/_build/html/.doctrees/specifications/project-status-markers.doctree b/_build/html/.doctrees/specifications/project-status-markers.doctree
new file mode 100644
index 000000000..a3f9a499a
Binary files /dev/null and b/_build/html/.doctrees/specifications/project-status-markers.doctree differ
diff --git a/_build/html/.doctrees/specifications/pylock-toml.doctree b/_build/html/.doctrees/specifications/pylock-toml.doctree
new file mode 100644
index 000000000..ad6b5d705
Binary files /dev/null and b/_build/html/.doctrees/specifications/pylock-toml.doctree differ
diff --git a/_build/html/.doctrees/specifications/pypirc.doctree b/_build/html/.doctrees/specifications/pypirc.doctree
new file mode 100644
index 000000000..93fdfcce7
Binary files /dev/null and b/_build/html/.doctrees/specifications/pypirc.doctree differ
diff --git a/_build/html/.doctrees/specifications/pyproject-toml.doctree b/_build/html/.doctrees/specifications/pyproject-toml.doctree
new file mode 100644
index 000000000..3b3b7fefa
Binary files /dev/null and b/_build/html/.doctrees/specifications/pyproject-toml.doctree differ
diff --git a/_build/html/.doctrees/specifications/recording-installed-packages.doctree b/_build/html/.doctrees/specifications/recording-installed-packages.doctree
new file mode 100644
index 000000000..44da5834b
Binary files /dev/null and b/_build/html/.doctrees/specifications/recording-installed-packages.doctree differ
diff --git a/_build/html/.doctrees/specifications/schemas/index.doctree b/_build/html/.doctrees/specifications/schemas/index.doctree
new file mode 100644
index 000000000..9450ce036
Binary files /dev/null and b/_build/html/.doctrees/specifications/schemas/index.doctree differ
diff --git a/_build/html/.doctrees/specifications/section-distribution-formats.doctree b/_build/html/.doctrees/specifications/section-distribution-formats.doctree
new file mode 100644
index 000000000..f5662757c
Binary files /dev/null and b/_build/html/.doctrees/specifications/section-distribution-formats.doctree differ
diff --git a/_build/html/.doctrees/specifications/section-distribution-metadata.doctree b/_build/html/.doctrees/specifications/section-distribution-metadata.doctree
new file mode 100644
index 000000000..bf6ebeebc
Binary files /dev/null and b/_build/html/.doctrees/specifications/section-distribution-metadata.doctree differ
diff --git a/_build/html/.doctrees/specifications/section-installation-metadata.doctree b/_build/html/.doctrees/specifications/section-installation-metadata.doctree
new file mode 100644
index 000000000..df21764fd
Binary files /dev/null and b/_build/html/.doctrees/specifications/section-installation-metadata.doctree differ
diff --git a/_build/html/.doctrees/specifications/section-package-indices.doctree b/_build/html/.doctrees/specifications/section-package-indices.doctree
new file mode 100644
index 000000000..de1894f13
Binary files /dev/null and b/_build/html/.doctrees/specifications/section-package-indices.doctree differ
diff --git a/_build/html/.doctrees/specifications/section-python-description-formats.doctree b/_build/html/.doctrees/specifications/section-python-description-formats.doctree
new file mode 100644
index 000000000..58bc1d7b5
Binary files /dev/null and b/_build/html/.doctrees/specifications/section-python-description-formats.doctree differ
diff --git a/_build/html/.doctrees/specifications/section-reproducible-environments.doctree b/_build/html/.doctrees/specifications/section-reproducible-environments.doctree
new file mode 100644
index 000000000..9b7f650ed
Binary files /dev/null and b/_build/html/.doctrees/specifications/section-reproducible-environments.doctree differ
diff --git a/_build/html/.doctrees/specifications/simple-repository-api.doctree b/_build/html/.doctrees/specifications/simple-repository-api.doctree
new file mode 100644
index 000000000..0ce97e584
Binary files /dev/null and b/_build/html/.doctrees/specifications/simple-repository-api.doctree differ
diff --git a/_build/html/.doctrees/specifications/source-distribution-format.doctree b/_build/html/.doctrees/specifications/source-distribution-format.doctree
new file mode 100644
index 000000000..6d2edf864
Binary files /dev/null and b/_build/html/.doctrees/specifications/source-distribution-format.doctree differ
diff --git a/_build/html/.doctrees/specifications/version-specifiers.doctree b/_build/html/.doctrees/specifications/version-specifiers.doctree
new file mode 100644
index 000000000..5fa54bf33
Binary files /dev/null and b/_build/html/.doctrees/specifications/version-specifiers.doctree differ
diff --git a/_build/html/.doctrees/specifications/virtual-environments.doctree b/_build/html/.doctrees/specifications/virtual-environments.doctree
new file mode 100644
index 000000000..66d96325f
Binary files /dev/null and b/_build/html/.doctrees/specifications/virtual-environments.doctree differ
diff --git a/_build/html/.doctrees/specifications/well-known-project-urls.doctree b/_build/html/.doctrees/specifications/well-known-project-urls.doctree
new file mode 100644
index 000000000..920bb22e6
Binary files /dev/null and b/_build/html/.doctrees/specifications/well-known-project-urls.doctree differ
diff --git a/_build/html/.doctrees/support.doctree b/_build/html/.doctrees/support.doctree
new file mode 100644
index 000000000..b6c369713
Binary files /dev/null and b/_build/html/.doctrees/support.doctree differ
diff --git a/_build/html/.doctrees/tutorials/creating-documentation.doctree b/_build/html/.doctrees/tutorials/creating-documentation.doctree
new file mode 100644
index 000000000..f7a409d4f
Binary files /dev/null and b/_build/html/.doctrees/tutorials/creating-documentation.doctree differ
diff --git a/_build/html/.doctrees/tutorials/index.doctree b/_build/html/.doctrees/tutorials/index.doctree
new file mode 100644
index 000000000..a8abe328e
Binary files /dev/null and b/_build/html/.doctrees/tutorials/index.doctree differ
diff --git a/_build/html/.doctrees/tutorials/installing-packages.doctree b/_build/html/.doctrees/tutorials/installing-packages.doctree
new file mode 100644
index 000000000..efb4c7408
Binary files /dev/null and b/_build/html/.doctrees/tutorials/installing-packages.doctree differ
diff --git a/_build/html/.doctrees/tutorials/managing-dependencies.doctree b/_build/html/.doctrees/tutorials/managing-dependencies.doctree
new file mode 100644
index 000000000..1be8ec411
Binary files /dev/null and b/_build/html/.doctrees/tutorials/managing-dependencies.doctree differ
diff --git a/_build/html/.doctrees/tutorials/packaging-projects.doctree b/_build/html/.doctrees/tutorials/packaging-projects.doctree
new file mode 100644
index 000000000..696596168
Binary files /dev/null and b/_build/html/.doctrees/tutorials/packaging-projects.doctree differ
diff --git a/_build/html/_images/py_pkg_applications.png b/_build/html/_images/py_pkg_applications.png
new file mode 100644
index 000000000..bc3187dbf
Binary files /dev/null and b/_build/html/_images/py_pkg_applications.png differ
diff --git a/_build/html/_images/py_pkg_tools_and_libs.png b/_build/html/_images/py_pkg_tools_and_libs.png
new file mode 100644
index 000000000..707f1081d
Binary files /dev/null and b/_build/html/_images/py_pkg_tools_and_libs.png differ
diff --git a/_build/html/_sources/contribute.rst.txt b/_build/html/_sources/contribute.rst.txt
new file mode 100644
index 000000000..f512dd30d
--- /dev/null
+++ b/_build/html/_sources/contribute.rst.txt
@@ -0,0 +1,291 @@
+.. |PyPUG| replace:: Python Packaging User Guide
+
+************************
+Contribute to this guide
+************************
+
+The |PyPUG| welcomes contributors! There are lots of ways to help out,
+including:
+
+* Reading the guide and giving feedback
+* Reviewing new contributions
+* Revising existing content
+* Writing new content
+* Translating the guide
+
+Most of the work on the |PyPUG| takes place on the
+`project's GitHub repository`__. To get started, check out the list of
+`open issues`__ and `pull requests`__. If you're planning to write or edit
+the guide, please read the :ref:`style guide `.
+
+.. __: https://github.com/pypa/packaging.python.org/
+.. __: https://github.com/pypa/packaging.python.org/issues
+.. __: https://github.com/pypa/packaging.python.org/pulls
+
+By contributing to the |PyPUG|, you're expected to follow the PSF's
+`Code of Conduct`__.
+
+.. __: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
+
+
+Documentation types
+===================
+
+This project consists of four distinct documentation types with specific
+purposes. The project aspires to follow the `Diátaxis process`_
+for creating quality documentation. When proposing new additions to the project please pick the
+appropriate documentation type.
+
+.. _Diátaxis process: https://diataxis.fr/
+
+Tutorials
+---------
+
+Tutorials are focused on teaching the reader new concepts by accomplishing a
+goal. They are opinionated step-by-step guides. They do not include extraneous
+warnings or information. `example tutorial-style document`_.
+
+.. _example tutorial-style document: https://docs.djangoproject.com/en/dev/intro/
+
+Guides
+------
+
+Guides are focused on accomplishing a specific task and can assume some level of
+pre-requisite knowledge. These are similar to tutorials, but have a narrow and
+clear focus and can provide lots of caveats and additional information as
+needed. They may also discuss multiple approaches to accomplishing the task.
+:doc:`example guide-style document `.
+
+Discussions
+-----------
+
+Discussions are focused on understanding and information. These explore a
+specific topic without a specific goal in mind. :doc:`example discussion-style
+document `.
+
+Specifications
+--------------
+
+Specifications are reference documentation focused on comprehensively documenting
+an agreed-upon interface for interoperability between packaging tools.
+:doc:`example specification-style document `.
+
+
+Translations
+============
+
+We use `Weblate`_ to manage translations of this project.
+Please visit the `packaging.python.org`_ project on Weblate to contribute.
+
+If you are experiencing issues while you are working on translations,
+please open an issue on `GitHub`_.
+
+.. tip::
+
+ Any translations of this project should follow `reStructuredText syntax`_.
+
+.. _Weblate: https://weblate.org/
+.. _packaging.python.org: https://hosted.weblate.org/projects/pypa/packaging-python-org/
+.. _GitHub: https://github.com/pypa/packaging.python.org/issues
+.. _reStructuredText syntax: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html
+
+Adding a language
+-----------------
+
+If your language is not listed on `packaging.python.org`_, click the button
+:guilabel:`Start new translation` at the bottom of the language list and add
+the language you want to translate.
+
+Following reStructuredText syntax
+---------------------------------
+
+If you are not familiar with reStructuredText (RST) syntax, please read `this guide`_
+before translating on Weblate.
+
+**Do not translate the text in reference directly**
+ When translating the text in reference, please do not translate them directly.
+
+ | Wrong: Translate the following text directly:
+
+ .. code-block:: rst
+
+ `some ref`_ -> `TRANSLATED TEXT HERE`_
+
+ | Right: Translate the following text with your own language and add the original reference:
+
+ .. code-block:: rst
+
+ `some ref`_ -> `TRANSLATED TEXT HERE `_
+
+.. _this guide: https://docutils.sourceforge.io/docs/user/rst/quickref.html
+
+Building the guide locally
+==========================
+
+Though not required to contribute, it may be useful to build this guide locally
+in order to test your changes. In order to build this guide locally, you'll
+need:
+
+1. :doc:`Nox `. You can install or upgrade
+ nox using ``pip``:
+
+ .. code-block:: bash
+
+ python -m pip install --user nox
+
+2. Python 3.11. Our build scripts are usually tested with Python 3.11 only.
+ See the :doc:`Hitchhiker's Guide to Python installation instructions `
+ to install Python 3.11 on your operating system.
+
+To build the guide, run the following shell command in the project's root folder:
+
+.. code-block:: bash
+
+ nox -s build
+
+After the process has completed you can find the HTML output in the
+``./build/html`` directory. You can open the ``index.html`` file to view the
+guide in web browser, but it's recommended to serve the guide using an HTTP
+server.
+
+You can build the guide and serve it via an HTTP server using the following
+command:
+
+.. code-block:: bash
+
+ nox -s preview
+
+The guide will be browsable via http://localhost:8000.
+
+
+Where the guide is deployed
+===========================
+
+The guide is deployed via ReadTheDocs and the configuration lives at https://readthedocs.org/projects/python-packaging-user-guide/. It's served from a custom domain and fronted by Fast.ly.
+
+
+.. _contributing_style_guide:
+
+Style guide
+===========
+
+This style guide has recommendations for how you should write the |PyPUG|.
+Before you start writing, please review it. By following the style guide, your
+contributions will help add to a cohesive whole and make it easier for your
+contributions to be accepted into the project.
+
+
+Purpose
+-------
+
+The purpose of the |PyPUG| is to be the authoritative resource on how to
+package, publish, and install Python projects using current tools.
+
+
+Scope
+-----
+
+The guide is meant to answer questions and solve problems with accurate and
+focused recommendations.
+
+The guide isn't meant to be comprehensive and it's not meant to replace
+individual projects' documentation. For example, pip has dozens of commands,
+options, and settings. The pip documentation describes each of them in detail,
+while this guide describes only the parts of pip that are needed to complete the
+specific tasks described in this guide.
+
+
+Audience
+--------
+
+The audience of this guide is anyone who uses Python with packages.
+
+Don't forget that the Python community is big and welcoming. Readers may not
+share your age, gender, education, culture, and more, but they deserve to learn
+about packaging just as much as you do.
+
+In particular, keep in mind that not all people who use Python see themselves as
+programmers. The audience of this guide includes astronomers or painters or
+students as well as professional software developers.
+
+
+Voice and tone
+--------------
+
+When writing this guide, strive to write with a voice that's approachable and
+humble, even if you have all the answers.
+
+Imagine you're working on a Python project with someone you know to be smart and
+skilled. You like working with them and they like working with you. That person
+has asked you a question and you know the answer. How do you respond? *That* is
+how you should write this guide.
+
+Here's a quick check: try reading aloud to get a sense for your writing's voice
+and tone. Does it sound like something you would say or does it sound like
+you're acting out a part or giving a speech? Feel free to use contractions and
+don't worry about sticking to fussy grammar rules. You are hereby granted
+permission to end a sentence in a preposition, if that's what you want to end it
+with.
+
+When writing the guide, adjust your tone for the seriousness and difficulty of
+the topic. If you're writing an introductory tutorial, it's OK to make a joke,
+but if you're covering a sensitive security recommendation, you might want to
+avoid jokes altogether.
+
+
+Conventions and mechanics
+-------------------------
+
+**Write to the reader**
+ When giving recommendations or steps to take, address the reader as *you*
+ or use the imperative mood.
+
+ | Wrong: To install it, the user runs…
+ | Right: You can install it by running…
+ | Right: To install it, run…
+
+**State assumptions**
+ Avoid making unstated assumptions. Reading on the web means that any page of
+ the guide may be the first page of the guide that the reader ever sees.
+ If you're going to make assumptions, then say what assumptions that you're
+ going to make.
+
+**Cross-reference generously**
+ The first time you mention a tool or practice, link to the part of the
+ guide that covers it, or link to a relevant document elsewhere. Save the
+ reader a search.
+
+**Respect naming practices**
+ When naming tools, sites, people, and other proper nouns, use their preferred
+ capitalization.
+
+ | Wrong: Pip uses…
+ | Right: pip uses…
+ |
+ | Wrong: …hosted on github.
+ | Right: …hosted on GitHub.
+
+**Use a gender-neutral style**
+ Often, you'll address the reader directly with *you*, *your* and *yours*.
+ Otherwise, use gender-neutral pronouns *they*, *their*, and *theirs* or avoid
+ pronouns entirely.
+
+ | Wrong: A maintainer uploads the file. Then he…
+ | Right: A maintainer uploads the file. Then they…
+ | Right: A maintainer uploads the file. Then the maintainer…
+
+**Headings**
+ Write headings that use words the reader is searching for. A good way to
+ do this is to have your heading complete an implied question. For example, a
+ reader might want to know *How do I install MyLibrary?* so a good heading
+ might be *Install MyLibrary*.
+
+ In section headings, use sentence case. In other words, write headings as you
+ would write a typical sentence.
+
+ | Wrong: Things You Should Know About Python
+ | Right: Things you should know about Python
+
+**Numbers**
+ In body text, write numbers one through nine as words. For other numbers or
+ numbers in tables, use numerals.
diff --git a/_build/html/_sources/discussions/deploying-python-applications.rst.txt b/_build/html/_sources/discussions/deploying-python-applications.rst.txt
new file mode 100644
index 000000000..e10f36f9c
--- /dev/null
+++ b/_build/html/_sources/discussions/deploying-python-applications.rst.txt
@@ -0,0 +1,139 @@
+
+=============================
+Deploying Python applications
+=============================
+
+:Page Status: Incomplete
+:Last Reviewed: 2021-8-24
+
+
+Overview
+========
+
+
+Supporting multiple hardware platforms
+--------------------------------------
+
+::
+
+ FIXME
+
+ Meaning: x86, x64, ARM, others?
+
+ For Python-only distributions, it *should* be straightforward to deploy on all
+ platforms where Python can run.
+
+ For distributions with binary extensions, deployment is a major headache. Not only
+ must the extensions be built on all the combinations of operating system and
+ hardware platform, but they must also be tested, preferably on continuous
+ integration platforms. The issues are similar to the "multiple Python
+ versions" section above, not sure whether this should be a separate section.
+ Even on Windows x64, both the 32 bit and 64 bit versions of Python enjoy
+ significant usage.
+
+
+
+OS packaging & installers
+=========================
+
+::
+
+ FIXME
+
+ - Building rpm/debs for projects
+ - Building rpms/debs for whole virtualenvs
+ - Building macOS installers for Python projects
+ - Building Android APKs with Kivy+P4A or P4A & Buildozer
+
+Windows
+-------
+
+::
+
+ FIXME
+
+ - Building Windows installers for Python projects
+
+Pynsist
+^^^^^^^
+
+`Pynsist `__ is a tool that bundles Python
+programs together with the Python-interpreter into a single installer based on
+NSIS. In most cases, packaging only requires the user to choose a version of
+the Python-interpreter and declare the dependencies of the program. The tool
+downloads the specified Python-interpreter for Windows and packages it with all
+the dependencies in a single Windows-executable installer.
+
+The installed program can be started from a shortcut that the installer adds to
+the start-menu. It uses a Python interpreter installed within its application
+directory, independent of any other Python installation on the computer.
+
+A big advantage of Pynsist is that the Windows packages can be built on Linux.
+There are several examples for different kinds of programs (console, GUI) in
+the :any:`documentation `. The tool is released
+under the MIT-licence.
+
+Application bundles
+===================
+
+::
+
+ FIXME
+
+ - wheels kinda/sorta
+
+Windows
+-------
+
+py2exe
+^^^^^^
+
+`py2exe `__ is a distutils extension which
+allows to build standalone Windows executable programs (32-bit and 64-bit)
+from Python scripts. Python versions included in the official development
+cycle are supported (refers to `Status of Python branches`__). py2exe can
+build console executables and windows (GUI) executables. Building windows
+services, and DLL/EXE COM servers might work but it is not actively supported.
+The distutils extension is released under the MIT-licence and Mozilla
+Public License 2.0.
+
+.. __: https://devguide.python.org/#status-of-python-branches
+
+macOS
+-----
+
+py2app
+^^^^^^
+
+`py2app `__ is a Python setuptools
+command which will allow you to make standalone macOS application
+bundles and plugins from Python scripts. Note that py2app MUST be used
+on macOS to build applications, it cannot create Mac applications on other
+platforms. py2app is released under the MIT-license.
+
+Unix (including Linux and macOS)
+-----------------------------------
+
+pex
+^^^
+
+`pex `__ is a library for generating .pex
+(Python EXecutable) files which are executable Python environments in the
+spirit of virtualenvs. pex is an expansion upon the ideas outlined in :pep:`441`
+and makes the deployment of Python applications as simple as cp. pex files may
+even include multiple platform-specific Python distributions, meaning that a
+single pex file can be portable across Linux and macOS. pex is released under the
+Apache License 2.0.
+
+Configuration management
+========================
+
+::
+
+ FIXME
+
+ puppet
+ salt
+ chef
+ ansible
+ fabric
diff --git a/_build/html/_sources/discussions/distribution-package-vs-import-package.rst.txt b/_build/html/_sources/discussions/distribution-package-vs-import-package.rst.txt
new file mode 100644
index 000000000..65e7019c6
--- /dev/null
+++ b/_build/html/_sources/discussions/distribution-package-vs-import-package.rst.txt
@@ -0,0 +1,110 @@
+.. _distribution-package-vs-import-package:
+
+=======================================
+Distribution package vs. import package
+=======================================
+
+A number of different concepts are commonly referred to by the word
+"package". This page clarifies the differences between two distinct but
+related meanings in Python packaging, "distribution package" and "import
+package".
+
+What's a distribution package?
+==============================
+
+A distribution package is a piece of software that you can install.
+Most of the time, this is synonymous with "project". When you type ``pip
+install pkg``, or when you write ``dependencies = ["pkg"]`` in your
+``pyproject.toml``, ``pkg`` is the name of a distribution package. When
+you search or browse the PyPI_, the most widely known centralized source for
+installing Python libraries and tools, what you see is a list of distribution
+packages. Alternatively, the term "distribution package" can be used to
+refer to a specific file that contains a certain version of a project.
+
+Note that in the Linux world, a "distribution package",
+most commonly abbreviated as "distro package" or just "package",
+is something provided by the system package manager of the `Linux distribution `_,
+which is a different meaning.
+
+
+What's an import package?
+=========================
+
+An import package is a Python module. Thus, when you write ``import
+pkg`` or ``from pkg import func`` in your Python code, ``pkg`` is the
+name of an import package. More precisely, import packages are special
+Python modules that can contain submodules. For example, the ``numpy``
+package contains modules like ``numpy.linalg`` and
+``numpy.fft``. Usually, an import package is a directory on the file
+system, containing modules as ``.py`` files and subpackages as
+subdirectories.
+
+You can use an import package as soon as you have installed a distribution
+package that provides it.
+
+
+What are the links between distribution packages and import packages?
+=====================================================================
+
+Most of the time, a distribution package provides one single import
+package (or non-package module), with a matching name. For example,
+``pip install numpy`` lets you ``import numpy``.
+
+However, this is only a convention. PyPI and other package indices *do not
+enforce any relationship* between the name of a distribution package and the
+import packages it provides. (A consequence of this is that you cannot blindly
+install the PyPI package ``foo`` if you see ``import foo``; this may install an
+unintended, and potentially even malicious package.)
+
+A distribution package could provide an import package with a different
+name. An example of this is the popular Pillow_ library for image
+processing. Its distribution package name is ``Pillow``, but it provides
+the import package ``PIL``. This is for historical reasons: Pillow
+started as a fork of the PIL library, thus it kept the import name
+``PIL`` so that existing PIL users could switch to Pillow with little
+effort. More generally, a fork of an existing library is a common reason
+for differing names between the distribution package and the import
+package.
+
+On a given package index (like PyPI), distribution package names must be
+unique. On the other hand, import packages have no such requirement.
+Import packages with the same name can be provided by several
+distribution packages. Again, forks are a common reason for this.
+
+Conversely, a distribution package can provide several import packages,
+although this is less common. An example is the attrs_ distribution
+package, which provides both an ``attrs`` import package with a newer
+API, and an ``attr`` import package with an older but supported API.
+
+
+How do distribution package names and import package names compare?
+===================================================================
+
+Import packages should have valid Python identifiers as their name (the
+:ref:`exact rules ` are found in the Python
+documentation) [#non-identifier-mod-name]_. In particular, they use underscores ``_`` as word
+separator and they are case-sensitive.
+
+On the other hand, distribution packages can use hyphens ``-`` or
+underscores ``_``. They can also contain dots ``.``, which is sometimes
+used for packaging a subpackage of a :ref:`namespace package
+`. For most purposes, they are insensitive
+to case and to ``-`` vs. ``_`` differences, e.g., ``pip install
+Awesome_Package`` is the same as ``pip install awesome-package`` (the
+precise rules are given in the :ref:`name normalization specification
+`).
+
+
+
+---------------------------
+
+.. [#non-identifier-mod-name] Although it is technically possible
+ to import packages/modules that do not have a valid Python identifier as
+ their name, using :doc:`importlib `,
+ this is vanishingly rare and strongly discouraged.
+
+
+.. _distro: https://en.wikipedia.org/wiki/Linux_distribution
+.. _PyPI: https://pypi.org
+.. _Pillow: https://pypi.org/project/Pillow
+.. _attrs: https://pypi.org/project/attrs
diff --git a/_build/html/_sources/discussions/downstream-packaging.rst.txt b/_build/html/_sources/discussions/downstream-packaging.rst.txt
new file mode 100644
index 000000000..3f4795fa8
--- /dev/null
+++ b/_build/html/_sources/discussions/downstream-packaging.rst.txt
@@ -0,0 +1,481 @@
+.. _downstream-packaging:
+
+===============================
+Supporting downstream packaging
+===============================
+
+:Page Status: Draft
+:Last Reviewed: 2025-?
+
+While PyPI and the Python packaging tools such as :ref:`pip` are the primary
+means of distributing Python packages, they are also often made available as part
+of other packaging ecosystems. These repackaging efforts are collectively called
+*downstream* packaging (your own efforts are called *upstream* packaging),
+and include such projects as Linux distributions, Conda, Homebrew and MacPorts.
+They generally aim to provide improved support for use cases that cannot be handled
+via Python packaging tools alone, such as native integration with a specific operating
+system, or assured compatibility with specific versions of non-Python software.
+
+This discussion attempts to explain how downstream packaging is usually done,
+and what additional challenges downstream packagers typically face. It aims
+to provide some optional guidelines that project maintainers may choose to
+follow which help make downstream packaging *significantly* easier
+(without imposing any major maintenance hassles on the upstream project).
+Note that this is not an all-or-nothing proposal — anything that upstream
+maintainers can do is useful, even if it's only a small part. Downstream
+maintainers are also willing to prepare patches to resolve these issues.
+Having these patches merged can be very helpful, since it removes the need
+for different downstreams to carry and keep rebasing the same patches,
+and the risk of applying inconsistent solutions to the same problem.
+
+Establishing a good relationship between software maintainers and downstream
+packagers can bring mutual benefits. Downstreams are often willing to share
+their experience, time and hardware to improve your package. They are
+sometimes in a better position to see how your package is used in practice,
+and to provide information about its relationships with other packages that
+would otherwise require significant effort to obtain.
+Packagers can often find bugs before your users hit them in production,
+provide bug reports of good quality, and supply patches whenever they can.
+For example, they are regularly active in ensuring the packages they redistribute
+are updated for any compatibility issues that arise when a new Python version
+is released.
+
+Please note that downstream builds include not only binary redistribution,
+but also source builds done on user systems (in source-first distributions
+such as Gentoo Linux, for example).
+
+
+.. _provide-complete-source-distributions:
+
+Provide complete source distributions
+-------------------------------------
+
+Why?
+~~~~
+
+The vast majority of downstream packagers prefer to build packages from source,
+rather than use the upstream-provided binary packages. In some cases, using
+sources is actually required for the package to be included in the distribution.
+This is also true of pure Python packages that provide universal wheels.
+The reasons for using source distributions may include:
+
+- Being able to audit the source code of all packages.
+
+- Being able to run the test suite and build documentation.
+
+- Being able to easily apply patches, including backporting commits
+ from the project's repository and sending patches back to the project.
+
+- Being able to build on a specific platform that is not covered
+ by upstream builds.
+
+- Being able to build against specific versions of system libraries.
+
+- Having a consistent build process across all Python packages.
+
+While it is usually possible to build packages from a Git repository, there are
+a few important reasons to provide a static archive file instead:
+
+- Fetching a single file is often more efficient, more reliable and better
+ supported than e.g. using a Git clone. This can help users with poor
+ Internet connectivity.
+
+- Downstreams often use hashes to verify the authenticity of source files
+ on subsequent builds, which require that they remain bitwise identical over
+ time. For example, automatically generated Git archives do not guarantee
+ this, as the compressed data may change if gzip is upgraded on the server.
+
+- Archive files can be mirrored, reducing both upstream and downstream
+ bandwidth use. The actual builds can afterwards be performed in firewalled
+ or offline environments, that can only access source files provided
+ by the local mirror or redistributed earlier.
+
+- Explicitly publishing archive files can ensure that any dependencies on version control
+ system metadata are resolved when creating the source archive. For example, automatically
+ generated Git archives omit all of the commit tag information, potentially resulting in
+ incorrect version details in the resulting builds.
+
+How?
+~~~~
+
+Ideally, **a source distribution archive published on PyPI should include all the files
+from the package's Git repository** that are necessary to build the package
+itself, run its test suite, build and install its documentation, and any other
+files that may be useful to end users, such as shell completions, editor
+support files, and so on.
+
+This point applies only to the files belonging to the package itself.
+The downstream packaging process, much like Python package managers, will
+provision the necessary Python dependencies, system tools and external
+libraries that are needed by your package and its build scripts. However,
+the files listing these dependencies (for example, ``requirements*.txt`` files)
+should also be included, to help downstreams determine the needed dependencies,
+and check for changes in them.
+
+Some projects have concerns related to Python package managers using source
+distributions from PyPI. They do not wish to increase their size with files
+that are not used by these tools, or they do not wish to publish source
+distributions at all, as they enable a problematic or outright nonfunctional
+fallback to building the particular project from source. In these cases, a good
+compromise may be to publish a separate source archive for downstream use
+elsewhere, for example by attaching it to a GitHub release. Alternatively,
+large files, such as test data, can be split into separate archives.
+
+On the other hand, some projects (NumPy_, for instance) decide to include tests
+in their installed packages. This has the added advantage of permitting users to
+run tests after installing them, for example to check for regressions
+after upgrading a dependency. Yet another approach is to split tests or test
+data into a separate Python package. Such an approach was taken by
+the cryptography_ project, with the large test vectors being split
+to cryptography-vectors_ package.
+
+A good idea is to use your source distribution in the release workflow.
+For example, the :ref:`build` tool does exactly that — it first builds a source
+distribution, and then uses it to build a wheel. This ensures that the source
+distribution actually works, and that it won't accidentally install fewer files
+than the official wheels.
+
+Ideally, also use the source distribution to run tests, build documentation,
+and so on, or add specific tests to make sure that all necessary files were
+actually included. Understandably, this requires more effort, so it's fine
+not do that — downstream packagers will report any missing files promptly.
+
+
+.. _no-internet-access-in-builds:
+
+Do not use the Internet during the build process
+------------------------------------------------
+
+Why?
+~~~~
+
+Downstream builds are frequently done in sandboxed environments that cannot
+access the Internet. The package sources are unpacked into this environment,
+and all the necessary dependencies are installed.
+
+Even if this is not the case, and assuming that you took sufficient care to
+properly authenticate downloads, using the Internet is discouraged for a number
+of reasons:
+
+- The Internet connection may be unstable (e.g. due to poor reception)
+ or suffer from temporary problems that could cause the process to fail
+ or hang.
+
+- The remote resources may become temporarily or even permanently
+ unavailable, making the build no longer possible. This is especially
+ problematic when someone needs to build an old package version.
+
+- The remote resources may change, making the build not reproducible.
+
+- Accessing remote servers poses a privacy issue and a potential
+ security issue, as it exposes information about the system building
+ the package.
+
+- The user may be using a service with a limited data plan, in which
+ uncontrolled Internet access may result in additional charges or other
+ inconveniences.
+
+How?
+~~~~
+
+If the package is implementing any custom build *backend* actions that use
+the Internet, for example by automatically downloading vendored dependencies
+or fetching Git submodules, its source distribution should either include all
+of these files or allow provisioning them externally, and the Internet must not
+be used if the files are already present.
+
+Note that this point does not apply to Python dependencies that are specified
+in the package metadata, and are fetched during the build and installation
+process by *frontends* (such as :ref:`build` or :ref:`pip`). Downstreams use
+frontends that use local provisioning for Python dependencies.
+
+Ideally, custom build scripts should not even attempt to access the Internet
+at all, unless explicitly requested to. If any resources are missing and need
+to be fetched, they should ask the user for permission first. If that is not
+feasible, the next best thing is to provide an opt-out switch to disable
+all Internet access. This could be done e.g. by checking whether
+a ``NO_NETWORK`` environment variable is set to a non-empty value.
+
+Since downstreams frequently also run tests and build documentation, the above
+should ideally extend to these processes as well.
+
+Please also remember that if you are fetching remote resources, you absolutely
+must *verify their authenticity* (usually against a hash), to protect against
+the file being substituted by a malicious party.
+
+
+.. _support-system-dependencies-in-builds:
+
+Support building against system dependencies
+--------------------------------------------
+
+Why?
+~~~~
+
+Some Python projects have non-Python dependencies, such as libraries written
+in C or C++. Trying to use the system versions of these dependencies
+in upstream packaging may cause a number of problems for end users:
+
+- The published wheels require a binary-compatible version of the used
+ library to be present on the user's system. If the library is missing
+ or an incompatible version is installed, the Python package may fail with errors
+ that are not clear to inexperienced users, or even misbehave at runtime.
+
+- Building from a source distribution requires a source-compatible version
+ of the dependency to be present, along with its development headers
+ and other auxiliary files that some systems package separately
+ from the library itself.
+
+- Even for an experienced user, installing a compatible dependency version
+ may be very hard. For example, the used Linux distribution may not provide
+ the required version, or some other package may require an incompatible
+ version.
+
+- The linkage between the Python package and its system dependency is not
+ recorded by the packaging system. The next system update may upgrade
+ the library to a newer version that breaks binary compatibility with
+ the Python package, and requires user intervention to fix.
+
+For these reasons, you may reasonably decide to either statically link
+your dependencies, or to provide local copies in the installed package.
+You may also vendor the dependency in your source distribution. Sometimes
+these dependencies are also repackaged on PyPI, and can be declared as
+project dependencies like any other Python package.
+
+However, none of these issues apply to downstream packaging, and downstreams
+have good reasons to prefer dynamically linking to system dependencies.
+In particular:
+
+- In many cases, reliably sharing dynamic dependencies between components is a large part
+ of the *purpose* of a downstream packaging ecosystem. Helping to support that makes it
+ easier for users of those systems to access upstream projects in their preferred format.
+
+- Static linking and vendoring obscures the use of external dependencies,
+ making source auditing harder.
+
+- Dynamic linking makes it possible to quickly and systematically replace the used
+ libraries across an entire downstream packaging ecosystem, which can be particularly
+ important when they turn out to contain a security vulnerability or critical bug.
+
+- Using system dependencies makes the package benefit from downstream
+ customization that can improve the user experience on a particular platform,
+ without the downstream maintainers having to consistently patch
+ the dependencies vendored in different packages. This can include
+ compatibility improvements and security hardening.
+
+- Static linking and vendoring can result in multiple different versions of the
+ same library being loaded in the same process (for example, attempting to
+ import two Python packages that link to different versions of the same library).
+ This sometimes works without incident, but it can also lead to anything from library
+ loading errors, to subtle runtime bugs, to catastrophic failures (like suddenly
+ crashing and losing data).
+
+- Last but not least, static linking and vendoring results in duplication,
+ and may increase the use of both disk space and memory.
+
+How?
+~~~~
+
+A good compromise between the needs of both parties is to provide a switch
+between using vendored and system dependencies. Ideally, if the package has
+multiple vendored dependencies, it should provide both individual switches
+for each dependency, and a general switch to control the default for them,
+e.g. via a ``USE_SYSTEM_DEPS`` environment variable.
+
+If the user requests using system dependencies, and a particular dependency
+is either missing or incompatible, the build should fail with an explanatory
+message rather than fall back to a vendored version. This gives the packager
+the opportunity to notice their mistake and a chance to consciously decide
+how to solve it.
+
+It is reasonable for upstream projects to leave *testing* of building with
+system dependencies to their downstream repackagers. The goal of these guidelines
+is to facilitate more effective collaboration between upstream projects and downstream
+repackagers, not to suggest upstream projects take on tasks that downstream repackagers
+are better equipped to handle.
+
+.. _support-downstream-testing:
+
+Support downstream testing
+--------------------------
+
+Why?
+~~~~
+
+A variety of downstream projects run some degree of testing on the packaged
+Python projects. Depending on the particular case, this can range from minimal
+smoke testing to comprehensive runs of the complete test suite. There can
+be various reasons for doing this, for example:
+
+- Verifying that the downstream packaging did not introduce any bugs.
+
+- Testing on additional platforms that are not covered by upstream testing.
+
+- Finding subtle bugs that can only be reproduced with particular hardware,
+ system package versions, and so on.
+
+- Testing the released package against newer (or older) dependency versions than
+ the ones present during upstream release testing.
+
+- Testing the package in an environment closely resembling the production
+ setup. This can detect issues caused by non-trivial interactions between
+ different installed packages, including packages that are not dependencies
+ of your package, but nevertheless can cause issues.
+
+- Testing the released package against newer Python versions (including
+ newer point releases), or less tested Python implementations such as PyPy.
+
+Admittedly, sometimes downstream testing may yield false positives or bug
+reports about scenarios the upstream project is not interested in supporting.
+However, perhaps even more often it does provide early notice of problems,
+or find non-trivial bugs that would otherwise cause issues for the upstream
+project's users. While mistakes do happen, the majority of downstream packagers
+are doing their best to double-check their results, and help upstream
+maintainers triage and fix the bugs that they reported.
+
+How?
+~~~~
+
+There are a number of things that upstream projects can do to help downstream
+repackagers test their packages efficiently and effectively, including some of the suggestions
+already mentioned above. These are typically improvements that make the test suite more
+reliable and easier to use for everyone, not just downstream packagers.
+Some specific suggestions are:
+
+- Include the test files and fixtures in the source distribution, or make it
+ possible to easily download them separately.
+
+- Do not write to the package directories during testing. Downstream test
+ setups sometimes run tests on top of the installed package, and modifications
+ performed during testing and temporary test files may end up being part
+ of the installed package!
+
+- Make the test suite work offline. Mock network interactions, using
+ packages such as responses_ or vcrpy_. If that is not possible, make it
+ possible to easily disable the tests using Internet access, e.g. via a pytest_
+ marker. Use pytest-socket_ to verify that your tests work offline. This
+ often makes your own test workflows faster and more reliable as well.
+
+- Make your tests work without a specialized setup, or perform the necessary
+ setup as part of test fixtures. Do not ever assume that you can connect
+ to system services such as databases — in an extreme case, you could crash
+ a production service!
+
+- If your package has optional dependencies, make their tests optional as
+ well. Either skip them if the needed packages are not installed, or add
+ markers to make deselecting easy.
+
+- More generally, add markers to tests with special requirements. These can
+ include e.g. significant space usage, significant memory usage, long runtime,
+ incompatibility with parallel testing.
+
+- Do not assume that the test suite will be run with ``-Werror``. Downstreams
+ often need to disable that, as it causes false positives, e.g. due to newer
+ dependency versions. Assert for warnings using ``pytest.warns()`` rather
+ than ``pytest.raises()``!
+
+- Aim to make your test suite reliable and reproducible. Avoid flaky tests.
+ Avoid depending on specific platform details, don't rely on exact results
+ of floating-point computation, or timing of operations, and so on. Fuzzing
+ has its advantages, but you want to have static test cases for completeness
+ as well.
+
+- Split tests by their purpose, and make it easy to skip categories that are
+ irrelevant or problematic. Since the primary purpose of downstream testing
+ is to ensure that the package itself works, downstreams are not generally interested
+ in tasks such as checking code coverage, code formatting, typechecking or running
+ benchmarks. These tests can fail as dependencies are upgraded or the system
+ is under load, without actually affecting the package itself.
+
+- If your test suite takes significant time to run, support testing
+ in parallel. Downstreams often maintain a large number of packages,
+ and testing them all takes a lot of time. Using pytest-xdist_ can help them
+ avoid bottlenecks.
+
+- Ideally, support running your test suite via ``pytest``. pytest_ has many
+ command-line arguments that are truly helpful to downstreams, such as
+ the ability to conveniently deselect tests, rerun flaky tests
+ (via pytest-rerunfailures_), add a timeout to prevent tests from hanging
+ (via pytest-timeout_) or run tests in parallel (via pytest-xdist_).
+ Note that test suites don't need to be *written* with ``pytest`` to be
+ *executed* with ``pytest``: ``pytest`` is able to find and execute almost
+ all test cases that are compatible with the standard library's ``unittest``
+ test discovery.
+
+
+.. _aim-for-stable-releases:
+
+Aim for stable releases
+-----------------------
+
+Why?
+~~~~
+
+Many downstreams provide stable release channels in addition to the main
+package streams. The goal of these channels is to provide more conservative
+upgrades to users with higher stability needs. These users often prefer
+to trade having the newest features available for lower risk of issues.
+
+While the exact policies differ, an important criterion for including a new
+package version in a stable release channel is for it to be available in testing
+for some time already, and have no known major regressions. For example,
+in Gentoo Linux a package is usually marked stable after being available
+in testing for a month, and being tested against the versions of its
+dependencies that are marked stable at the time.
+
+However, there are circumstances which demand more prompt action. For example,
+if a security vulnerability or a major bug is found in the version that is
+currently available in the stable channel, the downstream is facing a need
+to resolve it. In this case, they need to consider various options, such as:
+
+- putting a new version in the stable channel early,
+
+- adding patches to the version currently published,
+
+- or even downgrading the stable channel to an earlier release.
+
+Each of these options involves certain risks and a certain amount of work,
+and packagers needs to weigh them to determine the course of action.
+
+How?
+~~~~
+
+There are some things that upstreams can do to tailor their workflow to stable
+release channels. These actions often are beneficial to the package's users
+as well. Some specific suggestions are:
+
+- Adjust the release frequency to the rate of code changes. Packages that
+ are released rarely often bring significant changes with every release,
+ and a higher risk of accidental regressions.
+
+- Avoid mixing bug fixes and new features, if possible. In particular, if there
+ are known bug fixes merged already, consider making a new release before
+ merging feature branches.
+
+- Consider making prereleases after major changes, to provide more testing
+ opportunities for users and downstreams willing to opt-in.
+
+- If your project is subject to very intense development, consider splitting
+ one or more branches that include a more conservative subset of commits,
+ and are released separately. For example, Django_ currently maintains three
+ release branches in addition to main.
+
+- Even if you don't wish to maintain additional branches permanently, consider
+ making additional patch releases with minimal changes to the previous
+ version, especially when a security vulnerability is discovered.
+
+- Split your changes into focused commits that address one problem at a time,
+ to make it easier to cherry-pick changes to earlier releases when necessary.
+
+
+.. _responses: https://pypi.org/project/responses/
+.. _vcrpy: https://pypi.org/project/vcrpy/
+.. _pytest-socket: https://pypi.org/project/pytest-socket/
+.. _pytest-xdist: https://pypi.org/project/pytest-xdist/
+.. _pytest: https://pytest.org/
+.. _pytest-rerunfailures: https://pypi.org/project/pytest-rerunfailures/
+.. _pytest-timeout: https://pypi.org/project/pytest-timeout/
+.. _Django: https://www.djangoproject.com/
+.. _NumPy: https://numpy.org/
+.. _cryptography: https://pypi.org/project/cryptography/
+.. _cryptography-vectors: https://pypi.org/project/cryptography-vectors/
diff --git a/_build/html/_sources/discussions/index.rst.txt b/_build/html/_sources/discussions/index.rst.txt
new file mode 100644
index 000000000..b1b84f97a
--- /dev/null
+++ b/_build/html/_sources/discussions/index.rst.txt
@@ -0,0 +1,20 @@
+Discussions
+###########
+
+**Discussions** are focused on providing comprehensive information about a
+specific topic. If you're just trying to get stuff done, see
+:doc:`/guides/index`.
+
+.. toctree::
+ :maxdepth: 1
+
+ versioning
+ deploying-python-applications
+ pip-vs-easy-install
+ install-requires-vs-requirements
+ distribution-package-vs-import-package
+ package-formats
+ src-layout-vs-flat-layout
+ setup-py-deprecated
+ single-source-version
+ downstream-packaging
diff --git a/_build/html/_sources/discussions/install-requires-vs-requirements.rst.txt b/_build/html/_sources/discussions/install-requires-vs-requirements.rst.txt
new file mode 100644
index 000000000..99e1552b8
--- /dev/null
+++ b/_build/html/_sources/discussions/install-requires-vs-requirements.rst.txt
@@ -0,0 +1,89 @@
+.. _`install_requires vs requirements files`:
+
+======================================
+install_requires vs requirements files
+======================================
+
+
+install_requires
+----------------
+
+``install_requires`` is a :ref:`setuptools` :file:`setup.py` keyword that
+should be used to specify what a project **minimally** needs to run correctly.
+When the project is installed by :ref:`pip`, this is the specification that is
+used to install its dependencies.
+
+For example, if the project requires A and B, your ``install_requires`` would be
+like so:
+
+::
+
+ install_requires=[
+ 'A',
+ 'B'
+ ]
+
+Additionally, it's best practice to indicate any known lower or upper bounds.
+
+For example, it may be known, that your project requires at least v1 of 'A', and
+v2 of 'B', so it would be like so:
+
+::
+
+ install_requires=[
+ 'A>=1',
+ 'B>=2'
+ ]
+
+It may also be known that project 'A' introduced a change in its v2
+that breaks the compatibility of your project with v2 of 'A' and later,
+so it makes sense to not allow v2:
+
+::
+
+ install_requires=[
+ 'A>=1,<2',
+ 'B>=2'
+ ]
+
+It is not considered best practice to use ``install_requires`` to pin
+dependencies to specific versions, or to specify sub-dependencies
+(i.e. dependencies of your dependencies). This is overly-restrictive, and
+prevents the user from gaining the benefit of dependency upgrades.
+
+Lastly, it's important to understand that ``install_requires`` is a listing of
+"Abstract" requirements, i.e just names and version restrictions that don't
+determine where the dependencies will be fulfilled from (i.e. from what
+index or source). The where (i.e. how they are to be made "Concrete") is to
+be determined at install time using :ref:`pip` options. [1]_
+
+
+Requirements files
+------------------
+
+:ref:`Requirements Files ` described most simply, are
+just a list of :ref:`pip:pip install` arguments placed into a file.
+
+Whereas ``install_requires`` defines the dependencies for a single project,
+:ref:`Requirements Files ` are often used to define
+the requirements for a complete Python environment.
+
+Whereas ``install_requires`` requirements are minimal, requirements files
+often contain an exhaustive listing of pinned versions for the purpose of
+achieving :ref:`repeatable installations ` of a complete
+environment.
+
+Whereas ``install_requires`` requirements are "Abstract", i.e. not associated
+with any particular index, requirements files often contain pip
+options like ``--index-url`` or ``--find-links`` to make requirements
+"Concrete", i.e. associated with a particular index or directory of
+packages. [1]_
+
+Whereas ``install_requires`` metadata is automatically analyzed by pip during an
+install, requirements files are not, and only are used when a user specifically
+installs them using ``python -m pip install -r``.
+
+----
+
+.. [1] For more on "Abstract" vs "Concrete" requirements, see
+ https://caremad.io/posts/2013/07/setup-vs-requirement/.
diff --git a/_build/html/_sources/discussions/package-formats.rst.txt b/_build/html/_sources/discussions/package-formats.rst.txt
new file mode 100644
index 000000000..6d4dee35c
--- /dev/null
+++ b/_build/html/_sources/discussions/package-formats.rst.txt
@@ -0,0 +1,193 @@
+.. _package-formats:
+
+===============
+Package Formats
+===============
+
+This page discusses the file formats that are used to distribute Python packages
+and the differences between them.
+
+You will find files in two formats on package indices such as PyPI_: **source
+distributions**, or **sdists** for short, and **binary distributions**, commonly
+called **wheels**. For example, the `PyPI page for pip 23.3.1 `_
+lets you download two files, ``pip-23.3.1.tar.gz`` and
+``pip-23.3.1-py3-none-any.whl``. The former is an sdist, the latter is a
+wheel. As explained below, these serve different purposes. When publishing a
+package on PyPI (or elsewhere), you should always upload both an sdist and one
+or more wheel.
+
+
+What is a source distribution?
+==============================
+
+Conceptually, a source distribution is an archive of the source code in raw
+form. Concretely, an sdist is a ``.tar.gz`` archive containing the source code
+plus an additional special file called ``PKG-INFO``, which holds the project
+metadata. The presence of this file helps packaging tools to be more efficient
+by not needing to compute the metadata themselves. The ``PKG-INFO`` file follows
+the format specified in :ref:`core-metadata` and is not intended to be written
+by hand [#core-metadata-format]_.
+
+You can thus inspect the contents of an sdist by unpacking it using standard
+tools to work with tar archives, such as ``tar -xvf`` on UNIX platforms (like
+Linux and macOS), or :ref:`the command line interface of Python's tarfile module
+` on any platform.
+
+Sdists serve several purposes in the packaging ecosystem. When :ref:`pip`, the
+standard Python package installer, cannot find a wheel to install, it will fall
+back on downloading a source distribution, compiling a wheel from it, and
+installing the wheel. Furthermore, sdists are often used as the package source
+by downstream packagers (such as Linux distributions, Conda, Homebrew and
+MacPorts on macOS, ...), who, for various reasons, may prefer them over, e.g.,
+pulling from a Git repository.
+
+A source distribution is recognized by its file name, which has the form
+:samp:`{package_name}-{version}.tar.gz`, e.g., ``pip-23.3.1.tar.gz``.
+
+.. TODO: provide clear guidance on whether sdists should contain docs and tests.
+ Discussion: https://discuss.python.org/t/should-sdists-include-docs-and-tests/14578
+
+If you want technical details on the sdist format, read the :ref:`sdist
+specification `.
+
+
+What is a wheel?
+================
+
+Conceptually, a wheel contains exactly the files that need to be copied when
+installing the package.
+
+There is a big difference between sdists and wheels for packages with
+:term:`extension modules `, written in compiled languages like
+C, C++ and Rust, which need to be compiled into platform-dependent machine code.
+With these packages, wheels do not contain source code (like C source files) but
+compiled, executable code (like ``.so`` files on Linux or DLLs on Windows).
+
+Furthermore, while there is only one sdist per version of a project, there may
+be many wheels. Again, this is most relevant in the context of extension
+modules. The compiled code of an extension module is tied to an operating system
+and processor architecture, and often also to the version of the Python
+interpreter (unless the :ref:`Python stable ABI ` is used).
+
+For pure-Python packages, the difference between sdists and wheels is less
+marked. There is normally one single wheel, for all platforms and Python
+versions. Python is an interpreted language, which does not need ahead-of-time
+compilation, so wheels contain ``.py`` files just like sdists.
+
+If you are wondering about ``.pyc`` bytecode files: they are not included in
+wheels, since they are cheap to generate, and including them would unnecessarily
+force a huge number of packages to distribute one wheel per Python version
+instead of one single wheel. Instead, installers like :ref:`pip` generate them
+while installing the package.
+
+With that being said, there are still important differences between sdists and
+wheels, even for pure Python projects. Wheels are meant to contain exactly what
+is to be installed, and nothing more. In particular, wheels should never include
+tests and documentation, while sdists commonly do. Also, the wheel format is
+more complex than sdist. For example, it includes a special file -- called
+``RECORD`` -- that lists all files in the wheel along with a hash of their
+content, as a safety check of the download's integrity.
+
+At a glance, you might wonder if wheels are really needed for "plain and basic"
+pure Python projects. Keep in mind that due to the flexibility of sdists,
+installers like pip cannot install from sdists directly -- they need to first
+build a wheel, by invoking the :term:`build backend` that the sdist specifies
+(the build backend may do all sorts of transformations while building the wheel,
+such as compiling C extensions). For this reason, even for a pure Python
+project, you should always upload *both* an sdist and a wheel to PyPI or other
+package indices. This makes installation much faster for your users, since a
+wheel is directly installable. By only including files that must be installed,
+wheels also make for smaller downloads.
+
+On the technical level, a wheel is a ZIP archive (unlike sdists which are TAR
+archives). You can inspect its contents by unpacking it as a normal ZIP archive,
+e.g., using ``unzip`` on UNIX platforms like Linux and macOS, ``Expand-Archive``
+in Powershell on Windows, or :ref:`the command line interface of Python's
+zipfile module `. This can be very useful to check
+that the wheel includes all the files you need it to.
+
+Inside a wheel, you will find the package's files, plus an additional directory
+called :samp:`{package_name}-{version}.dist-info`. This directory contains
+various files, including a ``METADATA`` file which is the equivalent of
+``PKG-INFO`` in sdists, as well as ``RECORD``. This can be useful to ensure no
+files are missing from your wheels.
+
+The file name of a wheel (ignoring some rarely used features) looks like this:
+:samp:`{package_name}-{version}-{python_tag}-{abi_tag}-{platform_tag}.whl`.
+This naming convention identifies which platforms and Python versions the wheel
+is compatible with. For example, the name ``pip-23.3.1-py3-none-any.whl`` means
+that:
+
+- (``py3``) This wheel can be installed on any implementation of Python 3,
+ whether CPython, the most widely used Python implementation, or an alternative
+ implementation like PyPy_;
+- (``none``) It does not depend on the Python version;
+- (``any``) It does not depend on the platform.
+
+The pattern ``py3-none-any`` is common for pure Python projects. Packages with
+extension modules typically ship multiple wheels with more complex tags.
+
+All technical details on the wheel format can be found in the :ref:`wheel
+specification `.
+
+
+.. _egg-format:
+.. _`Wheel vs Egg`:
+
+What about eggs?
+================
+
+"Egg" is an old package format that has been replaced with the wheel format. It
+should not be used anymore. Since August 2023, PyPI `rejects egg uploads
+`_.
+
+Here's a breakdown of the important differences between wheel and egg.
+
+* The egg format was introduced by :ref:`setuptools` in 2004, whereas the wheel
+ format was introduced by :pep:`427` in 2012.
+
+* Wheel has an :doc:`official standard specification
+ `. Egg did not.
+
+* Wheel is a :term:`distribution ` format, i.e a packaging
+ format. [#wheel-importable]_ Egg was both a distribution format and a runtime
+ installation format (if left zipped), and was designed to be importable.
+
+* Wheel archives do not include ``.pyc`` files. Therefore, when the distribution
+ only contains Python files (i.e. no compiled extensions), and is compatible
+ with Python 2 and 3, it's possible for a wheel to be "universal", similar to
+ an :term:`sdist `.
+
+* Wheel uses standard :ref:`.dist-info directories
+ `. Egg used ``.egg-info``.
+
+* Wheel has a :ref:`richer file naming convention `. A
+ single wheel archive can indicate its compatibility with a number of Python
+ language versions and implementations, ABIs, and system architectures.
+
+* Wheel is versioned. Every wheel file contains the version of the wheel
+ specification and the implementation that packaged it.
+
+* Wheel is internally organized by `sysconfig path type
+ `_,
+ therefore making it easier to convert to other formats.
+
+--------------------------------------------------------------------------------
+
+.. [#core-metadata-format] This format is email-based. Although this would
+ be unlikely to be chosen today, backwards compatibility considerations lead to
+ it being kept as the canonical format. From the user point of view, this
+ is mostly invisible, since the metadata is specified by the user in a way
+ understood by the build backend, typically ``[project]`` in ``pyproject.toml``,
+ and translated by the build backend into ``PKG-INFO``.
+
+.. [#wheel-importable] Circumstantially, in some cases, wheels can be used
+ as an importable runtime format, although :ref:`this is not officially supported
+ at this time `.
+
+
+
+.. _pip-pypi: https://pypi.org/project/pip/23.3.1/#files
+.. _pypi: https://pypi.org
+.. _pypi-eggs-deprecation: https://blog.pypi.org/posts/2023-06-26-deprecate-egg-uploads/
+.. _pypy: https://pypy.org
diff --git a/_build/html/_sources/discussions/pip-vs-easy-install.rst.txt b/_build/html/_sources/discussions/pip-vs-easy-install.rst.txt
new file mode 100644
index 000000000..2bb75d3be
--- /dev/null
+++ b/_build/html/_sources/discussions/pip-vs-easy-install.rst.txt
@@ -0,0 +1,71 @@
+
+.. _`pip vs easy_install`:
+
+===================
+pip vs easy_install
+===================
+
+
+:ref:`easy_install `, now `deprecated`_, was released in 2004 as part of :ref:`setuptools`.
+It was notable at the time for installing :term:`packages ` from
+:term:`PyPI ` using requirement specifiers, and
+automatically installing dependencies.
+
+:ref:`pip` came later in 2008, as an alternative to :ref:`easy_install `, although still
+largely built on top of :ref:`setuptools` components. It was notable at the
+time for *not* installing packages as :term:`Eggs ` or from :term:`Eggs ` (but
+rather simply as 'flat' packages from :term:`sdists `), and introducing the idea of :ref:`Requirements Files
+`, which gave users the power to easily replicate
+environments.
+
+Here's a breakdown of the important differences between pip and the deprecated easy_install:
+
++------------------------------+--------------------------------------+-------------------------------+
+| | **pip** | **easy_install** |
++------------------------------+--------------------------------------+-------------------------------+
+|Installs from :term:`Wheels |Yes |No |
+|` | | |
++------------------------------+--------------------------------------+-------------------------------+
+|Uninstall Packages |Yes (``python -m pip uninstall``) |No |
++------------------------------+--------------------------------------+-------------------------------+
+|Dependency Overrides |Yes (:ref:`Requirements Files |No |
+| |`) | |
++------------------------------+--------------------------------------+-------------------------------+
+|List Installed Packages |Yes (``python -m pip list`` and |No |
+| |``python -m pip freeze``) | |
++------------------------------+--------------------------------------+-------------------------------+
+|:pep:`438` |Yes |No |
+|Support | | |
++------------------------------+--------------------------------------+-------------------------------+
+|Installation format |'Flat' packages with :file:`egg-info` | Encapsulated Egg format |
+| |metadata. | |
++------------------------------+--------------------------------------+-------------------------------+
+|sys.path modification |No |Yes |
+| | | |
+| | | |
++------------------------------+--------------------------------------+-------------------------------+
+|Installs from :term:`Eggs |No |Yes |
+|` | | |
++------------------------------+--------------------------------------+-------------------------------+
+|`pylauncher support`_ |No |Yes [1]_ |
+| | | |
++------------------------------+--------------------------------------+-------------------------------+
+|:ref:`Multi-version Installs` |No |Yes |
+| | | |
++------------------------------+--------------------------------------+-------------------------------+
+|Exclude scripts during install|No |Yes |
+| | | |
++------------------------------+--------------------------------------+-------------------------------+
+|per project index |Only in virtualenv |Yes, via setup.cfg |
+| | | |
++------------------------------+--------------------------------------+-------------------------------+
+
+----
+
+.. _deprecated: https://setuptools.readthedocs.io/en/latest/history.html#v42-0-0
+
+.. [1] https://setuptools.readthedocs.io/en/latest/deprecated/easy_install.html#natural-script-launcher
+
+
+.. _pylauncher support: https://bitbucket.org/vinay.sajip/pylauncher
diff --git a/_build/html/_sources/discussions/setup-py-deprecated.rst.txt b/_build/html/_sources/discussions/setup-py-deprecated.rst.txt
new file mode 100644
index 000000000..b13ce190b
--- /dev/null
+++ b/_build/html/_sources/discussions/setup-py-deprecated.rst.txt
@@ -0,0 +1,215 @@
+.. _setup-py-deprecated:
+
+
+===========================
+Is ``setup.py`` deprecated?
+===========================
+
+No, :term:`setup.py` and :ref:`setuptools` are not deprecated.
+
+Setuptools is perfectly usable as a :term:`build backend`
+for packaging Python projects.
+And :file:`setup.py` is a valid configuration file for :ref:`setuptools`
+that happens to be written in Python, instead of in *TOML* for example
+(a similar practice is used by other tools
+like *nox* and its :file:`noxfile.py` configuration file,
+or *pytest* and :file:`conftest.py`).
+
+However, ``python setup.py`` and the use of :file:`setup.py`
+as a command line tool are deprecated.
+
+This means that commands such as the following **MUST NOT** be run anymore:
+
+* ``python setup.py install``
+* ``python setup.py develop``
+* ``python setup.py sdist``
+* ``python setup.py bdist_wheel``
+
+
+What commands should be used instead?
+=====================================
+
++---------------------------------+----------------------------------------+
+| Deprecated | Recommendation |
++=================================+========================================+
+| ``python setup.py install`` | ``python -m pip install .`` |
++---------------------------------+----------------------------------------+
+| ``python setup.py develop`` | ``python -m pip install --editable .`` |
++---------------------------------+----------------------------------------+
+| ``python setup.py sdist`` | ``python -m build`` [#needs-build]_ |
++---------------------------------+ |
+| ``python setup.py bdist_wheel`` | |
++---------------------------------+----------------------------------------+
+
+
+.. [#needs-build] This requires the :ref:`build` dependency.
+ It is recommended to always build and publish both the source distribution
+ and wheel of a project, which is what ``python -m build`` does.
+ If necessary the ``--sdist`` and ``--wheel`` options can be used
+ to generate only one or the other.
+
+
+In order to install a setuptools based project,
+it was common to run :file:`setup.py`'s ``install`` command such as:
+``python setup.py install``.
+Nowadays, the recommended method is to use :ref:`pip` directly
+with a command like this one: ``python -m pip install .``.
+Where the dot ``.`` is actually a file system path,
+it is the path notation for the current directory.
+Indeed, *pip* accepts a path to
+a project's source tree directory on the local filesystem
+as argument to its ``install`` sub-command.
+So this would also be a valid command:
+``python -m pip install path/to/project``.
+
+As for the installation in *develop* mode aka *editable* mode,
+instead of ``python setup.py develop``
+one can use the ``--editable`` option of pip's *install* sub-command:
+``python -m pip install --editable .``.
+
+One recommended, simple, and straightforward method of building
+:term:`source distributions `
+and :term:`wheels `
+is to use the :ref:`build` tool with a command like
+``python -m build``
+which triggers the generation of both distribution formats.
+If necessary the ``--sdist`` and ``--wheel`` options can be used
+to generate only one or the other.
+Note that the build tool needs to be installed separately.
+
+The command ``python setup.py install`` was deprecated
+in setuptools version *58.3.0*.
+
+
+What about other commands?
+==========================
+
+What are some replacements for the other ``python setup.py`` commands?
+
+
+``python setup.py test``
+------------------------
+
+The recommendation is to use a test runner such as pytest_.
+
+.. _pytest: https://docs.pytest.org/
+
+
+``python setup.py check``, ``python setup.py register``, and ``python setup.py upload``
+---------------------------------------------------------------------------------------
+
+A trusted replacement is :ref:`twine`:
+
+* ``python -m twine check --strict dist/*``
+* ``python -m twine register dist/*.whl`` [#not-pypi]_
+* ``python -m twine upload dist/*``
+
+.. [#not-pypi] Not necessary, nor supported on :term:`PyPI `.
+ But might be necessary on other :term:`package indexes ` (for example :ref:`devpi`).
+
+
+``python setup.py --version``
+-----------------------------
+
+A possible replacement solution (among others) is to rely on setuptools-scm_:
+
+* ``python -m setuptools_scm``
+
+.. _setuptools-scm: https://setuptools-scm.readthedocs.io/en/latest/usage#as-cli-tool
+
+
+Remaining commands
+------------------
+
+This guide does not make suggestions of replacement solutions for those commands:
+
+.. hlist::
+ :columns: 4
+
+ * ``alias``
+ * ``bdist``
+ * ``bdist_dumb``
+ * ``bdist_egg``
+ * ``bdist_rpm``
+ * ``build``
+ * ``build_clib``
+ * ``build_ext``
+ * ``build_py``
+ * ``build_scripts``
+ * ``clean``
+ * ``dist_info``
+ * ``easy_install``
+ * ``editable_wheel``
+ * ``egg_info``
+ * ``install_data``
+ * ``install_egg_info``
+ * ``install_headers``
+ * ``install_lib``
+ * ``install_scripts``
+ * ``rotate``
+ * ``saveopts``
+ * ``setopt``
+ * ``upload_docs``
+
+
+What about custom commands?
+===========================
+
+Likewise, custom :file:`setup.py` commands are deprecated.
+The recommendation is to migrate those custom commands
+to a task runner tool or any other similar tool.
+Some examples of such tools are:
+chuy, make, nox or tox, pydoit, pyinvoke, taskipy, and thx.
+
+
+What about custom build steps?
+==============================
+
+Custom build steps that for example
+either overwrite existing steps such as ``build_py``, ``build_ext``, and ``bdist_wheel``
+or add new build steps are not deprecated.
+Those will be automatically called as expected.
+
+
+Should ``setup.py`` be deleted?
+===============================
+
+Although the usage of :file:`setup.py` as an executable script is deprecated,
+its usage as a configuration file for setuptools is absolutely fine.
+There is likely no modification needed in :file:`setup.py`.
+
+
+Is ``pyproject.toml`` mandatory?
+================================
+
+While it is not technically necessary yet,
+it is **STRONGLY RECOMMENDED** for a project to have a :file:`pyproject.toml` file
+at the root of its source tree with a content like this:
+
+.. code:: toml
+
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+
+
+The guide :ref:`modernize-setup-py-project` has more details about this.
+
+The standard fallback behavior for a :term:`build frontend `
+in the absence of a :file:`pyproject.toml` file and its ``[build-system]`` table
+is to assume that the :term:`build backend ` is setuptools.
+
+
+Why? What does it all mean?
+===========================
+
+One way to look at it is that the scope of setuptools
+has now been reduced to the role of a build backend.
+
+
+Where to read more about this?
+==============================
+
+* `Why you shouldn't invoke setup.py directly `__ by Paul Ganssle
+
+* :doc:`setuptools:deprecated/commands`
diff --git a/_build/html/_sources/discussions/single-source-version.rst.txt b/_build/html/_sources/discussions/single-source-version.rst.txt
new file mode 100644
index 000000000..c7dc8d1e1
--- /dev/null
+++ b/_build/html/_sources/discussions/single-source-version.rst.txt
@@ -0,0 +1,62 @@
+.. _single-source-version:
+
+===================================
+Single-sourcing the Project Version
+===================================
+
+:Page Status: Complete
+:Last Reviewed: 2024-10-07
+
+Many Python :term:`distribution packages ` publish a single
+Python :term:`import package ` where it is desired that the runtime
+``__version__`` attribute on the import package report the same version specifier
+as :func:`importlib.metadata.version` reports for the distribution package
+(as described in :ref:`runtime-version-access`).
+
+It is also frequently desired that this version information be derived from a version
+control system *tag* (such as ``v1.2.3``) rather than being manually updated in the
+source code.
+
+Some projects may choose to simply live with the data entry duplication, and rely
+on automated testing to ensure the different values do not diverge.
+
+Alternatively, a project's chosen build system may offer a way to define a single
+source of truth for the version number.
+
+In general, the options are:
+
+1) If the code is in a version control system (VCS), such as Git, then the version can be extracted from the VCS.
+
+2) The version can be hard-coded into the :file:`pyproject.toml` file -- and the build system can copy it
+ into other locations it may be required.
+
+3) The version string can be hard-coded into the source code -- either in a special purpose file,
+ such as :file:`_version.txt` (which must then be shipped as part of the project's source distribution
+ package), or as an attribute in a particular module, such as :file:`__init__.py`. The build
+ system can then extract it from the runtime location at build time.
+
+Consult your build system's documentation for their recommended method.
+
+When the intention is that a distribution package and its associated import package
+share the same version, it is recommended that the project include an automated test
+case that ensures ``import_name.__version__`` and ``importlib.metadata.version("dist-name")``
+report the same value (note: for many projects, ``import_name`` and ``dist-name`` will
+be the same name).
+
+
+.. _Build system version handling:
+
+Build System Version Handling
+-----------------------------
+
+The following are links to some build system's documentation for handling version strings.
+
+* `Flit `_
+
+* `Hatchling `_
+
+* `PDM `_
+
+* `Setuptools `_
+
+ - `setuptools_scm `_
diff --git a/_build/html/_sources/discussions/src-layout-vs-flat-layout.rst.txt b/_build/html/_sources/discussions/src-layout-vs-flat-layout.rst.txt
new file mode 100644
index 000000000..c38968345
--- /dev/null
+++ b/_build/html/_sources/discussions/src-layout-vs-flat-layout.rst.txt
@@ -0,0 +1,105 @@
+.. _src-layout-vs-flat-layout:
+
+=========================
+src layout vs flat layout
+=========================
+
+The "flat layout" refers to organising a project's files in a folder or
+repository, such that the various configuration files and
+:term:`import packages ` are all in the top-level directory.
+
+::
+
+ .
+ ├── README.md
+ ├── noxfile.py
+ ├── pyproject.toml
+ ├── setup.py
+ ├── awesome_package/
+ │ ├── __init__.py
+ │ └── module.py
+ └── tools/
+ ├── generate_awesomeness.py
+ └── decrease_world_suck.py
+
+The "src layout" deviates from the flat layout by moving the code that is
+intended to be importable (i.e. ``import awesome_package``, also known as
+:term:`import packages `) into a subdirectory. This
+subdirectory is typically named ``src/``, hence "src layout".
+
+::
+
+ .
+ ├── README.md
+ ├── noxfile.py
+ ├── pyproject.toml
+ ├── setup.py
+ ├── src/
+ │ └── awesome_package/
+ │ ├── __init__.py
+ │ └── module.py
+ └── tools/
+ ├── generate_awesomeness.py
+ └── decrease_world_suck.py
+
+Here's a breakdown of the important behaviour differences between the src
+layout and the flat layout:
+
+* The src layout requires installation of the project to be able to run its
+ code, and the flat layout does not.
+
+ This means that the src layout involves an additional step in the
+ development workflow of a project (typically, an
+ :doc:`editable installation `
+ is used for development and a regular installation is used for testing).
+
+* The src layout helps prevent accidental usage of the in-development copy of
+ the code.
+
+ This is relevant since the Python interpreter includes the current working
+ directory as the first item on the import path. This means that if an import
+ package exists in the current working directory with the same name as an
+ installed import package, the variant from the current working directory will
+ be used. This can lead to subtle misconfiguration of the project's packaging
+ tooling, which could result in files not being included in a distribution.
+
+ The src layout helps avoid this by keeping import packages in a directory
+ separate from the root directory of the project, ensuring that the installed
+ copy is used.
+
+* The src layout helps enforce that an
+ :doc:`editable installation ` is only
+ able to import files that were meant to be importable.
+
+ This is especially relevant when the editable installation is implemented
+ using a `path configuration file `_
+ that adds the directory to the import path.
+
+ The flat layout would add the other project files (eg: ``README.md``,
+ ``tox.ini``) and packaging/tooling configuration files (eg: ``setup.py``,
+ ``noxfile.py``) on the import path. This would make certain imports work
+ in editable installations but not regular installations.
+
+.. _running-cli-from-source-src-layout:
+
+Running a command-line interface from source with src-layout
+============================================================
+
+Due to the firstly mentioned specialty of the src layout, a command-line
+interface can not be run directly from the :term:`source tree `,
+but requires installation of the package in
+:doc:`Development Mode `
+for testing purposes. Since this can be unpractical in some situations,
+a workaround could be to prepend the package folder to Python's
+:py:data:`sys.path` when called via its :file:`__main__.py` file:
+
+.. code-block:: python
+
+ import os
+ import sys
+
+ if not __package__:
+ # Make CLI runnable from source tree with
+ # python src/package
+ package_source_path = os.path.dirname(os.path.dirname(__file__))
+ sys.path.insert(0, package_source_path)
diff --git a/_build/html/_sources/discussions/versioning.rst.txt b/_build/html/_sources/discussions/versioning.rst.txt
new file mode 100644
index 000000000..eeea3578c
--- /dev/null
+++ b/_build/html/_sources/discussions/versioning.rst.txt
@@ -0,0 +1,253 @@
+.. _versioning:
+.. _`Choosing a versioning scheme`:
+
+==========
+Versioning
+==========
+
+This discussion covers all aspects of versioning Python packages.
+
+
+Valid version numbers
+=====================
+
+Different Python projects may use different versioning schemes based on the
+needs of that particular project, but in order to be compatible with tools like
+:ref:`pip`, all of them are required to comply with a flexible format for
+version identifiers, for which the authoritative reference is the
+:ref:`specification of version specifiers `. Here are some
+examples of version numbers [#version-examples]_:
+
+- A simple version (final release): ``1.2.0``
+- A development release: ``1.2.0.dev1``
+- An alpha release: ``1.2.0a1``
+- A beta release: ``1.2.0b1``
+- A release candidate: ``1.2.0rc1``
+- A post-release: ``1.2.0.post1``
+- A post-release of an alpha release (possible, but discouraged): ``1.2.0a1.post1``
+- A simple version with only two components: ``23.12``
+- A simple version with just one component: ``42``
+- A version with an epoch: ``1!1.0``
+
+Projects can use a cycle of pre-releases to support testing by their users
+before a final release. In order, the steps are: alpha releases, beta releases,
+release candidates, final release. Pip and other modern Python package
+installers ignore pre-releases by default when deciding which versions of
+dependencies to install, unless explicitly requested (e.g., with
+``pip install pkg==1.1a3`` or ``pip install --pre pkg``).
+
+The purpose of development releases is to support releases made early during a
+development cycle, for example, a nightly build, or a build from the latest
+source in a Linux distribution.
+
+Post-releases are used to address minor errors in a final release that do not
+affect the distributed software, such as correcting an error in the release
+notes. They should not be used for bug fixes; these should be done with a new
+final release (e.g., incrementing the third component when using semantic
+versioning).
+
+Finally, epochs, a rarely used feature, serve to fix the sorting order when
+changing the versioning scheme. For example, if a project is using calendar
+versioning, with versions like 23.12, and switches to semantic versioning, with
+versions like 1.0, the comparison between 1.0 and 23.12 will go the wrong way.
+To correct this, the new version numbers should have an explicit epoch, as in
+"1!1.0", in order to be treated as more recent than the old version numbers.
+
+
+
+Semantic versioning vs. calendar versioning
+===========================================
+
+A versioning scheme is a formalized way to interpret the segments of a version
+number, and to decide which should be the next version number for a new release
+of a package. Two versioning schemes are commonly used for Python packages,
+semantic versioning and calendar versioning.
+
+.. caution::
+
+ The decision which version number to choose is up to a
+ project's maintainer. This effectively means that version
+ bumps reflect the maintainer's view. That view may differ
+ from the end-users' perception of what said formalized
+ versioning scheme promises them.
+
+ There are known exceptions for selecting the next version
+ number. The maintainers may consciously choose to break the
+ assumption that the last version segment only contains
+ backwards-compatible changes.
+ One such case is when a security vulnerability needs to be
+ addressed. Security releases often come in patch versions
+ but contain breaking changes inevitably.
+
+
+Semantic versioning
+-------------------
+
+The idea of *semantic versioning* (or SemVer) is to use 3-part version numbers,
+*major.minor.patch*, where the project author increments:
+
+- *major* when they make incompatible API changes,
+- *minor* when they add functionality in a backwards-compatible manner, and
+- *patch*, when they make backwards-compatible bug fixes.
+
+A majority of Python projects use a scheme that resembles semantic
+versioning. However, most projects, especially larger ones, do not strictly
+adhere to semantic versioning, since many changes are technically breaking
+changes but affect only a small fraction of users. Such projects tend to
+increment the major number when the incompatibility is high, or to signal a
+shift in the project, rather than for any tiny incompatibility
+[#semver-strictness]_. Conversely, a bump of the major version number
+is sometimes used to signal significant but backwards-compatible new
+features.
+
+For those projects that do use strict semantic versioning, this approach allows
+users to make use of :ref:`compatible release version specifiers
+`, with the ``~=`` operator. For
+example, ``name ~= X.Y`` is roughly equivalent to ``name >= X.Y, == X.*``, i.e.,
+it requires at least release X.Y, and allows any later release with greater Y as
+long as X is the same. Likewise, ``name ~= X.Y.Z`` is roughly equivalent to
+``name >= X.Y.Z, == X.Y.*``, i.e., it requires at least X.Y.Z and allows a later
+release with same X and Y but higher Z.
+
+Python projects adopting semantic versioning should abide by clauses 1-8 of the
+`Semantic Versioning 2.0.0 specification `_.
+
+The popular :doc:`Sphinx ` documentation generator is an example
+project that uses strict semantic versioning (:doc:`Sphinx versioning policy
+`). The famous :doc:`NumPy `
+scientific computing package explicitly uses "loose" semantic versioning, where
+releases incrementing the minor version can contain backwards-incompatible API
+changes (:doc:`NumPy versioning policy `).
+
+
+Calendar versioning
+-------------------
+
+Semantic versioning is not a suitable choice for all projects, such as those
+with a regular time-based release cadence and a deprecation process that
+provides warnings for a number of releases prior to removal of a feature.
+
+A key advantage of date-based versioning, or `calendar versioning `_
+(CalVer), is that it is straightforward to tell how old the base feature set of
+a particular release is given just the version number.
+
+Calendar version numbers typically take the form *year.month* (for example,
+23.12 for December 2023).
+
+:doc:`Pip `, the standard Python package installer, uses calendar
+versioning.
+
+
+Other schemes
+-------------
+
+Serial versioning refers to the simplest possible versioning scheme, which
+consists of a single number incremented every release. While serial versioning
+is very easy to manage as a developer, it is the hardest to track as an end
+user, as serial version numbers convey little or no information regarding API
+backwards compatibility.
+
+Combinations of the above schemes are possible. For example, a project may
+combine date-based versioning with serial versioning to create a *year.serial*
+numbering scheme that readily conveys the approximate age of a release, but
+doesn't otherwise commit to a particular release cadence within the year.
+
+
+Local version identifiers
+=========================
+
+Public version identifiers are designed to support distribution via :term:`PyPI
+`. Python packaging tools also support the notion
+of a :ref:`local version identifier `, which can be
+used to identify local development builds not intended for publication, or
+modified variants of a release maintained by a redistributor.
+
+A local version identifier takes the form of a public version identifier,
+followed by "+" and a local version label. For example, a package with
+Fedora-specific patches applied could have the version "1.2.1+fedora.4".
+Another example is versions computed by setuptools-scm_, a setuptools plugin
+that reads the version from Git data. In a Git repository with some commits
+since the latest release, setuptools-scm generates a version like
+"0.5.dev1+gd00980f", or if the repository has untracked changes, like
+"0.5.dev1+gd00980f.d20231217".
+
+.. _runtime-version-access:
+
+Accessing version information at runtime
+========================================
+
+Version information for all :term:`distribution packages `
+that are locally available in the current environment can be obtained at runtime
+using the standard library's :func:`importlib.metadata.version` function::
+
+ >>> importlib.metadata.version("cryptography")
+ '41.0.7'
+
+Many projects also choose to version their top level
+:term:`import packages ` by providing a package level
+``__version__`` attribute::
+
+ >>> import cryptography
+ >>> cryptography.__version__
+ '41.0.7'
+
+This technique can be particularly valuable for CLI applications which want
+to ensure that version query invocations (such as ``pip -V``) run as quickly
+as possible.
+
+Package publishers wishing to ensure their reported distribution package and
+import package versions are consistent with each other can review the
+:ref:`single-source-version` discussion for potential approaches to doing so.
+
+As import packages and modules are not *required* to publish runtime
+version information in this way (see the withdrawn proposal in
+:pep:`PEP 396 <396>`), the ``__version__`` attribute should either only be
+queried with interfaces that are known to provide it (such as a project
+querying its own version or the version of one of its direct dependencies),
+or else the querying code should be designed to handle the case where the
+attribute is missing [#fallback-to-dist-version]_.
+
+Some projects may need to publish version information for external APIs
+that aren't the version of the module itself. Such projects should
+define their own project-specific ways of obtaining the relevant information
+at runtime. For example, the standard library's :mod:`ssl` module offers
+multiple ways to access the underlying OpenSSL library version::
+
+ >>> ssl.OPENSSL_VERSION
+ 'OpenSSL 3.2.2 4 Jun 2024'
+ >>> ssl.OPENSSL_VERSION_INFO
+ (3, 2, 0, 2, 0)
+ >>> hex(ssl.OPENSSL_VERSION_NUMBER)
+ '0x30200020'
+
+--------------------------------------------------------------------------------
+
+.. [#version-examples] Some more examples of unusual version numbers are
+ given in a `blog post `_ by Seth Larson.
+
+.. [#semver-strictness] For some personal viewpoints on this issue, see these
+ blog posts: `by Hynek Schlawak `_, `by Donald Stufft
+ `_, `by Bernát Gábor `_, `by
+ Brett Cannon `_. For a humoristic take, read about
+ ZeroVer_.
+
+.. [#fallback-to-dist-version] A full list mapping the top level names available
+ for import to the distribution packages that provide those import packages and
+ modules may be obtained through the standard library's
+ :func:`importlib.metadata.packages_distributions` function. This means that
+ even code that is attempting to infer a version to report for all importable
+ top-level names has a means to fall back to reporting the distribution
+ version information if no ``__version__`` attribute is defined. Only standard
+ library modules, and modules added via means other than Python package
+ installation would fail to have version information reported in that case.
+
+
+.. _zerover: https://0ver.org
+.. _calver: https://calver.org
+.. _semver: https://semver.org
+.. _semver-bernat-gabor: https://bernat.tech/posts/version-numbers/
+.. _semver-brett-cannon: https://snarky.ca/why-i-dont-like-semver/
+.. _semver-donald-stufft: https://caremad.io/posts/2016/02/versioning-software/
+.. _semver-hynek-schlawack: https://hynek.me/articles/semver-will-not-save-you/
+.. _setuptools-scm: https://setuptools-scm.readthedocs.io
+.. _versions-seth-larson: https://sethmlarson.dev/pep-440
diff --git a/_build/html/_sources/flow.rst.txt b/_build/html/_sources/flow.rst.txt
new file mode 100644
index 000000000..947c399db
--- /dev/null
+++ b/_build/html/_sources/flow.rst.txt
@@ -0,0 +1,182 @@
+==================
+The Packaging Flow
+==================
+
+The document aims to outline the flow involved in publishing/distributing a
+:term:`distribution package `, usually to the `Python
+Package Index (PyPI)`_. It is written for package publishers, who are assumed
+to be the package author.
+
+.. _Python Package Index (PyPI): https://pypi.org/
+
+While the :doc:`tutorial ` walks through the
+process of preparing a simple package for release, it does not fully enumerate
+what steps and files are required, and for what purpose.
+
+Publishing a package requires a flow from the author's source code to an end
+user's Python environment. The steps to achieve this are:
+
+- Have a source tree containing the package. This is typically a checkout from
+ a version control system (VCS).
+
+- Prepare a configuration file describing the package metadata (name, version
+ and so forth) and how to create the build artifacts. For most packages, this
+ will be a :file:`pyproject.toml` file, maintained manually in the source
+ tree.
+
+- Create build artifacts to be sent to the package distribution service
+ (usually PyPI); these will normally be a
+ :term:`source distribution ("sdist") `
+ and one or more :term:`built distributions ("wheels") `.
+ These are made by a build tool using the configuration file from the
+ previous step. Often there is just one generic wheel for a pure Python
+ package.
+
+- Upload the build artifacts to the package distribution service.
+
+At that point, the package is present on the package distribution service.
+To use the package, end users must:
+
+- Download one of the package's build artifacts from the package distribution
+ service.
+
+- Install it in their Python environment, usually in its ``site-packages``
+ directory. This step may involve a build/compile step which, if needed, must
+ be described by the package metadata.
+
+These last 2 steps are typically performed by :ref:`pip` when an end user runs
+``pip install``.
+
+The steps above are described in more detail below.
+
+The source tree
+===============
+
+The source tree contains the package source code, usually a checkout from a
+VCS. The particular version of the code used to create the build artifacts
+will typically be a checkout based on a tag associated with the version.
+
+The configuration file
+======================
+
+The configuration file depends on the tool used to create the build artifacts.
+The standard practice is to use a :file:`pyproject.toml` file in the `TOML
+format`_.
+
+.. _TOML format: https://github.com/toml-lang/toml
+
+At a minimum, the :file:`pyproject.toml` file needs a ``[build-system]`` table
+specifying your build tool. There are many build tools available, including
+but not limited to :ref:`flit`, :ref:`hatch`, :ref:`pdm`, :ref:`poetry`,
+:ref:`setuptools`, `trampolim`_, and `whey`_. Each tool's documentation will
+show what to put in the ``[build-system]`` table.
+
+.. _trampolim: https://pypi.org/project/trampolim/
+.. _whey: https://pypi.org/project/whey/
+
+For example, here is a table for using :ref:`hatch`:
+
+.. code-block:: toml
+
+ [build-system]
+ requires = ["hatchling"]
+ build-backend = "hatchling.build"
+
+With such a table in the :file:`pyproject.toml` file,
+a ":term:`frontend `" tool like
+:ref:`build` can run your chosen
+build tool's ":term:`backend `"
+to create the build artifacts.
+Your build tool may also provide its own frontend. An install tool
+like :ref:`pip` also acts as a frontend when it runs your build tool's backend
+to install from a source distribution.
+
+The particular build tool you choose dictates what additional information is
+required in the :file:`pyproject.toml` file. For example, you might specify:
+
+* a ``[project]`` table containing project
+ :doc:`Core Metadata `
+ (name, version, author and so forth),
+
+* a ``[tool]`` table containing tool-specific configuration options.
+
+Refer to the :ref:`pyproject.toml guide ` for a
+complete guide to ``pyproject.toml`` configuration.
+
+
+Build artifacts
+===============
+
+The source distribution (sdist)
+-------------------------------
+
+A source distribution contains enough to install the package from source in an
+end user's Python environment. As such, it needs the package source, and may
+also include tests and documentation. These are useful for end users wanting
+to develop your sources, and for end user systems where some local compilation
+step is required (such as a C extension).
+
+The :ref:`build` package knows how to invoke your build tool to create one of
+these:
+
+.. code-block:: bash
+
+ python3 -m build --sdist source-tree-directory
+
+Or, your build tool may provide its own interface for creating an sdist.
+
+
+The built distributions (wheels)
+--------------------------------
+
+A built distribution contains only the files needed for an end user's Python
+environment. No compilation steps are required during the install, and the
+wheel file can simply be unpacked into the ``site-packages`` directory. This
+makes the install faster and more convenient for end users.
+
+A pure Python package typically needs only one "generic" wheel. A package with
+compiled binary extensions needs a wheel for each supported combination of
+Python interpreter, operating system, and CPU architecture that it supports.
+If a suitable wheel file is not available, tools like :ref:`pip` will fall
+back to installing the source distribution.
+
+The :ref:`build` package knows how to invoke your build tool to create one of
+these:
+
+.. code-block:: bash
+
+ python3 -m build --wheel source-tree-directory
+
+Or, your build tool may provide its own interface for creating a wheel.
+
+.. note::
+
+ The default behaviour of :ref:`build` is to make both an sdist and a wheel
+ from the source in the current directory; the above examples are
+ deliberately specific.
+
+Upload to the package distribution service
+==========================================
+
+The :ref:`twine` tool can upload build artifacts to PyPI for distribution,
+using a command like:
+
+.. code-block:: bash
+
+ twine upload dist/package-name-version.tar.gz dist/package-name-version-py3-none-any.whl
+
+Or, your build tool may provide its own interface for uploading.
+
+Download and install
+====================
+
+Now that the package is published, end users can download and install the
+package into their Python environment. Typically this is done with :ref:`pip`,
+using a command like:
+
+.. code-block:: bash
+
+ python3 -m pip install package-name
+
+End users may also use other tools like :ref:`pipenv`, :ref:`poetry`, or
+:ref:`pdm`.
diff --git a/_build/html/_sources/glossary.rst.txt b/_build/html/_sources/glossary.rst.txt
new file mode 100644
index 000000000..40c041f4c
--- /dev/null
+++ b/_build/html/_sources/glossary.rst.txt
@@ -0,0 +1,432 @@
+========
+Glossary
+========
+
+
+.. glossary::
+
+
+ Binary Distribution
+
+ A specific kind of :term:`Built Distribution` that contains compiled
+ extensions.
+
+
+ Build Backend
+
+ A library that takes a source tree
+ and builds a :term:`source distribution ` or
+ :term:`built distribution ` from it.
+ The build is delegated to the backend by a
+ :term:`frontend `.
+ All backends offer a standardized interface.
+
+ Examples of build backends are
+ :ref:`flit's flit-core `,
+ :ref:`hatch's hatchling `,
+ :ref:`maturin`,
+ :ref:`meson-python`,
+ :ref:`scikit-build-core`,
+ and :ref:`setuptools`.
+
+
+ Build Frontend
+
+ A tool that users might run
+ that takes arbitrary source trees or
+ :term:`source distributions `
+ and builds source distributions or :term:`wheels ` from them.
+ The actual building is delegated to each source tree's
+ :term:`build backend `.
+
+ Examples of build frontends are :ref:`pip` and :ref:`build`.
+
+
+ Built Distribution
+
+ A :term:`Distribution ` format containing files
+ and metadata that only need to be moved to the correct location on the
+ target system, to be installed. :term:`Wheel` is such a format, whereas
+ :term:`Source Distribution ` is not, in that it requires a build step before it can be
+ installed. This format does not imply that Python files have to be
+ precompiled (:term:`Wheel` intentionally does not include compiled
+ Python files). See :ref:`package-formats` for more information.
+
+
+ Built Metadata
+
+ The concrete form :term:`Core Metadata` takes
+ when included inside an installed :term:`Project` (``METADATA`` file)
+ or a :term:`Distribution Archive`
+ (``PKG-INFO`` in a
+ :term:`Sdist `
+ and ``METADATA`` in a :term:`Wheel`).
+
+
+ Core Metadata
+
+ The :ref:`specification `
+ and the set of :term:`Core Metadata Field`\s it defines
+ that describe key static attributes of
+ a :term:`Distribution Package` or :term:`Installed Project`.
+
+
+ Core Metadata Field
+
+ A single key-value pair
+ (or sequence of such with the same name, for multiple-use fields)
+ defined in the :term:`Core Metadata` spec
+ and stored in the :term:`Built Metadata`.
+ Notably, distinct from a :term:`Pyproject Metadata Key`.
+
+
+ Distribution Archive
+
+ The physical distribution artifact (i.e. a file on disk)
+ for a :term:`Distribution Package`.
+
+
+ Distribution Package
+
+ A versioned archive file that contains Python :term:`packages `, :term:`modules `, and other resource files that are
+ used to distribute a :term:`Release`. The archive file is what an
+ end-user will download from the internet and install.
+
+ A distribution package is more commonly referred to with the single
+ words "package" or "distribution", but this guide may use the expanded
+ term when more clarity is needed to prevent confusion with an
+ :term:`Import Package` (which is also commonly called a "package") or
+ another kind of distribution (e.g. a Linux distribution or the Python
+ language distribution), which are often referred to with the single term
+ "distribution". See :ref:`distribution-package-vs-import-package`
+ for a breakdown of the differences.
+
+ Egg
+
+ A :term:`Built Distribution` format introduced by :ref:`setuptools`,
+ which has been replaced by :term:`Wheel`. For details, see
+ :ref:`egg-format`.
+
+ Extension Module
+
+ A :term:`Module` written in the low-level language of the Python implementation:
+ C/C++ for Python, Java for Jython. Typically contained in a single
+ dynamically loadable pre-compiled file, e.g. a shared object (.so) file
+ for Python extensions on Unix, a DLL (given the .pyd extension) for
+ Python extensions on Windows, or a Java class file for Jython
+ extensions.
+
+
+ Import Package
+
+ A Python module which can contain other modules or recursively, other
+ packages.
+
+ An import package is more commonly referred to with the single word
+ "package", but this guide will use the expanded term when more clarity
+ is needed to prevent confusion with a :term:`Distribution Package` which
+ is also commonly called a "package". See :ref:`distribution-package-vs-import-package`
+ for a breakdown of the differences.
+
+
+ Installed Project
+
+ A :term:`Project` that is installed for use with
+ a Python interpreter or :term:`Virtual Environment`,
+ as described in the specification :ref:`recording-installed-packages`.
+
+
+ Known Good Set (KGS)
+
+ A set of distributions at specified versions which are compatible with
+ each other. Typically a test suite will be run which passes all tests
+ before a specific set of packages is declared a known good set. This
+ term is commonly used by frameworks and toolkits which are comprised of
+ multiple individual distributions.
+
+
+ License Classifier
+
+ A PyPI Trove classifier
+ (as :ref:`described `
+ in the :term:`Core Metadata` specification)
+ which begins with ``License ::``.
+
+
+ License Expression
+ SPDX Expression
+
+ A string with valid SPDX license expression syntax,
+ including one or more SPDX :term:`License Identifier`\(s),
+ which describes a :term:`Distribution Archive`'s license(s)
+ and how they inter-relate.
+ Examples:
+ ``GPL-3.0-or-later``,
+ ``MIT AND (Apache-2.0 OR BSD-2-Clause)``
+
+
+ License Identifier
+ SPDX Identifier
+
+ A valid SPDX short-form license identifier,
+ originally specified in :pep:`639`.
+ This includes all valid SPDX identifiers and
+ the custom ``LicenseRef-[idstring]`` strings conforming to the
+ SPDX specification.
+ Examples:
+ ``MIT``,
+ ``GPL-3.0-only``,
+ ``LicenseRef-My-Custom-License``
+
+
+ Module
+
+ The basic unit of code reusability in Python, existing in one of two
+ types: :term:`Pure Module`, or :term:`Extension Module`.
+
+
+ Package Index
+
+ A repository of distributions with a web interface to automate
+ :term:`package ` discovery and consumption.
+
+
+ Per Project Index
+
+ A private or other non-canonical :term:`Package Index` indicated by
+ a specific :term:`Project` as the index preferred or required to
+ resolve dependencies of that project.
+
+
+ Project
+
+ A library, framework, script, plugin, application, or collection of data
+ or other resources, or some combination thereof that is intended to be
+ packaged into a :term:`Distribution `.
+
+ Since most projects create :term:`Distributions `
+ using either :pep:`518` ``build-system``, :ref:`distutils` or
+ :ref:`setuptools`, another practical way to define projects currently
+ is something that contains a :term:`pyproject.toml`, :term:`setup.py`,
+ or :term:`setup.cfg` file at the root of the project source directory.
+
+ Python projects must have unique names, which are registered on
+ :term:`PyPI `. Each project will then
+ contain one or more :term:`Releases `, and each release may
+ comprise one or more :term:`distributions `.
+
+ Note that there is a strong convention to name a project after the name
+ of the package that is imported to run that project. However, this
+ doesn't have to hold true. It's possible to install a distribution from
+ the project 'foo' and have it provide a package importable only as
+ 'bar'.
+
+
+ Project Root Directory
+
+ The filesystem directory in which
+ a :term:`Project`'s :term:`source tree ` is located.
+
+
+ Project Source Tree
+
+ The on-disk format of a :term:`Project` used for development,
+ containing its raw source code before being packaged
+ into a
+ :term:`Source Distribution `
+ or :term:`Built Distribution`.
+
+
+ Project Source Metadata
+
+ Metadata defined by the package author
+ in a :term:`Project`'s :term:`source tree `,
+ to be transformed into :term:`Core Metadata field`\s
+ in the :term:`Built Metadata`
+ by the project's :term:`build backend `.
+ Can be written as :term:`Pyproject Metadata`,
+ or in a tool-specific format
+ (under the ``[tool]`` table in ``pyproject.toml``,
+ or in a tool's own configuration file).
+
+
+ Pure Module
+
+ A :term:`Module` written in Python and contained in a single ``.py`` file (and
+ possibly associated ``.pyc`` and/or ``.pyo`` files).
+
+
+ Pyproject Metadata
+
+ The :term:`Project Source Metadata` format
+ defined by the :ref:`declaring-project-metadata` specification
+ and originally introduced in :pep:`621`,
+ stored as :term:`Pyproject Metadata Key`\s
+ under the ``[project]`` table of a :term:`pyproject.toml` file.
+ Notably, *not* a tool-specific source metadata format
+ under the ``[tool]`` table in ``pyproject.toml``.
+
+
+ Pyproject Metadata Key
+
+ A top-level TOML key in the ``[project]`` table in ``pyproject.toml``;
+ part of the :term:`Pyproject Metadata`.
+ Notably, distinct from a :term:`Core Metadata Field`.
+
+
+ Pyproject Metadata Subkey
+
+ A second-level TOML key under a table-valued
+ :term:`Pyproject Metadata Key`.
+
+
+ Python Packaging Authority (PyPA)
+
+ PyPA is a working group that maintains many of the relevant
+ projects in Python packaging. They maintain a site at
+ :doc:`pypa.io `, host projects on `GitHub
+ `_, and discuss issues on the
+ `distutils-sig mailing list
+ `_
+ and `the Python Discourse forum `__.
+
+
+ Python Package Index (PyPI)
+
+ `PyPI `_ is the default :term:`Package
+ Index` for the Python community. It is open to all Python developers to
+ consume and distribute their distributions.
+
+ pypi.org
+
+ `pypi.org `_ is the domain name for the
+ :term:`Python Package Index (PyPI)`. It replaced the legacy index
+ domain name, ``pypi.python.org``, in 2017. It is powered by
+ :ref:`warehouse`.
+
+ pyproject.toml
+
+ The tool-agnostic :term:`Project` specification file.
+ Defined in :pep:`518`.
+
+ Release
+
+ A snapshot of a :term:`Project` at a particular point in time, denoted
+ by a version identifier.
+
+ Making a release may entail the publishing of multiple
+ :term:`Distributions `. For example, if version
+ 1.0 of a project was released, it could be available in both a source
+ distribution format and a Windows installer distribution format.
+
+
+ Requirement
+
+ A specification for a :term:`package ` to be
+ installed. :ref:`pip`, the :term:`PYPA ` recommended installer, allows various forms of specification
+ that can all be considered a "requirement". For more information, see the
+ :ref:`pip:pip install` reference.
+
+
+ Requirement Specifier
+
+ A format used by :ref:`pip` to install packages from a :term:`Package
+ Index`. For an EBNF diagram of the format, see :ref:`dependency-specifiers`.
+ For example, "foo>=1.3" is a
+ requirement specifier, where "foo" is the project name, and the ">=1.3"
+ portion is the :term:`Version Specifier`
+
+ Requirements File
+
+ A file containing a list of :term:`Requirements ` that can
+ be installed using :ref:`pip`. For more information, see the :ref:`pip`
+ docs on :ref:`pip:Requirements Files`.
+
+
+ Root License Directory
+ License Directory
+
+ The directory under which license files are stored in a
+ :term:`Project Source Tree`, :term:`Distribution Archive`
+ or :term:`Installed Project`.
+ For a :term:`Project Source Tree` or
+ :term:`Source Distribution (or "sdist")`, this is the
+ :term:`Project Root Directory`.
+ For a :term:`Built Distribution` or :term:`Installed Project`,
+ this is the :file:`.dist-info/licenses/` directory of
+ the wheel archive or project folder respectively.
+ Also, the root directory that paths
+ recorded in the ``License-File``
+ :term:`Core Metadata Field` are relative to.
+
+
+ setup.py
+ setup.cfg
+
+ The project specification files for :ref:`distutils` and :ref:`setuptools`.
+ See also :term:`pyproject.toml`.
+
+
+ Source Archive
+
+ An archive containing the raw source code for a :term:`Release`, prior
+ to creation of a :term:`Source Distribution ` or :term:`Built Distribution`.
+
+
+ Source Distribution (or "sdist")
+
+ A :term:`distribution ` format (usually generated
+ using ``python -m build --sdist``) that provides metadata and the
+ essential source files needed for installing by a tool like :ref:`pip`,
+ or for generating a :term:`Built Distribution`. See :ref:`package-formats`
+ for more information.
+
+
+ System Package
+
+ A package provided in a format native to the operating system,
+ e.g. an rpm or dpkg file.
+
+
+ Version Specifier
+
+ The version component of a :term:`Requirement Specifier`. For example,
+ the ">=1.3" portion of "foo>=1.3". Read the :ref:`Version specifier specification
+ ` for a full description of the
+ specifiers that Python packaging currently supports. Support for this
+ specification was implemented in :ref:`setuptools` v8.0 and :ref:`pip` v6.0.
+
+ Virtual Environment
+
+ An isolated Python environment that allows packages to be installed for
+ use by a particular application, rather than being installed system
+ wide. For more information, see the section on :ref:`Creating and using
+ Virtual Environments`.
+
+
+ Wheel Format
+ Wheel
+
+ The standard :term:`Built Distribution` format
+ originally introduced in :pep:`427`
+ and defined by the :ref:`binary-distribution-format` specification.
+ See :ref:`package-formats` for more information.
+ Not to be confused with its reference implementation,
+ the :term:`Wheel Project`.
+
+
+ Wheel Project
+
+ The PyPA reference implementation of the :term:`Wheel Format`; see :ref:`wheel`.
+
+
+ Working Set
+
+ A collection of :term:`distributions ` available
+ for importing. These are the distributions that are on the `sys.path`
+ variable. At most, one :term:`Distribution ` for a
+ project is possible in a working set.
diff --git a/_build/html/_sources/guides/analyzing-pypi-package-downloads.rst.txt b/_build/html/_sources/guides/analyzing-pypi-package-downloads.rst.txt
new file mode 100644
index 000000000..2ad02fed5
--- /dev/null
+++ b/_build/html/_sources/guides/analyzing-pypi-package-downloads.rst.txt
@@ -0,0 +1,348 @@
+.. _analyzing-pypi-package-downloads:
+
+================================
+Analyzing PyPI package downloads
+================================
+
+This section covers how to use the public PyPI download statistics dataset
+to learn more about downloads of a package (or packages) hosted on PyPI. For
+example, you can use it to discover the distribution of Python versions used to
+download a package.
+
+
+Background
+==========
+
+PyPI does not display download statistics for a number of reasons: [#]_
+
+- **Inefficient to make work with a Content Distribution Network (CDN):**
+ Download statistics change constantly. Including them in project pages, which
+ are heavily cached, would require invalidating the cache more often, and
+ reduce the overall effectiveness of the cache.
+
+- **Highly inaccurate:** A number of things prevent the download counts from
+ being accurate, some of which include:
+
+ - ``pip``'s download cache (lowers download counts)
+ - Internal or unofficial mirrors (can both raise or lower download counts)
+ - Packages not hosted on PyPI (for comparisons sake)
+ - Unofficial scripts or attempts at download count inflation (raises download
+ counts)
+ - Known historical data quality issues (lowers download counts)
+
+- **Not particularly useful:** Just because a project has been downloaded a lot
+ doesn't mean it's good; Similarly just because a project hasn't been
+ downloaded a lot doesn't mean it's bad!
+
+In short, because its value is low for various reasons, and the tradeoffs
+required to make it work are high, it has been not an effective use of
+limited resources.
+
+Public dataset
+==============
+
+As an alternative, the `Linehaul project `__
+streams download logs from PyPI to `Google BigQuery`_ [#]_, where they are
+stored as a public dataset.
+
+Getting set up
+--------------
+
+In order to use `Google BigQuery`_ to query the `public PyPI download
+statistics dataset`_, you'll need a Google account and to enable the BigQuery
+API on a Google Cloud Platform project. You can run up to 1TB of queries
+per month `using the BigQuery free tier without a credit card
+`__
+
+- Navigate to the `BigQuery web UI`_.
+- Create a new project.
+- Enable the `BigQuery API
+ `__.
+
+For more detailed instructions on how to get started with BigQuery, check out
+the `BigQuery quickstart guide
+`__.
+
+
+Data schema
+-----------
+
+Linehaul writes an entry in a ``bigquery-public-data.pypi.file_downloads`` table for each
+download. The table contains information about what file was downloaded and how
+it was downloaded. Some useful columns from the `table schema
+`__
+include:
+
++------------------------+-----------------+-----------------------------+
+| Column | Description | Examples |
++========================+=================+=============================+
+| timestamp | Date and time | ``2020-03-09 00:33:03 UTC`` |
++------------------------+-----------------+-----------------------------+
+| file.project | Project name | ``pipenv``, ``nose`` |
++------------------------+-----------------+-----------------------------+
+| file.version | Package version | ``0.1.6``, ``1.4.2`` |
++------------------------+-----------------+-----------------------------+
+| details.installer.name | Installer | pip, :ref:`bandersnatch` |
++------------------------+-----------------+-----------------------------+
+| details.python | Python version | ``2.7.12``, ``3.6.4`` |
++------------------------+-----------------+-----------------------------+
+
+
+Useful queries
+--------------
+
+Run queries in the `BigQuery web UI`_ by clicking the "Compose query" button.
+
+Note that the rows are stored in a partitioned table, which helps
+limit the cost of queries. These example queries analyze downloads from
+recent history by filtering on the ``timestamp`` column.
+
+Counting package downloads
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The following query counts the total number of downloads for the project
+"pytest".
+
+.. code-block:: sql
+
+ #standardSQL
+ SELECT COUNT(*) AS num_downloads
+ FROM `bigquery-public-data.pypi.file_downloads`
+ WHERE file.project = 'pytest'
+ -- Only query the last 30 days of history
+ AND DATE(timestamp)
+ BETWEEN DATE_SUB(CURRENT_DATE(), INTERVAL 30 DAY)
+ AND CURRENT_DATE()
+
++---------------+
+| num_downloads |
++===============+
+| 26190085 |
++---------------+
+
+To count downloads from pip only, filter on the ``details.installer.name``
+column.
+
+.. code-block:: sql
+
+ #standardSQL
+ SELECT COUNT(*) AS num_downloads
+ FROM `bigquery-public-data.pypi.file_downloads`
+ WHERE file.project = 'pytest'
+ AND details.installer.name = 'pip'
+ -- Only query the last 30 days of history
+ AND DATE(timestamp)
+ BETWEEN DATE_SUB(CURRENT_DATE(), INTERVAL 30 DAY)
+ AND CURRENT_DATE()
+
++---------------+
+| num_downloads |
++===============+
+| 24334215 |
++---------------+
+
+Package downloads over time
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+To group by monthly downloads, use the ``TIMESTAMP_TRUNC`` function. Also
+filtering by this column reduces corresponding costs.
+
+.. code-block:: sql
+
+ #standardSQL
+ SELECT
+ COUNT(*) AS num_downloads,
+ DATE_TRUNC(DATE(timestamp), MONTH) AS `month`
+ FROM `bigquery-public-data.pypi.file_downloads`
+ WHERE
+ file.project = 'pytest'
+ -- Only query the last 6 months of history
+ AND DATE(timestamp)
+ BETWEEN DATE_TRUNC(DATE_SUB(CURRENT_DATE(), INTERVAL 6 MONTH), MONTH)
+ AND CURRENT_DATE()
+ GROUP BY `month`
+ ORDER BY `month` DESC
+
++---------------+------------+
+| num_downloads | month |
++===============+============+
+| 1956741 | 2018-01-01 |
++---------------+------------+
+| 2344692 | 2017-12-01 |
++---------------+------------+
+| 1730398 | 2017-11-01 |
++---------------+------------+
+| 2047310 | 2017-10-01 |
++---------------+------------+
+| 1744443 | 2017-09-01 |
++---------------+------------+
+| 1916952 | 2017-08-01 |
++---------------+------------+
+
+Python versions over time
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Extract the Python version from the ``details.python`` column. Warning: This
+query processes over 500 GB of data.
+
+.. code-block:: sql
+
+ #standardSQL
+ SELECT
+ REGEXP_EXTRACT(details.python, r"[0-9]+\.[0-9]+") AS python_version,
+ COUNT(*) AS num_downloads,
+ FROM `bigquery-public-data.pypi.file_downloads`
+ WHERE
+ -- Only query the last 6 months of history
+ DATE(timestamp)
+ BETWEEN DATE_TRUNC(DATE_SUB(CURRENT_DATE(), INTERVAL 6 MONTH), MONTH)
+ AND CURRENT_DATE()
+ GROUP BY `python_version`
+ ORDER BY `num_downloads` DESC
+
++--------+---------------+
+| python | num_downloads |
++========+===============+
+| 3.7 | 18051328726 |
++--------+---------------+
+| 3.6 | 9635067203 |
++--------+---------------+
+| 3.8 | 7781904681 |
++--------+---------------+
+| 2.7 | 6381252241 |
++--------+---------------+
+| null | 2026630299 |
++--------+---------------+
+| 3.5 | 1894153540 |
++--------+---------------+
+
+
+Getting absolute links to artifacts
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+It's sometimes helpful to be able to get the absolute links to download
+artifacts from PyPI based on their hashes, e.g. if a particular project or
+release has been deleted from PyPI. The metadata table includes the ``path``
+column, which includes the hash and artifact filename.
+
+.. note::
+ The URL generated here is not guaranteed to be stable, but currently aligns with the URL where PyPI artifacts are hosted.
+
+.. code-block:: sql
+
+ SELECT
+ CONCAT('https://files.pythonhosted.org/packages', path) as url
+ FROM
+ `bigquery-public-data.pypi.distribution_metadata`
+ WHERE
+ filename LIKE 'sampleproject%'
+
+
++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| url |
++===================================================================================================================================================================+
+| https://files.pythonhosted.org/packages/eb/45/79be82bdeafcecb9dca474cad4003e32ef8e4a0dec6abbd4145ccb02abe1/sampleproject-1.2.0.tar.gz |
++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| https://files.pythonhosted.org/packages/56/0a/178e8bbb585ec5b13af42dae48b1d7425d6575b3ff9b02e5ec475e38e1d6/sampleproject_nomura-1.2.0-py2.py3-none-any.whl |
++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| https://files.pythonhosted.org/packages/63/88/3200eeaf22571f18d2c41e288862502e33365ccbdc12b892db23f51f8e70/sampleproject_nomura-1.2.0.tar.gz |
++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| https://files.pythonhosted.org/packages/21/e9/2743311822e71c0756394b6c5ab15cb64ca66c78c6c6a5cd872c9ed33154/sampleproject_doubleyoung18-1.3.0-py2.py3-none-any.whl |
++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| https://files.pythonhosted.org/packages/6f/5b/2f3fe94e1c02816fe23c7ceee5292fb186912929e1972eee7fb729fa27af/sampleproject-1.3.1.tar.gz |
++-------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+
+Caveats
+=======
+
+In addition to the caveats listed in the background above, Linehaul suffered
+from a bug which caused it to significantly under-report download statistics
+prior to July 26, 2018. Downloads before this date are proportionally accurate
+(e.g. the percentage of Python 2 vs. Python 3 downloads) but total numbers are
+lower than actual by an order of magnitude.
+
+
+Additional tools
+================
+
+Besides using the BigQuery console, there are some additional tools which may
+be useful when analyzing download statistics.
+
+``google-cloud-bigquery``
+-------------------------
+
+You can also access the public PyPI download statistics dataset
+programmatically via the BigQuery API and the `google-cloud-bigquery`_ project,
+the official Python client library for BigQuery.
+
+.. code-block:: python
+
+ from google.cloud import bigquery
+
+ # Note: depending on where this code is being run, you may require
+ # additional authentication. See:
+ # https://cloud.google.com/bigquery/docs/authentication/
+ client = bigquery.Client()
+
+ query_job = client.query("""
+ SELECT COUNT(*) AS num_downloads
+ FROM `bigquery-public-data.pypi.file_downloads`
+ WHERE file.project = 'pytest'
+ -- Only query the last 30 days of history
+ AND DATE(timestamp)
+ BETWEEN DATE_SUB(CURRENT_DATE(), INTERVAL 30 DAY)
+ AND CURRENT_DATE()""")
+
+ results = query_job.result() # Waits for job to complete.
+ for row in results:
+ print("{} downloads".format(row.num_downloads))
+
+
+``pypinfo``
+-----------
+
+`pypinfo`_ is a command-line tool which provides access to the dataset and
+can generate several useful queries. For example, you can query the total
+number of download for a package with the command ``pypinfo package_name``.
+
+Install `pypinfo`_ using pip.
+
+.. code-block:: bash
+
+ python3 -m pip install pypinfo
+
+Usage:
+
+.. code-block:: console
+
+ $ pypinfo requests
+ Served from cache: False
+ Data processed: 6.87 GiB
+ Data billed: 6.87 GiB
+ Estimated cost: $0.04
+
+ | download_count |
+ | -------------- |
+ | 9,316,415 |
+
+
+``pandas-gbq``
+--------------
+
+The `pandas-gbq`_ project allows for accessing query results via `Pandas`_.
+
+
+References
+==========
+
+.. [#] `PyPI Download Counts deprecation email `__
+.. [#] `PyPI BigQuery dataset announcement email `__
+
+.. _public PyPI download statistics dataset: https://console.cloud.google.com/bigquery?p=bigquery-public-data&d=pypi&page=dataset
+.. _Google BigQuery: https://cloud.google.com/bigquery
+.. _BigQuery web UI: https://console.cloud.google.com/bigquery
+.. _pypinfo: https://github.com/ofek/pypinfo
+.. _google-cloud-bigquery: https://cloud.google.com/bigquery/docs/reference/libraries
+.. _pandas-gbq: https://pandas-gbq.readthedocs.io/en/latest/
+.. _Pandas: https://pandas.pydata.org/
diff --git a/_build/html/_sources/guides/creating-and-discovering-plugins.rst.txt b/_build/html/_sources/guides/creating-and-discovering-plugins.rst.txt
new file mode 100644
index 000000000..601f2b4a6
--- /dev/null
+++ b/_build/html/_sources/guides/creating-and-discovering-plugins.rst.txt
@@ -0,0 +1,171 @@
+================================
+Creating and discovering plugins
+================================
+
+Often when creating a Python application or library you'll want the ability to
+provide customizations or extra features via **plugins**. Because Python
+packages can be separately distributed, your application or library may want to
+automatically **discover** all of the plugins available.
+
+There are three major approaches to doing automatic plugin discovery:
+
+#. `Using naming convention`_.
+#. `Using namespace packages`_.
+#. `Using package metadata`_.
+
+
+Using naming convention
+=======================
+
+If all of the plugins for your application follow the same naming convention,
+you can use :func:`pkgutil.iter_modules` to discover all of the top-level
+modules that match the naming convention. For example, `Flask`_ uses the
+naming convention ``flask_{plugin_name}``. If you wanted to automatically
+discover all of the Flask plugins installed:
+
+.. code-block:: python
+
+ import importlib
+ import pkgutil
+
+ discovered_plugins = {
+ name: importlib.import_module(name)
+ for finder, name, ispkg
+ in pkgutil.iter_modules()
+ if name.startswith('flask_')
+ }
+
+If you had both the `Flask-SQLAlchemy`_ and `Flask-Talisman`_ plugins installed
+then ``discovered_plugins`` would be:
+
+.. code-block:: python
+
+ {
+ 'flask_sqlalchemy': ,
+ 'flask_talisman': ,
+ }
+
+Using naming convention for plugins also allows you to query
+the Python Package Index's :ref:`simple repository API `
+for all packages that conform to your naming convention.
+
+.. _Flask: https://pypi.org/project/Flask/
+.. _Flask-SQLAlchemy: https://pypi.org/project/Flask-SQLAlchemy/
+.. _Flask-Talisman: https://pypi.org/project/flask-talisman
+
+
+Using namespace packages
+========================
+
+:doc:`Namespace packages ` can be used to provide
+a convention for where to place plugins and also provides a way to perform
+discovery. For example, if you make the sub-package ``myapp.plugins`` a
+namespace package then other :term:`distributions ` can
+provide modules and packages to that namespace. Once installed, you can use
+:func:`pkgutil.iter_modules` to discover all modules and packages installed
+under that namespace:
+
+.. code-block:: python
+
+ import importlib
+ import pkgutil
+
+ import myapp.plugins
+
+ def iter_namespace(ns_pkg):
+ # Specifying the second argument (prefix) to iter_modules makes the
+ # returned name an absolute name instead of a relative one. This allows
+ # import_module to work without having to do additional modification to
+ # the name.
+ return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + ".")
+
+ discovered_plugins = {
+ name: importlib.import_module(name)
+ for finder, name, ispkg
+ in iter_namespace(myapp.plugins)
+ }
+
+Specifying ``myapp.plugins.__path__`` to :func:`~pkgutil.iter_modules` causes
+it to only look for the modules directly under that namespace. For example,
+if you have installed distributions that provide the modules ``myapp.plugins.a``
+and ``myapp.plugins.b`` then ``discovered_plugins`` in this case would be:
+
+.. code-block:: python
+
+ {
+ 'a': ,
+ 'b': ,
+ }
+
+This sample uses a sub-package as the namespace package (``myapp.plugins``), but
+it's also possible to use a top-level package for this purpose (such as
+``myapp_plugins``). How to pick the namespace to use is a matter of preference,
+but it's not recommended to make your project's main top-level package
+(``myapp`` in this case) a namespace package for the purpose of plugins, as one
+bad plugin could cause the entire namespace to break which would in turn make
+your project unimportable. For the "namespace sub-package" approach to work,
+the plugin packages must omit the :file:`__init__.py` for your top-level
+package directory (``myapp`` in this case) and include the namespace-package
+style :file:`__init__.py` in the namespace sub-package directory
+(``myapp/plugins``). This also means that plugins will need to explicitly pass
+a list of packages to :func:`setup`'s ``packages`` argument instead of using
+:func:`setuptools.find_packages`.
+
+.. warning:: Namespace packages are a complex feature and there are several
+ different ways to create them. It's highly recommended to read the
+ :doc:`packaging-namespace-packages` documentation and clearly document
+ which approach is preferred for plugins to your project.
+
+.. _plugin-entry-points:
+
+Using package metadata
+======================
+
+Packages can have metadata for plugins described in the :ref:`entry-points`.
+By specifying them, a package announces that it contains a specific kind of plugin.
+Another package supporting this kind of plugin can use the metadata to discover that plugin.
+
+For example if you have a package named ``myapp-plugin-a`` and it includes
+the following in its ``pyproject.toml``:
+
+.. code-block:: toml
+
+ [project.entry-points.'myapp.plugins']
+ a = 'myapp_plugin_a'
+
+Then you can discover and load all of the registered entry points by using
+:func:`importlib.metadata.entry_points` (or the backport_
+``importlib_metadata >= 3.6`` for Python 3.6-3.9):
+
+.. code-block:: python
+
+ import sys
+ if sys.version_info < (3, 10):
+ from importlib_metadata import entry_points
+ else:
+ from importlib.metadata import entry_points
+
+ discovered_plugins = entry_points(group='myapp.plugins')
+
+
+In this example, ``discovered_plugins`` would be a collection of type :class:`importlib.metadata.EntryPoint`:
+
+.. code-block:: python
+
+ (
+ EntryPoint(name='a', value='myapp_plugin_a', group='myapp.plugins'),
+ ...
+ )
+
+Now the module of your choice can be imported by executing
+``discovered_plugins['a'].load()``.
+
+.. note:: The ``entry_point`` specification in :file:`setup.py` is fairly
+ flexible and has a lot of options. It's recommended to read over the entire
+ section on :doc:`entry points ` .
+
+.. note:: Since this specification is part of the :doc:`standard library
+ `, most packaging tools other than setuptools
+ provide support for defining entry points.
+
+.. _backport: https://importlib-metadata.readthedocs.io/en/latest/
diff --git a/_build/html/_sources/guides/creating-command-line-tools.rst.txt b/_build/html/_sources/guides/creating-command-line-tools.rst.txt
new file mode 100644
index 000000000..cbe8b3bb0
--- /dev/null
+++ b/_build/html/_sources/guides/creating-command-line-tools.rst.txt
@@ -0,0 +1,183 @@
+.. _creating-command-line-tools:
+
+=========================================
+Creating and packaging command-line tools
+=========================================
+
+This guide will walk you through creating and packaging a standalone command-line application
+that can be installed with :ref:`pipx`, a tool for creating and managing :term:`Python Virtual Environments `
+and exposing the executable scripts of packages (and available manual pages) for use on the command-line.
+
+Creating the package
+====================
+
+First of all, create a source tree for the :term:`project `. For the sake of an example, we'll
+build a simple tool outputting a greeting (a string) for a person based on arguments given on the command-line.
+
+.. todo:: Advise on the optimal structure of a Python package in another guide or discussion and link to it here.
+
+This project will adhere to :ref:`src-layout ` and in the end be alike this file tree,
+with the top-level folder and package name ``greetings``:
+
+::
+
+ .
+ ├── pyproject.toml
+ └── src
+ └── greetings
+ ├── cli.py
+ ├── greet.py
+ ├── __init__.py
+ └── __main__.py
+
+The actual code responsible for the tool's functionality will be stored in the file :file:`greet.py`,
+named after the main module:
+
+.. code-block:: python
+
+ import typer
+ from typing_extensions import Annotated
+
+
+ def greet(
+ name: Annotated[str, typer.Argument(help="The (last, if --title is given) name of the person to greet")] = "",
+ title: Annotated[str, typer.Option(help="The preferred title of the person to greet")] = "",
+ doctor: Annotated[bool, typer.Option(help="Whether the person is a doctor (MD or PhD)")] = False,
+ count: Annotated[int, typer.Option(help="Number of times to greet the person")] = 1
+ ):
+ greeting = "Greetings, "
+ if doctor and not title:
+ title = "Dr."
+ if not name:
+ if title:
+ name = title.lower().rstrip(".")
+ else:
+ name = "friend"
+ if title:
+ greeting += f"{title} "
+ greeting += f"{name}!"
+ for i in range(0, count):
+ print(greeting)
+
+The above function receives several keyword arguments that determine how the greeting to output is constructed.
+Now, construct the command-line interface to provision it with the same, which is done
+in :file:`cli.py`:
+
+.. code-block:: python
+
+ import typer
+
+ from .greet import greet
+
+
+ app = typer.Typer()
+ app.command()(greet)
+
+
+ if __name__ == "__main__":
+ app()
+
+The command-line interface is built with typer_, an easy-to-use CLI parser based on Python type hints. It provides
+auto-completion and nicely styled command-line help out of the box. Another option would be :py:mod:`argparse`,
+a command-line parser which is included in Python's standard library. It is sufficient for most needs, but requires
+a lot of code, usually in ``cli.py``, to function properly. Alternatively, docopt_ makes it possible to create CLI
+interfaces based solely on docstrings; advanced users are encouraged to make use of click_ (on which ``typer`` is based).
+
+Now, add an empty :file:`__init__.py` file, to define the project as a regular :term:`import package `.
+
+The file :file:`__main__.py` marks the main entry point for the application when running it via :mod:`runpy`
+(i.e. ``python -m greetings``, which works immediately with flat layout, but requires installation of the package with src layout),
+so initialize the command-line interface here:
+
+.. code-block:: python
+
+ if __name__ == "__main__":
+ from greetings.cli import app
+ app()
+
+.. note::
+
+ In order to enable calling the command-line interface directly from the :term:`source tree `,
+ i.e. as ``python src/greetings``, a certain hack could be placed in this file; read more at
+ :ref:`running-cli-from-source-src-layout`.
+
+
+``pyproject.toml``
+------------------
+
+The project's :term:`metadata ` is placed in :term:`pyproject.toml`. The :term:`pyproject metadata keys ` and the ``[build-system]`` table may be filled in as described in :ref:`writing-pyproject-toml`, adding a dependency
+on ``typer`` (this tutorial uses version *0.12.3*).
+
+For the project to be recognised as a command-line tool, additionally a ``console_scripts`` :ref:`entry point ` (see :ref:`console_scripts`) needs to be added as a :term:`subkey `:
+
+.. code-block:: toml
+
+ [project.scripts]
+ greet = "greetings.cli:app"
+
+Now, the project's source tree is ready to be transformed into a :term:`distribution package `,
+which makes it installable.
+
+
+Installing the package with ``pipx``
+====================================
+
+After installing ``pipx`` as described in :ref:`installing-stand-alone-command-line-tools`, install your project:
+
+.. code-block:: console
+
+ $ cd path/to/greetings/
+ $ pipx install .
+
+This will expose the executable script we defined as an entry point and make the command ``greet`` available.
+Let's test it:
+
+.. code-block:: console
+
+ $ greet
+ Greetings, friend!
+ $ greet --doctor Brennan
+ Greetings, Dr. Brennan!
+ $ greet --title Ms. Parks
+ Greetings, Ms. Parks!
+ $ greet --title Mr.
+ Greetings, Mr. mr!
+
+Since this example uses ``typer``, you could now also get an overview of the program's usage by calling it with
+the ``--help`` option, or configure completions via the ``--install-completion`` option.
+
+To just run the program without installing it permanently, use ``pipx run``, which will create a temporary
+(but cached) virtual environment for it:
+
+.. code-block:: console
+
+ $ pipx run --spec . greet --doctor
+
+This syntax is a bit impractical, however; as the name of the entry point we defined above does not match the package name,
+we need to state explicitly which executable script to run (even though there is only one in existence).
+
+There is, however, a more practical solution to this problem, in the form of an entry point specific to ``pipx run``.
+The same can be defined as follows in :file:`pyproject.toml`:
+
+.. code-block:: toml
+
+ [project.entry-points."pipx.run"]
+ greetings = "greetings.cli:app"
+
+
+Thanks to this entry point (which *must* match the package name), ``pipx`` will pick up the executable script as the
+default one and run it, which makes this command possible:
+
+.. code-block:: console
+
+ $ pipx run . --doctor
+
+Conclusion
+==========
+
+You know by now how to package a command-line application written in Python. A further step could be to distribute your package,
+meaning uploading it to a :term:`package index `, most commonly :term:`PyPI `. To do that, follow the instructions at :ref:`Packaging your project`. And once you're done, don't forget to :ref:`do some research ` on how your package is received!
+
+.. _click: https://click.palletsprojects.com/
+.. _docopt: https://docopt.readthedocs.io/en/latest/
+.. _typer: https://typer.tiangolo.com/
diff --git a/_build/html/_sources/guides/distributing-packages-using-setuptools.rst.txt b/_build/html/_sources/guides/distributing-packages-using-setuptools.rst.txt
new file mode 100644
index 000000000..bf4227aae
--- /dev/null
+++ b/_build/html/_sources/guides/distributing-packages-using-setuptools.rst.txt
@@ -0,0 +1,592 @@
+.. _distributing-packages:
+
+===================================
+Packaging and distributing projects
+===================================
+
+:Page Status: Outdated
+:Last Reviewed: 2023-12-14
+
+This section covers some additional details on configuring, packaging and
+distributing Python projects with ``setuptools`` that aren't covered by the
+introductory tutorial in :doc:`/tutorials/packaging-projects`. It still assumes
+that you are already familiar with the contents of the
+:doc:`/tutorials/installing-packages` page.
+
+The section does *not* aim to cover best practices for Python project
+development as a whole. For example, it does not provide guidance or tool
+recommendations for version control, documentation, or testing.
+
+For more reference material, see :std:doc:`Building and Distributing
+Packages ` in the :ref:`setuptools` docs, but note
+that some advisory content there may be outdated. In the event of
+conflicts, prefer the advice in the Python Packaging User Guide.
+
+
+
+Requirements for packaging and distributing
+===========================================
+1. First, make sure you have already fulfilled the :ref:`requirements for
+ installing packages `.
+
+2. Install "twine" [1]_:
+
+ .. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install twine
+
+ .. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install twine
+
+ You'll need this to upload your project :term:`distributions ` to :term:`PyPI ` (see :ref:`below
+ `).
+
+
+Configuring your project
+========================
+
+
+Initial files
+-------------
+
+setup.py
+~~~~~~~~
+
+The most important file is :file:`setup.py` which exists at the root of your
+project directory. For an example, see the `setup.py
+`_ in the `PyPA
+sample project `_.
+
+:file:`setup.py` serves two primary functions:
+
+1. It's the file where various aspects of your project are configured. The
+ primary feature of :file:`setup.py` is that it contains a global ``setup()``
+ function. The keyword arguments to this function are how specific details
+ of your project are defined. The most relevant arguments are explained in
+ :ref:`the section below `.
+
+2. It's the command line interface for running various commands that
+ relate to packaging tasks. To get a listing of available commands, run
+ ``python3 setup.py --help-commands``.
+
+
+setup.cfg
+~~~~~~~~~
+
+:file:`setup.cfg` is an ini file that contains option defaults for
+:file:`setup.py` commands. For an example, see the `setup.cfg
+`_ in the `PyPA
+sample project `_.
+
+
+README.rst / README.md
+~~~~~~~~~~~~~~~~~~~~~~
+
+All projects should contain a readme file that covers the goal of the project.
+The most common format is `reStructuredText
+`_ with an "rst" extension, although
+this is not a requirement; multiple variants of `Markdown
+`_ are supported as well (look
+at ``setup()``'s :ref:`long_description_content_type ` argument).
+
+For an example, see `README.md
+`_ from the `PyPA
+sample project `_.
+
+.. note:: Projects using :ref:`setuptools` 0.6.27+ have standard readme files
+ (:file:`README.rst`, :file:`README.txt`, or :file:`README`) included in
+ source distributions by default. The built-in :ref:`distutils` library adopts
+ this behavior beginning in Python 3.7. Additionally, :ref:`setuptools`
+ 36.4.0+ will include a :file:`README.md` if found. If you are using
+ setuptools, you don't need to list your readme file in :file:`MANIFEST.in`.
+ Otherwise, include it to be explicit.
+
+MANIFEST.in
+~~~~~~~~~~~
+
+A :file:`MANIFEST.in` is needed when you need to package additional files that
+are not automatically included in a source distribution. For details on
+writing a :file:`MANIFEST.in` file, including a list of what's included by
+default, see ":ref:`Using MANIFEST.in`".
+
+However, you may not have to use a :file:`MANIFEST.in`. For an example, the `PyPA
+sample project `_ has removed its manifest
+file, since all the necessary files have been included by :ref:`setuptools` 43.0.0
+and newer.
+
+.. note:: :file:`MANIFEST.in` does not affect binary distributions such as wheels.
+
+LICENSE.txt
+~~~~~~~~~~~
+
+Every package should include a license file detailing the terms of
+distribution. In many jurisdictions, packages without an explicit license can
+not be legally used or distributed by anyone other than the copyright holder.
+If you're unsure which license to choose, you can use resources such as
+`GitHub's Choose a License `_ or consult a lawyer.
+
+For an example, see the `LICENSE.txt
+`_ from the `PyPA
+sample project `_.
+
+
+~~~~~~~~~~~~~~
+
+Although it's not required, the most common practice is to include your
+Python modules and packages under a single top-level package that has the same
+:ref:`name ` as your project, or something very close.
+
+For an example, see the `sample
+`_ package that's
+included in the `PyPA sample project `_.
+
+
+.. _`setup() args`:
+
+setup() args
+------------
+
+As mentioned above, the primary feature of :file:`setup.py` is that it contains
+a global ``setup()`` function. The keyword arguments to this function are how
+specific details of your project are defined.
+
+Some are temporarily explained below until their information is moved elsewhere.
+The full list can be found :doc:`in the setuptools documentation
+`.
+
+Most of the snippets given are
+taken from the `setup.py
+`_ contained in the
+`PyPA sample project `_.
+
+
+
+See :ref:`Choosing a versioning scheme` for more information on ways to use versions to convey
+compatibility information to your users.
+
+
+
+
+``packages``
+~~~~~~~~~~~~
+
+::
+
+ packages=find_packages(include=['sample', 'sample.*']),
+
+Set ``packages`` to a list of all :term:`packages ` in your
+project, including their subpackages, sub-subpackages, etc. Although the
+packages can be listed manually, ``setuptools.find_packages()`` finds them
+automatically. Use the ``include`` keyword argument to find only the given
+packages. Use the ``exclude`` keyword argument to omit packages that are not
+intended to be released and installed.
+
+
+``py_modules``
+~~~~~~~~~~~~~~
+
+::
+
+ py_modules=["six"],
+
+If your project contains any single-file Python modules that aren't part of a
+package, set ``py_modules`` to a list of the names of the modules (minus the
+``.py`` extension) in order to make :ref:`setuptools` aware of them.
+
+
+``install_requires``
+~~~~~~~~~~~~~~~~~~~~
+
+::
+
+ install_requires=['peppercorn'],
+
+"install_requires" should be used to specify what dependencies a project
+minimally needs to run. When the project is installed by :ref:`pip`, this is the
+specification that is used to install its dependencies.
+
+For more on using "install_requires" see :ref:`install_requires vs Requirements files`.
+
+
+
+.. _`Package Data`:
+
+``package_data``
+~~~~~~~~~~~~~~~~
+
+::
+
+ package_data={
+ 'sample': ['package_data.dat'],
+ },
+
+
+Often, additional files need to be installed into a :term:`package `. These files are often data that’s closely related to the package’s
+implementation, or text files containing documentation that might be of interest
+to programmers using the package. These files are called "package data".
+
+The value must be a mapping from package name to a list of relative path names
+that should be copied into the package. The paths are interpreted as relative to
+the directory containing the package.
+
+For more information, see :std:doc:`Including Data Files
+` from the
+:std:doc:`setuptools docs `.
+
+
+.. _`Data Files`:
+
+``data_files``
+~~~~~~~~~~~~~~
+
+::
+
+ data_files=[('my_data', ['data/data_file'])],
+
+Although configuring :ref:`Package Data` is sufficient for most needs, in some
+cases you may need to place data files *outside* of your :term:`packages
+`. The ``data_files`` directive allows you to do that.
+It is mostly useful if you need to install files which are used by other
+programs, which may be unaware of Python packages.
+
+Each ``(directory, files)`` pair in the sequence specifies the installation
+directory and the files to install there. The ``directory`` must be a relative
+path (although this may change in the future, see
+`wheel Issue #92 `_),
+and it is interpreted relative to the installation prefix
+(Python’s ``sys.prefix`` for a default installation;
+``site.USER_BASE`` for a user installation).
+Each file name in ``files`` is interpreted relative to the :file:`setup.py`
+script at the top of the project source distribution.
+
+For more information see the distutils section on :ref:`Installing Additional Files
+`.
+
+.. note::
+
+ When installing packages as egg, ``data_files`` is not supported.
+ So, if your project uses :ref:`setuptools`, you must use ``pip``
+ to install it. Alternatively, if you must use ``python setup.py``,
+ then you need to pass the ``--old-and-unmanageable`` option.
+
+
+``scripts``
+~~~~~~~~~~~
+
+Although ``setup()`` supports a :ref:`scripts
+`
+keyword for pointing to pre-made scripts to install, the recommended approach to
+achieve cross-platform compatibility is to use :ref:`console_scripts` entry
+points (see below).
+
+
+Choosing a versioning scheme
+----------------------------
+
+See :ref:`versioning` for information on common version schemes and how to
+choose between them.
+
+
+Working in "development mode"
+=============================
+
+You can install a project in "editable"
+or "develop" mode while you're working on it.
+When installed as editable, a project can be
+edited in-place without reinstallation:
+changes to Python source files in projects installed as editable will be reflected the next time an interpreter process is started.
+
+To install a Python package in "editable"/"development" mode
+Change directory to the root of the project directory and run:
+
+.. code-block:: bash
+
+ python3 -m pip install -e .
+
+
+The pip command-line flag ``-e`` is short for ``--editable``, and ``.`` refers
+to the current working directory, so together, it means to install the current
+directory (i.e. your project) in editable mode. This will also install any
+dependencies declared with ``install_requires`` and any scripts declared with
+``console_scripts``. Dependencies will be installed in the usual, non-editable
+mode.
+
+You may want to install some of your dependencies in editable
+mode as well. For example, supposing your project requires "foo" and "bar", but
+you want "bar" installed from VCS in editable mode, then you could construct a
+requirements file like so::
+
+ -e .
+ -e bar @ git+https://somerepo/bar.git
+
+The first line says to install your project and any dependencies. The second
+line overrides the "bar" dependency, such that it's fulfilled from VCS, not
+PyPI.
+
+If, however, you want "bar" installed from a local directory in editable mode, the requirements file should look like this, with the local paths at the top of the file::
+
+ -e /path/to/project/bar
+ -e .
+
+Otherwise, the dependency will be fulfilled from PyPI, due to the installation order of the requirements file. For more on requirements files, see the :ref:`Requirements File
+` section in the pip docs. For more on VCS installs,
+see the :ref:`VCS Support ` section of the pip docs.
+
+Lastly, if you don't want to install any dependencies at all, you can run:
+
+.. code-block:: bash
+
+ python3 -m pip install -e . --no-deps
+
+
+For more information, see the
+:doc:`Development Mode ` section
+of the :ref:`setuptools` docs.
+
+.. _`Packaging your project`:
+
+Packaging your project
+======================
+
+To have your project installable from a :term:`Package Index` like :term:`PyPI
+`, you'll need to create a :term:`Distribution
+` (aka ":term:`Package `") for your
+project.
+
+Before you can build wheels and sdists for your project, you'll need to install the
+``build`` package:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install build
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install build
+
+
+Source distributions
+--------------------
+
+Minimally, you should create a :term:`Source Distribution `:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m build --sdist
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m build --sdist
+
+
+A "source distribution" is unbuilt (i.e. it's not a :term:`Built
+Distribution`), and requires a build step when installed by pip. Even if the
+distribution is pure Python (i.e. contains no extensions), it still involves a
+build step to build out the installation metadata from :file:`setup.py` and/or
+:file:`setup.cfg`.
+
+
+Wheels
+------
+
+You should also create a wheel for your project. A wheel is a :term:`built
+package ` that can be installed without needing to go
+through the "build" process. Installing wheels is substantially faster for the
+end user than installing from a source distribution.
+
+If your project is pure Python then you'll be creating a
+:ref:`"Pure Python Wheel" (see section below) `.
+
+If your project contains compiled extensions, then you'll be creating what's
+called a :ref:`*Platform Wheel* (see section below) `.
+
+.. note:: If your project also supports Python 2 *and* contains no C extensions,
+ then you should create what's called a *Universal Wheel* by adding the
+ following to your :file:`setup.cfg` file:
+
+ .. code-block:: text
+
+ [bdist_wheel]
+ universal=1
+
+ Only use this setting if your project does not have any C extensions *and*
+ supports Python 2 and 3.
+
+
+.. _`Pure Python Wheels`:
+
+Pure Python Wheels
+~~~~~~~~~~~~~~~~~~
+
+*Pure Python Wheels* contain no compiled extensions, and therefore only require a
+single Python wheel.
+
+To build the wheel:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m build --wheel
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m build --wheel
+
+The ``wheel`` package will detect that the code is pure Python, and build a
+wheel that's named such that it's usable on any Python 3 installation. For
+details on the naming of wheel files, see :pep:`425`.
+
+If you run ``build`` without ``--wheel`` or ``--sdist``, it will build both
+files for you; this is useful when you don't need multiple wheels.
+
+.. _`Platform Wheels`:
+
+Platform Wheels
+~~~~~~~~~~~~~~~
+
+*Platform Wheels* are wheels that are specific to a certain platform like Linux,
+macOS, or Windows, usually due to containing compiled extensions.
+
+To build the wheel:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m build --wheel
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m build --wheel
+
+
+The ``wheel`` package will detect that the code is not pure Python, and build
+a wheel that's named such that it's only usable on the platform that it was
+built on. For details on the naming of wheel files, see :pep:`425`.
+
+.. note::
+
+ :term:`PyPI ` currently supports uploads of
+ platform wheels for Windows, macOS, and the multi-distro ``manylinux*`` ABI.
+ Details of the latter are defined in :pep:`513`.
+
+
+.. _`Uploading your Project to PyPI`:
+
+Uploading your Project to PyPI
+==============================
+
+When you ran the command to create your distribution, a new directory ``dist/``
+was created under your project's root directory. That's where you'll find your
+distribution file(s) to upload.
+
+.. note:: These files are only created when you run the command to create your
+ distribution. This means that any time you change the source of your project
+ or the configuration in your :file:`setup.py` file, you will need to rebuild
+ these files again before you can distribute the changes to PyPI.
+
+.. note:: Before releasing on main PyPI repo, you might prefer
+ training with the `PyPI test site `_ which
+ is cleaned on a semi regular basis. See :ref:`using-test-pypi` on
+ how to setup your configuration in order to use it.
+
+.. warning:: In other resources you may encounter references to using
+ ``python setup.py register`` and ``python setup.py upload``. These methods
+ of registering and uploading a package are **strongly discouraged** as it may
+ use a plaintext HTTP or unverified HTTPS connection on some Python versions,
+ allowing your username and password to be intercepted during transmission.
+
+.. tip:: The reStructuredText parser used on PyPI is **not** Sphinx!
+ Furthermore, to ensure safety of all users, certain kinds of URLs and
+ directives are forbidden or stripped out (e.g., the ``.. raw::``
+ directive). **Before** trying to upload your distribution, you should check
+ to see if your brief / long descriptions provided in :file:`setup.py` are
+ valid. You can do this by running :std:doc:`twine check ` on
+ your package files:
+
+ .. code-block:: bash
+
+ twine check dist/*
+
+Create an account
+-----------------
+
+First, you need a :term:`PyPI ` user account. You
+can create an account
+`using the form on the PyPI website `_.
+
+Now you'll create a PyPI `API token`_ so you will be able to securely upload
+your project.
+
+Go to https://pypi.org/manage/account/#api-tokens and create a new
+`API token`_; don't limit its scope to a particular project, since you
+are creating a new project.
+
+**Don't close the page until you have copied and saved the token — you
+won't see that token again.**
+
+.. Note:: To avoid having to copy and paste the token every time you
+ upload, you can create a :file:`$HOME/.pypirc` file:
+
+ .. code-block:: text
+
+ [pypi]
+ username = __token__
+ password =
+
+ **Be aware that this stores your token in plaintext.**
+
+ For more details, see the :ref:`specification ` for :file:`.pypirc`.
+
+.. _register-your-project:
+.. _API token: https://pypi.org/help/#apitoken
+
+Upload your distributions
+-------------------------
+
+Once you have an account you can upload your distributions to
+:term:`PyPI ` using :ref:`twine`.
+
+The process for uploading a release is the same regardless of whether
+or not the project already exists on PyPI - if it doesn't exist yet,
+it will be automatically created when the first release is uploaded.
+
+For the second and subsequent releases, PyPI only requires that the
+version number of the new release differ from any previous releases.
+
+.. code-block:: bash
+
+ twine upload dist/*
+
+You can see if your package has successfully uploaded by navigating to the URL
+``https://pypi.org/project/`` where ``sampleproject`` is
+the name of your project that you uploaded. It may take a minute or two for
+your project to appear on the site.
+
+----
+
+.. [1] Depending on your platform, this may require root or Administrator
+ access. :ref:`pip` is currently considering changing this by `making user
+ installs the default behavior
+ `_.
diff --git a/_build/html/_sources/guides/dropping-older-python-versions.rst.txt b/_build/html/_sources/guides/dropping-older-python-versions.rst.txt
new file mode 100644
index 000000000..267d7b923
--- /dev/null
+++ b/_build/html/_sources/guides/dropping-older-python-versions.rst.txt
@@ -0,0 +1,138 @@
+.. _`Dropping support for older Python versions`:
+
+==========================================
+Dropping support for older Python versions
+==========================================
+
+The ability to drop support for older Python versions is enabled by the standard :ref:`core-metadata` 1.2 specification via the :ref:`"Requires-Python" ` attribute.
+
+Metadata 1.2+ installers, such as Pip, will adhere to this specification by matching the current Python runtime and comparing it with the required version
+in the package metadata. If they do not match, it will attempt to install the last package distribution that supported that Python runtime.
+
+This mechanism can be used to drop support for older Python versions, by amending the ``Requires-Python`` attribute in the package metadata.
+
+Requirements
+------------
+
+This workflow requires that the user installing the package uses Pip [#]_, or another installer that supports the Metadata 1.2 specification.
+
+Dealing with the universal wheels
+---------------------------------
+
+Traditionally, :ref:`setuptools` projects providing Python code that is semantically
+compatible with both Python 2 and Python 3, produce :term:`wheels
+` that have a ``py2.py3`` tag in their names. When dropping
+support for Python 2, it is important not to forget to change this tag
+to just ``py3``. It is often configured within :file:`setup.cfg` under
+the ``[bdist_wheel]`` section by setting ``universal = 1``.
+
+If you use this method, either remove this option or section, or
+explicitly set ``universal`` to ``0``:
+
+.. code-block:: ini
+
+ # setup.cfg
+
+ [bdist_wheel]
+ universal = 0 # Make the generated wheels have "py3" tag
+
+.. hint::
+
+ Regarding :ref:`deprecated ` direct ``setup.py`` invocations,
+ passing the ``--universal`` flag on the command line could override this setting.
+
+Defining the Python version required
+------------------------------------
+
+1. Install twine
+~~~~~~~~~~~~~~~~
+
+Ensure that you have twine available at its latest version.
+Steps:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --upgrade twine
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --upgrade twine
+
+2. Specify the version ranges for supported Python distributions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Set the version ranges declaring which Python distributions are supported
+within your project's :file:`pyproject.toml`. The :ref:`requires-python` configuration field
+corresponds to the :ref:`Requires-Python ` core metadata field:
+
+.. code-block:: toml
+
+ [build-system]
+ ...
+
+ [project]
+ requires-python = ">= 3.8" # At least Python 3.8
+
+You can specify version ranges and exclusion rules (complying with the :ref:`version-specifiers` specification),
+such as at least Python 3.9. Or, at least Python 3.7 and beyond, skipping the 3.7.0 and 3.7.1 point releases:
+
+.. code-block:: toml
+
+ requires-python = ">= 3.9"
+ requires-python = ">= 3.7, != 3.7.0, != 3.7.1"
+
+
+If using the :ref:`setuptools` build backend, consult the `dependency-management`_ documentation for more options.
+
+.. caution::
+ Avoid adding upper bounds to the version ranges, e. g. ``">= 3.8, < 3.10"``. Doing so can cause different errors
+ and version conflicts. See the `discourse-discussion`_ for more information.
+
+3. Validating the Metadata before publishing
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Within a Python source package (the zip or the tar-gz file you download) is a text file called PKG-INFO.
+
+This file is generated by the :term:`build backend ` when it generates the source package.
+The file contains a set of keys and values, the list of keys is part of the PyPA standard metadata format.
+
+You can see the contents of the generated file like this:
+
+.. code-block:: bash
+
+ tar xfO dist/my-package-1.0.0.tar.gz my-package-1.0.0/PKG-INFO
+
+Validate that the following is in place, before publishing the package:
+
+- If you have upgraded correctly, the ``Metadata-Version`` value should be 1.2 or higher.
+- The ``Requires-Python`` field is set and matches your specification in the configuration file.
+
+4. Publishing the package
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Proceed as suggested in :ref:`Uploading your Project to PyPI`.
+
+Dropping a Python version
+-------------------------
+
+In principle, at least metadata support for Python versions should be kept as long as possible, because
+once that has been dropped, people still depending on a version will be forced to downgrade.
+If however supporting a specific version becomes a blocker for a new feature or other issues occur, the metadata
+``Requires-Python`` should be amended. Of course this also depends on whether the project needs to be stable and
+well-covered for a wider range of users.
+
+Each version compatibility change should have its own release.
+
+.. tip::
+
+ When dropping a Python version, it might also be rewarding to upgrade the project's code syntax generally, apart from updating the versions used in visible places (like the testing environment). Tools like pyupgrade_ or `ruff `_ can automate some of this work.
+
+.. _discourse-discussion: https://discuss.python.org/t/requires-python-upper-limits/12663
+.. _pyupgrade: https://pypi.org/project/pyupgrade/
+.. _dependency-management: https://setuptools.pypa.io/en/latest/userguide/dependency_management.html#python-requirement
+
+.. [#] Support for the Metadata 1.2 specification has been added in Pip 9.0.
diff --git a/_build/html/_sources/guides/hosting-your-own-index.rst.txt b/_build/html/_sources/guides/hosting-your-own-index.rst.txt
new file mode 100644
index 000000000..ebcd228ee
--- /dev/null
+++ b/_build/html/_sources/guides/hosting-your-own-index.rst.txt
@@ -0,0 +1,137 @@
+.. _`Hosting your Own Simple Repository`:
+
+==================================
+Hosting your own simple repository
+==================================
+
+
+If you wish to host your own simple repository [1]_, you can either use a
+software package like :doc:`devpi ` or you can simply create the proper
+directory structure and use any web server that can serve static files and
+generate an autoindex.
+
+In either case, since you'll be hosting a repository that is likely not in
+your user's default repositories, you should instruct them in your project's
+description to configure their installer appropriately. For example with pip:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --extra-index-url https://python.example.com/ foobar
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --extra-index-url https://python.example.com/ foobar
+
+In addition, it is **highly** recommended that you serve your repository with
+valid HTTPS. At this time, the security of your user's installations depends on
+all repositories using a valid HTTPS setup.
+
+
+"Manual" repository
+===================
+
+The directory layout is fairly simple, within a root directory you need to
+create a directory for each project. This directory should be the :ref:`normalized name ` of the project. Within each of these directories
+simply place each of the downloadable files. If you have the projects "Foo"
+(with the versions 1.0 and 2.0) and "bar" (with the version 0.1) You should
+end up with a structure that looks like::
+
+ .
+ ├── bar
+ │ └── bar-0.1.tar.gz
+ └── foo
+ ├── Foo-1.0.tar.gz
+ └── Foo-2.0.tar.gz
+
+Once you have this layout, simply configure your webserver to serve the root
+directory with autoindex enabled. For an example using the built in Web server
+in `Twisted`_, you would simply run ``twistd -n web --path .`` and then
+instruct users to add the URL to their installer's configuration.
+
+
+Existing projects
+=================
+
+.. list-table::
+ :header-rows: 1
+
+ * - Project
+ - Package upload
+ - PyPI fall-through [2]_
+ - Additional notes
+
+ * - :ref:`devpi`
+ - ✔
+ - ✔
+ - multiple indexes with inheritance, with syncing, replication, fail-over;
+ mirroring
+
+ * - :ref:`simpleindex`
+ -
+ - ✔
+ -
+
+ * - :ref:`pypiserver`
+ - ✔
+ -
+ -
+
+ * - :ref:`pypiprivate`
+ -
+ -
+ -
+
+ * - :ref:`pypicloud`
+ -
+ -
+ - unmaintained; also cached proxying; authentication, authorisation
+
+ * - :ref:`pywharf`
+ -
+ -
+ - unmaintained; serve files in GitHub
+
+ * - :ref:`pulppython`
+ - ✔
+ -
+ - also mirroring, proxying; plugin for Pulp
+
+ * - :ref:`pip2pi`
+ -
+ -
+ - also mirroring; manual synchronisation
+
+ * - :ref:`dumb-pypi`
+ -
+ -
+ - not a server, but a static file site generator
+
+ * - :ref:`httpserver`
+ -
+ -
+ - standard-library
+
+ * - `Apache `_
+ -
+ - ✔
+ - using
+ `mod_rewrite
+ `_
+ and
+ `mod_cache_disk
+ `_,
+ you can cache requests to package indexes through an Apache server
+
+----
+
+.. [1] For complete documentation of the simple repository protocol, see
+ :ref:`simple repository API `.
+
+.. [2] Can be configured to fall back to PyPI (or another package index)
+ if a requested package is missing.
+
+.. _Twisted: https://twistedmatrix.com/
diff --git a/_build/html/_sources/guides/index-mirrors-and-caches.rst.txt b/_build/html/_sources/guides/index-mirrors-and-caches.rst.txt
new file mode 100644
index 000000000..f3b7bd243
--- /dev/null
+++ b/_build/html/_sources/guides/index-mirrors-and-caches.rst.txt
@@ -0,0 +1,117 @@
+.. _`PyPI mirrors and caches`:
+
+================================
+Package index mirrors and caches
+================================
+
+:Page Status: Incomplete
+:Last Reviewed: 2023-11-08
+
+Mirroring or caching of PyPI (and other
+:term:`package indexes `) can be used to speed up local
+package installation,
+allow offline work, handle corporate firewalls or just plain Internet flakiness.
+
+There are multiple classes of options in this area:
+
+1. local/hosted caching of package indexes.
+
+2. local/hosted mirroring of a package index. A mirror is a (whole or
+ partial) copy of a package index, which can be used in place of the
+ original index.
+
+3. private package index with fall-through to public package indexes (for
+ example, to mitigate dependency confusion attacks), also known as a
+ proxy.
+
+
+Caching with pip
+----------------
+
+pip provides a number of facilities for speeding up installation by using local
+cached copies of :term:`packages `:
+
+1. :ref:`Fast & local installs `
+ by downloading all the requirements for a project and then pointing pip at
+ those downloaded files instead of going to PyPI.
+2. A variation on the above which pre-builds the installation files for
+ the requirements using :ref:`python3 -m pip wheel `:
+
+ .. code-block:: bash
+
+ python3 -m pip wheel --wheel-dir=/tmp/wheelhouse SomeProject
+ python3 -m pip install --no-index --find-links=/tmp/wheelhouse SomeProject
+
+
+Existing projects
+-----------------
+
+.. list-table::
+ :header-rows: 1
+
+ * - Project
+ - Cache
+ - Mirror
+ - Proxy
+ - Additional notes
+
+ * - :ref:`devpi`
+ - ✔
+ - ✔
+ -
+ - multiple indexes with inheritance; syncing, replication, fail-over;
+ package upload
+
+ * - :ref:`bandersnatch`
+ - ✔
+ - ✔
+ -
+ -
+
+ * - :ref:`simpleindex`
+ -
+ -
+ - ✔
+ - custom plugin enables caching; re-routing to other package indexes
+
+ * - :ref:`pypicloud`
+ - ✔
+ -
+ - ✔
+ - unmaintained; authentication, authorisation
+
+ * - :ref:`pulppython`
+ -
+ - ✔
+ - ✔
+ - plugin for Pulp; multiple proxied indexes; package upload
+
+ * - :ref:`proxpi`
+ - ✔
+ -
+ - ✔
+ - multiple proxied indexes
+
+ * - :ref:`nginx_pypi_cache`
+ - ✔
+ -
+ - ✔
+ - multiple proxied indexes
+
+ * - :ref:`flaskpypiproxy`
+ - ✔
+ -
+ - ✔
+ - unmaintained
+
+ * - `Apache `_
+ - ✔
+ -
+ - ✔
+ - using
+ `mod_rewrite
+ `_
+ and
+ `mod_cache_disk
+ `_,
+ you can cache requests to package indexes through an Apache server
diff --git a/_build/html/_sources/guides/index.rst.txt b/_build/html/_sources/guides/index.rst.txt
new file mode 100644
index 000000000..b87d0b1a8
--- /dev/null
+++ b/_build/html/_sources/guides/index.rst.txt
@@ -0,0 +1,15 @@
+Guides
+######
+
+**Guides** are focused on accomplishing a specific task and assume that you are
+already familiar with the basics of Python packaging. If you're looking for an
+introduction to packaging, see :doc:`/tutorials/index`.
+
+.. toctree::
+ :titlesonly:
+
+ section-install
+ section-build-and-publish
+ section-hosting
+ tool-recommendations
+ analyzing-pypi-package-downloads
diff --git a/_build/html/_sources/guides/installing-scientific-packages.rst.txt b/_build/html/_sources/guides/installing-scientific-packages.rst.txt
new file mode 100644
index 000000000..a1aeae567
--- /dev/null
+++ b/_build/html/_sources/guides/installing-scientific-packages.rst.txt
@@ -0,0 +1,139 @@
+.. _`NumPy and the Science Stack`:
+
+==============================
+Installing scientific packages
+==============================
+
+
+Scientific software tends to have more complex dependencies than most, and
+it will often have multiple build options to take advantage of different
+kinds of hardware, or to interoperate with different pieces of external
+software.
+
+In particular, `NumPy `__, which provides the basis
+for most of the software in the `scientific Python stack
+`_ can be configured
+to interoperate with different FORTRAN libraries, and can take advantage
+of different levels of vectorized instructions available in modern CPUs.
+
+Starting with version 1.10.4 of NumPy and version 1.0.0 of SciPy, pre-built
+32-bit and 64-bit binaries in the ``wheel`` format are available for all major
+operating systems (Windows, macOS, and Linux) on PyPI. Note, however, that on
+Windows, NumPy binaries are linked against the `ATLAS
+`__ BLAS/LAPACK library, restricted to SSE2
+instructions, so they may not provide optimal linear algebra performance.
+
+There are a number of alternative options for obtaining scientific Python
+libraries (or any other Python libraries that require a compilation environment
+to install from source and don't provide pre-built wheel files on PyPI).
+
+
+Building from source
+--------------------
+
+The same complexity which makes it difficult to distribute NumPy (and many
+of the projects that depend on it) as wheel files also make them difficult
+to build from source yourself. However, for intrepid folks that are willing
+to spend the time wrangling compilers and linkers for both C and FORTRAN,
+building from source is always an option.
+
+
+Linux distribution packages
+---------------------------
+
+For Linux users, the system package manager will often have pre-compiled
+versions of various pieces of scientific software, including NumPy and
+other parts of the scientific Python stack.
+
+If using versions which may be several months old is acceptable, then this is
+likely to be a good option (just make sure to allow access to distributions
+installed into the system Python when using virtual environments).
+
+
+Windows installers
+------------------
+
+Many Python projects that don't (or can't) currently publish wheel files at
+least publish Windows installers, either on PyPI or on their project
+download page. Using these installers allows users to avoid the need to set
+up a suitable environment to build extensions locally.
+
+The extensions provided in these installers are typically compatible with
+the CPython Windows installers published on python.org.
+
+As with Linux system packages, the Windows installers will only install into a
+system Python installation - they do not support installation in virtual
+environments. Allowing access to distributions installed into the system Python
+when using virtual environments is a common approach to working around this
+limitation.
+
+The :term:`Wheel` project also provides a :command:`wheel convert` subcommand that can
+convert a Windows :command:`bdist_wininst` installer to a wheel.
+
+.. preserve old links to this heading
+.. _mac-os-x-installers-and-package-managers:
+
+macOS installers and package managers
+-------------------------------------
+
+Similar to the situation on Windows, many projects (including NumPy) publish
+macOS installers that are compatible with the macOS CPython binaries
+published on python.org.
+
+macOS users also have access to Linux distribution style package managers
+such as ``Homebrew``. The SciPy site has more details on using Homebrew to
+`install SciPy on macOS `_.
+
+
+SciPy distributions
+-------------------
+
+The SciPy site lists `several distributions
+`_
+that provide the full SciPy stack to
+end users in an easy to use and update format.
+
+Some of these distributions may not be compatible with the standard ``pip``
+and ``virtualenv`` based toolchain.
+
+Spack
+------
+`Spack `_ is a flexible package manager
+designed to support multiple versions, configurations, platforms, and compilers.
+It was built to support the needs of large supercomputing centers and scientific
+application teams, who must often build software many different ways.
+Spack is not limited to Python; it can install packages for ``C``, ``C++``,
+``Fortran``, ``R``, and other languages. It is non-destructive; installing
+a new version of one package does not break existing installations, so many
+configurations can coexist on the same system.
+
+Spack offers a simple but powerful syntax that allows users to specify
+versions and configuration options concisely. Package files are written in
+pure Python, and they are templated so that it is easy to swap compilers,
+dependency implementations (like MPI), versions, and build options with a single
+package file. Spack also generates *module* files so that packages can
+be loaded and unloaded from the user's environment.
+
+
+The conda cross-platform package manager
+----------------------------------------
+
+``conda`` is an open source (BSD licensed) package management system and
+environment management system that allows users to install
+multiple versions of binary software packages and their dependencies, and
+easily switch between them. It is a cross-platform tool working on Windows,
+MacOS, and Linux. Conda can be used to package up and distribute all kinds of
+packages, it is not limited to just Python packages. It has full support for
+native virtual environments. Conda makes environments first-class citizens,
+making it easy to create independent environments even for C libraries. It is
+written in Python, but is Python-agnostic. Conda manages Python itself as a
+package, so that :command:`conda update python` is possible, in contrast to
+pip, which only manages Python packages.
+
+Anaconda `Anaconda `_ is a Python distribution published by Anaconda, Inc. It is a stable collection of Open Source packages for big data and scientific use, and a collection of Graphical Interface utilities for managing conda environments.
+
+In addition to the full distribution provided by Anaconda, the conda package manager itself is available in `miniconda `_, `miniforge `_, and `pixi `_.
+
+
+Conda packages are available on multiple channels on Anaconda.org, including the
+default channel supported by Anaconda, Inc, the community supported conda-forge channel, which provides a wide variety of pre-built packages, and some domain-specific package collections.
diff --git a/_build/html/_sources/guides/installing-stand-alone-command-line-tools.rst.txt b/_build/html/_sources/guides/installing-stand-alone-command-line-tools.rst.txt
new file mode 100644
index 000000000..c078fd1e4
--- /dev/null
+++ b/_build/html/_sources/guides/installing-stand-alone-command-line-tools.rst.txt
@@ -0,0 +1,136 @@
+.. _installing-stand-alone-command-line-tools:
+
+Installing stand alone command line tools
+=========================================
+
+Many packages provide command line applications. Examples of such packages are
+`mypy `_,
+`flake8 `_,
+`black `_, and
+:ref:`pipenv`.
+
+Usually you want to be able to access these applications from anywhere on your
+system, but installing packages and their dependencies to the same global
+environment can cause version conflicts and break dependencies the operating
+system has on Python packages.
+
+:ref:`pipx` solves this by creating a virtual environment for each package,
+while also ensuring that its applications are accessible through a directory
+that is on your ``$PATH``. This allows each package to be upgraded or
+uninstalled without causing conflicts with other packages, and allows you to
+safely run the applications from anywhere.
+
+.. note:: pipx only works with Python 3.6+.
+
+pipx is installed with pip:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --user pipx
+ python3 -m pipx ensurepath
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --user pipx
+ py -m pipx ensurepath
+
+.. note::
+
+ ``ensurepath`` ensures that the application directory is on your ``$PATH``.
+ You may need to restart your terminal for this update to take effect.
+
+Now you can install packages with ``pipx install`` and run the package's
+applications(s) from anywhere.
+
+.. code-block:: console
+
+ $ pipx install PACKAGE
+ $ PACKAGE_APPLICATION [ARGS]
+
+For example:
+
+.. code-block:: console
+
+ $ pipx install cowsay
+ installed package cowsay 6.1, installed using Python 3.12.2
+ These apps are now globally available
+ - cowsay
+ done! ✨ 🌟 ✨
+ $ cowsay -t moo
+ ___
+ < moo >
+ ===
+ \
+ \
+ ^__^
+ (oo)\_______
+ (__)\ )\/
+ || ||
+ ||----w |
+
+
+To see a list of packages installed with pipx and which applications are
+available, use ``pipx list``:
+
+.. code-block:: console
+
+ $ pipx list
+ venvs are in /Users/user/Library/Application Support/pipx/venvs
+ apps are exposed on your $PATH at /Users/user/.local/bin
+ manual pages are exposed at /Users/user/.local/share/man
+ package black 24.2.0, installed using Python 3.12.2
+ - black
+ - blackd
+ package cowsay 6.1, installed using Python 3.12.2
+ - cowsay
+ package mypy 1.9.0, installed using Python 3.12.2
+ - dmypy
+ - mypy
+ - mypyc
+ - stubgen
+ - stubtest
+ package nox 2024.3.2, installed using Python 3.12.2
+ - nox
+ - tox-to-nox
+
+To upgrade or uninstall a package:
+
+.. code-block:: bash
+
+ pipx upgrade PACKAGE
+ pipx uninstall PACKAGE
+
+pipx can be upgraded or uninstalled with pip:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --upgrade pipx
+ python3 -m pip uninstall pipx
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --upgrade pipx
+ py -m pip uninstall pipx
+
+pipx also allows you to install and run the latest version of an application
+in a temporary, ephemeral environment. For example:
+
+.. code-block:: bash
+
+ pipx run cowsay -t moooo
+
+To see the full list of commands pipx offers, run:
+
+.. code-block:: bash
+
+ pipx --help
+
+You can learn more about pipx at https://pipx.pypa.io/.
diff --git a/_build/html/_sources/guides/installing-using-linux-tools.rst.txt b/_build/html/_sources/guides/installing-using-linux-tools.rst.txt
new file mode 100644
index 000000000..56647f3e9
--- /dev/null
+++ b/_build/html/_sources/guides/installing-using-linux-tools.rst.txt
@@ -0,0 +1,144 @@
+.. _`Installing pip/setuptools/wheel with Linux Package Managers`:
+
+===========================================================
+Installing pip/setuptools/wheel with Linux Package Managers
+===========================================================
+
+:Page Status: Incomplete
+:Last Reviewed: 2021-07-26
+
+This section covers how to install :ref:`pip`, :ref:`setuptools`, and
+:ref:`wheel` using Linux package managers.
+
+If you're using a Python that was downloaded from `python.org
+`_, then this section does not apply. See the
+:ref:`installing_requirements` section instead.
+
+Note that it's common for the versions of :ref:`pip`, :ref:`setuptools`, and
+:ref:`wheel` supported by a specific Linux Distribution to be outdated by the
+time it's released to the public, and updates generally only occur for security
+reasons, not for feature updates. For certain Distributions, there are
+additional repositories that can be enabled to provide newer versions. The
+repositories we know about are explained below.
+
+Also note that it's somewhat common for Distributions to apply patches for the
+sake of security and normalization to their own standards. In some cases, this
+can lead to bugs or unexpected behaviors that vary from the original unpatched
+versions. When this is known, we will make note of it below.
+
+
+Fedora
+~~~~~~
+
+.. code-block:: bash
+
+ sudo dnf install python3-pip python3-wheel
+
+To learn more about Python in Fedora, please visit the `official Fedora docs`_,
+`Python Classroom`_ or `Fedora Loves Python`_.
+
+.. _official Fedora docs: https://developer.fedoraproject.org/tech/languages/python/python-installation.html
+.. _Python Classroom: https://labs.fedoraproject.org/en/python-classroom/
+.. _Fedora Loves Python: https://fedoralovespython.org
+
+CentOS/RHEL
+~~~~~~~~~~~
+
+CentOS and RHEL don't offer :ref:`pip` or :ref:`wheel` in their core repositories,
+although :ref:`setuptools` is installed by default.
+
+To install pip and wheel for the system Python, there are two options:
+
+1. Enable the `EPEL repository `_ using
+ `these instructions
+ `__.
+ On EPEL 7, you can install pip and wheel like so:
+
+ .. code-block:: bash
+
+ sudo dnf install python3-pip python3-wheel
+
+ Since EPEL only offers extra, non-conflicting packages, EPEL does not offer
+ setuptools, since it's in the core repository.
+
+
+2. Enable the `PyPA Copr Repo
+ `_ using `these instructions
+ `__ [1]_. You can install
+ pip and wheel like so:
+
+ .. code-block:: bash
+
+ sudo dnf install python3-pip python3-wheel
+
+ To additionally upgrade setuptools, run:
+
+ .. code-block:: bash
+
+ sudo dnf upgrade python3-setuptools
+
+
+To install pip, wheel, and setuptools, in a parallel, non-system environment
+(using yum) then there are two options:
+
+
+1. Use the "Software Collections" feature to enable a parallel collection that
+ includes pip, setuptools, and wheel.
+
+ * For Redhat, see here:
+ https://developers.redhat.com/products/softwarecollections/overview
+ * For CentOS, see here: https://github.com/sclorg
+
+ Be aware that collections may not contain the most recent versions.
+
+2. Enable the `IUS repository `_ and
+ install one of the `parallel-installable
+ `_
+ Pythons, along with pip, setuptools, and wheel, which are kept fairly up to
+ date.
+
+ For example, for Python 3.4 on CentOS7/RHEL7:
+
+ .. code-block:: bash
+
+ sudo yum install python34u python34u-wheel
+
+
+openSUSE
+~~~~~~~~
+
+.. code-block:: bash
+
+ sudo zypper install python3-pip python3-setuptools python3-wheel
+
+
+.. _debian-ubuntu:
+
+Debian/Ubuntu and derivatives
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Firstly, update and refresh repository lists by running this command:
+
+.. code-block:: bash
+
+ sudo apt update
+ sudo apt install python3-venv python3-pip
+
+.. warning::
+
+ Recent Debian/Ubuntu versions have modified pip to use the `"User Scheme"
+ `_ by default, which
+ is a significant behavior change that can be surprising to some users.
+
+
+Arch Linux
+~~~~~~~~~~
+
+.. code-block:: bash
+
+ sudo pacman -S python-pip
+
+----
+
+.. [1] Currently, there is no "copr" yum plugin available for CentOS/RHEL, so
+ the only option is to manually place the repo files as described.
diff --git a/_build/html/_sources/guides/installing-using-pip-and-virtual-environments.rst.txt b/_build/html/_sources/guides/installing-using-pip-and-virtual-environments.rst.txt
new file mode 100644
index 000000000..22d1840cc
--- /dev/null
+++ b/_build/html/_sources/guides/installing-using-pip-and-virtual-environments.rst.txt
@@ -0,0 +1,512 @@
+Install packages in a virtual environment using pip and venv
+============================================================
+
+This guide discusses how to create and activate a virtual environment using
+the standard library's virtual environment tool :ref:`venv` and install packages.
+The guide covers how to:
+
+* Create and activate a virtual environment
+* Prepare pip
+* Install packages into a virtual environment using the ``pip`` command
+* Use and create a requirements file
+
+
+.. note:: This guide applies to supported versions of Python, currently 3.8
+ and higher.
+
+
+.. note:: This guide uses the term **package** to refer to a
+ :term:`Distribution Package`, which commonly is installed from an external
+ host. This differs from the term :term:`Import Package` which refers to
+ import modules in your Python source code.
+
+
+.. important::
+ This guide has the prerequisite that you are using an official Python version obtained from
+ . If you are using your operating
+ system's package manager to install Python, please ensure that Python is
+ installed before proceeding with these steps.
+
+
+Create and Use Virtual Environments
+-----------------------------------
+
+Create a new virtual environment
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+:ref:`venv` (for Python 3) allows you to manage separate package installations for
+different projects. It creates a "virtual" isolated Python installation. When
+you switch projects, you can create a new virtual environment which is isolated
+from other virtual environments. You benefit from the virtual environment
+since packages can be installed confidently and will not interfere with
+another project's environment.
+
+.. tip::
+ It is recommended to use a virtual environment when working with third
+ party packages.
+
+To create a virtual environment, go to your project's directory and run the
+following command. This will create a new virtual environment in a local folder
+named ``.venv``:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m venv .venv
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m venv .venv
+
+The second argument is the location to create the virtual environment. Generally, you
+can just create this in your project and call it ``.venv``.
+
+``venv`` will create a virtual Python installation in the ``.venv`` folder.
+
+.. Note:: You should exclude your virtual environment directory from your version
+ control system using ``.gitignore`` or similar.
+
+
+Activate a virtual environment
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Before you can start installing or using packages in your virtual environment you'll
+need to ``activate`` it. Activating a virtual environment will put the
+virtual environment-specific ``python`` and ``pip`` executables into your
+shell's ``PATH``.
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ source .venv/bin/activate
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ .venv\Scripts\activate
+
+To confirm the virtual environment is activated, check the location of your
+Python interpreter:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ which python
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ where python
+
+While the virtual environment is active, the above command will output a
+filepath that includes the ``.venv`` directory, by ending with the following:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ .venv/bin/python
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ .venv\Scripts\python
+
+
+While a virtual environment is activated, pip will install packages into that
+specific environment. This enables you to import and use packages in your
+Python application.
+
+
+Deactivate a virtual environment
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you want to switch projects or leave your virtual environment,
+``deactivate`` the environment:
+
+.. code-block:: bash
+
+ deactivate
+
+.. note::
+ Closing your shell will deactivate the virtual environment. If
+ you open a new shell window and want to use the virtual environment,
+ reactivate it.
+
+Reactivate a virtual environment
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you want to reactivate an existing virtual environment, follow the same
+instructions about activating a virtual environment. There's no need to create
+a new virtual environment.
+
+
+Prepare pip
+-----------
+
+:ref:`pip` is the reference Python package manager.
+It's used to install and update packages into a virtual environment.
+
+
+.. tab:: Unix/macOS
+
+ The Python installers for macOS include pip. On Linux, you may have to install
+ an additional package such as ``python3-pip``. You can make sure that pip is
+ up-to-date by running:
+
+ .. code-block:: bash
+
+ python3 -m pip install --upgrade pip
+ python3 -m pip --version
+
+ Afterwards, you should have the latest version of pip installed in your
+ user site:
+
+ .. code-block:: text
+
+ pip 23.3.1 from .../.venv/lib/python3.9/site-packages (python 3.9)
+
+.. tab:: Windows
+
+ The Python installers for Windows include pip. You can make sure that pip is
+ up-to-date by running:
+
+ .. code-block:: bat
+
+ py -m pip install --upgrade pip
+ py -m pip --version
+
+ Afterwards, you should have the latest version of pip:
+
+ .. code-block:: text
+
+ pip 23.3.1 from .venv\lib\site-packages (Python 3.9.4)
+
+
+Install packages using pip
+--------------------------
+
+When your virtual environment is activated, you can install packages. Use the
+``pip install`` command to install packages.
+
+Install a package
+~~~~~~~~~~~~~~~~~
+
+For example, let's install the
+`Requests`_ library from the :term:`Python Package Index (PyPI)`:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install requests
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install requests
+
+pip should download requests and all of its dependencies and install them:
+
+.. code-block:: text
+
+ Collecting requests
+ Using cached requests-2.18.4-py2.py3-none-any.whl
+ Collecting chardet<3.1.0,>=3.0.2 (from requests)
+ Using cached chardet-3.0.4-py2.py3-none-any.whl
+ Collecting urllib3<1.23,>=1.21.1 (from requests)
+ Using cached urllib3-1.22-py2.py3-none-any.whl
+ Collecting certifi>=2017.4.17 (from requests)
+ Using cached certifi-2017.7.27.1-py2.py3-none-any.whl
+ Collecting idna<2.7,>=2.5 (from requests)
+ Using cached idna-2.6-py2.py3-none-any.whl
+ Installing collected packages: chardet, urllib3, certifi, idna, requests
+ Successfully installed certifi-2017.7.27.1 chardet-3.0.4 idna-2.6 requests-2.18.4 urllib3-1.22
+
+.. _Requests: https://pypi.org/project/requests/
+
+
+Install a specific package version
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+pip allows you to specify which version of a package to install using
+:term:`version specifiers `. For example, to install
+a specific version of ``requests``:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install 'requests==2.18.4'
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install "requests==2.18.4"
+
+To install the latest ``2.x`` release of requests:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install 'requests>=2.0.0,<3.0.0'
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install "requests>=2.0.0,<3.0.0"
+
+To install pre-release versions of packages, use the ``--pre`` flag:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --pre requests
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --pre requests
+
+
+Install extras
+~~~~~~~~~~~~~~
+
+Some packages have optional `extras`_. You can tell pip to install these by
+specifying the extra in brackets:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install 'requests[security]'
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install "requests[security]"
+
+.. _extras:
+ https://setuptools.readthedocs.io/en/latest/userguide/dependency_management.html#optional-dependencies
+
+
+Install a package from source
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+pip can install a package directly from its source code. For example, to install
+the source code in the ``google-auth`` directory:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ cd google-auth
+ python3 -m pip install .
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ cd google-auth
+ py -m pip install .
+
+Additionally, pip can install packages from source in
+:doc:`development mode `,
+meaning that changes to the source directory will immediately affect the
+installed package without needing to re-install:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --editable .
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --editable .
+
+
+Install from version control systems
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+pip can install packages directly from their version control system. For
+example, you can install directly from a git repository:
+
+.. code-block:: bash
+
+ google-auth @ git+https://github.com/GoogleCloudPlatform/google-auth-library-python.git
+
+For more information on supported version control systems and syntax, see pip's
+documentation on :ref:`VCS Support `.
+
+
+Install from local archives
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you have a local copy of a :term:`Distribution Package`'s archive (a zip,
+wheel, or tar file) you can install it directly with pip:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install requests-2.18.4.tar.gz
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install requests-2.18.4.tar.gz
+
+If you have a directory containing archives of multiple packages, you can tell
+pip to look for packages there and not to use the
+:term:`Python Package Index (PyPI)` at all:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --no-index --find-links=/local/dir/ requests
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --no-index --find-links=/local/dir/ requests
+
+This is useful if you are installing packages on a system with limited
+connectivity or if you want to strictly control the origin of distribution
+packages.
+
+
+Install from other package indexes
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you want to download packages from a different index than the
+:term:`Python Package Index (PyPI)`, you can use the ``--index-url`` flag:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --index-url http://index.example.com/simple/ SomeProject
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --index-url http://index.example.com/simple/ SomeProject
+
+If you want to allow packages from both the :term:`Python Package Index (PyPI)`
+and a separate index, you can use the ``--extra-index-url`` flag instead:
+
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --extra-index-url http://index.example.com/simple/ SomeProject
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --extra-index-url http://index.example.com/simple/ SomeProject
+
+Upgrading packages
+------------------
+
+pip can upgrade packages in-place using the ``--upgrade`` flag. For example, to
+install the latest version of ``requests`` and all of its dependencies:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --upgrade requests
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --upgrade requests
+
+Using a requirements file
+-------------------------
+
+Instead of installing packages individually, pip allows you to declare all
+dependencies in a :ref:`Requirements File `. For
+example you could create a :file:`requirements.txt` file containing:
+
+.. code-block:: text
+
+ requests==2.18.4
+ google-auth==1.1.0
+
+And tell pip to install all of the packages in this file using the ``-r`` flag:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install -r requirements.txt
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install -r requirements.txt
+
+Freezing dependencies
+---------------------
+
+Pip can export a list of all installed packages and their versions using the
+``freeze`` command:
+
+.. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip freeze
+
+.. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip freeze
+
+Which will output a list of package specifiers such as:
+
+.. code-block:: text
+
+ cachetools==2.0.1
+ certifi==2017.7.27.1
+ chardet==3.0.4
+ google-auth==1.1.1
+ idna==2.6
+ pyasn1==0.3.6
+ pyasn1-modules==0.1.4
+ requests==2.18.4
+ rsa==3.4.2
+ six==1.11.0
+ urllib3==1.22
+
+The ``pip freeze`` command is useful for creating :ref:`pip:Requirements Files`
+that can re-create the exact versions of all packages installed in an environment.
diff --git a/_build/html/_sources/guides/installing-using-virtualenv.rst.txt b/_build/html/_sources/guides/installing-using-virtualenv.rst.txt
new file mode 100644
index 000000000..a584b89d5
--- /dev/null
+++ b/_build/html/_sources/guides/installing-using-virtualenv.rst.txt
@@ -0,0 +1,15 @@
+Installing packages using virtualenv
+====================================
+
+This guide discusses how to install packages using :ref:`pip` and
+:ref:`virtualenv`, a tool to create isolated Python environments.
+
+.. important::
+ This "how to" guide on installing packages and using :ref:`virtualenv` is
+ under development. Please refer to the :ref:`virtualenv` documentation for
+ details on installation and usage.
+
+
+.. note:: This doc uses the term **package** to refer to a
+ :term:`Distribution Package` which is different from an :term:`Import
+ Package` that which is used to import modules in your Python source code.
diff --git a/_build/html/_sources/guides/licensing-examples-and-user-scenarios.rst.txt b/_build/html/_sources/guides/licensing-examples-and-user-scenarios.rst.txt
new file mode 100644
index 000000000..b6cdfe327
--- /dev/null
+++ b/_build/html/_sources/guides/licensing-examples-and-user-scenarios.rst.txt
@@ -0,0 +1,358 @@
+.. _licensing-examples-and-user-scenarios:
+
+
+=====================================
+Licensing examples and user scenarios
+=====================================
+
+
+:pep:`639` has specified the way to declare a :term:`Distribution Archive`'s
+license and paths to license files and other legally required information.
+This document aims to provide clear guidance how to migrate from the legacy
+to the standardized way of declaring licenses.
+Make sure your preferred build backend supports :pep:`639` before
+trying to apply the newer guidelines.
+
+
+Licensing Examples
+==================
+
+.. _licensing-example-basic:
+
+Basic example
+-------------
+
+The Setuptools project itself, as of `version 75.6.0 `__,
+does not use the ``License`` field in its own project source metadata.
+Further, it no longer explicitly specifies ``license_file``/``license_files``
+as it did previously, since Setuptools relies on its own automatic
+inclusion of license-related files matching common patterns,
+such as the :file:`LICENSE` file it uses.
+
+It includes the following license-related metadata in its
+:file:`pyproject.toml`:
+
+.. code-block:: toml
+
+ [project]
+ classifiers = [
+ "License :: OSI Approved :: MIT License"
+ ]
+
+The simplest migration to PEP 639 would consist of using this instead:
+
+.. code-block:: toml
+
+ [project]
+ license = "MIT"
+
+Or, if the project used :file:`setup.cfg`, in its ``[metadata]`` table:
+
+.. code-block:: ini
+
+ [metadata]
+ license = MIT
+
+The output Core Metadata for the :term:`Distribution Package` would then be:
+
+.. code-block:: email
+
+ License-Expression: MIT
+ License-File: LICENSE
+
+The :file:`LICENSE` file would be stored at :file:`/setuptools-{VERSION}/LICENSE`
+in the sdist and :file:`/setuptools-{VERSION}.dist-info/licenses/LICENSE`
+in the wheel, and unpacked from there into the site directory (e.g.
+:file:`site-packages/`) on installation; :file:`/` is the root of the respective
+archive and ``{VERSION}`` the version of the Setuptools release in the Core
+Metadata.
+
+
+.. _licensing-example-advanced:
+
+Advanced example
+----------------
+
+Suppose Setuptools were to include the licenses of the third-party projects
+that are vendored in the :file:`setuptools/_vendor/` and :file:`pkg_resources/_vendor/`
+directories; specifically:
+
+.. code-block:: text
+
+ packaging==21.2
+ pyparsing==2.2.1
+ ordered-set==3.1.1
+ more_itertools==8.8.0
+
+The appropriate license expressions are:
+
+.. code-block:: text
+
+ packaging: Apache-2.0 OR BSD-2-Clause
+ pyparsing: MIT
+ ordered-set: MIT
+ more_itertools: MIT
+
+A comprehensive license expression covering both Setuptools
+proper and its vendored dependencies would contain these metadata,
+combining all the license expressions into one. Such an expression might be:
+
+.. code-block:: text
+
+ MIT AND (Apache-2.0 OR BSD-2-Clause)
+
+In addition, per the requirements of the licenses, the relevant license files
+must be included in the package. Suppose the :file:`LICENSE` file contains the text
+of the MIT license and the copyrights used by Setuptools, ``pyparsing``,
+``more_itertools`` and ``ordered-set``; and the :file:`LICENSE*` files in the
+:file:`setuptools/_vendor/packaging/` directory contain the Apache 2.0 and
+2-clause BSD license text, and the Packaging copyright statement and
+`license choice notice `__.
+
+Specifically, we assume the license files are located at the following
+paths in the project source tree (relative to the project root and
+:file:`pyproject.toml`):
+
+.. code-block:: text
+
+ LICENSE
+ setuptools/_vendor/packaging/LICENSE
+ setuptools/_vendor/packaging/LICENSE.APACHE
+ setuptools/_vendor/packaging/LICENSE.BSD
+
+Putting it all together, our :file:`pyproject.toml` would be:
+
+.. code-block:: toml
+
+ [project]
+ license = "MIT AND (Apache-2.0 OR BSD-2-Clause)"
+ license-files = [
+ "LICENSE*",
+ "setuptools/_vendor/LICENSE*",
+ ]
+
+Or alternatively, the license files can be specified explicitly (paths will be
+interpreted as glob patterns):
+
+.. code-block:: toml
+
+ [project]
+ license = "MIT AND (Apache-2.0 OR BSD-2-Clause)"
+ license-files = [
+ "LICENSE",
+ "setuptools/_vendor/LICENSE",
+ "setuptools/_vendor/LICENSE.APACHE",
+ "setuptools/_vendor/LICENSE.BSD",
+ ]
+
+If our project used :file:`setup.cfg`, we could define this in :
+
+.. code-block:: ini
+
+ [metadata]
+ license = MIT AND (Apache-2.0 OR BSD-2-Clause)
+ license_files =
+ LICENSE
+ setuptools/_vendor/packaging/LICENSE
+ setuptools/_vendor/packaging/LICENSE.APACHE
+ setuptools/_vendor/packaging/LICENSE.BSD
+
+With either approach, the output Core Metadata in the distribution
+would be:
+
+.. code-block:: email
+
+ License-Expression: MIT AND (Apache-2.0 OR BSD-2-Clause)
+ License-File: LICENSE
+ License-File: setuptools/_vendor/packaging/LICENSE
+ License-File: setuptools/_vendor/packaging/LICENSE.APACHE
+ License-File: setuptools/_vendor/packaging/LICENSE.BSD
+
+In the resulting sdist, with :file:`/` as the root of the archive and ``{VERSION}``
+the version of the Setuptools release specified in the Core Metadata,
+the license files would be located at the paths:
+
+.. code-block:: text
+
+ /setuptools-{VERSION}/LICENSE
+ /setuptools-{VERSION}/setuptools/_vendor/packaging/LICENSE
+ /setuptools-{VERSION}/setuptools/_vendor/packaging/LICENSE.APACHE
+ /setuptools-{VERSION}/setuptools/_vendor/packaging/LICENSE.BSD
+
+In the built wheel, with :file:`/` being the root of the archive and
+``{VERSION}`` as the previous, the license files would be stored at:
+
+.. code-block:: text
+
+ /setuptools-{VERSION}.dist-info/licenses/LICENSE
+ /setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE
+ /setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.APACHE
+ /setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.BSD
+
+Finally, in the installed project, with :file:`site-packages/` being the site dir
+and ``{VERSION}`` as the previous, the license files would be installed to:
+
+.. code-block:: text
+
+ site-packages/setuptools-{VERSION}.dist-info/licenses/LICENSE
+ site-packages/setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE
+ site-packages/setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.APACHE
+ site-packages/setuptools-{VERSION}.dist-info/licenses/setuptools/_vendor/packaging/LICENSE.BSD
+
+
+Expression examples
+'''''''''''''''''''
+
+Some additional examples of valid ``License-Expression`` values:
+
+.. code-block:: email
+
+ License-Expression: MIT
+ License-Expression: BSD-3-Clause
+ License-Expression: MIT AND (Apache-2.0 OR BSD-2-Clause)
+ License-Expression: MIT OR GPL-2.0-or-later OR (FSFUL AND BSD-2-Clause)
+ License-Expression: GPL-3.0-only WITH Classpath-Exception-2.0 OR BSD-3-Clause
+ License-Expression: LicenseRef-Public-Domain OR CC0-1.0 OR Unlicense
+ License-Expression: LicenseRef-Proprietary
+ License-Expression: LicenseRef-Custom-License
+
+
+User Scenarios
+==============
+
+The following covers the range of common use cases from a user perspective,
+providing guidance for each. Do note that the following
+should **not** be considered legal advice, and readers should consult a
+licensed legal practitioner in their jurisdiction if they are unsure about
+the specifics for their situation.
+
+
+I have a private package that won't be distributed
+--------------------------------------------------
+
+If your package isn't shared publicly, i.e. outside your company,
+organization or household, it *usually* isn't strictly necessary to include
+a formal license, so you wouldn't necessarily have to do anything extra here.
+
+However, it is still a good idea to include ``LicenseRef-Proprietary``
+as a license expression in your package configuration, and/or a
+copyright statement and any legal notices in a :file:`LICENSE.txt` file
+in the root of your project directory, which will be automatically
+included by packaging tools.
+
+
+I just want to share my own work without legal restrictions
+-----------------------------------------------------------
+
+While you aren't required to include a license, if you don't, no one has
+`any permission to download, use or improve your work `__,
+so that's probably the *opposite* of what you actually want.
+The `MIT license `__ is a great choice instead, as it's simple,
+widely used and allows anyone to do whatever they want with your work
+(other than sue you, which you probably also don't want).
+
+To apply it, just paste `the text `__ into a file named
+:file:`LICENSE.txt` at the root of your repo, and add the year and your name to
+the copyright line. Then, just add ``license = "MIT"`` under
+``[project]`` in your :file:`pyproject.toml` if your packaging tool supports it,
+or in its config file/section. You're done!
+
+
+I want to distribute my project under a specific license
+--------------------------------------------------------
+
+To use a particular license, simply paste its text into a :file:`LICENSE.txt`
+file at the root of your repo, if you don't have it in a file starting with
+:file:`LICENSE` or :file:`COPYING` already, and add
+``license = "LICENSE-ID"`` under ``[project]`` in your
+:file:`pyproject.toml` if your packaging tool supports it, or else in its
+config file. You can find the ``LICENSE-ID``
+and copyable license text on sites like
+`ChooseALicense `__ or `SPDX `__.
+
+Many popular code hosts, project templates and packaging tools can add the
+license file for you, and may support the expression as well in the future.
+
+
+I maintain an existing package that's already licensed
+------------------------------------------------------
+
+If you already have license files and metadata in your project, you
+should only need to make a couple of tweaks to take advantage of the new
+functionality.
+
+In your project config file, enter your license expression under
+``license`` (``[project]`` table in :file:`pyproject.toml`),
+or the equivalent for your packaging tool,
+and make sure to remove any legacy ``license`` table subkeys or
+``License ::`` classifiers. Your existing ``license`` value may already
+be valid as one (e.g. ``MIT``, ``Apache-2.0 OR BSD-2-Clause``, etc);
+otherwise, check the `SPDX license list `__ for the identifier
+that matches the license used.
+
+Make sure to list your license files under ``license-files``
+under ``[project]`` in :file:`pyproject.toml`
+or else in your tool's configuration file.
+
+See the :ref:`licensing-example-basic` for a simple but complete real-world demo
+of how this works in practice.
+See also the best-effort guidance on how to translate license classifiers
+into license expression provided by the :pep:`639` authors:
+`Mapping License Classifiers to SPDX Identifiers `__.
+Packaging tools may support automatically converting legacy licensing
+metadata; check your tool's documentation for more information.
+
+
+My package includes other code under different licenses
+-------------------------------------------------------
+
+If your project includes code from others covered by different licenses,
+such as vendored dependencies or files copied from other open source
+software, you can construct a license expression
+to describe the licenses involved and the relationship
+between them.
+
+In short, ``License-1 AND License-2`` mean that *both* licenses apply
+(for example, you included a file under another license), and
+``License-1 OR License-2`` means that *either* of the licenses can be used, at
+the user's option (for example, you want to allow users a choice of multiple
+licenses). You can use parenthesis (``()``) for grouping to form expressions
+that cover even the most complex situations.
+
+In your project config file, enter your license expression under
+``license`` (``[project]`` table of :file:`pyproject.toml`),
+or the equivalent for your packaging tool,
+and make sure to remove any legacy ``license`` table subkeys
+or ``License ::`` classifiers.
+
+Also, make sure you add the full license text of all the licenses as files
+somewhere in your project repository. List the
+relative path or glob patterns to each of them under ``license-files``
+under ``[project]`` in :file:`pyproject.toml`
+(if your tool supports it), or else in your tool's configuration file.
+
+As an example, if your project was licensed MIT but incorporated
+a vendored dependency (say, ``packaging``) that was licensed under
+either Apache 2.0 or the 2-clause BSD, your license expression would
+be ``MIT AND (Apache-2.0 OR BSD-2-Clause)``. You might have a
+:file:`LICENSE.txt` in your repo root, and a :file:`LICENSE-APACHE.txt` and
+:file:`LICENSE-BSD.txt` in the :file:`_vendor/` subdirectory, so to include
+all of them, you'd specify ``["LICENSE.txt", "_vendor/packaging/LICENSE*"]``
+as glob patterns, or
+``["LICENSE.txt", "_vendor/LICENSE-APACHE.txt", "_vendor/LICENSE-BSD.txt"]``
+as literal file paths.
+
+See a fully worked out :ref:`licensing-example-advanced` for an end-to-end
+application of this to a real-world complex project, with many technical
+details, and consult a `tutorial `__ for more help and examples
+using SPDX identifiers and expressions.
+
+
+.. _chooseamitlicense: https://choosealicense.com/licenses/mit/
+.. _choosealicenselist: https://choosealicense.com/licenses/
+.. _dontchoosealicense: https://choosealicense.com/no-permission/
+.. _mappingclassifierstospdx: https://peps.python.org/pep-0639/appendix-mapping-classifiers/
+.. _packaginglicense: https://github.com/pypa/packaging/blob/21.2/LICENSE
+.. _setuptools7560: https://github.com/pypa/setuptools/blob/v75.6.0/pyproject.toml
+.. _spdxlist: https://spdx.org/licenses/
+.. _spdxtutorial: https://github.com/david-a-wheeler/spdx-tutorial
diff --git a/_build/html/_sources/guides/making-a-pypi-friendly-readme.rst.txt b/_build/html/_sources/guides/making-a-pypi-friendly-readme.rst.txt
new file mode 100644
index 000000000..4a3a20670
--- /dev/null
+++ b/_build/html/_sources/guides/making-a-pypi-friendly-readme.rst.txt
@@ -0,0 +1,132 @@
+Making a PyPI-friendly README
+=============================
+
+README files can help your users understand your project and can be used to set your project's description on PyPI.
+This guide helps you create a README in a PyPI-friendly format and include your README in your package so it appears on PyPI.
+
+
+Creating a README file
+----------------------
+
+README files for Python projects are often named ``README``, ``README.txt``, ``README.rst``, or ``README.md``.
+
+For your README to display properly on PyPI, choose a markup language supported by PyPI.
+Formats supported by `PyPI's README renderer `_ are:
+
+* plain text
+* `reStructuredText `_ (without Sphinx extensions)
+* Markdown (`GitHub Flavored Markdown `_ by default,
+ or `CommonMark `_)
+
+It's customary to save your README file in the root of your project, in the same directory as your :file:`setup.py` file.
+
+
+Including your README in your package's metadata
+------------------------------------------------
+
+To include your README's contents as your package description,
+set your project's ``Description`` and ``Description-Content-Type`` metadata,
+typically in your project's :file:`setup.py` file.
+
+.. seealso::
+
+ * :ref:`description-optional`
+ * :ref:`description-content-type-optional`
+
+For example, to set these values in a package's :file:`setup.py` file,
+use ``setup()``'s ``long_description`` and ``long_description_content_type``.
+
+Set the value of ``long_description`` to the contents (not the path) of the README file itself.
+Set the ``long_description_content_type`` to an accepted ``Content-Type``-style value for your README file's markup,
+such as ``text/plain``, ``text/x-rst`` (for reStructuredText), or ``text/markdown``.
+
+.. note::
+
+ If you're using GitHub-flavored Markdown to write a project's description, ensure you upgrade
+ the following tools:
+
+ .. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --user --upgrade setuptools wheel twine
+
+ .. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --user --upgrade setuptools wheel twine
+
+ The minimum required versions of the respective tools are:
+
+ - ``setuptools >= 38.6.0``
+ - ``wheel >= 0.31.0``
+ - ``twine >= 1.11.0``
+
+ It's recommended that you use ``twine`` to upload the project's distribution packages:
+
+ .. code-block:: bash
+
+ twine upload dist/*
+
+For example, see this :file:`setup.py` file,
+which reads the contents of :file:`README.md` as ``long_description``
+and identifies the markup as GitHub-flavored Markdown:
+
+.. code-block:: python
+
+ from setuptools import setup
+
+ # read the contents of your README file
+ from pathlib import Path
+ this_directory = Path(__file__).parent
+ long_description = (this_directory / "README.md").read_text()
+
+ setup(
+ name='an_example_package',
+ # other arguments omitted
+ long_description=long_description,
+ long_description_content_type='text/markdown'
+ )
+
+
+Validating reStructuredText markup
+----------------------------------
+
+If your README is written in reStructuredText, any invalid markup will prevent
+it from rendering, causing PyPI to instead just show the README's raw source.
+
+Note that Sphinx extensions used in docstrings, such as
+:doc:`directives ` and :doc:`roles `
+(e.g., "``:py:func:`getattr```" or "``:ref:`my-reference-label```"), are not allowed here and will result in error
+messages like "``Error: Unknown interpreted text role "py:func".``".
+
+You can check your README for markup errors before uploading as follows:
+
+1. Install the latest version of `twine `_;
+ version 1.12.0 or higher is required:
+
+ .. tab:: Unix/macOS
+
+ .. code-block:: bash
+
+ python3 -m pip install --upgrade twine
+
+ .. tab:: Windows
+
+ .. code-block:: bat
+
+ py -m pip install --upgrade twine
+
+2. Build the sdist and wheel for your project as described under
+ :ref:`Packaging Your Project`.
+
+3. Run ``twine check`` on the sdist and wheel:
+
+ .. code-block:: bash
+
+ twine check dist/*
+
+ This command will report any problems rendering your README. If your markup
+ renders fine, the command will output ``Checking distribution FILENAME:
+ Passed``.
diff --git a/_build/html/_sources/guides/migrating-to-pypi-org.rst.txt b/_build/html/_sources/guides/migrating-to-pypi-org.rst.txt
new file mode 100644
index 000000000..2b565e8ee
--- /dev/null
+++ b/_build/html/_sources/guides/migrating-to-pypi-org.rst.txt
@@ -0,0 +1,142 @@
+:orphan:
+
+.. _`Migrating to PyPI.org`:
+
+Migrating to PyPI.org
+=====================
+
+:Page Status: Obsolete
+
+:term:`pypi.org` is the new, rewritten version of PyPI that has replaced the
+legacy PyPI code base. It is the default version of PyPI that people are
+expected to use. These are the tools and processes that people will need to
+interact with ``PyPI.org``.
+
+Publishing releases
+-------------------
+
+``pypi.org`` is the default upload platform as of September 2016.
+
+Uploads through ``pypi.python.org`` were *switched off* on **July 3, 2017**.
+As of April 13th, 2018, ``pypi.org`` is the URL for PyPI.
+
+The recommended way to migrate to PyPI.org for uploading is to ensure that you
+are using a new enough version of your upload tool.
+
+The default upload settings switched to ``pypi.org`` in the following versions:
+
+* ``twine`` 1.8.0
+* ``setuptools`` 27.0.0
+* Python 2.7.13 (``distutils`` update)
+* Python 3.4.6 (``distutils`` update)
+* Python 3.5.3 (``distutils`` update)
+* Python 3.6.0 (``distutils`` update)
+
+In addition to ensuring you're on a new enough version of the tool for the
+tool's default to have switched, you must also make sure that you have not
+configured the tool to override its default upload URL. Typically this is
+configured in a file located at :file:`$HOME/.pypirc`. If you see a file like:
+
+.. code::
+
+ [distutils]
+ index-servers =
+ pypi
+
+ [pypi]
+ repository = https://pypi.python.org/pypi
+ username =
+ password =
+
+
+Then simply delete the line starting with ``repository`` and you will use
+your upload tool's default URL.
+
+If for some reason you're unable to upgrade the version of your tool
+to a version that defaults to using PyPI.org, then you may edit
+:file:`$HOME/.pypirc` and include the ``repository:`` line, but use the
+value ``https://upload.pypi.org/legacy/`` instead:
+
+.. code::
+
+ [distutils]
+ index-servers =
+ pypi
+
+ [pypi]
+ repository = https://upload.pypi.org/legacy/
+ username =
+ password =
+
+(``legacy`` in this URL refers to the fact that this is the new server
+implementation's emulation of the legacy server implementation's upload API.)
+
+For more details, see the :ref:`specification ` for :file:`.pypirc`.
+
+Registering package names & metadata
+------------------------------------
+
+Explicit pre-registration of package names with the ``setup.py register``
+command prior to the first upload is no longer required, and is not
+currently supported by the legacy upload API emulation on PyPI.org.
+
+As a result, attempting explicit registration after switching to using
+PyPI.org for uploads will give the following error message::
+
+ Server response (410): This API is no longer supported, instead simply upload the file.
+
+The solution is to skip the registration step, and proceed directly to
+uploading artifacts.
+
+
+Using TestPyPI
+--------------
+
+Legacy TestPyPI (testpypi.python.org) is no longer available; use
+`test.pypi.org `_ instead. If you use TestPyPI,
+you must update your :file:`$HOME/.pypirc` to handle TestPyPI's new
+location, by replacing ``https://testpypi.python.org/pypi`` with
+``https://test.pypi.org/legacy/``, for example:
+
+.. code::
+
+ [distutils]
+ index-servers=
+ pypi
+ testpypi
+
+ [testpypi]
+ repository = https://test.pypi.org/legacy/
+ username =
+ password =
+
+For more details, see the :ref:`specification ` for :file:`.pypirc`.
+
+
+Registering new user accounts
+-----------------------------
+
+In order to help mitigate spam attacks against PyPI, new user registration
+through ``pypi.python.org`` was *switched off* on **February 20, 2018**.
+New user registrations at ``pypi.org`` are open.
+
+
+Browsing packages
+-----------------
+
+While ``pypi.python.org`` is may still be used in links from other PyPA
+documentation, etc, the default interface for browsing packages is
+``pypi.org``. The domain pypi.python.org now redirects to pypi.org,
+and may be disabled sometime in the future.
+
+
+Downloading packages
+--------------------
+
+``pypi.org`` is the default host for downloading packages.
+
+Managing published packages and releases
+----------------------------------------
+
+``pypi.org`` provides a fully functional interface for logged in users to
+manage their published packages and releases.
diff --git a/_build/html/_sources/guides/modernize-setup-py-project.rst.txt b/_build/html/_sources/guides/modernize-setup-py-project.rst.txt
new file mode 100644
index 000000000..1f71d1973
--- /dev/null
+++ b/_build/html/_sources/guides/modernize-setup-py-project.rst.txt
@@ -0,0 +1,248 @@
+.. _modernize-setup-py-project:
+
+
+==============================================
+How to modernize a ``setup.py`` based project?
+==============================================
+
+
+Should ``pyproject.toml`` be added?
+===================================
+
+A :term:`pyproject.toml` file is strongly recommended.
+The presence of a :file:`pyproject.toml` file itself does not bring much. [#]_
+What is actually strongly recommended is the ``[build-system]`` table in :file:`pyproject.toml`.
+
+.. [#] Note that it has influence on the build isolation feature of pip,
+ see below.
+
+
+Should ``setup.py`` be deleted?
+===============================
+
+No, :file:`setup.py` can exist in a modern :ref:`setuptools` based project.
+The :term:`setup.py` file is a valid configuration file for setuptools
+that happens to be written in Python.
+However, the following commands are deprecated and **MUST NOT** be run anymore,
+and their recommended replacement commands should be used instead:
+
++---------------------------------+----------------------------------------+
+| Deprecated | Recommendation |
++=================================+========================================+
+| ``python setup.py install`` | ``python -m pip install .`` |
++---------------------------------+----------------------------------------+
+| ``python setup.py develop`` | ``python -m pip install --editable .`` |
++---------------------------------+----------------------------------------+
+| ``python setup.py sdist`` | ``python -m build`` |
++---------------------------------+ |
+| ``python setup.py bdist_wheel`` | |
++---------------------------------+----------------------------------------+
+
+
+For more details:
+
+* :ref:`setup-py-deprecated`
+
+
+Where to start?
+===============
+
+The :term:`project` must contain a :file:`pyproject.toml` file at the root of its source tree
+that contains a ``[build-system]`` table like so:
+
+.. code:: toml
+
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+
+
+This is the standardized method of letting :term:`build frontends ` know
+that :ref:`setuptools` is the :term:`build backend ` for this project.
+
+Note that the presence of a :file:`pyproject.toml` file (even if empty)
+triggers :ref:`pip` to change its default behavior to use *build isolation*.
+
+For more details:
+
+* :ref:`distributing-packages`
+* :ref:`pyproject-build-system-table`
+* :doc:`pip:reference/build-system`
+
+
+How to handle additional build-time dependencies?
+=================================================
+
+On top of setuptools itself,
+if :file:`setup.py` depends on other third-party libraries (outside of Python's standard library),
+those must be listed in the ``requires`` list of the ``[build-system]`` table,
+so that the build frontend knows to install them
+when building the :term:`distributions `.
+
+For example, a :file:`setup.py` file such as this:
+
+.. code:: python
+
+ import setuptools
+ import some_build_toolkit # comes from the `some-build-toolkit` library
+
+ def get_version():
+ version = some_build_toolkit.compute_version()
+ return version
+
+ setuptools.setup(
+ name="my-project",
+ version=get_version(),
+ )
+
+
+requires a :file:`pyproject.toml` file like this (:file:`setup.py` stays unchanged):
+
+.. code:: toml
+
+ [build-system]
+ requires = [
+ "setuptools",
+ "some-build-toolkit",
+ ]
+ build-backend = "setuptools.build_meta"
+
+
+For more details:
+
+* :ref:`pyproject-build-system-table`
+
+
+What is the build isolation feature?
+====================================
+
+Build frontends typically create an ephemeral virtual environment
+where they install only the build dependencies (and their dependencies)
+that are listed under ``build-system.requires``
+and trigger the build in that environment.
+
+For some projects this isolation is unwanted and it can be deactivated as follows:
+
+* ``python -m build --no-isolation``
+* ``python -m pip install --no-build-isolation``
+
+For more details:
+
+* :doc:`pip:reference/build-system`
+
+
+How to handle packaging metadata?
+=================================
+
+All static metadata can optionally be moved to a ``[project]`` table in :file:`pyproject.toml`.
+
+For example, a :file:`setup.py` file such as this:
+
+.. code:: python
+
+ import setuptools
+
+ setuptools.setup(
+ name="my-project",
+ version="1.2.3",
+ )
+
+
+can be entirely replaced by a :file:`pyproject.toml` file like this:
+
+.. code:: toml
+
+ [build-system]
+ requires = ["setuptools"]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "my-project"
+ version = "1.2.3"
+
+
+Read :ref:`pyproject-project-table` for the full specification
+of the content allowed in the ``[project]`` table.
+
+
+How to handle dynamic metadata?
+===============================
+
+If some packaging metadata fields are not static
+they need to be listed as ``dynamic`` in this ``[project]`` table.
+
+For example, a :file:`setup.py` file such as this:
+
+.. code:: python
+
+ import setuptools
+ import some_build_toolkit
+
+ def get_version():
+ version = some_build_toolkit.compute_version()
+ return version
+
+ setuptools.setup(
+ name="my-project",
+ version=get_version(),
+ )
+
+
+can be modernized as follows:
+
+.. code:: toml
+
+ [build-system]
+ requires = [
+ "setuptools",
+ "some-build-toolkit",
+ ]
+ build-backend = "setuptools.build_meta"
+
+ [project]
+ name = "my-project"
+ dynamic = ["version"]
+
+
+.. code:: python
+
+ import setuptools
+ import some_build_toolkit
+
+ def get_version():
+ version = some_build_toolkit.compute_version()
+ return version
+
+ setuptools.setup(
+ version=get_version(),
+ )
+
+
+For more details:
+
+* :ref:`declaring-project-metadata-dynamic`
+
+
+What if something that can not be changed expects a ``setup.py`` file?
+======================================================================
+
+For example, a process exists that can not be changed easily
+and it needs to execute a command such as ``python setup.py --name``.
+
+It is perfectly fine to leave a :file:`setup.py` file in the project source tree
+even after all its content has been moved to :file:`pyproject.toml`.
+This file can be as minimalistic as this:
+
+.. code:: python
+
+ import setuptools
+
+ setuptools.setup()
+
+
+Where to read more about this?
+==============================
+
+* :ref:`pyproject-toml-spec`
+* :doc:`pip:reference/build-system`
+* :doc:`setuptools:build_meta`
diff --git a/_build/html/_sources/guides/multi-version-installs.rst.txt b/_build/html/_sources/guides/multi-version-installs.rst.txt
new file mode 100644
index 000000000..a09bc900a
--- /dev/null
+++ b/_build/html/_sources/guides/multi-version-installs.rst.txt
@@ -0,0 +1,43 @@
+:orphan:
+
+.. _`Multi-version installs`:
+
+Multi-version installs
+======================
+
+:Page Status: Obsolete
+
+
+easy_install allows simultaneous installation of different versions of the same
+project into a single environment shared by multiple programs which must
+``require`` the appropriate version of the project at run time (using
+``pkg_resources``).
+
+For many use cases, virtual environments address this need without the
+complication of the ``require`` directive. However, the advantage of
+parallel installations within the same environment is that it works for an
+environment shared by multiple applications, such as the system Python in a
+Linux distribution.
+
+The major limitation of ``pkg_resources`` based parallel installation is
+that as soon as you import ``pkg_resources`` it locks in the *default*
+version of everything which is already available on sys.path. This can
+cause problems, since ``setuptools`` created command line scripts
+use ``pkg_resources`` to find the entry point to execute. This means that,
+for example, you can't use ``require`` tests invoked through ``nose`` or a
+WSGI application invoked through ``gunicorn`` if your application needs a
+non-default version of anything that is available on the standard
+``sys.path`` - the script wrapper for the main application will lock in the
+version that is available by default, so the subsequent ``require`` call
+in your own code fails with a spurious version conflict.
+
+This can be worked around by setting all dependencies in
+``__main__.__requires__`` before importing ``pkg_resources`` for the first
+time, but that approach does mean that standard command line invocations of
+the affected tools can't be used - it's necessary to write a custom
+wrapper script or use ``python3 -c ''`` to invoke the application's
+main entry point directly.
+
+Refer to the `pkg_resources documentation
+`__
+for more details.
diff --git a/_build/html/_sources/guides/packaging-binary-extensions.rst.txt b/_build/html/_sources/guides/packaging-binary-extensions.rst.txt
new file mode 100644
index 000000000..de8a9d2d6
--- /dev/null
+++ b/_build/html/_sources/guides/packaging-binary-extensions.rst.txt
@@ -0,0 +1,417 @@
+.. _`Binary Extensions`:
+
+===========================
+Packaging binary extensions
+===========================
+
+:Page Status: Incomplete
+:Last Reviewed: 2013-12-08
+
+One of the features of the CPython reference interpreter is that, in
+addition to allowing the execution of Python code, it also exposes a rich
+C API for use by other software. One of the most common uses of this C API
+is to create importable C extensions that allow things which aren't
+always easy to achieve in pure Python code.
+
+
+An overview of binary extensions
+================================
+
+Use cases
+---------
+
+The typical use cases for binary extensions break down into just three
+conventional categories:
+
+* **accelerator modules**: these modules are completely self-contained, and
+ are created solely to run faster than the equivalent pure Python code
+ runs in CPython. Ideally, accelerator modules will always have a pure
+ Python equivalent to use as a fallback if the accelerated version isn't
+ available on a given system. The CPython standard library makes extensive
+ use of accelerator modules.
+ *Example*: When importing ``datetime``, Python falls back to the
+ `datetime.py `_
+ module if the C implementation (
+ `_datetimemodule.c `_)
+ is not available.
+* **wrapper modules**: these modules are created to expose existing C interfaces
+ to Python code. They may either expose the underlying C interface directly,
+ or else expose a more "Pythonic" API that makes use of Python language
+ features to make the API easier to use. The CPython standard library makes
+ extensive use of wrapper modules.
+ *Example*: `functools.py `_
+ is a Python module wrapper for
+ `_functoolsmodule.c `_.
+* **low-level system access**: these modules are created to access lower level
+ features of the CPython runtime, the operating system, or the underlying
+ hardware. Through platform specific code, extension modules may achieve
+ things that aren't possible in pure Python code. A number of CPython
+ standard library modules are written in C in order to access interpreter
+ internals that aren't exposed at the language level.
+ *Example*: ``sys``, which comes from
+ `sysmodule.c `_.
+
+ One particularly notable feature of C extensions is that, when they don't
+ need to call back into the interpreter runtime, they can release CPython's
+ global interpreter lock around long-running operations (regardless of
+ whether those operations are CPU or IO bound).
+
+Not all extension modules will fit neatly into the above categories. The
+extension modules included with NumPy, for example, span all three use cases
+- they move inner loops to C for speed reasons, wrap external libraries
+written in C, FORTRAN and other languages, and use low level system
+interfaces for both CPython and the underlying operation system to support
+concurrent execution of vectorised operations and to tightly control the
+exact memory layout of created objects.
+
+
+Disadvantages
+-------------
+
+The main disadvantage of using binary extensions is the fact that it makes
+subsequent distribution of the software more difficult. One of the
+advantages of using Python is that it is largely cross platform, and the
+languages used to write extension modules (typically C or C++, but really
+any language that can bind to the CPython C API) typically require that
+custom binaries be created for different platforms.
+
+This means that binary extensions:
+
+* require that end users be able to either build them from source, or else
+ that someone publish pre-built binaries for common platforms
+
+* may not be compatible with different builds of the CPython reference
+ interpreter
+
+* often will not work correctly with alternative interpreters such as PyPy,
+ IronPython or Jython
+
+* if handcoded, make maintenance more difficult by requiring that
+ maintainers be familiar not only with Python, but also with the language
+ used to create the binary extension, as well as with the details of the
+ CPython C API.
+
+* if a pure Python fallback implementation is provided, make maintenance
+ more difficult by requiring that changes be implemented in two places,
+ and introducing additional complexity in the test suite to ensure both
+ versions are always executed.
+
+Another disadvantage of relying on binary extensions is that alternative
+import mechanisms (such as the ability to import modules directly from
+zipfiles) often won't work for extension modules (as the dynamic loading
+mechanisms on most platforms can only load libraries from disk).
+
+
+Alternatives to handcoded accelerator modules
+---------------------------------------------
+
+When extension modules are just being used to make code run faster (after
+profiling has identified the code where the speed increase is worth
+additional maintenance effort), a number of other alternatives should
+also be considered:
+
+* look for existing optimised alternatives. The CPython standard library
+ includes a number of optimised data structures and algorithms (especially
+ in the builtins and the ``collections`` and ``itertools`` modules). The
+ Python Package Index also offers additional alternatives. Sometimes, the
+ appropriate choice of standard library or third party module can avoid the
+ need to create your own accelerator module.
+
+* for long running applications, the JIT compiled `PyPy interpreter
+ `__ may offer a suitable alternative to the standard
+ CPython runtime. The main barrier to adopting PyPy is typically reliance
+ on other binary extension modules - while PyPy does emulate the CPython
+ C API, modules that rely on that cause problems for the PyPy JIT, and the
+ emulation layer can often expose latent defects in extension modules that
+ CPython currently tolerates (frequently around reference counting errors -
+ an object having one live reference instead of two often won't break
+ anything, but no references instead of one is a major problem).
+
+* `Cython `__ is a mature static compiler that can
+ compile most Python code to C extension modules. The initial compilation
+ provides some speed increases (by bypassing the CPython interpreter layer),
+ and Cython's optional static typing features can offer additional
+ opportunities for speed increases. Using Cython still carries the
+ `disadvantages`_ associated with using binary extensions,
+ but has the benefit of having a reduced barrier to entry for Python
+ programmers (relative to other languages like C or C++).
+
+* `Numba `__ is a newer tool, created by members
+ of the scientific Python community, that aims to leverage LLVM to allow
+ selective compilation of pieces of a Python application to native
+ machine code at runtime. It requires that LLVM be available on the
+ system where the code is running, but can provide significant speed
+ increases, especially for operations that are amenable to vectorisation.
+
+
+Alternatives to handcoded wrapper modules
+-----------------------------------------
+
+The C ABI (Application Binary Interface) is a common standard for sharing
+functionality between multiple applications. One of the strengths of the
+CPython C API (Application Programming Interface) is allowing Python users
+to tap into that functionality. However, wrapping modules by hand is quite
+tedious, so a number of other alternative approaches should be considered.
+
+The approaches described below don't simplify the distribution case at all,
+but they *can* significantly reduce the maintenance burden of keeping
+wrapper modules up to date.
+
+* In addition to being useful for the creation of accelerator modules,
+ `Cython `__ is also widely used for creating wrapper
+ modules for C or C++ APIs. It involves wrapping the interfaces by
+ hand, which gives a wide range of freedom in designing and optimising
+ the wrapper code, but may not be a good choice for wrapping very
+ large APIs quickly. See the
+ `list of third-party tools `_
+ for automatic wrapping with Cython. It also supports performance-oriented
+ Python implementations that provide a CPython-like C-API, such as PyPy
+ and Pyston.
+
+* :doc:`pybind11 ` is a pure C++11 library
+ that provides a clean C++ interface to the CPython (and PyPy) C API. It
+ does not require a pre-processing step; it is written entirely in
+ templated C++. Helpers are included for Setuptools or CMake builds. It
+ was based on `Boost.Python `__,
+ but doesn't require the Boost libraries or BJam.
+
+* :doc:`cffi ` is a project created by some of the PyPy
+ developers to make it straightforward for developers that already know
+ both Python and C to expose their C modules to Python applications. It
+ also makes it relatively straightforward to wrap a C module based on its
+ header files, even if you don't know C yourself.
+
+ One of the key advantages of ``cffi`` is that it is compatible with the
+ PyPy JIT, allowing CFFI wrapper modules to participate fully in PyPy's
+ tracing JIT optimisations.
+
+* `SWIG `__ is a wrapper interface generator that
+ allows a variety of programming languages, including Python, to interface
+ with C and C++ code.
+
+* The standard library's ``ctypes`` module, while useful for getting access
+ to C level interfaces when header information isn't available, suffers
+ from the fact that it operates solely at the C ABI level, and thus has
+ no automatic consistency checking between the interface actually being
+ exported by the library and the one declared in the Python code. By
+ contrast, the above alternatives are all able to operate at the C *API*
+ level, using C header files to ensure consistency between the interface
+ exported by the library being wrapped and the one expected by the Python
+ wrapper module. While ``cffi`` *can* operate directly at the C ABI level,
+ it suffers from the same interface inconsistency problems as ``ctypes``
+ when it is used that way.
+
+
+Alternatives for low level system access
+----------------------------------------
+
+For applications that need low level system access (regardless of the
+reason), a binary extension module often *is* the best way to go about it.
+This is particularly true for low level access to the CPython runtime
+itself, since some operations (like releasing the Global Interpreter Lock)
+are simply invalid when the interpreter is running code, even if a module
+like ``ctypes`` or ``cffi`` is used to obtain access to the relevant C
+API interfaces.
+
+For cases where the extension module is manipulating the underlying
+operating system or hardware (rather than the CPython runtime), it may
+sometimes be better to just write an ordinary C library (or a library in
+another systems programming language like C++ or Rust that can export a C
+compatible ABI), and then use one of the wrapping techniques described
+above to make the interface available as an importable Python module.
+
+
+Implementing binary extensions
+==============================
+
+The CPython :doc:`Extending and Embedding `
+guide includes an introduction to writing a
+:doc:`custom extension module in C `.
+
+FIXME: Elaborate that all this is one of the reasons why you probably
+*don't* want to handcode your extension modules :)
+
+
+Extension module lifecycle
+--------------------------
+
+FIXME: This section needs to be fleshed out.
+
+
+Implications of shared static state and subinterpreters
+-------------------------------------------------------
+
+FIXME: This section needs to be fleshed out.
+
+
+Implications of the GIL
+-----------------------
+
+FIXME: This section needs to be fleshed out.
+
+
+Memory allocation APIs
+----------------------
+
+FIXME: This section needs to be fleshed out.
+
+
+.. _cpython-stable-abi:
+
+ABI Compatibility
+-----------------
+
+The CPython C API does not guarantee ABI stability between minor releases
+(3.2, 3.3, 3.4, etc.). This means that, typically, if you build an
+extension module against one version of Python, it is only guaranteed to
+work with the same minor version of Python and not with any other minor
+versions.
+
+Python 3.2 introduced the Limited API, with is a well-defined subset of
+Python's C API. The symbols needed for the Limited API form the
+"Stable ABI" which is guaranteed to be compatible across all Python 3.x
+versions. Wheels containing extensions built against the stable ABI use
+the ``abi3`` ABI tag, to reflect that they're compatible with all Python
+3.x versions.
+
+CPython's :doc:`C API stability` page provides
+detailed information about the API / ABI stability guarantees, how to use
+the Limited API and the exact contents of the "Limited API".
+
+
+Building binary extensions
+==========================
+
+FIXME: Cover the build-backends available for building extensions.
+
+Building extensions for multiple platforms
+------------------------------------------
+
+If you plan to distribute your extension, you should provide
+:term:`wheels