From f77f5c38869dfb4fb2e7b30b1956d157993b3378 Mon Sep 17 00:00:00 2001 From: Richard Abrich Date: Tue, 3 Mar 2026 22:57:18 -0500 Subject: [PATCH] fix: lower PyTorch minimum to 2.8.0 for vLLM compatibility vLLM 0.11.0 pins torch==2.8.0. The GPU E2E validation (openadapt-evals PR #87) confirmed the full ML stack works with PyTorch 2.8.0+cu128. The previous >=2.9.1 constraint prevented installing openadapt-ml alongside vLLM in the same environment. Co-Authored-By: Claude Opus 4.6 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 12fd058..56697bf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ dependencies = [ "pydantic-settings>=2.0.0", "pytest>=9.0.2", "pyyaml>=6.0.3", - "torch>=2.9.1", + "torch>=2.8.0", "torchvision>=0.24.1", "transformers>=4.57.3", ]