From 7c35dbdc7304b00baaba08c4b1dd0ba234355849 Mon Sep 17 00:00:00 2001 From: Ricardo Vieira Date: Wed, 22 Oct 2025 12:34:20 +0200 Subject: [PATCH 1/2] Do not coerce gradients to TensorVariable This could cause spurious disconnected errors, because the tensorified variable was not in the graph of the cost --- pytensor/gradient.py | 23 ++++++++++++----------- tests/test_gradient.py | 12 ++++++++++++ 2 files changed, 24 insertions(+), 11 deletions(-) diff --git a/pytensor/gradient.py b/pytensor/gradient.py index 663db99bce..68c1d7da03 100644 --- a/pytensor/gradient.py +++ b/pytensor/gradient.py @@ -494,22 +494,25 @@ def Lop( coordinates of the tensor elements. If `f` is a list/tuple, then return a list/tuple with the results. """ + from pytensor.tensor import as_tensor_variable + if not isinstance(eval_points, list | tuple): - _eval_points: list[Variable] = [pytensor.tensor.as_tensor_variable(eval_points)] - else: - _eval_points = [pytensor.tensor.as_tensor_variable(x) for x in eval_points] + eval_points = [eval_points] + _eval_points = [ + x if isinstance(x, Variable) else as_tensor_variable(x) for x in eval_points + ] if not isinstance(f, list | tuple): - _f: list[Variable] = [pytensor.tensor.as_tensor_variable(f)] - else: - _f = [pytensor.tensor.as_tensor_variable(x) for x in f] + f = [f] + _f = [x if isinstance(x, Variable) else as_tensor_variable(x) for x in f] grads = list(_eval_points) + using_list = isinstance(wrt, list) + using_tuple = isinstance(wrt, tuple) if not isinstance(wrt, list | tuple): - _wrt: list[Variable] = [pytensor.tensor.as_tensor_variable(wrt)] - else: - _wrt = [pytensor.tensor.as_tensor_variable(x) for x in wrt] + wrt = [wrt] + _wrt = [x if isinstance(x, Variable) else as_tensor_variable(x) for x in wrt] assert len(_f) == len(grads) known = dict(zip(_f, grads, strict=True)) @@ -523,8 +526,6 @@ def Lop( return_disconnected=return_disconnected, ) - using_list = isinstance(wrt, list) - using_tuple = isinstance(wrt, tuple) return as_list_or_tuple(using_list, using_tuple, ret) diff --git a/tests/test_gradient.py b/tests/test_gradient.py index a79746da6d..34e5d6b730 100644 --- a/tests/test_gradient.py +++ b/tests/test_gradient.py @@ -11,6 +11,7 @@ DisconnectedType, GradClip, GradScale, + Lop, NullTypeGradError, Rop, UndefinedGrad, @@ -32,6 +33,7 @@ from pytensor.graph.null_type import NullType from pytensor.graph.op import Op from pytensor.graph.traversal import graph_inputs +from pytensor.scalar import float64 from pytensor.scan.op import Scan from pytensor.tensor.math import add, dot, exp, outer, sigmoid, sqr, sqrt, tanh from pytensor.tensor.math import sum as pt_sum @@ -1207,3 +1209,13 @@ def test_multiple_wrt(self): hessp_x_eval, hessp_y_eval = hessp_fn(**test) np.testing.assert_allclose(hessp_x_eval, [2, 4, 6]) np.testing.assert_allclose(hessp_y_eval, [-6, -4, -2]) + + +def test_scalar_Lop(): + xtm1 = float64("xtm1") + xt = xtm1**2 + + dout_dxt = float64("dout_dxt") + dout_dxtm1 = Lop(xt, wrt=xtm1, eval_points=dout_dxt) + assert dout_dxtm1.type == dout_dxt.type + assert dout_dxtm1.eval({xtm1: 3.0, dout_dxt: 1.5}) == 2 * 3.0 * 1.5 From 42f13679c1f3fe91332d5b690d9ddd039975674e Mon Sep 17 00:00:00 2001 From: jessegrabowski Date: Wed, 7 Jan 2026 21:30:46 -0600 Subject: [PATCH 2/2] Type-consistent checks --- pytensor/gradient.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pytensor/gradient.py b/pytensor/gradient.py index 68c1d7da03..f1aa31b9a5 100644 --- a/pytensor/gradient.py +++ b/pytensor/gradient.py @@ -496,13 +496,13 @@ def Lop( """ from pytensor.tensor import as_tensor_variable - if not isinstance(eval_points, list | tuple): + if not isinstance(eval_points, Sequence): eval_points = [eval_points] _eval_points = [ x if isinstance(x, Variable) else as_tensor_variable(x) for x in eval_points ] - if not isinstance(f, list | tuple): + if not isinstance(f, Sequence): f = [f] _f = [x if isinstance(x, Variable) else as_tensor_variable(x) for x in f] @@ -510,7 +510,7 @@ def Lop( using_list = isinstance(wrt, list) using_tuple = isinstance(wrt, tuple) - if not isinstance(wrt, list | tuple): + if not isinstance(wrt, Sequence): wrt = [wrt] _wrt = [x if isinstance(x, Variable) else as_tensor_variable(x) for x in wrt]