Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions RELEASENOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ Releases, starting with 9/2/2021, are listed with the most recent release at the
__API Changes__:

Fix `torch.jit.ScriptModule.zero_grad`.<br/>
Add ReadOnlySpan overloads to many methods.<br/>

# NuGet Version 0.105.2

Expand Down
12 changes: 6 additions & 6 deletions src/TorchSharp/Autograd.cs
Original file line number Diff line number Diff line change
Expand Up @@ -135,9 +135,9 @@ public static IList<Tensor> grad(IList<Tensor> outputs, IList<Tensor> inputs, IL
using var grads = new PinnedArray<IntPtr>();
using var results = new PinnedArray<IntPtr>();

IntPtr outsRef = outs.CreateArray(outputs.Select(p => p.Handle).ToArray());
IntPtr insRef = ins.CreateArray(inputs.Select(p => p.Handle).ToArray());
IntPtr gradsRef = grad_outputs == null ? IntPtr.Zero : grads.CreateArray(grad_outputs.Select(p => p.Handle).ToArray());
IntPtr outsRef = outs.CreateArray(outputs.ToHandleArray());
IntPtr insRef = ins.CreateArray(inputs.ToHandleArray());
IntPtr gradsRef = grad_outputs == null ? IntPtr.Zero : grads.CreateArray(grad_outputs.ToHandleArray());
long gradsLength = grad_outputs == null ? 0 : grads.Array.Length;

THSAutograd_grad(outsRef, outs.Array.Length, insRef, ins.Array.Length, gradsRef, gradsLength, retain_graph, create_graph, allow_unused, results.CreateArray);
Expand Down Expand Up @@ -178,9 +178,9 @@ public static void backward(IList<Tensor> tensors, IList<Tensor> grad_tensors =
using var ts = new PinnedArray<IntPtr>();
using var gts = new PinnedArray<IntPtr>();
using var ins = new PinnedArray<IntPtr>();
IntPtr tensRef = ts.CreateArray(tensors.Select(p => p.Handle).ToArray());
IntPtr gradsRef = grad_tensors == null ? IntPtr.Zero : gts.CreateArray(grad_tensors.Select(p => p.Handle).ToArray());
IntPtr insRef = inputs == null ? IntPtr.Zero : ins.CreateArray(inputs.Select(p => p.Handle).ToArray());
IntPtr tensRef = ts.CreateArray(tensors.ToHandleArray());
IntPtr gradsRef = grad_tensors == null ? IntPtr.Zero : gts.CreateArray(grad_tensors.ToHandleArray());
IntPtr insRef = inputs == null ? IntPtr.Zero : ins.CreateArray(inputs.ToHandleArray());
long insLength = inputs == null ? 0 : ins.Array.Length;
long gradsLength = grad_tensors == null ? 0 : gts.Array.Length;

Expand Down
10 changes: 5 additions & 5 deletions src/TorchSharp/AutogradFunction.cs
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ internal List<Tensor> ComputeVariableInput(object[] args)
internal void SetNextEdges(List<Tensor> inputVars, bool isExecutable)
{
using var l = new PinnedArray<IntPtr>();
THSAutograd_CSharpNode_setNextEdges(handle, l.CreateArrayWithSize(inputVars.Select(v => v.Handle).ToArray()), isExecutable);
THSAutograd_CSharpNode_setNextEdges(handle, l.CreateArrayWithSize(inputVars.ToHandleArray()), isExecutable);
CheckForErrors();
}

Expand All @@ -166,10 +166,10 @@ internal List<Tensor> WrapOutputs(List<Tensor> inputVars, List<Tensor> outputs,
using var outputArr = new PinnedArray<IntPtr>();
using var resultsArr = new PinnedArray<IntPtr>();

var varsPtr = varsArr.CreateArrayWithSize(inputVars.Select(v => v.Handle).ToArray());
var diffsPtr = diffArr.CreateArrayWithSize(_context.NonDifferentiableTensors.Select(v => v.Handle).ToArray());
var dirtyPtr = diffArr.CreateArrayWithSize(_context.DirtyTensors.Select(v => v.Handle).ToArray());
var outputPtr = outputArr.CreateArrayWithSize(outputs.Select(v => v.Handle).ToArray());
var varsPtr = varsArr.CreateArrayWithSize(inputVars.ToHandleArray());
var diffsPtr = diffArr.CreateArrayWithSize(_context.NonDifferentiableTensors.ToHandleArray());
var dirtyPtr = diffArr.CreateArrayWithSize(_context.DirtyTensors.ToHandleArray());
var outputPtr = outputArr.CreateArrayWithSize(outputs.ToHandleArray());

THSAutograd_Function_wrapOutputs(varsPtr, diffsPtr, dirtyPtr, outputPtr, isExecutable ? handle : new(), resultsArr.CreateArray);
CheckForErrors();
Expand Down
2 changes: 1 addition & 1 deletion src/TorchSharp/LinearAlgebra.cs
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ public static Tensor multi_dot(IList<Tensor> tensors)
}

using (var parray = new PinnedArray<IntPtr>()) {
IntPtr tensorsRef = parray.CreateArray(tensors.Select(p => p.Handle).ToArray());
IntPtr tensorsRef = parray.CreateArray(tensors.ToHandleArray());
var res = THSLinalg_multi_dot(tensorsRef, parray.Array.Length);
if (res == IntPtr.Zero)
torch.CheckForErrors();
Expand Down
4 changes: 2 additions & 2 deletions src/TorchSharp/NN/Utils/RNNUtils.cs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public static (torch.Tensor, torch.Tensor) pad_packed_sequence(PackedSequence se
/// <returns>The padded tensor</returns>
public static torch.Tensor pad_sequence(IEnumerable<torch.Tensor> sequences, bool batch_first = false, double padding_value = 0.0)
{
var sequences_arg = sequences.Select(p => p.Handle).ToArray();
var sequences_arg = sequences.ToHandleArray();
var res = THSNN_pad_sequence(sequences_arg, sequences_arg.Length, batch_first, padding_value);
if (res == IntPtr.Zero) { torch.CheckForErrors(); }
return new torch.Tensor(res);
Expand All @@ -69,7 +69,7 @@ public static torch.Tensor pad_sequence(IEnumerable<torch.Tensor> sequences, boo
/// <returns>The packed batch of variable length sequences</returns>
public static PackedSequence pack_sequence(IEnumerable<torch.Tensor> sequences, bool enforce_sorted = true)
{
var sequences_arg = sequences.Select(p => p.Handle).ToArray();
var sequences_arg = sequences.ToHandleArray();
var res = THSNN_pack_sequence(sequences_arg, sequences_arg.Length, enforce_sorted);
if (res.IsInvalid) { torch.CheckForErrors(); }
return new PackedSequence(res);
Expand Down
2 changes: 1 addition & 1 deletion src/TorchSharp/Optimizers/LBFGS.cs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public static LBFGS LBFGS(IEnumerable<Parameter> parameters, double lr = 0.01, l
if (!max_eval.HasValue) max_eval = 5 * max_iter / 4;

using var parray = new PinnedArray<IntPtr>();
IntPtr paramsRef = parray.CreateArray(parameters.Select(p => p.Handle).ToArray());
IntPtr paramsRef = parray.CreateArray(parameters.ToHandleArray());

var res = THSNN_LBFGS_ctor(paramsRef, parray.Array.Length, lr, max_iter, max_eval.Value, tolerange_grad, tolerance_change, history_size);
if (res == IntPtr.Zero) { torch.CheckForErrors(); }
Expand Down
Loading