From 0d31b919ace5b9a1d9e169f77793616bbb749586 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bart=20van=20Merri=C3=ABnboer?= Date: Fri, 27 May 2016 16:26:55 -0400 Subject: [PATCH] Fix zero gradient for subtensor assignment. The variable that is being assigned has its gradient correctly calculated (g[k]) but later on when the gradient of the variable being assigned to is calculated g[k] is set to 0. This gives the correct gradient for the variable being assigned to, but because it shares the same storage it actually overrides the earlier gradient incorrectly to zero. This fixes that. --- src/gradfuns.lua | 7 ++++++- test/test.lua | 5 +++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/gradfuns.lua b/src/gradfuns.lua index b9c7b3b..0f20da6 100644 --- a/src/gradfuns.lua +++ b/src/gradfuns.lua @@ -152,7 +152,12 @@ functions.set = { return nil end, function(g, ans, x, k, v) - return g[k] + local gk = getValue(g[k]) + if type(gk) == 'number' then + return gk + else + return torch.clone(gk) + end end, } diff --git a/test/test.lua b/test/test.lua index 54b31c8..1d807db 100644 --- a/test/test.lua +++ b/test/test.lua @@ -1687,6 +1687,11 @@ local tests = { return torch.sum(xc) end tester:assert(gradcheck(f4,{x=torch.randn(10,10),y=torch.randn(3)}), "Incorrect gradient") + local f5 = function(params) + params.x[2] = params.y*2.0 + return torch.sum(params.x) + end + tester:assert(gradcheck(f5,{x=torch.randn(10,10),y=torch.randn(10)}), "Incorrect gradient") end, ScalarSigmoid = function()