From ff0057f4bf6a26fd0b762d2b9027aa3ea4f66436 Mon Sep 17 00:00:00 2001 From: Alex Wiltschko Date: Tue, 24 May 2016 10:05:07 -0400 Subject: [PATCH] Remove totem dependency (torch now has its own tester) (#124) * Remove totem dependency (torch now has its own tester), fix contiguous grad --- .travis.yml | 1 - autograd-scm-1.rockspec | 1 - src/gradfuns.lua | 5 ++--- src/util.lua | 7 ------- test/test.lua | 38 ++++++++++++++++++++++++++++++-------- 5 files changed, 32 insertions(+), 20 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2ceff8b..79b38b0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -44,7 +44,6 @@ before_script: - export CMAKE_LIBRARY_PATH=$HOME/OpenBlasInstall/include:$HOME/OpenBlasInstall/lib:$CMAKE_LIBRARY_PATH - cmake .. -DCMAKE_INSTALL_PREFIX="${INSTALL_PREFIX}" -DCMAKE_BUILD_TYPE=Release -DWITH_${TORCH_LUA_VERSION}=ON - make && make install -- ${INSTALL_PREFIX}/bin/luarocks install totem - ${INSTALL_PREFIX}/bin/luarocks install trepl - ${INSTALL_PREFIX}/bin/luarocks install nn - git clone https://github.com/torch/xlua && cd xlua && ${INSTALL_PREFIX}/bin/luarocks make xlua-1.1-0.rockspec diff --git a/autograd-scm-1.rockspec b/autograd-scm-1.rockspec index 71f92f3..b98641d 100644 --- a/autograd-scm-1.rockspec +++ b/autograd-scm-1.rockspec @@ -13,7 +13,6 @@ description = { dependencies = { "torch >= 7.0", - "totem" } build = { diff --git a/src/gradfuns.lua b/src/gradfuns.lua index 98f53e1..b9c7b3b 100644 --- a/src/gradfuns.lua +++ b/src/gradfuns.lua @@ -228,7 +228,7 @@ operators.pow = { -- e.g. torch.view(x,3,3) and x:view(3,3) local viewGradients = { function(g, ans, x,sizes) - return torch.view(util.makeContiguous(g), torch.size(x)) + return torch.view(torch.contiguous(g), torch.size(x)) end } local viewAsGradients = { @@ -236,7 +236,7 @@ local viewAsGradients = { return torch.clone(torch.viewAs(g,x)) end, function(g, ans, x,template) - return nil -- g.new(template:size()):zero() + return nil end } local expandGradients = { @@ -639,7 +639,6 @@ overload.module("util", util, function(module) function(g, ans, x, template, dim, index) return nil end, function(g, ans, x, template, dim, index) return nil end, }) - module.gradient("makeContiguous", zeroGradient()) module.gradient("cat", functions.catGradient) module.static("lt") module.static("le") diff --git a/src/util.lua b/src/util.lua index 9fc03dd..4433973 100644 --- a/src/util.lua +++ b/src/util.lua @@ -227,13 +227,6 @@ function util.cat(x, y, dim) end end -function util.makeContiguous(g) - if not g:isContiguous() then - g = g:contiguous() - end - return g -end - function util.defaultBool(b, db) if b == nil then return db diff --git a/test/test.lua b/test/test.lua index 1d5fe48..54b31c8 100644 --- a/test/test.lua +++ b/test/test.lua @@ -1,10 +1,10 @@ -- Tester: -local totem = require 'totem' +local torch = require 'torch' local autograd = require 'autograd' local util = require 'autograd.util' local gradcheck = require 'autograd.gradcheck' {randomizeInput = true} local gradcheckConstant = require 'autograd.gradcheck' {randomizeInput = false} -local tester = totem.Tester() +local tester = torch.Tester() local stringx = require 'pl.stringx' autograd.protected(true) @@ -1696,6 +1696,20 @@ local tests = { end df = autograd(f) dparams, loss = df(params, 2) + end, + + Contiguous = function() + -- Parameters: + local W = torch.Tensor(32,100):fill(.5) + local x = torch.Tensor(100):fill(.5) + + -- Function: + local f1 = function(inputs) + return torch.sum(torch.contiguous(torch.contiguous(inputs.W)) * torch.contiguous(torch.contiguous(inputs.x))) + end + + -- Tests: + tester:assert(gradcheck(f1,{W=torch.Tensor(32,100):fill(.5),x=torch.Tensor(100):fill(.5)}), "Incorrect gradient") end @@ -1703,18 +1717,26 @@ local tests = { local function prefixTests(pf, t, skip) local nt = { } - for k, v in pairs(t) do - if not skip[k] then - nt[pf .. k] = v + if type(t) == "table" then + for k, v in pairs(t) do + if not skip[k] then + nt[pf .. k] = v + end end + elseif type(t) == "string" then + nt = pf .. t + elseif type(t) == "nil" then + nt = nil end return nt end + -- Run tests: +print(prefixTests("Optimized_", tests, { })) autograd.optimize(true) -tester:add(prefixTests("Optimized_", tests, { })):run() +tester:add(prefixTests("Optimized_", tests, { })):run(prefixTests("Optimized_", arg[1])) autograd.optimize(false) -tester = totem.Tester() -tester:add(prefixTests("Direct_", tests, { GradGrad = true, AutoModule = true, DebuggerDivZero = true, StableGradients = true, ZeroGrad = true, SimpleGradGrad = true })):run() +tester = torch.Tester() +tester:add(prefixTests("Direct_", tests, { GradGrad = true, AutoModule = true, DebuggerDivZero = true, StableGradients = true, ZeroGrad = true, SimpleGradGrad = true })):run(arg[1])