Skip to content
This repository has been archived by the owner on Nov 1, 2021. It is now read-only.

Commit

Permalink
Remove totem dependency (torch now has its own tester) (#124)
Browse files Browse the repository at this point in the history
* Remove totem dependency (torch now has its own tester), fix contiguous grad
  • Loading branch information
alexbw committed May 24, 2016
1 parent cc6d1f6 commit ff0057f
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 20 deletions.
1 change: 0 additions & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ before_script:
- export CMAKE_LIBRARY_PATH=$HOME/OpenBlasInstall/include:$HOME/OpenBlasInstall/lib:$CMAKE_LIBRARY_PATH
- cmake .. -DCMAKE_INSTALL_PREFIX="${INSTALL_PREFIX}" -DCMAKE_BUILD_TYPE=Release -DWITH_${TORCH_LUA_VERSION}=ON
- make && make install
- ${INSTALL_PREFIX}/bin/luarocks install totem
- ${INSTALL_PREFIX}/bin/luarocks install trepl
- ${INSTALL_PREFIX}/bin/luarocks install nn
- git clone https://github.com/torch/xlua && cd xlua && ${INSTALL_PREFIX}/bin/luarocks make xlua-1.1-0.rockspec
Expand Down
1 change: 0 additions & 1 deletion autograd-scm-1.rockspec
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ description = {

dependencies = {
"torch >= 7.0",
"totem"
}

build = {
Expand Down
5 changes: 2 additions & 3 deletions src/gradfuns.lua
Original file line number Diff line number Diff line change
Expand Up @@ -228,15 +228,15 @@ operators.pow = {
-- e.g. torch.view(x,3,3) and x:view(3,3)
local viewGradients = {
function(g, ans, x,sizes)
return torch.view(util.makeContiguous(g), torch.size(x))
return torch.view(torch.contiguous(g), torch.size(x))
end
}
local viewAsGradients = {
function(g, ans, x,template)
return torch.clone(torch.viewAs(g,x))
end,
function(g, ans, x,template)
return nil -- g.new(template:size()):zero()
return nil
end
}
local expandGradients = {
Expand Down Expand Up @@ -639,7 +639,6 @@ overload.module("util", util, function(module)
function(g, ans, x, template, dim, index) return nil end,
function(g, ans, x, template, dim, index) return nil end,
})
module.gradient("makeContiguous", zeroGradient())
module.gradient("cat", functions.catGradient)
module.static("lt")
module.static("le")
Expand Down
7 changes: 0 additions & 7 deletions src/util.lua
Original file line number Diff line number Diff line change
Expand Up @@ -227,13 +227,6 @@ function util.cat(x, y, dim)
end
end

function util.makeContiguous(g)
if not g:isContiguous() then
g = g:contiguous()
end
return g
end

function util.defaultBool(b, db)
if b == nil then
return db
Expand Down
38 changes: 30 additions & 8 deletions test/test.lua
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
-- Tester:
local totem = require 'totem'
local torch = require 'torch'
local autograd = require 'autograd'
local util = require 'autograd.util'
local gradcheck = require 'autograd.gradcheck' {randomizeInput = true}
local gradcheckConstant = require 'autograd.gradcheck' {randomizeInput = false}
local tester = totem.Tester()
local tester = torch.Tester()
local stringx = require 'pl.stringx'

autograd.protected(true)
Expand Down Expand Up @@ -1696,25 +1696,47 @@ local tests = {
end
df = autograd(f)
dparams, loss = df(params, 2)
end,

Contiguous = function()
-- Parameters:
local W = torch.Tensor(32,100):fill(.5)
local x = torch.Tensor(100):fill(.5)

-- Function:
local f1 = function(inputs)
return torch.sum(torch.contiguous(torch.contiguous(inputs.W)) * torch.contiguous(torch.contiguous(inputs.x)))
end

-- Tests:
tester:assert(gradcheck(f1,{W=torch.Tensor(32,100):fill(.5),x=torch.Tensor(100):fill(.5)}), "Incorrect gradient")
end


}

local function prefixTests(pf, t, skip)
local nt = { }
for k, v in pairs(t) do
if not skip[k] then
nt[pf .. k] = v
if type(t) == "table" then
for k, v in pairs(t) do
if not skip[k] then
nt[pf .. k] = v
end
end
elseif type(t) == "string" then
nt = pf .. t
elseif type(t) == "nil" then
nt = nil
end
return nt
end


-- Run tests:
print(prefixTests("Optimized_", tests, { }))
autograd.optimize(true)
tester:add(prefixTests("Optimized_", tests, { })):run()
tester:add(prefixTests("Optimized_", tests, { })):run(prefixTests("Optimized_", arg[1]))
autograd.optimize(false)
tester = totem.Tester()
tester:add(prefixTests("Direct_", tests, { GradGrad = true, AutoModule = true, DebuggerDivZero = true, StableGradients = true, ZeroGrad = true, SimpleGradGrad = true })):run()
tester = torch.Tester()
tester:add(prefixTests("Direct_", tests, { GradGrad = true, AutoModule = true, DebuggerDivZero = true, StableGradients = true, ZeroGrad = true, SimpleGradGrad = true })):run(arg[1])

0 comments on commit ff0057f

Please sign in to comment.