diff --git a/tmva/tmva/inc/TMVA/DNN/Architectures/Cpu.h b/tmva/tmva/inc/TMVA/DNN/Architectures/Cpu.h index 45ffe910de82a..e890cf2f7e49f 100644 --- a/tmva/tmva/inc/TMVA/DNN/Architectures/Cpu.h +++ b/tmva/tmva/inc/TMVA/DNN/Architectures/Cpu.h @@ -403,6 +403,36 @@ class TCpu ///@} + //____________________________________________________________________________ + // + // Zero Padding Layer Propagation + //____________________________________________________________________________ + /** @name Forward Propagation in Zero Padding Layer + */ + ///@{ + + /** Zero Pad the matrix \p B to the matrix \p A, using the + * padding dimensions specified. + * */ + static void ZeroPad2DForward(TCpuMatrix &A, const TCpuMatrix &B, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth); + + ///@} + + /** @name Backward Propagation in Zero Padding Layer + */ + ///@{ + + /** Perform the complete backward propagation step in a Zero Padding Layer. The gradients + * at the padded positions get discarded. */ + static void ZeroPad2DBackward(std::vector> &activationGradientsBackward, + const std::vector> &activationGradients, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth, + size_t batchSize, size_t depth); + ///@} + //____________________________________________________________________________ // // Reshape Layer Propagation diff --git a/tmva/tmva/inc/TMVA/DNN/Architectures/Cuda.h b/tmva/tmva/inc/TMVA/DNN/Architectures/Cuda.h index 0a17eed5db012..58e3eb55e2c2a 100644 --- a/tmva/tmva/inc/TMVA/DNN/Architectures/Cuda.h +++ b/tmva/tmva/inc/TMVA/DNN/Architectures/Cuda.h @@ -412,6 +412,36 @@ class TCuda ///@} + //____________________________________________________________________________ + // + // Zero Padding Layer Propagation + //____________________________________________________________________________ + /** @name Forward Propagation in Zero Padding Layer + */ + ///@{ + + /** Zero Pad the matrix \p B to the matrix \p A, using the + * padding dimensions specified. + * */ + static void ZeroPad2DForward(TCudaMatrix &A, const TCudaMatrix &B, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth); + + ///@} + + /** @name Backward Propagation in Zero Padding Layer + */ + ///@{ + + /** Perform the complete backward propagation step in a Zero Padding Layer. The gradients + * at the padded positions get discarded. */ + static void ZeroPad2DBackward(std::vector> &activationGradientsBackward, + const std::vector> &activationGradients, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth, + size_t batchSize, size_t depth); + ///@} + //____________________________________________________________________________ // // Reshape Layer Propagation diff --git a/tmva/tmva/inc/TMVA/DNN/Architectures/Reference.h b/tmva/tmva/inc/TMVA/DNN/Architectures/Reference.h index 294c1839a23ae..c5ebc32e58881 100644 --- a/tmva/tmva/inc/TMVA/DNN/Architectures/Reference.h +++ b/tmva/tmva/inc/TMVA/DNN/Architectures/Reference.h @@ -432,6 +432,38 @@ class TReference const std::vector> &indexMatrix, size_t batchSize, size_t depth, size_t nLocalViews); ///@} + + + //____________________________________________________________________________ + // + // Zero Padding Layer Propagation + //____________________________________________________________________________ + /** @name Forward Propagation in Zero Padding Layer + */ + ///@{ + + /** Zero Pad the matrix \p B to the matrix \p A, using the + * padding dimensions specified. + * */ + static void ZeroPad2DForward(TMatrixT &A, const TMatrixT &B, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth); + + ///@} + + /** @name Backward Propagation in Zero Padding Layer + */ + ///@{ + + /** Perform the complete backward propagation step in a Zero Padding Layer. The gradients + * at the padded positions get discarded. */ + static void ZeroPad2DBackward(std::vector> &activationGradientsBackward, + const std::vector> &activationGradients, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth, + size_t batchSize, size_t depth); + ///@} + //____________________________________________________________________________ // // Reshape Layer Propagation diff --git a/tmva/tmva/inc/TMVA/DNN/CNN/PaddingLayer.h b/tmva/tmva/inc/TMVA/DNN/CNN/PaddingLayer.h new file mode 100644 index 0000000000000..bf9ff252310c0 --- /dev/null +++ b/tmva/tmva/inc/TMVA/DNN/CNN/PaddingLayer.h @@ -0,0 +1,218 @@ +// @(#)root/tmva/tmva/dnn:$Id$ +// Author: Siddhartha Rao Kamalakara + +/********************************************************************************** + * Project: TMVA - a Root-integrated toolkit for multivariate data analysis * + * Package: TMVA * + * Class : TConvLayer * + * Web : http://tmva.sourceforge.net * + * * + * Description: * + * Padding Layer * + * * + * Authors (alphabetical): * + * Siddhartha Rao Kamalakara - CERN, Switzerland * + * * + * Copyright (c) 2005-2015: * + * CERN, Switzerland * + * U. of Victoria, Canada * + * MPI-K Heidelberg, Germany * + * U. of Bonn, Germany * + * * + * Redistribution and use in source and binary forms, with or without * + * modification, are permitted according to the terms listed in LICENSE * + * (http://tmva.sourceforge.net/LICENSE) * + **********************************************************************************/ + +#ifndef TMVA_CNN_PADDINGLAYER2D +#define TMVA_CNN_PADDINGLAYER2D + +#include "TMatrix.h" + +#include "TMVA/DNN/GeneralLayer.h" +#include "TMVA/DNN/Functions.h" + +#include +#include + +namespace TMVA { +namespace DNN { +namespace CNN { + +template +class TPaddingLayer2D : public VGeneralLayer +{ + +public: + using Matrix_t = typename Architecture_t::Matrix_t; + using Scalar_t = typename Architecture_t::Scalar_t; + +private: + size_t fTopPad; + size_t fBottomPad; + size_t fLeftPad; + size_t fRightPad; + size_t outputWidth; + size_t outputHeight; + + size_t calculateDimension(size_t imgHeight, size_t imgWidth, size_t pad_left, size_t pad_right, size_t pad_top, size_t pad_bottom); + +public: + /*! Constructor. */ + TPaddingLayer2D(size_t BatchSize, size_t inputDepth, size_t inputHeight, size_t inputWidth, size_t depth, size_t height, size_t width, size_t TopPad, size_t BottomPad, size_t LeftPad, size_t RightPad); + + /*! Copy the conv layer provided as a pointer */ + TPaddingLayer2D(TPaddingLayer2D *layer); + + /*! Copy constructor. */ + TPaddingLayer2D(const TPaddingLayer2D &); + + /*! Destructor. */ + ~TPaddingLayer2D(); + + /*! Pads the input array with the dimensions given by + * the user. Padding is done in two dimensions for each + * example in the batch */ + void Forward(std::vector &input, bool applyDropout = false); + + /*! Discards the gradients through the padded inputs + * since they are zero padded */ + void Backward(std::vector &gradients_backward, + const std::vector & /*activations_backward*/, + std::vector & /*inp1*/, std::vector & + /*inp2*/); + + /*! Writes the information and the weights about the layer in an XML node. */ + virtual void AddWeightsXMLTo(void *parent); + + /*! Read the information and the weights about the layer from XML node. */ + virtual void ReadWeightsFromXML(void *parent); + + /*! Prints the info about the layer. */ + void Print() const; + + size_t GetTopPadding() const {return fTopPad;} + + size_t GetBottomPadding() const {return fBottomPad;} + + size_t GetLeftPadding() const {return fLeftPad;} + + size_t GetRightPadding() const {return fRightPad;} + + size_t GetOutputHeight() const {return outputHeight;} + + size_t GetOutputWidth() const {return outputWidth;} + + +}; + +template +TPaddingLayer2D::TPaddingLayer2D(size_t batchSize, size_t inputDepth, size_t inputHeight, size_t inputWidth, + size_t depth, size_t height, size_t width, + size_t topPad, size_t bottomPad, size_t leftPad, size_t rightPad) + : VGeneralLayer(batchSize, inputDepth, inputHeight, inputWidth, depth, height, width, 0, 0, 0, 0, 0, + 0, batchSize, inputDepth, calculateDimension(inputHeight, inputWidth, leftPad, rightPad, topPad, bottomPad), EInitialization::kZero), + fTopPad(topPad), fBottomPad(bottomPad), fLeftPad(leftPad), fRightPad(rightPad) +{ + + this->outputHeight = inputHeight + topPad + bottomPad; + this->outputWidth = inputWidth + leftPad + rightPad; +} + + +//_________________________________________________________________________________________________ +template +TPaddingLayer2D::TPaddingLayer2D(TPaddingLayer2D *layer) + : VGeneralLayer(layer), fTopPad(layer->GetTopPadding()), fBottomPad(layer->GetBottomPadding()), + fLeftPad(layer->GetLeftPadding()), fRightPad(layer->GetRightPadding()) +{ +} + +//_________________________________________________________________________________________________ +template +TPaddingLayer2D::TPaddingLayer2D(const TPaddingLayer2D &layer) + : VGeneralLayer(layer), fTopPad(layer.fTopPad), fBottomPad(layer.fBottomPad), + fLeftPad(layer.fLeftPad), fRightPad(layer.fRightPad) +{ + // Nothing to do here. +} + +//_________________________________________________________________________________________________ +template +TPaddingLayer2D::~TPaddingLayer2D() +{ + // Nothing to do here. +} + +//_________________________________________________________________________________________________ +template +auto TPaddingLayer2D::Forward(std::vector &input, bool /*applyDropout*/) -> void +{ + + for (size_t i = 0; i < this->GetBatchSize(); i++) { + Architecture_t::ZeroPad2DForward(this->GetOutputAt(i), input[i], fTopPad, fBottomPad, fLeftPad, fRightPad, this->GetOutputHeight(), this->GetOutputWidth()); + } + +} + +//_________________________________________________________________________________________________ +template +auto TPaddingLayer2D::Backward(std::vector &gradients_backward, + const std::vector & /*activations_backward*/, + std::vector & /*inp1*/, std::vector & + /*inp2*/) -> void +{ + Architecture_t::ZeroPad2DBackward(gradients_backward, this->GetActivationGradients(), fTopPad, fBottomPad, fLeftPad, + fRightPad, this->GetOutputHeight(), this->GetOutputWidth(), this->GetBatchSize(), + this->GetDepth()); +} + +//_________________________________________________________________________________________________ +template +auto TPaddingLayer2D::Print() const -> void +{ + std::cout << " PADDING Layer \t "; + std::cout << "Input = ( " << this->GetInputDepth() << " , " << this->GetInputHeight() << " , " << this->GetInputWidth() << " ) "; + if (this->GetOutput().size() > 0) { + std::cout << "\tOutput = ( " << this->GetOutput().size() << " , " << this->GetOutput()[0].GetNrows() << " , " << this->GetOutput()[0].GetNcols() << " ) "; + } + std::cout << std::endl; +} + +template +auto TPaddingLayer2D::AddWeightsXMLTo(void *parent) -> void +{ + auto layerxml = gTools().xmlengine().NewChild(parent, 0, "PaddingLayer2D"); + + // write info for padding layer + gTools().xmlengine().NewAttr(layerxml, 0, "LeftPad", gTools().StringFromInt(this->GetLeftPadding())); + gTools().xmlengine().NewAttr(layerxml, 0, "RightPad", gTools().StringFromInt(this->GetRightPadding())); + gTools().xmlengine().NewAttr(layerxml, 0, "TopPad", gTools().StringFromInt(this->GetTopPadding())); + gTools().xmlengine().NewAttr(layerxml, 0, "BottomPad", gTools().StringFromInt(this->GetBottomPadding())); + + +} + +//______________________________________________________________________________ +template +void TPaddingLayer2D::ReadWeightsFromXML(void * /*parent*/) +{ + // no info to read +} + + +template +size_t TPaddingLayer2D::calculateDimension(size_t imgHeight, size_t imgWidth, size_t pad_left, size_t pad_right, size_t pad_top, size_t pad_bottom){ + + size_t height = imgHeight + pad_top + pad_bottom; + size_t width = imgWidth + pad_left + pad_right; + + return height*width; +} + + +} // namespace DNN +} // namespace TMVA +} + +#endif \ No newline at end of file diff --git a/tmva/tmva/inc/TMVA/DNN/DeepNet.h b/tmva/tmva/inc/TMVA/DNN/DeepNet.h index 400546ce343f3..51bb8a4a255dc 100644 --- a/tmva/tmva/inc/TMVA/DNN/DeepNet.h +++ b/tmva/tmva/inc/TMVA/DNN/DeepNet.h @@ -14,6 +14,7 @@ * Akshay Vashistha - CERN, Switzerland * * Vladimir Ilievski - CERN, Switzerland * * Saurav Shekhar - CERN, Switzerland * + * Siddhartha Rao Kamalakara - CERN, Switzerland * * * * Copyright (c) 2005-2015: * * CERN, Switzerland * @@ -40,6 +41,7 @@ #include "TMVA/DNN/CNN/ConvLayer.h" #include "TMVA/DNN/CNN/MaxPoolLayer.h" +#include "TMVA/DNN/CNN/PaddingLayer.h" #include "TMVA/DNN/RNN/RNNLayer.h" @@ -167,6 +169,15 @@ class TDeepNet { * the layer is already created. */ void AddReshapeLayer(TReshapeLayer *reshapeLayer); + /*! Function for adding Padding Layer in the Deep Neural Network, with a given + * top, bottom, left and right paddings. It will take every matrix from the + * previous layer and pad it with zeros to a matrix with new dimensions. */ + TPaddingLayer2D *AddPaddingLayer2D(size_t topPad, size_t bottomPad, size_t leftPad, size_t rightPad); + + /*! Function for adding Padding Layer in the Deep Neural Network, when + * the layer is already created. */ + void AddPaddingLayer2D(TPaddingLayer2D *paddingLayer); + #ifdef HAVE_DAE /// DAE functions /*! Function for adding Corruption layer in the Deep Neural Network, * with given number of visibleUnits and hiddenUnits. It corrupts input @@ -545,6 +556,54 @@ void TDeepNet::AddBasicRNNLayer(TBasicRNNLayer +TPaddingLayer2D *TDeepNet::AddPaddingLayer2D(size_t topPad, size_t bottomPad, + size_t leftPad, size_t rightPad) +{ + size_t batchSize = this->GetBatchSize(); + size_t inputDepth; + size_t inputHeight; + size_t inputWidth; + size_t depth; + size_t height; + size_t width; + size_t outputNSlices = this->GetBatchSize(); + size_t outputNRows; + size_t outputNCols; + + if (fLayers.size() == 0) { + inputDepth = this->GetInputDepth(); + inputHeight = this->GetInputHeight(); + inputWidth = this->GetInputWidth(); + } else { + Layer_t *lastLayer = fLayers.back(); + inputDepth = lastLayer->GetDepth(); + inputHeight = lastLayer->GetHeight(); + inputWidth = lastLayer->GetWidth(); + } + + depth = inputDepth; + height = inputHeight + topPad + bottomPad; + width = inputWidth + leftPad + rightPad; + + TPaddingLayer2D *paddingLayer = new TPaddingLayer2D( + batchSize, inputDepth, inputHeight, inputWidth, depth, height, width, topPad, bottomPad, leftPad, rightPad); + + // But this creates a copy or what? + fLayers.push_back(paddingLayer); + + return paddingLayer; +} + +//______________________________________________________________________________ +template +void TDeepNet::AddPaddingLayer2D(TPaddingLayer2D *paddingLayer) +{ + fLayers.push_back(paddingLayer); +} + + //DAE #ifdef HAVE_DAE diff --git a/tmva/tmva/inc/TMVA/MethodDL.h b/tmva/tmva/inc/TMVA/MethodDL.h index 770233101eadd..e49f6cf7e7fe6 100644 --- a/tmva/tmva/inc/TMVA/MethodDL.h +++ b/tmva/tmva/inc/TMVA/MethodDL.h @@ -126,6 +126,11 @@ class MethodDL : public MethodBase { std::vector> &nets, TString layerString, TString delim); + template + void ParsePaddingLayer2D(DNN::TDeepNet &deepNet, + std::vector> &nets, TString layerString, + TString delim); + template void ParseRnnLayer(DNN::TDeepNet &deepNet, std::vector> &nets, TString layerString, TString delim); diff --git a/tmva/tmva/src/DNN/Architectures/Cpu/Propagation.cxx b/tmva/tmva/src/DNN/Architectures/Cpu/Propagation.cxx index cacbf34bb2843..6f9ea67fb4fa0 100644 --- a/tmva/tmva/src/DNN/Architectures/Cpu/Propagation.cxx +++ b/tmva/tmva/src/DNN/Architectures/Cpu/Propagation.cxx @@ -579,6 +579,56 @@ void TCpu::MaxPoolLayerBackward(std::vector> &activat } } +//______________________________________________________________________________ +template +void TCpu::ZeroPad2DForward(TCpuMatrix &A, const TCpuMatrix &B, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth) +{ + auto nColsA = A.GetNcols(); + auto nColsB = B.GetNcols(); + + for (Int_t i = 0; i < (Int_t) A.GetNrows(); i++) { + int original_idx = 0; + for (Int_t j = 0; j < (Int_t) A.GetNcols(); j++) { + Int_t row = j / outputHeight; + Int_t col = j - (row*outputWidth); + if(row(outputHeight-topPad-bottomPad) && row(outputWidth-leftPad-rightPad) && col +void TCpu::ZeroPad2DBackward(std::vector> &activationGradientsBackward, + const std::vector> &activationGradients, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth, + size_t batchSize, size_t depth) +{ + size_t inputHeight = outputHeight - topPad - bottomPad; + size_t inputWidth = outputWidth - leftPad - rightPad; + + for (size_t i = 0; i < batchSize; i++) { + for (size_t j = 0; j < depth; j++) { + + // initialize to zeros + for (size_t t = 0; t < (size_t)activationGradientsBackward[i].GetNcols(); t++) { + size_t idx = outputWidth * topPad + (t/inputWidth) * outputWidth + t%inputWidth + leftPad; + activationGradientsBackward[i](j, t) = activationGradients[i](j, idx); + } + + } + } +} + + //____________________________________________________________________________ template void TCpu::Reshape(TCpuMatrix &A, const TCpuMatrix &B) diff --git a/tmva/tmva/src/DNN/Architectures/Reference/Propagation.cxx b/tmva/tmva/src/DNN/Architectures/Reference/Propagation.cxx index 88404efc12803..4866c753721c4 100644 --- a/tmva/tmva/src/DNN/Architectures/Reference/Propagation.cxx +++ b/tmva/tmva/src/DNN/Architectures/Reference/Propagation.cxx @@ -398,6 +398,55 @@ void TReference::MaxPoolLayerBackward(std::vector> &activ } } +//______________________________________________________________________________ +template +void TReference::ZeroPad2DForward(TMatrixT &A, const TMatrixT &B, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth) +{ + auto nColsA = A.GetNcols(); + auto nColsB = B.GetNcols(); + + for (Int_t i = 0; i < A.GetNrows(); i++) { + Int_t original_idx = 0; + for (Int_t j = 0; j < A.GetNcols(); j++) { + Int_t row = j / outputHeight; + Int_t col = j - (row*outputWidth); + if(row(outputHeight-topPad-bottomPad) && row(outputWidth-leftPad-rightPad) && col +void TReference::ZeroPad2DBackward(std::vector> &activationGradientsBackward, + const std::vector> &activationGradients, + size_t topPad, size_t bottomPad, size_t leftPad, + size_t rightPad, size_t outputHeight, size_t outputWidth, + size_t batchSize, size_t depth) +{ + size_t inputHeight = outputHeight - topPad - bottomPad; + size_t inputWidth = outputWidth - leftPad - rightPad; + + for (size_t i = 0; i < batchSize; i++) { + for (size_t j = 0; j < depth; j++) { + + // initialize to zeros + for (size_t t = 0; t < (size_t)activationGradientsBackward[i].GetNcols(); t++) { + size_t idx = outputWidth * topPad + (t/inputWidth) * outputWidth + t%inputWidth + leftPad; + activationGradientsBackward[i][j][t] = activationGradients[i][j][idx]; + } + + } + } +} + //______________________________________________________________________________ template void TReference::Reshape(TMatrixT &A, const TMatrixT &B) diff --git a/tmva/tmva/src/MethodDL.cxx b/tmva/tmva/src/MethodDL.cxx index dac6332e00381..070027c74f5df 100644 --- a/tmva/tmva/src/MethodDL.cxx +++ b/tmva/tmva/src/MethodDL.cxx @@ -496,6 +496,8 @@ void MethodDL::CreateDeepNet(DNN::TDeepNet &deepNet, } else if (strLayerType == "LSTM") { Log() << kFATAL << "LSTM Layer is not yet fully implemented" << Endl; //ParseLstmLayer(deepNet, nets, layerString->GetString(), subDelimiter); + } else if (strLayerType == "PADDING2D") { + ParsePaddingLayer2D(deepNet, nets, layerString->GetString(), subDelimiter); } } } @@ -878,6 +880,61 @@ void MethodDL::ParseLstmLayer(DNN::TDeepNet & /*deepNet } } +//////////////////////////////////////////////////////////////////////////////// +/// Pases the layer string and creates the appropriate padding layer +template +void MethodDL::ParsePaddingLayer2D(DNN::TDeepNet &deepNet, + std::vector> & /*nets*/, TString layerString, + TString delim) +{ + int topPad = 0; + int bottomPad = 0; + int leftPad = 0; + int rightPad = 0; + + //layout expected: topPad|bottomPad|leftPad|rightPad + + // Split layer details + TObjArray *subStrings = layerString.Tokenize(delim); + TIter nextToken(subStrings); + TObjString *token = (TObjString *)nextToken(); + int idxToken = 0; + + for (; token != nullptr; token = (TObjString *)nextToken()) { + + switch (idxToken) { + case 1: // top padding + { + TString strTopPad(token->GetString()); + topPad = strTopPad.Atoi(); + } break; + case 2: // bottom padding + { + TString strBottomPad(token->GetString()); + bottomPad = strBottomPad.Atoi(); + } break; + case 3: // left padding + { + TString strLeftPad(token->GetString()); + leftPad = strLeftPad.Atoi(); + } break; + case 4: // right padding + { + TString strRightPad(token->GetString()); + rightPad = strRightPad.Atoi(); + } break; + } + ++idxToken; + } + + // Add the padding layer + deepNet.AddPaddingLayer2D(topPad, bottomPad, leftPad, rightPad); + + // Add the same layer to fNet + if (fBuildNet) fNet->AddPaddingLayer2D(topPad, bottomPad, leftPad, rightPad); + +} + //////////////////////////////////////////////////////////////////////////////// /// Standard constructor. MethodDL::MethodDL(const TString &jobName, const TString &methodTitle, DataSetInfo &theData, const TString &theOption) @@ -1638,6 +1695,18 @@ void MethodDL::ReadWeightsFromXML(void * rootXML) fNet->AddBasicRNNLayer(stateSize, inputSize, timeSteps, rememberState); } + else if (layerName == "PaddingLayer2D") { + + // read reshape layer info + size_t leftPad, rightPad, topPad, bottomPad = 0; + gTools().ReadAttr(layerXML, "LeftPad", leftPad); + gTools().ReadAttr(layerXML, "RightPad", rightPad); + gTools().ReadAttr(layerXML, "TopPad", topPad); + gTools().ReadAttr(layerXML, "BottomPad", bottomPad); + + fNet->AddPaddingLayer2D(topPad, bottomPad, leftPad, rightPad); + + } // read eventually weights and biases diff --git a/tmva/tmva/test/DNN/CNN/TestMethodDL.cxx b/tmva/tmva/test/DNN/CNN/TestMethodDL.cxx index 678bd4aaff36b..d6d89debf546c 100644 --- a/tmva/tmva/test/DNN/CNN/TestMethodDL.cxx +++ b/tmva/tmva/test/DNN/CNN/TestMethodDL.cxx @@ -33,7 +33,7 @@ int main() TString archCPU = "CPU"; - testMethodDL_DNN(archCPU); + //testMethodDL_DNN(archCPU); testMethodDL_CNN(archCPU); } diff --git a/tmva/tmva/test/DNN/CNN/TestMethodDL.h b/tmva/tmva/test/DNN/CNN/TestMethodDL.h index 1cac008458e8b..f79c99fd617eb 100644 --- a/tmva/tmva/test/DNN/CNN/TestMethodDL.h +++ b/tmva/tmva/test/DNN/CNN/TestMethodDL.h @@ -101,7 +101,7 @@ void testMethodDL_CNN(TString architectureStr) TString batchLayoutString("BatchLayout=256|1|64"); // General layout. - TString layoutString("Layout=CONV|6|3|3|1|1|0|0|TANH,MAXPOOL|2|2|2|2,RESHAPE|FLAT,DENSE|10|TANH," + TString layoutString("Layout=CONV|6|3|3|1|1|0|0|TANH,PADDING2D|0|0|0|0,MAXPOOL|2|2|2|2,RESHAPE|FLAT,DENSE|10|TANH," "DENSE|2|LINEAR"); // Training strategies.