forked from root-project/root
-
Notifications
You must be signed in to change notification settings - Fork 36
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Padding Layer #15
Open
srk97
wants to merge
8
commits into
tmvadnn:master
Choose a base branch
from
srk97:tmp
base: master
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Padding Layer #15
Changes from all commits
Commits
Show all changes
8 commits
Select commit
Hold shift + click to select a range
bc39fbe
Initial padding commit
srk97 7d6ebb6
Add Padding layer skeleton
srk97 de96881
Implementation of Padding Layer
srk97 d894b7d
Fix build errors
srk97 7ed053d
Fix types and build errors
srk97 cd04501
Fix Padding Layer bugs
srk97 9a39727
Fix XML parsing errors
srk97 53a99e2
Change layer to ZeroPadding2D
srk97 File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,218 @@ | ||
// @(#)root/tmva/tmva/dnn:$Id$ | ||
// Author: Siddhartha Rao Kamalakara | ||
|
||
/********************************************************************************** | ||
* Project: TMVA - a Root-integrated toolkit for multivariate data analysis * | ||
* Package: TMVA * | ||
* Class : TConvLayer * | ||
* Web : http://tmva.sourceforge.net * | ||
* * | ||
* Description: * | ||
* Padding Layer * | ||
* * | ||
* Authors (alphabetical): * | ||
* Siddhartha Rao Kamalakara <[email protected]> - CERN, Switzerland * | ||
* * | ||
* Copyright (c) 2005-2015: * | ||
* CERN, Switzerland * | ||
* U. of Victoria, Canada * | ||
* MPI-K Heidelberg, Germany * | ||
* U. of Bonn, Germany * | ||
* * | ||
* Redistribution and use in source and binary forms, with or without * | ||
* modification, are permitted according to the terms listed in LICENSE * | ||
* (http://tmva.sourceforge.net/LICENSE) * | ||
**********************************************************************************/ | ||
|
||
#ifndef TMVA_CNN_PADDINGLAYER2D | ||
#define TMVA_CNN_PADDINGLAYER2D | ||
|
||
#include "TMatrix.h" | ||
|
||
#include "TMVA/DNN/GeneralLayer.h" | ||
#include "TMVA/DNN/Functions.h" | ||
|
||
#include <vector> | ||
#include <iostream> | ||
|
||
namespace TMVA { | ||
namespace DNN { | ||
namespace CNN { | ||
|
||
template <typename Architecture_t> | ||
class TPaddingLayer2D : public VGeneralLayer<Architecture_t> | ||
{ | ||
|
||
public: | ||
using Matrix_t = typename Architecture_t::Matrix_t; | ||
using Scalar_t = typename Architecture_t::Scalar_t; | ||
|
||
private: | ||
size_t fTopPad; | ||
size_t fBottomPad; | ||
size_t fLeftPad; | ||
size_t fRightPad; | ||
size_t outputWidth; | ||
size_t outputHeight; | ||
|
||
size_t calculateDimension(size_t imgHeight, size_t imgWidth, size_t pad_left, size_t pad_right, size_t pad_top, size_t pad_bottom); | ||
|
||
public: | ||
/*! Constructor. */ | ||
TPaddingLayer2D(size_t BatchSize, size_t inputDepth, size_t inputHeight, size_t inputWidth, size_t depth, size_t height, size_t width, size_t TopPad, size_t BottomPad, size_t LeftPad, size_t RightPad); | ||
|
||
/*! Copy the conv layer provided as a pointer */ | ||
TPaddingLayer2D(TPaddingLayer2D<Architecture_t> *layer); | ||
|
||
/*! Copy constructor. */ | ||
TPaddingLayer2D(const TPaddingLayer2D &); | ||
|
||
/*! Destructor. */ | ||
~TPaddingLayer2D(); | ||
|
||
/*! Pads the input array with the dimensions given by | ||
* the user. Padding is done in two dimensions for each | ||
* example in the batch */ | ||
void Forward(std::vector<Matrix_t> &input, bool applyDropout = false); | ||
|
||
/*! Discards the gradients through the padded inputs | ||
* since they are zero padded */ | ||
void Backward(std::vector<Matrix_t> &gradients_backward, | ||
const std::vector<Matrix_t> & /*activations_backward*/, | ||
std::vector<Matrix_t> & /*inp1*/, std::vector<Matrix_t> & | ||
/*inp2*/); | ||
|
||
/*! Writes the information and the weights about the layer in an XML node. */ | ||
virtual void AddWeightsXMLTo(void *parent); | ||
|
||
/*! Read the information and the weights about the layer from XML node. */ | ||
virtual void ReadWeightsFromXML(void *parent); | ||
|
||
/*! Prints the info about the layer. */ | ||
void Print() const; | ||
|
||
size_t GetTopPadding() const {return fTopPad;} | ||
|
||
size_t GetBottomPadding() const {return fBottomPad;} | ||
|
||
size_t GetLeftPadding() const {return fLeftPad;} | ||
|
||
size_t GetRightPadding() const {return fRightPad;} | ||
|
||
size_t GetOutputHeight() const {return outputHeight;} | ||
|
||
size_t GetOutputWidth() const {return outputWidth;} | ||
|
||
|
||
}; | ||
|
||
template <typename Architecture_t> | ||
TPaddingLayer2D<Architecture_t>::TPaddingLayer2D(size_t batchSize, size_t inputDepth, size_t inputHeight, size_t inputWidth, | ||
size_t depth, size_t height, size_t width, | ||
size_t topPad, size_t bottomPad, size_t leftPad, size_t rightPad) | ||
: VGeneralLayer<Architecture_t>(batchSize, inputDepth, inputHeight, inputWidth, depth, height, width, 0, 0, 0, 0, 0, | ||
0, batchSize, inputDepth, calculateDimension(inputHeight, inputWidth, leftPad, rightPad, topPad, bottomPad), EInitialization::kZero), | ||
fTopPad(topPad), fBottomPad(bottomPad), fLeftPad(leftPad), fRightPad(rightPad) | ||
{ | ||
|
||
this->outputHeight = inputHeight + topPad + bottomPad; | ||
this->outputWidth = inputWidth + leftPad + rightPad; | ||
} | ||
|
||
|
||
//_________________________________________________________________________________________________ | ||
template <typename Architecture_t> | ||
TPaddingLayer2D<Architecture_t>::TPaddingLayer2D(TPaddingLayer2D<Architecture_t> *layer) | ||
: VGeneralLayer<Architecture_t>(layer), fTopPad(layer->GetTopPadding()), fBottomPad(layer->GetBottomPadding()), | ||
fLeftPad(layer->GetLeftPadding()), fRightPad(layer->GetRightPadding()) | ||
{ | ||
} | ||
|
||
//_________________________________________________________________________________________________ | ||
template <typename Architecture_t> | ||
TPaddingLayer2D<Architecture_t>::TPaddingLayer2D(const TPaddingLayer2D &layer) | ||
: VGeneralLayer<Architecture_t>(layer), fTopPad(layer.fTopPad), fBottomPad(layer.fBottomPad), | ||
fLeftPad(layer.fLeftPad), fRightPad(layer.fRightPad) | ||
{ | ||
// Nothing to do here. | ||
} | ||
|
||
//_________________________________________________________________________________________________ | ||
template <typename Architecture_t> | ||
TPaddingLayer2D<Architecture_t>::~TPaddingLayer2D() | ||
{ | ||
// Nothing to do here. | ||
} | ||
|
||
//_________________________________________________________________________________________________ | ||
template <typename Architecture_t> | ||
auto TPaddingLayer2D<Architecture_t>::Forward(std::vector<Matrix_t> &input, bool /*applyDropout*/) -> void | ||
{ | ||
|
||
for (size_t i = 0; i < this->GetBatchSize(); i++) { | ||
Architecture_t::ZeroPad2DForward(this->GetOutputAt(i), input[i], fTopPad, fBottomPad, fLeftPad, fRightPad, this->GetOutputHeight(), this->GetOutputWidth()); | ||
} | ||
|
||
} | ||
|
||
//_________________________________________________________________________________________________ | ||
template <typename Architecture_t> | ||
auto TPaddingLayer2D<Architecture_t>::Backward(std::vector<Matrix_t> &gradients_backward, | ||
const std::vector<Matrix_t> & /*activations_backward*/, | ||
std::vector<Matrix_t> & /*inp1*/, std::vector<Matrix_t> & | ||
/*inp2*/) -> void | ||
{ | ||
Architecture_t::ZeroPad2DBackward(gradients_backward, this->GetActivationGradients(), fTopPad, fBottomPad, fLeftPad, | ||
fRightPad, this->GetOutputHeight(), this->GetOutputWidth(), this->GetBatchSize(), | ||
this->GetDepth()); | ||
} | ||
|
||
//_________________________________________________________________________________________________ | ||
template <typename Architecture_t> | ||
auto TPaddingLayer2D<Architecture_t>::Print() const -> void | ||
{ | ||
std::cout << " PADDING Layer \t "; | ||
std::cout << "Input = ( " << this->GetInputDepth() << " , " << this->GetInputHeight() << " , " << this->GetInputWidth() << " ) "; | ||
if (this->GetOutput().size() > 0) { | ||
std::cout << "\tOutput = ( " << this->GetOutput().size() << " , " << this->GetOutput()[0].GetNrows() << " , " << this->GetOutput()[0].GetNcols() << " ) "; | ||
} | ||
std::cout << std::endl; | ||
} | ||
|
||
template <typename Architecture_t> | ||
auto TPaddingLayer2D<Architecture_t>::AddWeightsXMLTo(void *parent) -> void | ||
{ | ||
auto layerxml = gTools().xmlengine().NewChild(parent, 0, "PaddingLayer2D"); | ||
|
||
// write info for padding layer | ||
gTools().xmlengine().NewAttr(layerxml, 0, "LeftPad", gTools().StringFromInt(this->GetLeftPadding())); | ||
gTools().xmlengine().NewAttr(layerxml, 0, "RightPad", gTools().StringFromInt(this->GetRightPadding())); | ||
gTools().xmlengine().NewAttr(layerxml, 0, "TopPad", gTools().StringFromInt(this->GetTopPadding())); | ||
gTools().xmlengine().NewAttr(layerxml, 0, "BottomPad", gTools().StringFromInt(this->GetBottomPadding())); | ||
|
||
|
||
} | ||
|
||
//______________________________________________________________________________ | ||
template <typename Architecture_t> | ||
void TPaddingLayer2D<Architecture_t>::ReadWeightsFromXML(void * /*parent*/) | ||
{ | ||
// no info to read | ||
} | ||
|
||
|
||
template <typename Architecture_t> | ||
size_t TPaddingLayer2D<Architecture_t>::calculateDimension(size_t imgHeight, size_t imgWidth, size_t pad_left, size_t pad_right, size_t pad_top, size_t pad_bottom){ | ||
|
||
size_t height = imgHeight + pad_top + pad_bottom; | ||
size_t width = imgWidth + pad_left + pad_right; | ||
|
||
return height*width; | ||
} | ||
|
||
|
||
} // namespace DNN | ||
} // namespace TMVA | ||
} | ||
|
||
#endif |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Create private functions for this? The calculation can be reused in constructor for options "valid" and "full".
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yes. I have made this change already. I'll push it soon.