Skip to content
This repository was archived by the owner on Nov 3, 2022. It is now read-only.

Added DropConnect wrapper #438

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions keras_contrib/wrappers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .dropconnect import DropConnect
52 changes: 52 additions & 0 deletions keras_contrib/wrappers/dropconnect.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
from keras import backend as K
from keras.layers.wrappers import Wrapper


class DropConnect(Wrapper):
"""
An implementation of DropConnect wrapper in Keras.
This layer drops connections between a one layer and
the next layer randomly with a given probability (rather
than dropping activations as in classic Dropout).

This wrapper can be used to drop the connections from
any Keras layer with weights and biases (Dense, LSTM etc)

#Example usage
dense = DropConnect(Dense(10, activation='sigmoid'), prob=0.05)
lstm = DropConnect(LSTM(20, activation='relu'), prob=0.2)

#Arguments
layer : Any Keras layer (instance of Layer class)
prob : dropout rate (probability)

#References
https://github.com/andry9454/KerasDropconnect/blob/master/ddrop/layers.py
"""
def __init__(self, layer, prob=0.1, **kwargs):
self.prob = prob
self.layer = layer
super(DropConnect, self).__init__(layer, **kwargs)
if 0. < self.prob < 1.:
self.uses_learning_phase = True

def build(self, input_shape):
if not self.layer.built:
self.layer.build(input_shape)
self.layer.built = True

def compute_output_shape(self, input_shape):
return self.layer.compute_output_shape(input_shape)

def call(self, x):
if 0. < self.prob < 1.:
self.layer.kernel = K.in_train_phase(K.dropout(self.layer.kernel,
self.prob),
self.layer.kernel)
self.layer.bias = K.in_train_phase(K.dropout(self.layer.bias,
self.prob),
self.layer.bias)
return self.layer.call(x)

def get_config(self):
return self.layer.get_config(self)