Skip to content

Commit 82353da

Browse files
committed
Remove LRN2D layer.
1 parent be46766 commit 82353da

File tree

1 file changed

+0
-41
lines changed

1 file changed

+0
-41
lines changed

keras/layers/normalization.py

Lines changed: 0 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -80,44 +80,3 @@ def get_config(self):
8080
"momentum": self.momentum}
8181
base_config = super(BatchNormalization, self).get_config()
8282
return dict(list(base_config.items()) + list(config.items()))
83-
84-
85-
class LRN2D(Layer):
86-
"""
87-
This code is adapted from pylearn2.
88-
License at: https://github.com/lisa-lab/pylearn2/blob/master/LICENSE.txt
89-
"""
90-
91-
def __init__(self, alpha=1e-4, k=2, beta=0.75, n=5, **kwargs):
92-
if n % 2 == 0:
93-
raise NotImplementedError("LRN2D only works with odd n. n provided: " + str(n))
94-
super(LRN2D, self).__init__(**kwargs)
95-
self.alpha = alpha
96-
self.k = k
97-
self.beta = beta
98-
self.n = n
99-
100-
def get_output(self, train):
101-
X = self.get_input(train)
102-
b, ch, r, c = K.shape(X)
103-
half_n = self.n // 2
104-
input_sqr = K.square(X)
105-
extra_channels = K.zeros((b, ch + 2 * half_n, r, c))
106-
input_sqr = K.concatenate([extra_channels[:, :half_n, :, :],
107-
input_sqr,
108-
extra_channels[:, half_n + ch:, :, :]],
109-
axis=1)
110-
scale = self.k
111-
for i in range(self.n):
112-
scale += self.alpha * input_sqr[:, i:i + ch, :, :]
113-
scale = scale ** self.beta
114-
return X / scale
115-
116-
def get_config(self):
117-
config = {"name": self.__class__.__name__,
118-
"alpha": self.alpha,
119-
"k": self.k,
120-
"beta": self.beta,
121-
"n": self.n}
122-
base_config = super(LRN2D, self).get_config()
123-
return dict(list(base_config.items()) + list(config.items()))

0 commit comments

Comments
 (0)