diff --git a/joey/activation.py b/joey/activation.py index bec2422..d75dade 100644 --- a/joey/activation.py +++ b/joey/activation.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from sympy import Max, sign +from sympy import Max, Min, sign from devito import Eq @@ -50,6 +50,21 @@ def backprop_eqs(self, layer): * Max(0, sign(layer.result[dims])))] +class LeakyReLU(Activation): + """An Activation subclass corresponding to ReLU.""" + def __init__(self, negative_slope=0.01): + self.negative_slope = negative_slope + super().__init__(lambda x: x if x > 0 else x*negative_slope) + + def backprop_eqs(self, layer): + dims = layer.result_gradients.dimensions + return [Eq(layer.result_gradients[dims], + layer.result_geadients[dims] + * (Max(0, sign(layer.result[dims]) + + Min(0, sign(layer.result[dims] + * self.negative_slope)))))] + + class Dummy(Activation): """An Activation subclass corresponding to f(x) = x."""