From 80bfbab9a2c9029742d80f712508a6c83f72547f Mon Sep 17 00:00:00 2001 From: Yuanchen Date: Tue, 14 Feb 2023 13:59:13 +0000 Subject: [PATCH 1/2] add leaky_relu function --- joey/activation.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/joey/activation.py b/joey/activation.py index bec2422..aa74767 100644 --- a/joey/activation.py +++ b/joey/activation.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from sympy import Max, sign +from sympy import Max, Min, sign from devito import Eq @@ -50,6 +50,22 @@ def backprop_eqs(self, layer): * Max(0, sign(layer.result[dims])))] +class LeakyReLU(Activation): + """An Activation subclass corresponding to ReLU.""" + def __init__(self, negative_slope=0.01): + self.negative_slope = negative_slope + super().__init__(lambda x: x if x>0 else x*negative_slope) + + + def backprop_eqs(self, layer): + dims = layer.result_gradients.dimensions + return [Eq(layer.result_gradients[dims], + layer.result_geadients[dims] + * (Max(0, sign(layer.result[dims]) + + Min(0, sign(layer.result[dims] + * self.negative_slope)))))] + + class Dummy(Activation): """An Activation subclass corresponding to f(x) = x.""" From 118e7833719bb5961d4635dfa6d862678f6385a1 Mon Sep 17 00:00:00 2001 From: Yuanchen Date: Tue, 14 Feb 2023 13:59:13 +0000 Subject: [PATCH 2/2] add leaky_relu function --- joey/activation.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/joey/activation.py b/joey/activation.py index bec2422..aa74767 100644 --- a/joey/activation.py +++ b/joey/activation.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from sympy import Max, sign +from sympy import Max, Min, sign from devito import Eq @@ -50,6 +50,22 @@ def backprop_eqs(self, layer): * Max(0, sign(layer.result[dims])))] +class LeakyReLU(Activation): + """An Activation subclass corresponding to ReLU.""" + def __init__(self, negative_slope=0.01): + self.negative_slope = negative_slope + super().__init__(lambda x: x if x>0 else x*negative_slope) + + + def backprop_eqs(self, layer): + dims = layer.result_gradients.dimensions + return [Eq(layer.result_gradients[dims], + layer.result_geadients[dims] + * (Max(0, sign(layer.result[dims]) + + Min(0, sign(layer.result[dims] + * self.negative_slope)))))] + + class Dummy(Activation): """An Activation subclass corresponding to f(x) = x."""