From bcacbe5342cf14377aeb78c30326af6ba32f6391 Mon Sep 17 00:00:00 2001 From: Kuldeep Borkar Jr Date: Mon, 17 Oct 2022 20:11:26 +0530 Subject: [PATCH 1/4] Implemented Swish Function --- maths/swish.py | 50 ++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 maths/swish.py diff --git a/maths/swish.py b/maths/swish.py new file mode 100644 index 000000000000..f5321bb44817 --- /dev/null +++ b/maths/swish.py @@ -0,0 +1,50 @@ +""" +This script demonstrates the implementation of the Swish function. + +The function takes a vector x of K real numbers as input and then +returns x * sigmoid(x). +It is a smooth, non-monotonic function that consistently matches +or outperforms ReLU on deep networks, it is unbounded above and +bounded below. + +Script inspired from its corresponding Tensorflow documentation, +https://www.tensorflow.org/api_docs/python/tf/keras/activations/swish +""" + +import numpy as np + + +def sigmoid(vector: np.array): + """ + Swish function can be implemented easily with the help of + sigmoid function + """ + return 1 / (1 + np.exp(-vector)) + + +def swish(vector: np.array): + """ + Implements the swish function + + Parameters: + vector (np.array): A numpy array consisting of real + values. + + Returns: + vector (np.array): The input numpy array, after applying + swish. + + Examples: + >>> swish(np.array([-1.0, 1.0, 2.0])) + array([-0.26894142, 0.73105858, 1.76159416]) + + >>> swish(np.array([-2])) + array([-0.23840584]) + """ + return vector * sigmoid(vector) + + +if __name__ == "__main__": + import doctest + + doctest.testmod() From 764a48039e909589f0f423847f570c07aa198fcc Mon Sep 17 00:00:00 2001 From: Kuldeep Borkar Jr Date: Mon, 17 Oct 2022 21:28:37 +0530 Subject: [PATCH 2/4] Added more description and return hint in def --- maths/swish.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/maths/swish.py b/maths/swish.py index f5321bb44817..df0ddbf041e8 100644 --- a/maths/swish.py +++ b/maths/swish.py @@ -3,18 +3,21 @@ The function takes a vector x of K real numbers as input and then returns x * sigmoid(x). -It is a smooth, non-monotonic function that consistently matches -or outperforms ReLU on deep networks, it is unbounded above and -bounded below. - -Script inspired from its corresponding Tensorflow documentation, -https://www.tensorflow.org/api_docs/python/tf/keras/activations/swish +Swish is a smooth, non-monotonic function defined as +f(x) = x ·sigmoid(x). +Extensive experiments shows that Swish consistently +matches or outperforms ReLU on deep networks applied to a variety +of challenging domains such as image classification and +machine translation. + +Script inspired from its corresponding research paper +https://arxiv.org/abs/1710.05941 """ import numpy as np -def sigmoid(vector: np.array): +def sigmoid(vector: np.array) -> np.array: """ Swish function can be implemented easily with the help of sigmoid function @@ -22,7 +25,7 @@ def sigmoid(vector: np.array): return 1 / (1 + np.exp(-vector)) -def swish(vector: np.array): +def swish(vector: np.array) -> np.array: """ Implements the swish function From 116cfcf42ceca78dcc815d32623b5fbe3039517b Mon Sep 17 00:00:00 2001 From: Kuldeep Borkar Jr Date: Wed, 19 Oct 2022 23:19:51 +0530 Subject: [PATCH 3/4] Changed the name and added more descrition including test for sigmoid function --- maths/sigmoid_linear_unit.py | 57 ++++++++++++++++++++++++++++++++++++ maths/swish.py | 53 --------------------------------- 2 files changed, 57 insertions(+), 53 deletions(-) create mode 100644 maths/sigmoid_linear_unit.py delete mode 100644 maths/swish.py diff --git a/maths/sigmoid_linear_unit.py b/maths/sigmoid_linear_unit.py new file mode 100644 index 000000000000..68ffc81d6016 --- /dev/null +++ b/maths/sigmoid_linear_unit.py @@ -0,0 +1,57 @@ +""" +This script demonstrates the implementation of the Sigmoid Linear Unit (SiLU) +or swish function. +https://en.wikipedia.org/wiki/Rectifier_(neural_networks) +https://en.wikipedia.org/wiki/Swish_function + +The function takes a vector x of K real numbers as input and returns x * sigmoid(x). +Swish is a smooth, non-monotonic function defined as f(x) = x * sigmoid(x). +Extensive experiments shows that Swish consistently matches or outperforms ReLU +on deep networks applied to a variety of challenging domains such as +image classification and machine translation. + +This script is inspired by a corresponding research paper. +https://arxiv.org/abs/1710.05941 +""" + +import numpy as np + + +def sigmoid(vector: np.array) -> np.array: + """ + Mathematical function sigmoid takes a vector x of K real numbers as input and + returns 1/ (1 + e^-x). + https://en.wikipedia.org/wiki/Sigmoid_function + + >>> sigmoid(np.array([-1.0, 1.0, 2.0])) + array([0.26894142, 0.73105858, 0.88079708]) + """ + return 1 / (1 + np.exp(-vector)) + + +def sigmoid_linear_unit(vector: np.array) -> np.array: + """ + Implements the Sigmoid Linear Unit (SiLU) or swish function + + Parameters: + vector (np.array): A numpy array consisting of real + values. + + Returns: + swish_vec (np.array): The input numpy array, after applying + swish. + + Examples: + >>> sigmoid_linear_unit(np.array([-1.0, 1.0, 2.0])) + array([-0.26894142, 0.73105858, 1.76159416]) + + >>> sigmoid_linear_unit(np.array([-2])) + array([-0.23840584]) + """ + return vector * sigmoid(vector) + + +if __name__ == "__main__": + import doctest + + doctest.testmod() diff --git a/maths/swish.py b/maths/swish.py deleted file mode 100644 index df0ddbf041e8..000000000000 --- a/maths/swish.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -This script demonstrates the implementation of the Swish function. - -The function takes a vector x of K real numbers as input and then -returns x * sigmoid(x). -Swish is a smooth, non-monotonic function defined as -f(x) = x ·sigmoid(x). -Extensive experiments shows that Swish consistently -matches or outperforms ReLU on deep networks applied to a variety -of challenging domains such as image classification and -machine translation. - -Script inspired from its corresponding research paper -https://arxiv.org/abs/1710.05941 -""" - -import numpy as np - - -def sigmoid(vector: np.array) -> np.array: - """ - Swish function can be implemented easily with the help of - sigmoid function - """ - return 1 / (1 + np.exp(-vector)) - - -def swish(vector: np.array) -> np.array: - """ - Implements the swish function - - Parameters: - vector (np.array): A numpy array consisting of real - values. - - Returns: - vector (np.array): The input numpy array, after applying - swish. - - Examples: - >>> swish(np.array([-1.0, 1.0, 2.0])) - array([-0.26894142, 0.73105858, 1.76159416]) - - >>> swish(np.array([-2])) - array([-0.23840584]) - """ - return vector * sigmoid(vector) - - -if __name__ == "__main__": - import doctest - - doctest.testmod() From 41c5aec085b36b35f3a104f3b4cfe05b2b7e0986 Mon Sep 17 00:00:00 2001 From: Kuldeep Borkar Jr Date: Wed, 19 Oct 2022 23:23:39 +0530 Subject: [PATCH 4/4] Added * in front of links --- maths/sigmoid_linear_unit.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/maths/sigmoid_linear_unit.py b/maths/sigmoid_linear_unit.py index 68ffc81d6016..a8ada10dd8ec 100644 --- a/maths/sigmoid_linear_unit.py +++ b/maths/sigmoid_linear_unit.py @@ -1,8 +1,8 @@ """ This script demonstrates the implementation of the Sigmoid Linear Unit (SiLU) or swish function. -https://en.wikipedia.org/wiki/Rectifier_(neural_networks) -https://en.wikipedia.org/wiki/Swish_function +* https://en.wikipedia.org/wiki/Rectifier_(neural_networks) +* https://en.wikipedia.org/wiki/Swish_function The function takes a vector x of K real numbers as input and returns x * sigmoid(x). Swish is a smooth, non-monotonic function defined as f(x) = x * sigmoid(x). @@ -11,7 +11,7 @@ image classification and machine translation. This script is inspired by a corresponding research paper. -https://arxiv.org/abs/1710.05941 +* https://arxiv.org/abs/1710.05941 """ import numpy as np