mirror of
https://github.com/TheAlgorithms/Python.git
synced 2025-07-05 09:21:13 +08:00
File moved to neural_network/activation_functions (#11216)
* added GELU activation functions file * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update gaussian_error_linear_unit.py * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update gaussian_error_linear_unit.py * Delete neural_network/activation_functions/gaussian_error_linear_unit.py * Rename maths/gaussian_error_linear_unit.py to neural_network/activation_functions/gaussian_error_linear_unit.py --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
@ -0,0 +1,51 @@
|
||||
"""
|
||||
This script demonstrates an implementation of the Gaussian Error Linear Unit function.
|
||||
* https://en.wikipedia.org/wiki/Activation_function#Comparison_of_activation_functions
|
||||
|
||||
The function takes a vector of K real numbers as input and returns x * sigmoid(1.702*x).
|
||||
Gaussian Error Linear Unit (GELU) is a high-performing neural network activation
|
||||
function.
|
||||
|
||||
This script is inspired by a corresponding research paper.
|
||||
* https://arxiv.org/abs/1606.08415
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
def sigmoid(vector: np.ndarray) -> np.ndarray:
|
||||
"""
|
||||
Mathematical function sigmoid takes a vector x of K real numbers as input and
|
||||
returns 1/ (1 + e^-x).
|
||||
https://en.wikipedia.org/wiki/Sigmoid_function
|
||||
|
||||
>>> sigmoid(np.array([-1.0, 1.0, 2.0]))
|
||||
array([0.26894142, 0.73105858, 0.88079708])
|
||||
"""
|
||||
return 1 / (1 + np.exp(-vector))
|
||||
|
||||
|
||||
def gaussian_error_linear_unit(vector: np.ndarray) -> np.ndarray:
|
||||
"""
|
||||
Implements the Gaussian Error Linear Unit (GELU) function
|
||||
|
||||
Parameters:
|
||||
vector (np.ndarray): A numpy array of shape (1, n) consisting of real values
|
||||
|
||||
Returns:
|
||||
gelu_vec (np.ndarray): The input numpy array, after applying gelu
|
||||
|
||||
Examples:
|
||||
>>> gaussian_error_linear_unit(np.array([-1.0, 1.0, 2.0]))
|
||||
array([-0.15420423, 0.84579577, 1.93565862])
|
||||
|
||||
>>> gaussian_error_linear_unit(np.array([-3]))
|
||||
array([-0.01807131])
|
||||
"""
|
||||
return vector * sigmoid(1.702 * vector)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
|
||||
doctest.testmod()
|
Reference in New Issue
Block a user