mirror of
https://github.com/helblazer811/ManimML.git
synced 2025-05-23 05:25:56 +08:00
General changes, got basic visualization of an activation function working for a
convolutinoal layer.
This commit is contained in:
15
manim_ml/neural_network/activation_functions/relu.py
Normal file
15
manim_ml/neural_network/activation_functions/relu.py
Normal file
@ -0,0 +1,15 @@
|
||||
from manim import *
|
||||
|
||||
from manim_ml.neural_network.activation_functions.activation_function import ActivationFunction
|
||||
|
||||
class ReLUFunction(ActivationFunction):
|
||||
"""Rectified Linear Unit Activation Function"""
|
||||
|
||||
def __init__(self, function_name="ReLU", x_range=[-1, 1], y_range=[-1, 1]):
|
||||
super().__init__(function_name, x_range, y_range)
|
||||
|
||||
def apply_function(self, x_val):
|
||||
if x_val < 0:
|
||||
return 0
|
||||
else:
|
||||
return x_val
|
Reference in New Issue
Block a user