General changes, got basic visualization of an activation function working for a

convolutinoal layer.
This commit is contained in:
Alec Helbling
2023-01-24 15:44:48 -05:00
parent 5291d9db8a
commit ce184af78e
34 changed files with 1575 additions and 479 deletions

View File

@ -0,0 +1,15 @@
from manim import *
from manim_ml.neural_network.activation_functions.activation_function import ActivationFunction
class ReLUFunction(ActivationFunction):
"""Rectified Linear Unit Activation Function"""
def __init__(self, function_name="ReLU", x_range=[-1, 1], y_range=[-1, 1]):
super().__init__(function_name, x_range, y_range)
def apply_function(self, x_val):
if x_val < 0:
return 0
else:
return x_val