Bug fixes and linting for the activation functions addition.

This commit is contained in:
Alec Helbling
2023-01-25 08:40:32 -05:00
parent ce184af78e
commit f56620f047
42 changed files with 1275 additions and 387 deletions

View File

@ -1,6 +1,9 @@
from manim import *
from manim_ml.neural_network.activation_functions.activation_function import ActivationFunction
from manim_ml.neural_network.activation_functions.activation_function import (
ActivationFunction,
)
class ReLUFunction(ActivationFunction):
"""Rectified Linear Unit Activation Function"""
@ -12,4 +15,4 @@ class ReLUFunction(ActivationFunction):
if x_val < 0:
return 0
else:
return x_val
return x_val