mirror of
https://github.com/helblazer811/ManimML.git
synced 2025-06-03 16:27:19 +08:00
Bug fixes and linting for the activation functions addition.
This commit is contained in:
@ -1,6 +1,9 @@
|
||||
from manim import *
|
||||
|
||||
from manim_ml.neural_network.activation_functions.activation_function import ActivationFunction
|
||||
from manim_ml.neural_network.activation_functions.activation_function import (
|
||||
ActivationFunction,
|
||||
)
|
||||
|
||||
|
||||
class ReLUFunction(ActivationFunction):
|
||||
"""Rectified Linear Unit Activation Function"""
|
||||
@ -12,4 +15,4 @@ class ReLUFunction(ActivationFunction):
|
||||
if x_val < 0:
|
||||
return 0
|
||||
else:
|
||||
return x_val
|
||||
return x_val
|
||||
|
Reference in New Issue
Block a user