From 5a4ea233cd30723628fb184bc05f969ad463b0af Mon Sep 17 00:00:00 2001 From: Kotmin <70173732+Kotmin@users.noreply.github.com> Date: Mon, 4 Sep 2023 19:38:26 +0200 Subject: [PATCH] Style sigmoid function in harmony with pep guideness (#6677) * Style sigmoid function in harmony with pep guideness * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Apply suggestions from code review --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Tianyi Zheng --- neural_network/back_propagation_neural_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/neural_network/back_propagation_neural_network.py b/neural_network/back_propagation_neural_network.py index 9dd112115..bdd096b3f 100644 --- a/neural_network/back_propagation_neural_network.py +++ b/neural_network/back_propagation_neural_network.py @@ -21,8 +21,8 @@ import numpy as np from matplotlib import pyplot as plt -def sigmoid(x): - return 1 / (1 + np.exp(-1 * x)) +def sigmoid(x: np.ndarray) -> np.ndarray: + return 1 / (1 + np.exp(-x)) class DenseLayer: