From 4b06ce16223eaa042738776e83a49338fb93fc69 Mon Sep 17 00:00:00 2001 From: Alec Helbling Date: Tue, 31 Jan 2023 23:04:23 -0500 Subject: [PATCH] Added padding. Fixed a bug with ImageLayerToConvolutional2D Padding example --- examples/cnn/cnn.py | 4 +- examples/cnn/cnn_max_pool.py | 4 +- examples/cnn/one_by_one_convolution.py | 5 +- examples/cnn/padding_example.py | 82 +++++++++ examples/lenet/lenet.py | 53 ++++++ .../translation_equivariance.py | 2 - manim_ml/gridded_rectangle.py | 48 ++++-- .../neural_network/layers/convolutional_2d.py | 158 ++++++++++++++---- .../convolutional_2d_to_convolutional_2d.py | 33 +++- manim_ml/neural_network/neural_network.py | 27 ++- setup.py | 2 +- tests/.DS_Store | Bin 8196 -> 8196 bytes tests/control_data/padding/ConvPadding.npz | Bin 0 -> 14368 bytes tests/test_activation_function.py | 7 +- tests/test_conv_padding.py | 74 ++++++++ tests/test_convolutional_2d_layer.py | 5 +- tests/test_feed_forward.py | 13 +- tests/test_gridded_rectangle.py | 12 ++ tests/test_max_pool.py | 10 +- tests/test_nn_scale.py | 9 +- 20 files changed, 445 insertions(+), 103 deletions(-) create mode 100644 examples/cnn/padding_example.py create mode 100644 examples/lenet/lenet.py create mode 100644 tests/control_data/padding/ConvPadding.npz create mode 100644 tests/test_conv_padding.py create mode 100644 tests/test_gridded_rectangle.py diff --git a/examples/cnn/cnn.py b/examples/cnn/cnn.py index cb2463d..a779f2a 100644 --- a/examples/cnn/cnn.py +++ b/examples/cnn/cnn.py @@ -72,8 +72,6 @@ class CombinedScene(ThreeDScene): # group.move_to(ORIGIN) nn.move_to(ORIGIN) # Play animation - forward_pass = nn.make_forward_pass_animation( - corner_pulses=False, all_filters_at_once=False - ) + forward_pass = nn.make_forward_pass_animation() self.wait(1) self.play(forward_pass) diff --git a/examples/cnn/cnn_max_pool.py b/examples/cnn/cnn_max_pool.py index 3faf358..4a5f5fb 100644 --- a/examples/cnn/cnn_max_pool.py +++ b/examples/cnn/cnn_max_pool.py @@ -65,8 +65,6 @@ class CombinedScene(ThreeDScene): self.add(code) self.wait(5) # Play animation - forward_pass = nn.make_forward_pass_animation( - corner_pulses=False, all_filters_at_once=False - ) + forward_pass = nn.make_forward_pass_animation() self.wait(1) self.play(forward_pass) \ No newline at end of file diff --git a/examples/cnn/one_by_one_convolution.py b/examples/cnn/one_by_one_convolution.py index 258dd63..2668885 100644 --- a/examples/cnn/one_by_one_convolution.py +++ b/examples/cnn/one_by_one_convolution.py @@ -68,9 +68,6 @@ class CombinedScene(ThreeDScene): group = Group(nn, code) group.move_to(ORIGIN) # Play animation - forward_pass = nn.make_forward_pass_animation( - corner_pulses=False, - all_filters_at_once=False, - ) + forward_pass = nn.make_forward_pass_animation() self.wait(1) self.play(forward_pass) diff --git a/examples/cnn/padding_example.py b/examples/cnn/padding_example.py new file mode 100644 index 0000000..6b65cbf --- /dev/null +++ b/examples/cnn/padding_example.py @@ -0,0 +1,82 @@ +from manim import * +from manim_ml.neural_network.layers.image import ImageLayer +import numpy as np +from PIL import Image + +from manim_ml.neural_network.layers.convolutional_2d import Convolutional2DLayer +from manim_ml.neural_network.layers.feed_forward import FeedForwardLayer +from manim_ml.neural_network.neural_network import NeuralNetwork + +ROOT_DIR = Path(__file__).parents[2] + +# Make the specific scene +config.pixel_height = 1200 +config.pixel_width = 1900 +config.frame_height = 6.0 +config.frame_width = 6.0 + +def make_code_snippet(): + code_str = """ + # Make nn + nn = NeuralNetwork([ + ImageLayer(numpy_image), + Convolutional2DLayer(1, 6, 1, padding=1), + Convolutional2DLayer(3, 6, 3), + FeedForwardLayer(3), + FeedForwardLayer(1), + ]) + # Play animation + self.play(nn.make_forward_pass_animation()) + """ + + code = Code( + code=code_str, + tab_width=4, + background_stroke_width=1, + background_stroke_color=WHITE, + insert_line_no=False, + style="monokai", + # background="window", + language="py", + ) + code.scale(0.38) + + return code + +class CombinedScene(ThreeDScene): + + def construct(self): + # Make nn + image = Image.open(ROOT_DIR / "assets/mnist/digit.jpeg") + numpy_image = np.asarray(image) + # Make nn + nn = NeuralNetwork([ + ImageLayer(numpy_image, height=1.5), + Convolutional2DLayer( + num_feature_maps=1, + feature_map_size=6, + padding=1, + padding_dashed=True + ), + Convolutional2DLayer( + num_feature_maps=3, + feature_map_size=6, + filter_size=3, + padding=0, + padding_dashed=False + ), + FeedForwardLayer(3), + FeedForwardLayer(1), + ], + layer_spacing=0.25, + ) + # Center the nn + self.add(nn) + code = make_code_snippet() + code.next_to(nn, DOWN) + self.add(code) + Group(code, nn).move_to(ORIGIN) + # Play animation + forward_pass = nn.make_forward_pass_animation() + self.wait(1) + self.play(forward_pass, run_time=20) diff --git a/examples/lenet/lenet.py b/examples/lenet/lenet.py new file mode 100644 index 0000000..a218123 --- /dev/null +++ b/examples/lenet/lenet.py @@ -0,0 +1,53 @@ +from pathlib import Path + +from manim import * +from PIL import Image +import numpy as np + +from manim_ml.neural_network.layers.convolutional_2d import Convolutional2DLayer +from manim_ml.neural_network.layers.feed_forward import FeedForwardLayer +from manim_ml.neural_network.layers.image import ImageLayer +from manim_ml.neural_network.layers.max_pooling_2d import MaxPooling2DLayer +from manim_ml.neural_network.neural_network import NeuralNetwork + +# Make the specific scene +config.pixel_height = 1200 +config.pixel_width = 1900 +config.frame_height = 20.0 +config.frame_width = 20.0 + +ROOT_DIR = Path(__file__).parents[2] + +class CombinedScene(ThreeDScene): + def construct(self): + image = Image.open(ROOT_DIR / "assets/mnist/digit.jpeg") + numpy_image = np.asarray(image) + # Make nn + nn = NeuralNetwork([ + ImageLayer(numpy_image, height=4.5), + Convolutional2DLayer(1, 28), + Convolutional2DLayer(6, 28, 5), + MaxPooling2DLayer(kernel_size=2), + Convolutional2DLayer(16, 10, 5), + MaxPooling2DLayer(kernel_size=2), + FeedForwardLayer(8), + FeedForwardLayer(3), + FeedForwardLayer(2), + ], + layer_spacing=0.25, + ) + # Center the nn + nn.move_to(ORIGIN) + self.add(nn) + # Make code snippet + # code = make_code_snippet() + # code.next_to(nn, DOWN) + # self.add(code) + # Group it all + # group = Group(nn, code) + # group.move_to(ORIGIN) + nn.move_to(ORIGIN) + # Play animation + # forward_pass = nn.make_forward_pass_animation() + # self.wait(1) + # self.play(forward_pass) diff --git a/examples/translation_equivariance/translation_equivariance.py b/examples/translation_equivariance/translation_equivariance.py index d0b9ac9..c4726f7 100644 --- a/examples/translation_equivariance/translation_equivariance.py +++ b/examples/translation_equivariance/translation_equivariance.py @@ -37,8 +37,6 @@ class CombinedScene(ThreeDScene): self.add(nn) # Play animation forward_pass = nn.make_forward_pass_animation( - corner_pulses=False, - all_filters_at_once=False, highlight_active_feature_map=True, ) self.wait(1) diff --git a/manim_ml/gridded_rectangle.py b/manim_ml/gridded_rectangle.py index 29123bf..d0d80ab 100644 --- a/manim_ml/gridded_rectangle.py +++ b/manim_ml/gridded_rectangle.py @@ -14,12 +14,13 @@ class GriddedRectangle(VGroup): close_new_points=True, grid_xstep=None, grid_ystep=None, - grid_stroke_width=0.0, # DEFAULT_STROKE_WIDTH/2, + grid_stroke_width=0.0, # DEFAULT_STROKE_WIDTH/2, grid_stroke_color=ORANGE, grid_stroke_opacity=1.0, stroke_width=2.0, fill_opacity=0.2, show_grid_lines=False, + dotted_lines=False, **kwargs ): super().__init__() @@ -37,16 +38,43 @@ class GriddedRectangle(VGroup): self.show_grid_lines = show_grid_lines self.untransformed_width = width self.untransformed_height = height + self.dotted_lines = dotted_lines # Make rectangle - self.rectangle = Rectangle( - width=width, - height=height, - color=color, - stroke_width=stroke_width, - fill_color=color, - fill_opacity=fill_opacity, - shade_in_3d=True - ) + if self.dotted_lines: + no_border_rectangle = Rectangle( + width=width, + height=height, + color=color, + fill_color=color, + stroke_opacity=0.0, + fill_opacity=fill_opacity, + shade_in_3d=True + ) + self.rectangle = no_border_rectangle + border_rectangle = Rectangle( + width=width, + height=height, + color=color, + fill_color=color, + fill_opacity=fill_opacity, + shade_in_3d=True, + stroke_width=stroke_width + ) + self.dotted_lines = DashedVMobject( + border_rectangle, + num_dashes=int((width + height) / 2) * 20, + ) + self.add(self.dotted_lines) + else: + self.rectangle = Rectangle( + width=width, + height=height, + color=color, + stroke_width=stroke_width, + fill_color=color, + fill_opacity=fill_opacity, + shade_in_3d=True + ) self.add(self.rectangle) # Make grid lines grid_lines = self.make_grid_lines() diff --git a/manim_ml/neural_network/layers/convolutional_2d.py b/manim_ml/neural_network/layers/convolutional_2d.py index 02e8c81..9120101 100644 --- a/manim_ml/neural_network/layers/convolutional_2d.py +++ b/manim_ml/neural_network/layers/convolutional_2d.py @@ -12,6 +12,89 @@ from manim_ml.neural_network.layers.parent_layers import ( ) from manim_ml.gridded_rectangle import GriddedRectangle +class FeatureMap(VGroup): + """Class for making a feature map""" + + def __init__( + self, + color=ORANGE, + feature_map_size=None, + fill_color=ORANGE, + fill_opacity=0.2, + cell_width=0.2, + padding=(0, 0), + stroke_width=2.0, + show_grid_lines=False, + padding_dashed=False + ): + super().__init__() + self.color = color + self.feature_map_size = feature_map_size + self.fill_color = fill_color + self.fill_opacity = fill_opacity + self.cell_width = cell_width + self.padding = padding + self.stroke_width = stroke_width + self.show_grid_lines = show_grid_lines + self.padding_dashed = padding_dashed + # Check if we have non-zero padding + if padding[0] > 0 or padding[1] > 0: + # Make the exterior rectangle dashed + width_with_padding = (self.feature_map_size[0] + self.padding[0] * 2) * self.cell_width + height_with_padding = (self.feature_map_size[1] + self.padding[1] * 2) * self.cell_width + self.untransformed_width = width_with_padding + self.untransformed_height = height_with_padding + + self.exterior_rectangle = GriddedRectangle( + color=self.color, + width=width_with_padding, + height=height_with_padding, + fill_color=self.color, + fill_opacity=self.fill_opacity, + stroke_color=self.color, + stroke_width=self.stroke_width, + grid_xstep=self.cell_width, + grid_ystep=self.cell_width, + grid_stroke_width=self.stroke_width / 2, + grid_stroke_color=self.color, + show_grid_lines=self.show_grid_lines, + dotted_lines=self.padding_dashed + ) + self.add(self.exterior_rectangle) + # Add an interior rectangle with no fill color + self.interior_rectangle = GriddedRectangle( + color=self.color, + fill_opacity=0.0, + width=self.feature_map_size[0] * self.cell_width, + height=self.feature_map_size[1] * self.cell_width, + stroke_width=self.stroke_width + ) + self.add(self.interior_rectangle) + else: + # Just make an exterior rectangle with no dashes. + self.untransformed_height = self.feature_map_size[1] * self.cell_width, + self.untransformed_width = self.feature_map_size[0] * self.cell_width, + # Make the exterior rectangle + self.exterior_rectangle = GriddedRectangle( + color=self.color, + height=self.feature_map_size[1] * self.cell_width, + width=self.feature_map_size[0] * self.cell_width, + fill_color=self.color, + fill_opacity=self.fill_opacity, + stroke_color=self.color, + stroke_width=self.stroke_width, + grid_xstep=self.cell_width, + grid_ystep=self.cell_width, + grid_stroke_width=self.stroke_width / 2, + grid_stroke_color=self.color, + show_grid_lines=self.show_grid_lines, + ) + self.add(self.exterior_rectangle) + + def get_corners_dict(self): + """Returns a dictionary of the corners""" + # Sort points through clockwise rotation of a vector in the xy plane + return self.exterior_rectangle.get_corners_dict() class Convolutional2DLayer(VGroupNeuralNetworkLayer, ThreeDLayer): """Handles rendering a convolutional layer for a nn""" @@ -24,33 +107,48 @@ class Convolutional2DLayer(VGroupNeuralNetworkLayer, ThreeDLayer): cell_width=0.2, filter_spacing=0.1, color=BLUE, - pulse_color=ORANGE, - show_grid_lines=False, + active_color=ORANGE, filter_color=ORANGE, + show_grid_lines=False, + fill_opacity=0.3, stride=1, stroke_width=2.0, activation_function=None, + padding=0, + padding_dashed=True, **kwargs, ): super().__init__(**kwargs) self.num_feature_maps = num_feature_maps self.filter_color = filter_color + if isinstance(padding, tuple): + assert len(padding) == 2 + self.padding = padding + elif isinstance(padding, int): + self.padding = (padding, padding) + else: + raise Exception(f"Unrecognized type for padding: {type(padding)}") + if isinstance(feature_map_size, int): self.feature_map_size = (feature_map_size, feature_map_size) else: self.feature_map_size = feature_map_size + if isinstance(filter_size, int): self.filter_size = (filter_size, filter_size) else: self.filter_size = filter_size + self.cell_width = cell_width self.filter_spacing = filter_spacing self.color = color - self.pulse_color = pulse_color + self.active_color = active_color self.stride = stride self.stroke_width = stroke_width self.show_grid_lines = show_grid_lines self.activation_function = activation_function + self.fill_opacity = fill_opacity + self.padding_dashed = padding_dashed def construct_layer( self, @@ -92,12 +190,14 @@ class Convolutional2DLayer(VGroupNeuralNetworkLayer, ThreeDLayer): # Draw rectangles that are filled in with opacity feature_maps = [] for filter_index in range(self.num_feature_maps): - rectangle = GriddedRectangle( + # Check if we need to add padding + """ + feature_map = GriddedRectangle( color=self.color, height=self.feature_map_size[1] * self.cell_width, width=self.feature_map_size[0] * self.cell_width, fill_color=self.color, - fill_opacity=0.2, + fill_opacity=self.fill_opacity, stroke_color=self.color, stroke_width=self.stroke_width, grid_xstep=self.cell_width, @@ -106,52 +206,44 @@ class Convolutional2DLayer(VGroupNeuralNetworkLayer, ThreeDLayer): grid_stroke_color=self.color, show_grid_lines=self.show_grid_lines, ) + """ + # feature_map = GriddedRectangle() + feature_map = FeatureMap( + color=self.color, + feature_map_size=self.feature_map_size, + cell_width=self.cell_width, + fill_color=self.color, + fill_opacity=self.fill_opacity, + padding=self.padding, + padding_dashed=self.padding_dashed + ) # Move the feature map - rectangle.move_to([0, 0, filter_index * self.filter_spacing]) + feature_map.move_to([0, 0, filter_index * self.filter_spacing]) # rectangle.set_z_index(4) - feature_maps.append(rectangle) + feature_maps.append(feature_map) return VGroup(*feature_maps) def highlight_and_unhighlight_feature_maps(self): """Highlights then unhighlights feature maps""" return Succession( - ApplyMethod(self.feature_maps.set_color, self.pulse_color), + ApplyMethod(self.feature_maps.set_color, self.active_color), ApplyMethod(self.feature_maps.set_color, self.color), ) def make_forward_pass_animation( - self, run_time=5, corner_pulses=False, layer_args={}, **kwargs + self, run_time=5, layer_args={}, **kwargs ): """Convolution forward pass animation""" # Note: most of this animation is done in the Convolution3DToConvolution3D layer - if corner_pulses: - raise NotImplementedError() - passing_flashes = [] - for line in self.corner_lines: - pulse = ShowPassingFlash( - line.copy().set_color(self.pulse_color).set_stroke(opacity=1.0), - time_width=0.5, - run_time=run_time, - rate_func=rate_functions.linear, - ) - passing_flashes.append(pulse) - - # per_filter_run_time = run_time / len(self.feature_maps) - # Make animation group + if not self.activation_function is None: animation_group = AnimationGroup( - *passing_flashes, - # filter_flashes + self.activation_function.make_evaluate_animation(), + self.highlight_and_unhighlight_feature_maps(), + lag_ratio=0.0, ) else: - if not self.activation_function is None: - animation_group = AnimationGroup( - self.activation_function.make_evaluate_animation(), - self.highlight_and_unhighlight_feature_maps(), - lag_ratio=0.0, - ) - else: - animation_group = AnimationGroup() + animation_group = AnimationGroup() return animation_group diff --git a/manim_ml/neural_network/layers/convolutional_2d_to_convolutional_2d.py b/manim_ml/neural_network/layers/convolutional_2d_to_convolutional_2d.py index f694081..8dd2700 100644 --- a/manim_ml/neural_network/layers/convolutional_2d_to_convolutional_2d.py +++ b/manim_ml/neural_network/layers/convolutional_2d_to_convolutional_2d.py @@ -7,7 +7,6 @@ from manim_ml.gridded_rectangle import GriddedRectangle from manim.utils.space_ops import rotation_matrix - def get_rotated_shift_vectors(input_layer, normalized=False): """Rotates the shift vectors""" # Make base shift vectors @@ -25,7 +24,6 @@ def get_rotated_shift_vectors(input_layer, normalized=False): return right_shift, down_shift - class Filters(VGroup): """Group for showing a collection of filters connecting two layers""" @@ -109,6 +107,8 @@ class Filters(VGroup): rectangle_width = self.output_layer.cell_width rectangle_height = self.output_layer.cell_width filter_color = self.output_layer.filter_color + right_shift, down_shift = get_rotated_shift_vectors(self.input_layer) + left_shift = -1 * right_shift for index, feature_map in enumerate(self.output_layer.feature_maps): # Make sure current feature map is the right filter @@ -142,6 +142,13 @@ class Filters(VGroup): buff=0.0 # aligned_edge=feature_map.get_corners_dict()["top_left"].get_center() ) + # Shift based on the amount of output layer padding + rectangle.shift( + self.output_layer.padding[0] * right_shift, + ) + rectangle.shift( + self.output_layer.padding[1] * down_shift, + ) rectangles.append(rectangle) feature_map_rectangles = VGroup(*rectangles) @@ -280,7 +287,7 @@ class Convolutional2DToConvolutional2D(ConnectiveLayer, ThreeDLayer): color=ORANGE, filter_opacity=0.3, line_color=ORANGE, - pulse_color=ORANGE, + active_color=ORANGE, cell_width=0.2, show_grid_lines=True, highlight_color=ORANGE, @@ -299,10 +306,11 @@ class Convolutional2DToConvolutional2D(ConnectiveLayer, ThreeDLayer): self.num_output_feature_maps = self.output_layer.num_feature_maps self.cell_width = self.output_layer.cell_width self.stride = self.output_layer.stride + self.padding = self.input_layer.padding self.filter_opacity = filter_opacity self.cell_width = cell_width self.line_color = line_color - self.pulse_color = pulse_color + self.active_color = active_color self.show_grid_lines = show_grid_lines self.highlight_color = highlight_color @@ -333,9 +341,11 @@ class Convolutional2DToConvolutional2D(ConnectiveLayer, ThreeDLayer): # Make the animation num_y_moves = int( (self.feature_map_size[1] - self.filter_size[1]) / self.stride + + self.padding[1] * 2 ) num_x_moves = int( (self.feature_map_size[0] - self.filter_size[0]) / self.stride + + self.padding[0] * 2 ) for y_move in range(num_y_moves): # Go right num_x_moves @@ -401,9 +411,11 @@ class Convolutional2DToConvolutional2D(ConnectiveLayer, ThreeDLayer): # Make the animation num_y_moves = int( (self.feature_map_size[1] - self.filter_size[1]) / self.stride + + self.padding[1] * 2 ) num_x_moves = int( (self.feature_map_size[0] - self.filter_size[0]) / self.stride + + self.padding[0] * 2 ) for y_move in range(num_y_moves): # Go right num_x_moves @@ -434,7 +446,10 @@ class Convolutional2DToConvolutional2D(ConnectiveLayer, ThreeDLayer): # Do last row move right for x_move in range(num_x_moves): # Shift right - shift_animation = ApplyMethod(filters.shift, self.stride * right_shift) + shift_animation = ApplyMethod( + filters.shift, + self.stride * right_shift + ) # shift_animation = self.animate.shift(right_shift) animations.append(shift_animation) # Remove the filters @@ -445,14 +460,18 @@ class Convolutional2DToConvolutional2D(ConnectiveLayer, ThreeDLayer): # Change the output feature map colors change_color_animations = [] change_color_animations.append( - ApplyMethod(feature_map.set_color, original_feature_map_color) + ApplyMethod( + feature_map.set_color, + original_feature_map_color + ) ) # Change the input feature map colors input_feature_maps = self.input_layer.feature_maps for input_feature_map in input_feature_maps: change_color_animations.append( ApplyMethod( - input_feature_map.set_color, original_feature_map_color + input_feature_map.set_color, + original_feature_map_color ) ) # Combine the animations diff --git a/manim_ml/neural_network/neural_network.py b/manim_ml/neural_network/neural_network.py index 79eac06..5b55ad8 100644 --- a/manim_ml/neural_network/neural_network.py +++ b/manim_ml/neural_network/neural_network.py @@ -10,11 +10,10 @@ Example: NeuralNetwork(layer_node_count) """ import textwrap +from manim_ml.neural_network.layers.embedding import EmbeddingLayer import numpy as np from manim import * -from manim_ml.neural_network.layers.embedding import EmbeddingLayer -from manim_ml.neural_network.layers.feed_forward import FeedForwardLayer from manim_ml.neural_network.layers.parent_layers import ConnectiveLayer, ThreeDLayer from manim_ml.neural_network.layers.util import get_connective_layer from manim_ml.list_group import ListGroup @@ -23,7 +22,6 @@ from manim_ml.neural_network.neural_network_transformations import ( RemoveLayer, ) - class NeuralNetwork(Group): """Neural Network Visualization Container Class""" @@ -59,7 +57,10 @@ class NeuralNetwork(Group): # Make the connective layers self.connective_layers, self.all_layers = self._construct_connective_layers() # Make overhead title - self.title = Text(self.title_text, font_size=DEFAULT_FONT_SIZE / 2) + self.title = Text( + self.title_text, + font_size=DEFAULT_FONT_SIZE / 2 + ) self.title.next_to(self, UP, 1.0) self.add(self.title) # Place layers at correct z index @@ -96,6 +97,7 @@ class NeuralNetwork(Group): previous_layer = self.input_layers[layer_index - 1] current_layer = self.input_layers[layer_index] current_layer.move_to(previous_layer.get_center()) + if layout_direction == "left_to_right": x_shift = previous_layer.get_width() / 2 \ + current_layer.get_width() / 2 \ @@ -106,7 +108,6 @@ class NeuralNetwork(Group): previous_layer.get_width() / 2 \ + current_layer.get_width() / 2 ) + self.layer_spacing) - shift_vector = np.array([0, y_shift, 0]) else: raise Exception( @@ -119,15 +120,13 @@ class NeuralNetwork(Group): # Place activation function if hasattr(current_layer, "activation_function"): if not current_layer.activation_function is None: - up_movement = np.array( - [ - 0, - current_layer.get_height() / 2 - + current_layer.activation_function.get_height() / 2 - + 0.5 * self.layer_spacing, - 0, - ] - ) + up_movement = np.array([ + 0, + current_layer.get_height() / 2 + + current_layer.activation_function.get_height() / 2 + + 0.5 * self.layer_spacing, + 0, + ]) current_layer.activation_function.move_to( current_layer, ) diff --git a/setup.py b/setup.py index 67f7834..0841d02 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from setuptools import setup, find_packages setup( name="manim_ml", - version="0.0.14", + version="0.0.15", description=(" Machine Learning Animations in python using Manim."), packages=find_packages(), ) diff --git a/tests/.DS_Store b/tests/.DS_Store index 7b4368e6066c14f65878ea79bc9119f0bc528a2d..18ed8c9b0543ac74461b561e1abbc53a6ebdcacb 100644 GIT binary patch delta 41 xcmZp1XmOa}&nU1lU^hRbz-AtSYR1Wz1!ER>3o&ng%%j4*nO))=%f~gP zD4VPxBF4wq&A`B*_a6)x7$&y}#!qe+)L=B8yg^Wji79>a6+wRHjb$}Vo7pA4u}lsX K>DyRV$pirT1R>)9 diff --git a/tests/control_data/padding/ConvPadding.npz b/tests/control_data/padding/ConvPadding.npz new file mode 100644 index 0000000000000000000000000000000000000000..d485da0d3dbcb2b16b5d67d906df77244b62f0de GIT binary patch literal 14368 zcmeHuX;hQv)^@C~`kq>)RdHa-Ii-k#KmieDJgo}KC_)Gj5-MSkSs;NhgxFfE%oRi? zAqvQlKqxbWAzFsWEQAo|AYn=Zi2*`@kmP%y{m$w4t@ZwV{~cI_wII23XFvPC_P(xt zJ$I}R{?{oG2=w2;&p$v8GNDq6Aqe#F&^gd?&?%5l*u5Zc53hTud)mPdq7Hz*-2V^o zRM6hD_P$o9|MnY1*g7lzw)ASjs|yz%l{(z{FZILTP7YqrT`j!xzw$b9=V$#3dhe6^ zYTjQea5(kS*5>&)=NY-V^D|RzaM5VYbRNNu3CoL zaRaZ&`bj+vqcWLC;zVSS=zi*5^PH{1GDA6~%fW9Q4Vf_3qVM^s(DfjLV3pl@M;4xA zxFb-v zEzLjw;{*NY@jnsxUyQ)jOs9}>GhTKU>}^GA>QzotU9_AQDmd9$IW?L$&8j(?2P;E7 z`hE%SR2KGn{lpn4$!&Q`lRJeT3AfJBhI`>ew4wRVjG>*3$U&Ff3UJ33yt%M<#^dHv zE5%Gz86hjbHAA~jj5yPM)*$!WiVj)DLD1W5a*Vt4WqgA+VJZ@LKxko;;-gcc_3C47 z)8hf_%NU=C*Wujc{OFPJ+Cy`g$HiJ| zQG0;7DJLL5VKjHsq9Ij!R+(w^z`}^K?!6o5AlmAE=~ETm`}?x+9n%{jk}D|}tIJNo z&9|QD$C<-7C2&-nsKM97OpY{m4wpy2K+?h?;&E~y(5#ITk=P~|=R|OdPw9r++oo<- zN-hN}UR_KY!A1L+yznciyOv6&m@&UG>+w6o4coC0pVPTfcj9(M-V7>d33vbkkKN5@L2z|O8@>+dDkzv$07v0i=Ga&+yP~MZtuHHP07XA42Tg&u`giy$54?XZ{W z$&kf}_>yWWHRhUf!DQ0P#@2_Gi&gq1F|Q3=d;_5HsL=7P>8xC2P$nT1vKV?Brw(l2 z>5sSP5pE2YpA*;xQ)}_{^BYN-)kl^F53S^6oXic2UaEX&1Z&nfx2|Kcdv$`Y4PKvc zx+viP7YOuH0}hEruE9}|g7u`AC@fvowZ_wGGYh5tLvwjMqs(m}D{tV~?)`fd3~yoF z!S};B17bfD;~mI#H7%aTp)Z$4JH+F3_V%$KeLjJBBFMKdNz#;B%&3^~QlVr_UMu!Z z)qY_2L`(ej(!!a!{)=**eI-WJ=|>iL130?#vG*LGI;AA9qoFt%?UXwF#!A|J00dIE z-iu6ZCOiX!=_((~oYc*eWtt53D9DE_qHd?+Vz3bAGszsi1V&AWSDuspqur{7ctU(+ zB*{#uapTFv0`l#&yw&dhQbcF|>!Ak|Qzt@R6+~cjwL?P}xOM?H19XzrlsVFFrGND~ z&Yo!J+Qe3RS5mF6gvLU+_pW>P7u8o8&|H+C$XyyDMb8aZI=YZbY-vU0S7&x8@hV6YLU+1vHpP5!JXw>T1&BV}D>Z5}Ot^&CH)z zy6oHkw`NZD(VikPbfo-2+?0#pr!3lp%4y)*8>fsy`h6->j#o-*J961KCipv-Y5(L> z(n&^i-D}QJHTdvqp?Q8N6dlDU_6f}8IRqYl*KuR(6L@Yl!@e^~IhR&%l@t@;5li;O zBr&U4SvY3~e%Hh&1aTVNYVyr!V2*ODNV1dE%y}notj}kI?Z+VTd9D68oO_8hs;eC0 zq)@}dWsV5|#MN_D4xv!kGrs7;uW1BA)-~_44oZ(gb)#SD=N%b~o%yAzdPD=-hC8#twy^yycc=uKj zIIwp^hoauFGI;cY=cbo{w>+{;_^C|?o*|08X-9eAt>F3t7E<|4!cuosNr-4atQ@Hf zd18w1RK0$b^iE@dMBdsR7R)?UELcd+EFNqgZ8F)J3cVSRvjo!r1`6^Wvw>uFsaxf2 zZ>Bl)2WEm7UPwPV*=fx(?p!^HH}Qckn1)=%c#+M;(rmbD!0KFMedtgmSoPe@67 zOiKBniH^s>{*{^DTa5~=hR8k%JAEo&Z0P00#9({c1+yPsQ&$Nla(xIDea1j)o+^i0 zi%6dPrTDQ-^71xRjZr$Y5`&GlC(cJzSS@%iwr5!H#(V|>Q45Km@3cig!gjacG*!jj zoh;Qaoy8B`|MY%r4aLopIyIb(pg&s&frA*|j2~1bSPw7rjAvM&CiDD`fk?n4M>(G0&TD_Q`DdN%e+=^Vcn2Izbow zMyg#eDUbdA$`^iL%GLTuTfj41h6LoXT^Hf*QYPY#5u)Fu;mIKo$X62%`5paEA7%$1$sW+icL9x+r;r-P$wh>WP z70^wvX6j8$(b?s>h{;@<}1LIYb`1D!6PHCZca z*02|z*BT~SjsCEZ5~zqZ-{m|fbAGCR=Uv9;3GdHktMv+ z>6m2^iO5OLQZi6squh{d*V%*{KteV-WGZ z*sc2N;BWx!pik=q3(=tg*dyU$hZs3S5;_!b44Id#_dHLa=Mg${1|Ub3C@Vkdu_uUW zQXUBOPnEqQ5~hs?aydYfcTujZu>FW~9q~lB*%_{f(%IY>_I9G8t9hiJ@PO0Y8`w5;}ldQ4(`t z^X!nUQCw~hr+htg7HvPps2=x=+)G+TM&niI2$M$pPV~ZHuk4aa8+-UHbDn@H?xY_w zEA*lWD@+O0&duL|gL>%|F(Jfepc8=pXMV(a6=zi+Z{7OF(taeVN(7hJ z@<*h})~zx$I%nTKVzrilQ+$6B*@E$eu4F|Z6mp=4{3CyaI1*AYzLHUeU7+d4*$lEG zAak4ITh0-SzW$AZh`>2n(v?j?*iD8#oU~@591D>y-V>V9}aMoHP7W5=HF)gpf;V+ zcji*$vh>{<2>Co=nt9&8mUC7KaBr6*wn(7PNK>UE5$q;Z7 zu$R&#mz?E-zZ6DqnzKm$(UMNxPx+s3iz=)Z55!3rxIvFcLR1ZrHU5Nfvyi<`tMaF; zr(?WWGs~Or_=Upa$T>o^@Mudu=`+x)3vje}+M!wWfe(k$6?L!qn+&sSyKX?0TZt!B zEkyA_`ry=vNv3(3?)@QxYm0u_O%qYNsqYA++n@{$Cv-hu%$VB@=uuuFTnCDofeTjA zIYkG-Ijxl$l-e88s3sE)G|vy08l%3SfHFox(s1t31E4!2gsI877zU(&u{>ob-Q#M7 z-~)upBXlgg>d&oqwimdirz<|0mr`IJ+2{6vnqzxh1~bkeS$)GLa3F2p`k3Br`X^M4 z7aQib6R8>gT~nbnc-5@Epq+=mnH_efcn~cQ0^PKQ$`=t+n2>IPZtj(?`j5gwAKJ;( z>oYrq`kfG7USDxj3H!TH2qn41H?Dc`vff!1XMsf~#J<>W; zD>o&o?#Sd{*R`91baM>@=$9grThxPtowt~SLu8Gp*z2^@RhWOazh_Tg@Hf3O`Y&=Qu5n3$GV(3jM^^3e432ff8~^8n>W%?oc%xwWU?Y zsl6HjQw}nJo1c0mzTte`(;9i`Biq$&WK#%8n(CY>ks;Xdh zrLpWNb>a3m;Rj;s%fUD?%3E6H#Y}MK&X{br$zz{bKI!v)CJNtAY8vDao8ZVNbR^rt zIyS!&7A~kBn&f!#j>h)C@kZJ%5d=WBQ{Rov$CZYm?jy|G#!WB6!!wjLaogTi_qu<^ zJQz=DoGYH#UdqS$ZKHU2?C1f|g5T<2kXmap`p(QUp7}*3LDqEhy_EnnR=DmsY7V0cqqJmV0rT+ z`1s73MxN;*#P$7*1sWDIXQNrep-v#F@_FzG6_Vh{CYTs#z69rzA&H9yI@cC`Hx@q_ z&v7ZzRF~-{|CJSpYViAAyP3frNTWvAroT$7N`Sn)|Fnv^288f||CfvI3RNTZ*U}3P z978_H>?)ap#87jTxcR=271_y8dhA z?gy4YkGX&Osk~KnWw>_8(S<8g$K?Vibz_GPq zKQyGkef;h-+FA}~Q%Wq{>pdsw3!eN>#;jb%`Qm`;E2s+R+1&$?(5F5OaLn<0-|vO- zISL|3jwcyS*(XMlQVvNJ@|~dzO_e*HRSh2fV?#uX%YIJg(#@ga#LKi{jQG18__8x0mCaCVP({V}aMg}Ph2Nk=}$*`BsP}2wPYE=YBy2}`koHfZJ zZS7E`NSmNgv^!MVg8BHdhkXr-aKExxZzffG`}02f?QzSKaL8}OG%9?;F#gzeg&TF} zOGY8x*orjMOIBdA2Yt*7OV2N>D)dd>hzd*QwCKI@y#FX6G^8%kek+>%Ip|?Q(O$tt zK|YeKYJ+K)>yKB0(J`CmWh71vRi3>=P)#+s|Kaa#(p1@Lic*xL$#nT$6h*Rj--BC? zs-Sm5Jxc*Ii!1BRm+vFQ6f7SDT)EvSlE?lId|@pKZc?=N{LtwY`)V z+5N^La{1@lN>Ul~8(HIDf`{HES*6I0%c25T9ZX5BD=oVM`%ODs6=Y#K7PgIyv#^Yy z|9n29V6suS3OB=YsCWMu^Iz)zqI0K}BV?C)o*QGpm;SoH4xk~gBs{p6jS*IHLXn+E zhb2R28YoxXW-#nf^Fq~TLUd>1Tt9?`yhk}qHKCx!tJzu5nBRhS_DjqE&9EktRo8CX zx;1r?O6Uv#7+GQZ*dYHsiCo^5EEqg$+zB_(Hs|rDGFvZjj^{o%8-;*Cr|L9B^;+O?U4RJ+@;5X@MB@_H7|c_$mQv&t_IHw7`Cj$EvJX1J{RhiK7di!toZD5U zxOQYdzVBcxYH+a7KXH`(7t9L}U?#-dYTjgoBIhJg*wogLo6L_x|5$f@-ZVS=7ocs+ zC20__qaN)1m72Jy-QMyi$dS42&`#UB^+99V@EUu{TwHz-bZ2T0kO*7NgsZ%xEnGpa zr(?w6vQ%JhA%^3NMF)BR2Cz7fe*qW)(@a3;7FVG$lG4cHDckFXrl67z z@!vTAs(bSGP}8BZ&`aR4Z%F}DbDsq1D};`NhVCbbMv&yktEA*6Q(i))jBDPx*Kv^7 zmO25c$!_rWsDFPDeBtNH{eJ9^{cOPNFiRYl50&>m&KNx=S&^=u=EE0GuTS9YoDARh;Pd>^r%#%e46qo+clGfKycdjS%qV4`mv zylTkV!(w?j)LZCx%JE6$RUY=oRFUgcoNd$CRBQB<>%#A!QK^xQ#%-=c*HkL7u3xm2 z%pz7YGc`}HPaI}T+Ih(PC@zHvrT{g5kdlPUzoQx4R{g22O)Ap~jvEO8xkB|sCeguO@ ziZ=!=bCq}{j?0Akd?%o;8S~uRfKKC_hfIj;;0!>JzLg50*B2w#;vyX01b7BkE)Eeb ziKQ;hr2RS^vEL9s8H3PsYoGSFE8KWeZ+mo;b{hq$cJ_ZDNvNu~rlYi(Rc3f&g8SL^ zm>Ff9@o#E5BaK&kdj8>f5RB&*uP=Iyp}|+@=F{u zD~PL&Epq)BXY?Hu9_}~pLTmDm#uH9r&ARhB%5In6GaJ6X<}6L<^=wc3n_#wDIduD2 z!#qA_MlI7~`t`}-Hga)X`Y!9=Fdlac4k1P*j+s`+0rboDb?8+GD7qp%DsaVFOT#02 z<~FdhusmOjOJ$nDvvDf}>LqV*4SHuzAp$Z=r+r;eW0IN+j=WjvnZK-g0S@6y7pr_n zOhZKnALnu~aJ2nkoPakG&z_>BO9D9_kq$|LDDM`%*}bAAgGHBbL@ik^wR%`&SoEiE z$Gqu%VM}e9coali+Fuq`kEU~$qC0=|J~)?{gYz`YXH**~^H$#l@OT!{U63sd->}1B*%}q-Uka3e5f{r}-g74^w zYfG=$gmf;!ry{45^i0&z-=8w=#N-habLyXV9#pWfV_Ga7hQxb@SDyg|dZ_WN&3sVt2nrC(vzE6|n&w;I8QtGJ$*E_pKRAA=x}P za#X0zbe*ZQJ^$RG*|^${i2G$lb8Y;Uqm0gj>u5+=Y+rG5RShI^7 zG4{`XO%pMt7U_e>tah4h50<2GlxohS63;c)Ta0pzS(0u0qV>bLY(%fomaOS4sg%qx0Z#l%7~#_EU~d_`w>%2QZNm$Xlcc`I0b}-;n@|YhdV~0 zK3zXalZUZmM>j@+Pc}6QDF|LTN^5#}OyTl-J|G;0+$5qyRAQJ3ZS95E7i->&oAyx> zvNuRNYq0Q=OW19jXj3a*naY>N4nMVfre7bh;wE#CZTbzm0zkKPl|A9-sKzKr3xLSX zR!33VvxZF#CCV8aBeEO^b$Duq609}o_JlpYB>b3MJFvisdt)+MmM4C`D{7&^cl;|OjVqc%vk&bOftwu4;w%! zis9jXlfQjiOlecibId~B)suZn?-N^*HPmUaBQHcc545;7*UjxGIA?T3=BO6jB`o^h z7c;V`a+h>P?x;B!c7^igq#go;4ftXL_je!d?61-nZ4$p~8th;V+NNxtRDUy#7Qd?U z^@iI1sV!_bRIOzJnpk)cOT4cUao!y(KnzOv4jIcYLZYTS%lo|QbwJ%l)C2@45nxd8-{$hwvuIri~ho+`rFkK?e!NHx#Nk2w>Kl1ka2r-@->#R{8+=B3;y1W z%8yv1E7zv%ea9e2ClHE30dR%u9B7iF9-dJwjqEfGF-hk&=eze>O%UA2ABj1c#0LcqCggwoueXJz8VBRzOsDC^^BPr5s8 zqEWED(bAbcV>O`>$?6?arP=%kth( zHB$p@py+$?0hQHt#p%^H4BACfI^7cWF3#u?Ny8D){n+jX2rgW8G_vdksm7nN`>9R7 z8*w@)xNYV6pA-{ljq>j6q=FkT-%b}6MB+EuZSuYOt6!OCuVct<8ch)yCU!giQ*+mb z#H)Df&*L=DouAl#sueK2qP{3F`86MW%UFX=u+46BYco<`)agx$>a8B;Lc8nAi`vry zpuG|0x0*HBWl`tC^=u8o#S@t5rId}W+Wr8~E@Hqiviq7nPv)1@^M0Cq<$Q=+9uuCr zD*cumD7|gO_-)|T$w-JEGeq1|g51r@e;m!71f(rM2oXQs(bb?qELTftD}?!IPv_nq z!TLSJ&XViFNr?lRc`EjRYPQ~QC|K|Xcj6hzPY)hB{xZ#6wmrC4YQKI%{M=~T^aRlm z`*($UzYN~7t8tj6thKeNvyJ?9)bZK+jcEG1u4tkBY}Gx-jTv!NQucPBO|H2qqGw}d z!X{(^UTG9I@WZhCAef9WFreMKeO;~fasZY5J`M?u3sJtvCOi|zDO}55FZPYrd{%?D zO~Qz1S&fk%Qup=+VmM8d-e(Z2w~k10XQ8RTR%Pq*74;FWQ6pRzfLOiKPDc6`m+@j> zZ97mGP~^Qq*nDMPv^YLTcd?CFaI@cf4TmVZ`{t|(=0S^Sqg~MVsg2EYq&J4LPRMu% zK%XS9iJxn5#Gf&gO|ArG*x3VJ?O_`-LDr_T#&8UJzu_FF#ci@-ej|3V$urgpJ7a6l zK&W9eB3n7zK>@UYyWx>4D6oK4b<$j>I) z7Z(Rr%WK&Apip^Z?W!MHxpKY>e;S=6tQ?3xAeNhtWIR0QIE+0uspY1)8X0@m&@kjH z%_D|3qLN#iWmiY9z68|#fD#|`yK2*79am#WJ*v!wUgkdm?KUE)4z||xJl0*65`mdB zFtYp`9CF7Yk}d!gs{0*{8ww6#758_`{n#o$28=E&HrVT_e;PNi9!5K*8S*ulpD&>n8b zC)A%jU)*XC%`T|LBd*rQX|UhS3-l{jZ7QK^tIrv3I^I&_uW9zJGh0_D`nxyAe$PbC zRxb$VbL4fmR#TBHlj1c|j6KN~2v6U4Hlx_pv0vq7MwFlxxCj+Eu@&n@saMM%6W!dv z>#F6|Y^}5`c}jD8mqTZ;EorT$W^Q9TSG4g8 zrYV4GF8L{4)CvfQr~!|ei89sJsgL9=#?C}(#P;@~6cL5^J*uTHHXauV0{uOeq_G^h zOh8Uk2z~=Up#~}W*$Au6gEnN1Cg+T-mVdb7>WnN_#g}(lh26TMgEqWWI%cHtekQx# zjCQ^x{CGn}-f2p%@YW^7xBu|b>b4Snc;ADfzm*(WV^Wc-hdcI0TL)?ow-``*^egYF z$$AKCdGyM(e(2)Q!G4Zf**h-fFGuyScnK3Knp*YY25ReOasb}o9=KIn2!JUPjbybc zHlA2|5SQ(Bd=||{JhW7?A4M!oQvqMrz-eC?RPyWMfqZ|cvfr{MjK*~d^3l@%VHO4k z#t?0i9eZ1_A})t!6rH4~ESH@3YsiO*WI#G1AH*kR0^^E52VvlFyzsQL%5DCWsaiPk zv*=XCj0i~Ki~hNVACDoqpLT}GwzQMHBUw|9fupLB0$5(}Q6)T$Y&8|Ta7azs3M7vS z9af)eY+<`SgQgD%2RoP6qw`0zc7G)krhofRaC5W0DtcT0P>x0=F+XZO8RP5Ui}9R4 zsosL4SCKWi>^6ZkC$!4*&Z9=|yO3!H=PEp7s_R5;SHI~Sua=N^gkz+XV<2FM)4C=b2%SN&UbXFot%3FvA1~iPSqYGBZNfK z53Yh!{Om8MMpCaNB|92-o&;kd%eyQzq{+q_n5_X5kQ9@)i!|5FG2&;Shc!tgjW*qD zxy=Ord-XZtML8-kSbYiAtgYP=W}zx#C+NA@KGx& zF?;EaRo*xS$mk2l|5$g0u#~yjwR1bDaNt#YB_2g5EYRkX{eqCv=PHjkE3P7-rW2&w z<%)OBPj7^rf;XF?rm&iJ^~5yEs0SCKYH_(mPd3)~MAI@apeJKX1k9OJEcPVQmqCCG z;%yh``YrhbcQ9M`+c?=>@p1+mU$}Z?Yjqu*)oS8H$^#o)j@XN=DX-b@B;VNAf@ixU zAk{LaBDOMBQ~Of7StX>*eFCB;fdjdN+#jp@IzqMnZ2@RLlAN;V0T?akRiV5`9YhCB zs)-86RnF<-IZ9&Rh~)CRMAk&tAz9l^I)vNoWX*?V$XWvGE_je^;&D)5T2zQSU zLo~y7ZrY8$|4i(>p)hV9r?>DLg%^hKVS-gswAb>lF8XCbfmkvcrDXQ0mhXYq11PId zH1jZLXsyedwgl(8t6+JJ1sxQ?GxWe@FSbQR&77HY>qtjCe%4FVD>y79x2_pEw3Slj z>G`A)`C<8cv9l7pQ(m7Xfgp#&`Cz@3qRoRQG!n;Elz_p_9&;#0m7((Jn$yJb{XCly zC=>wZv1Gt}Cb9EwZBs>Hkd`Jj0Wk;0vsvRYpM$||vNO*B9A3$uZg{Bk;@DG)jm{3q zC_~q;)h`4!E`ylVM(lf6P5?F-$j)NxM{=S=mMHl@q|a-|fmH&bpW|LIGy8LLtyw+MTe+eDc|tZ9!<7GGj?IPR0T zNt=wJ`-ALEm6teSE3oqeW`TZ9cUHpeXb;#t^=pf4fG@q`rtJa_Iur7g)O*E(Pp|BF z3PtqCMi?Vkh%nEo7ZtjplcQNJSeOllrLH~=F^-`>1*WfdU|&;=?T>qs&uk#N{FhQg z&$Y-IrxF@4YJ=o1Vb$Si0y3Ww{Ub}NU&(Dj!M zdiE}!VXezw_Z%YC=`Mc|#PQ=)S{qvBC6O)4*AbJ7g_|uWuZtumJJ=1##GUw;eM@P= zop*y@M-Ha!vbtI2)5dd6jC!hLWn1*`Br-7N7j!-a@6i%!vb@gIYfQ5Gw}5r1LYqvs z8VRH=7~}a+r*%*&I`-oxq{6hHSXLwQ$P?>$EkbL&cs=A%k(p)m$Kx>a@0UbtH zeZNizv9je$Yp6H1w$6H(l+zN5?!%yM`zGtmPOl)qH4){>ez}Z}qGLqQ-*+Io7g^i3 zy{yPYaH1`2scX0Inhu5hcWK-`2FAO;_N;WEL^qD#HkK2$IxjQ~>e-E?&u`IpNrK5t zqprnFDwR{$l-*{uwceqb9iApiSMmTFkI@i}8DgqN%Yq`R+42gNVX)447t>bX`4sNy z!Y8Pt;MR$ow9SgS`WJHY6O6eVa5|rqP}RT`vRU)_Z{>BWp#YLBr2u;P1wp*L~*O7=t zUp^`Kl!q4kTw=Xe;&}GD1z&vmd~1!lL|;@Hr4dJY6%L34mVxfN29lbzh=~^V1IG&U zjkaRn8AiH3KKX!t@r{Nud&}mV0fmy3-?>CPdP6g(^+&@S_-Ofp>#J*fY!;}9fK%KN z=zoo-Nrah=AMcM9L!$eAc0Ud1E-WUhL`?OfSo#k5BlsxkV~0(@^DoH4t< zqFY&|3``F6*@Srn#KSZy+^MBBFp%qDYBjlsN#Q{4TfPAovQEnthfha~{J`m!=y>nJ z&8PUjD3jxt6u-plaPek#i(Sbu6ZL3{WIgNK7&kAiES01BlGXRib~xbi3<@n1CA zj-wJ#B1&jjGE!YL6%1cI-zOXtrU>tdn4RhH-k$k(Y3x_b(5cHp;~?%3lJd2Yzk3Fmg2f z5YGdBKA{k(w;F#KOZR(@a2mkw2Q(curg5Zct<26MpA66KLdH!V=g>3B&Oj~R>@R8t z*LNcU8W!EjbH%dK)g;4s^|SydyiTvlz^-AFPsU~F*OkQjxPV|LF`LR|EEO|~($Kcu z!m>BSur&steUzs0dSH{6W4nJxF%$kRUAzlc_*H1rlAY^ESP=*ic2AbCsETK?71bpj z12)zby==u*-FBAmi z^&)o;GKvB)zzB#cpwxtI(R2)Q4*UL<68k<@zB zE%`lmu`wVwCBkF&*?9V#5b!aZEeTw*B{weP1$S-wRTN{wjXGOa-VJ$nO?@x=$_&=* z??ddVBKjupSvCH2He>N*|33x8mfw71E{)flY;F?I$u?1|TjyzZxZVoIJ=Jzgm6a&rbzNtER&s2^5qBrMd#K>PP)!&<<>m%uW>dcAe zosp+{Z>%%6B7vm0V>FFH94*l)t|!Tz&Fd)?8#Zc6B?D zGcT+bKhu7>=0XnX{P1JyBC~r z+w6NRzDs7%6mhv#I(I^*0(b(muL80Et9Bk*oR_wZ!E3Q2KaoGY4!F*I(Kl@!XU-

4RZhCg5z}x5zJEiBAoily@^mlOpIkFXU2Ek1+4dF@eBcb=-3d?VJCd@%J7} zai4@0&8LiX9BSYV-!IJ!2YhlEY^@Lds9k30873X9K`_74X=&0w&36wU33elgLs~v{ z4Pd0n%1>63gs>&BKdF6<1jRPv0rja{>8+O*y;%~_wObFFi#OIRiiv@?ZM2|F(j*|` zmQ7k=o6K=2ng+P2hQNg^*pY!-OOb6!OdXyIE}LfJ9O8Pm)oiP|k1PH796ea;%8-+O zOk&z+VsU5kn3C0Vse_L+Bf3010#7ksm{)dC0HNYRF;4}rEVAj-dUdz;@Z*8z(z^@a zFR5vh-0MCxK}Vaod-p8B$sd{k);1{H4kw=JOjzypq;{=r;c9|YDR%aw*Xg&HRwWDL zlb*dfOP<{?GLK@!3l8kTwpCfSgSweY7_!eFa*ap%I0v-ceYcaT&-hC=m!m?xYvUHC z^+$cUM|AHoc+1ry=ui~Da`4s@hPh^%XyXH6`lg;mzq*a404Mujyq)AfOaCVVe?;KH wl^xlEJJtt2J9Y4Xx}Wdgp?U~l0qFm`yYG(mp~HKBJpg>00KW$S5dsAIKlVU5-T(jq literal 0 HcmV?d00001 diff --git a/tests/test_activation_function.py b/tests/test_activation_function.py index 2f5436f..92b132f 100644 --- a/tests/test_activation_function.py +++ b/tests/test_activation_function.py @@ -30,9 +30,6 @@ class CombinedScene(ThreeDScene): nn.move_to(ORIGIN) self.add(nn) # Play animation - forward_pass = nn.make_forward_pass_animation( - corner_pulses=False, - all_filters_at_once=False - ) + forward_pass = nn.make_forward_pass_animation() self.wait(1) - self.play(forward_pass) \ No newline at end of file + self.play(forward_pass, run_time=30) \ No newline at end of file diff --git a/tests/test_conv_padding.py b/tests/test_conv_padding.py new file mode 100644 index 0000000..876f566 --- /dev/null +++ b/tests/test_conv_padding.py @@ -0,0 +1,74 @@ +from manim import * + +from manim_ml.neural_network.layers.convolutional_2d import Convolutional2DLayer +from manim_ml.neural_network.layers.feed_forward import FeedForwardLayer +from manim_ml.neural_network.neural_network import NeuralNetwork + +from manim_ml.utils.testing.frames_comparison import frames_comparison + +__module_test__ = "padding" + +# Make the specific scene +config.pixel_height = 1200 +config.pixel_width = 1900 +config.frame_height = 6.0 +config.frame_width = 6.0 + +class CombinedScene(ThreeDScene): + + def construct(self): + # Make nn + nn = NeuralNetwork([ + Convolutional2DLayer( + num_feature_maps=1, + feature_map_size=7, + padding=1, + padding_dashed=True + ), + Convolutional2DLayer( + num_feature_maps=3, + feature_map_size=7, + filter_size=3, + padding=0, + padding_dashed=False + ), + FeedForwardLayer(3), + ], + layer_spacing=0.25, + ) + # Center the nn + nn.move_to(ORIGIN) + self.add(nn) + # Play animation + forward_pass = nn.make_forward_pass_animation() + self.wait(1) + self.play(forward_pass, run_time=30) + +@frames_comparison +def test_ConvPadding(scene): + # Make nn + nn = NeuralNetwork([ + Convolutional2DLayer( + num_feature_maps=1, + feature_map_size=7, + padding=1, + padding_dashed=True + ), + Convolutional2DLayer( + num_feature_maps=3, + feature_map_size=7, + filter_size=3, + padding=1, + filter_spacing=0.35, + padding_dashed=False + ), + FeedForwardLayer(3), + ], + layer_spacing=0.25, + ) + # Center the nn + nn.move_to(ORIGIN) + scene.add(nn) + # Play animation + forward_pass = nn.make_forward_pass_animation() + scene.play(forward_pass, run_time=30) \ No newline at end of file diff --git a/tests/test_convolutional_2d_layer.py b/tests/test_convolutional_2d_layer.py index b9f5417..bdbebd1 100644 --- a/tests/test_convolutional_2d_layer.py +++ b/tests/test_convolutional_2d_layer.py @@ -77,9 +77,6 @@ class CombinedScene(ThreeDScene): nn.move_to(ORIGIN) self.add(nn) # Play animation - forward_pass = nn.make_forward_pass_animation( - corner_pulses=False, - all_filters_at_once=False - ) + forward_pass = nn.make_forward_pass_animation() self.wait(1) self.play(forward_pass) \ No newline at end of file diff --git a/tests/test_feed_forward.py b/tests/test_feed_forward.py index b61d0e8..da09bd7 100644 --- a/tests/test_feed_forward.py +++ b/tests/test_feed_forward.py @@ -14,4 +14,15 @@ def test_FeedForwardScene(scene): FeedForwardLayer(3) ]) - scene.add(nn) \ No newline at end of file + scene.add(nn) + +class FeedForwardScene(Scene): + + def construct(self): + nn = NeuralNetwork([ + FeedForwardLayer(3), + FeedForwardLayer(5), + FeedForwardLayer(3) + ]) + + self.add(nn) \ No newline at end of file diff --git a/tests/test_gridded_rectangle.py b/tests/test_gridded_rectangle.py new file mode 100644 index 0000000..9978722 --- /dev/null +++ b/tests/test_gridded_rectangle.py @@ -0,0 +1,12 @@ +from manim import * +from manim_ml.gridded_rectangle import GriddedRectangle + +class TestGriddedRectangleScene(ThreeDScene): + + def construct(self): + rect = GriddedRectangle( + color=ORANGE, + width=3, + height=3 + ) + self.add(rect) \ No newline at end of file diff --git a/tests/test_max_pool.py b/tests/test_max_pool.py index db97eea..f39a431 100644 --- a/tests/test_max_pool.py +++ b/tests/test_max_pool.py @@ -33,11 +33,7 @@ class CombinedScene(ThreeDScene): self.add(nn) self.wait(5) # Play animation - forward_pass = nn.make_forward_pass_animation( - corner_pulses=False, all_filters_at_once=False - ) - print(forward_pass) - print(forward_pass.animations) + forward_pass = nn.make_forward_pass_animation() self.wait(1) self.play(forward_pass) @@ -57,8 +53,6 @@ class SmallNetwork(ThreeDScene): nn.move_to(ORIGIN) self.add(nn) # Play animation - forward_pass = nn.make_forward_pass_animation( - corner_pulses=False, all_filters_at_once=False - ) + forward_pass = nn.make_forward_pass_animation() self.wait(1) self.play(forward_pass) \ No newline at end of file diff --git a/tests/test_nn_scale.py b/tests/test_nn_scale.py index 90fe10d..7ff121a 100644 --- a/tests/test_nn_scale.py +++ b/tests/test_nn_scale.py @@ -31,14 +31,7 @@ class CombinedScene(ThreeDScene): nn.move_to(ORIGIN) nn.scale(1.3) self.add(nn) - """ - self.play( - FadeIn(nn) - ) - """ # Play animation - forward_pass = nn.make_forward_pass_animation( - corner_pulses=False, all_filters_at_once=False, highlight_filters=True - ) + forward_pass = nn.make_forward_pass_animation() self.wait(1) self.play(forward_pass)