From 6fe86efdff7de27e18e138b572dc6eb9f802b4f9 Mon Sep 17 00:00:00 2001 From: Varuna Jayasiri Date: Sun, 3 Jan 2021 12:16:01 +0530 Subject: [PATCH] badgeg --- Makefile | 1 + labml_nn/hypernetworks/hyper_lstm.py | 7 +++---- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 4c8587b7..4f450135 100644 --- a/Makefile +++ b/Makefile @@ -26,6 +26,7 @@ docs: ## Render annotated HTML pylit --remove_empty_sections --title_md -t ../../pylit/templates/nn -d html -w labml_nn pages: ## Copy to lab-ml site + pylit --remove_empty_sections --title_md -t ../../pylit/templates/nn -d html labml_nn @cd ../pages; git pull cp -r html/* ../pages/ diff --git a/labml_nn/hypernetworks/hyper_lstm.py b/labml_nn/hypernetworks/hyper_lstm.py index 0a8bd8d8..180a94ca 100644 --- a/labml_nn/hypernetworks/hyper_lstm.py +++ b/labml_nn/hypernetworks/hyper_lstm.py @@ -11,12 +11,11 @@ We have implemented HyperLSTM introduced in paper [This blog post](https://blog.otoro.net/2016/09/28/hyper-networks/) by David Ha gives a good explanation of HyperNetworks. -[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/lab-ml/nn/blob/master/labml_nn/hypernetworks/experiment.ipynb) - We have an experiment that trains a HyperLSTM to predict text on Shakespear dataset. -Here's the link to code: [experiment.py](experiment.html) +Here's the link to code: [`experiment.py`](experiment.html) -This is the training results: [View Run](https://web.lab-ml.com/run?uuid=9e7f39e047e811ebbaff2b26e3148b3d). +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/lab-ml/nn/blob/master/labml_nn/hypernetworks/experiment.ipynb) +[![View Run](https://img.shields.io/badge/labml-experiment-brightgreen)](https://web.lab-ml.com/run?uuid=9e7f39e047e811ebbaff2b26e3148b3d) HyperNetworks uses a smaller network to generate weights of a larger network. There are two variants: static hyper-networks and dynamic hyper-networks.