This commit is contained in:
Varuna Jayasiri
2021-01-03 11:12:55 +05:30
parent 600c8590f4
commit bead174ed4
2 changed files with 234 additions and 3 deletions

View File

@ -0,0 +1,234 @@
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "HyperNetworks",
"provenance": [],
"collapsed_sections": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"accelerator": "GPU"
},
"cells": [
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "ZCzmCrAIVg0L",
"outputId": "3d623494-2c75-4238-a98c-eafbdbda8203"
},
"source": [
"!pip install labml-nn"
],
"execution_count": 1,
"outputs": [
{
"output_type": "stream",
"text": [
"Collecting labml-nn\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/03/8d/0328d3c1313f49dae3904b95b9473985110877f23d7ce00081bfe3ae199a/labml_nn-0.4.75-py3-none-any.whl (99kB)\n",
"\r\u001b[K |███▎ | 10kB 24.4MB/s eta 0:00:01\r\u001b[K |██████▋ | 20kB 18.6MB/s eta 0:00:01\r\u001b[K |█████████▉ | 30kB 12.7MB/s eta 0:00:01\r\u001b[K |█████████████▏ | 40kB 13.3MB/s eta 0:00:01\r\u001b[K |████████████████▍ | 51kB 10.9MB/s eta 0:00:01\r\u001b[K |███████████████████▊ | 61kB 10.8MB/s eta 0:00:01\r\u001b[K |███████████████████████ | 71kB 11.0MB/s eta 0:00:01\r\u001b[K |██████████████████████████▎ | 81kB 11.0MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▌ | 92kB 12.0MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 102kB 6.6MB/s \n",
"\u001b[?25hRequirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from labml-nn) (1.19.4)\n",
"Collecting labml>=0.4.86\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/f7/cd/982fe0c11289b460ca186ba70cd13cb6eb5a2a352e38a060e962cbc8a9f7/labml-0.4.86-py3-none-any.whl (97kB)\n",
"\u001b[K |████████████████████████████████| 102kB 9.7MB/s \n",
"\u001b[?25hCollecting labml-helpers>=0.4.72\n",
" Downloading https://files.pythonhosted.org/packages/ec/58/2b7dcfde4565134ad97cdfe96ad7070fef95c37be2cbc066b608c9ae5c1d/labml_helpers-0.4.72-py3-none-any.whl\n",
"Collecting einops\n",
" Downloading https://files.pythonhosted.org/packages/5d/a0/9935e030634bf60ecd572c775f64ace82ceddf2f504a5fd3902438f07090/einops-0.3.0-py2.py3-none-any.whl\n",
"Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (from labml-nn) (1.7.0+cu101)\n",
"Collecting gitpython\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/24/d1/a7f8fe3df258549b303415157328bfcc63e9b11d06a7ad7a3327f3d32606/GitPython-3.1.11-py3-none-any.whl (159kB)\n",
"\u001b[K |████████████████████████████████| 163kB 33.3MB/s \n",
"\u001b[?25hCollecting pyyaml>=5.3.1\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz (269kB)\n",
"\u001b[K |████████████████████████████████| 276kB 42.5MB/s \n",
"\u001b[?25hRequirement already satisfied: future in /usr/local/lib/python3.6/dist-packages (from torch->labml-nn) (0.16.0)\n",
"Requirement already satisfied: dataclasses in /usr/local/lib/python3.6/dist-packages (from torch->labml-nn) (0.8)\n",
"Requirement already satisfied: typing-extensions in /usr/local/lib/python3.6/dist-packages (from torch->labml-nn) (3.7.4.3)\n",
"Collecting gitdb<5,>=4.0.1\n",
"\u001b[?25l Downloading https://files.pythonhosted.org/packages/48/11/d1800bca0a3bae820b84b7d813ad1eff15a48a64caea9c823fc8c1b119e8/gitdb-4.0.5-py3-none-any.whl (63kB)\n",
"\u001b[K |████████████████████████████████| 71kB 11.5MB/s \n",
"\u001b[?25hCollecting smmap<4,>=3.0.1\n",
" Downloading https://files.pythonhosted.org/packages/b0/9a/4d409a6234eb940e6a78dfdfc66156e7522262f5f2fecca07dc55915952d/smmap-3.0.4-py2.py3-none-any.whl\n",
"Building wheels for collected packages: pyyaml\n",
" Building wheel for pyyaml (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
" Created wheel for pyyaml: filename=PyYAML-5.3.1-cp36-cp36m-linux_x86_64.whl size=44621 sha256=c05b7ecc79753a54763fb4e25c0be1ba74384bfc8490be003bd2abc7d19bc4d5\n",
" Stored in directory: /root/.cache/pip/wheels/a7/c1/ea/cf5bd31012e735dc1dfea3131a2d5eae7978b251083d6247bd\n",
"Successfully built pyyaml\n",
"Installing collected packages: smmap, gitdb, gitpython, pyyaml, labml, labml-helpers, einops, labml-nn\n",
" Found existing installation: PyYAML 3.13\n",
" Uninstalling PyYAML-3.13:\n",
" Successfully uninstalled PyYAML-3.13\n",
"Successfully installed einops-0.3.0 gitdb-4.0.5 gitpython-3.1.11 labml-0.4.86 labml-helpers-0.4.72 labml-nn-0.4.75 pyyaml-5.3.1 smmap-3.0.4\n"
],
"name": "stdout"
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "0hJXx_g0wS2C"
},
"source": [
"from labml import experiment\n",
"from labml_nn.hypernetworks.experiment import Configs"
],
"execution_count": 3,
"outputs": []
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 255
},
"id": "WQ8VGpMGwZuj",
"outputId": "5833cc50-26a8-496e-e729-88f42b3f4651"
},
"source": [
"# Create experiment\n",
"experiment.create(name=\"hyper_lstm\", comment='')\n",
"# Create configs\n",
"conf = Configs()\n",
"# Load configurations\n",
"experiment.configs(conf,\n",
" # A dictionary of configurations to override\n",
" {'tokenizer': 'character',\n",
" 'text': 'tiny_shakespeare',\n",
" 'optimizer.learning_rate': 2.5e-4,\n",
" 'optimizer.optimizer': 'Adam',\n",
" 'prompt': 'It is',\n",
" 'prompt_separator': '',\n",
"\n",
" 'rnn_model': 'hyper_lstm',\n",
"\n",
" 'train_loader': 'shuffled_train_loader',\n",
" 'valid_loader': 'shuffled_valid_loader',\n",
"\n",
" 'seq_len': 512,\n",
" 'epochs': 128,\n",
" 'batch_size': 2,\n",
" 'inner_iterations': 25})\n",
"\n",
"\n",
"# Set models for saving and loading\n",
"experiment.add_pytorch_models({'model': conf.model})\n",
"\n",
"conf.init()"
],
"execution_count": 5,
"outputs": [
{
"output_type": "display_data",
"data": {
"text/html": [
"<pre style=\"overflow-x: scroll;\">\n",
"Prepare model...\n",
" Prepare n_tokens...\n",
" Prepare text...\n",
" Prepare tokenizer<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t3.07ms</span>\n",
" Load data<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t2.85ms</span>\n",
" Tokenize<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t33.69ms</span>\n",
" Build vocabulary<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t103.52ms</span>\n",
" Prepare text<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t153.38ms</span>\n",
" Prepare n_tokens<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t160.21ms</span>\n",
" Prepare rnn_model<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t13.84ms</span>\n",
"Prepare model<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t195.08ms</span>\n",
"Prepare mode<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t1.78ms</span>\n",
"</pre>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {
"tags": []
}
},
{
"output_type": "stream",
"text": [
"/usr/local/lib/python3.6/dist-packages/torch/nn/modules/container.py:434: UserWarning: Setting attributes on ParameterList is not supported.\n",
" warnings.warn(\"Setting attributes on ParameterList is not supported.\")\n"
],
"name": "stderr"
}
]
},
{
"cell_type": "code",
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 374
},
"id": "f07vAOaHwumr",
"outputId": "cfbb9042-52df-4807-cf18-5984fcfaf8e2"
},
"source": [
"# Start the experiment\n",
"with experiment.start():\n",
" # `TrainValidConfigs.run`\n",
" conf.run()"
],
"execution_count": null,
"outputs": [
{
"output_type": "display_data",
"data": {
"text/html": [
"<pre style=\"overflow-x: scroll;\">\n",
"<strong><span style=\"text-decoration: underline\">hyper_lstm</span></strong>: <span style=\"color: #208FFB\">5004f5724d8611eba84a0242ac1c0002</span>\n",
"\t[dirty]: <strong><span style=\"color: #DDB62B\">\"\"</span></strong>\n",
"Initialize<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t1.12ms</span>\n",
"Prepare validator...\n",
" Prepare valid_loader<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t76.72ms</span>\n",
"<span style=\"color: #C5C1B4\"></span>\n",
"<span style=\"color: #C5C1B4\">--------------------------------------------------</span><span style=\"color: #DDB62B\"><strong><span style=\"text-decoration: underline\"></span></strong></span>\n",
"<span style=\"color: #DDB62B\"><strong><span style=\"text-decoration: underline\">LABML WARNING</span></strong></span>\n",
"<span style=\"color: #DDB62B\"><strong><span style=\"text-decoration: underline\"></span></strong></span>LabML App Warning: <span style=\"color: #60C6C8\">empty_token: </span><strong>Please create a valid token at https://web.lab-ml.com.</strong>\n",
"<strong>Click on the experiment link to monitor the experiment and add it to your experiments list.</strong><span style=\"color: #C5C1B4\"></span>\n",
"<span style=\"color: #C5C1B4\">--------------------------------------------------</span>\n",
"<span style=\"color: #208FFB\">Monitor experiment at </span><a href='https://web.lab-ml.com/run?uuid=5004f5724d8611eba84a0242ac1c0002' target='blank'>https://web.lab-ml.com/run?uuid=5004f5724d8611eba84a0242ac1c0002</a>\n",
"Prepare validator<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t174.93ms</span>\n",
"Prepare trainer...\n",
" Prepare train_loader<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t100.16ms</span>\n",
"Prepare trainer<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t137.49ms</span>\n",
"Prepare training_loop...\n",
" Prepare loop_count<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t37.12ms</span>\n",
"Prepare training_loop<span style=\"color: #00A250\">...[DONE]</span><span style=\"color: #208FFB\">\t301.04ms</span>\n",
"<span style=\"color: #C5C1B4\">It is</span><strong>?</strong><strong>?</strong><strong>?</strong><strong>?</strong><strong>?</strong><strong>?</strong><strong>?</strong><strong>?</strong><strong>n</strong><strong>n</strong><strong>?</strong><strong>n</strong><strong>?</strong><strong>n</strong><strong>?</strong><strong>n</strong><strong>?</strong><strong>?</strong><strong>?</strong><strong>n</strong><strong>n</strong><strong>?</strong><strong>n</strong><strong>?</strong><strong>n</strong>\n",
"<strong><span style=\"color: #DDB62B\"> 3,584: </span></strong>Sample:<span style=\"color: #C5C1B4\"> 100%</span><span style=\"color: #208FFB\"> 868ms </span>Train:<span style=\"color: #C5C1B4\"> 0%</span><span style=\"color: #208FFB\"> 6,257,219ms </span> accuracy.train: <strong>0.102679</strong> loss.train: <strong> 3.91217</strong> <span style=\"color: #208FFB\">6,258,086ms</span><span style=\"color: #D160C4\"> 0:00m/222:30m </span></pre>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
]
},
"metadata": {
"tags": []
}
}
]
},
{
"cell_type": "code",
"metadata": {
"id": "crH6MzKmw-SY"
},
"source": [
""
],
"execution_count": null,
"outputs": []
}
]
}

View File

@ -89,9 +89,6 @@ def main():
'batch_size': 2,
'inner_iterations': 25})
# This is needed to initialize models
conf.n_tokens = conf.text.n_tokens
# Set models for saving and loading
experiment.add_pytorch_models(get_modules(conf))