mirror of
https://github.com/labmlai/annotated_deep_learning_paper_implementations.git
synced 2025-08-06 15:22:21 +08:00
{ "<h1><a href=\"https://nn.labml.ai/optimizers/index.html\">Optimizers</a></h1>\n<h2>Optimizer Implementations</h2>\n<ul><li><a href=\"https://nn.labml.ai/optimizers/adam.html\">Adam Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/amsgrad.html\">AMSGrad Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/adam_warmup.html\">Adam Optimizer with warmup</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/noam.html\">Noam Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/radam.html\">Rectified Adam Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/ada_belief.html\">AdaBelief Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/sophia.html\">Sophia-G Optimizer</a> </li></ul>\n": "<h1><a href=\"https://nn.labml.ai/optimizers/index.html\">Optimizers</a></h1>\n<h2>Optimizer Implementations</h2>\n<ul><li><a href=\"https://nn.labml.ai/optimizers/adam.html\">Adam Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/amsgrad.html\">AMSGrad Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/adam_warmup.html\">Adam Optimizer with warmup</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/noam.html\">Noam Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/radam.html\">Rectified Adam Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/ada_belief.html\">AdaBelief Optimizer</a> </li>\n<li><a href=\"https://nn.labml.ai/optimizers/sophia.html\">Sophia-G Optimizer</a> </li></ul>\n", "Optimizers": "\u4f18\u5316\u5668" }