1from typing import Callable
2
3from labml.configs import BaseConfigs, option

Tokenizer Configurations

6class TokenizerConfigs(BaseConfigs):
13    tokenizer: Callable = 'character'
15    def __init__(self):
16        super().__init__(_primary='tokenizer')

Basic english tokenizer

We use character level tokenizer in this experiment. You can switch by setting,

'tokenizer': 'basic_english'

in the configurations dictionary when starting the experiment.

19@option(TokenizerConfigs.tokenizer)
20def basic_english():
34    from torchtext.data import get_tokenizer
35    return get_tokenizer('basic_english')

Character level tokenizer

38def character_tokenizer(x: str):
42    return list(x)

Character level tokenizer configuration

45@option(TokenizerConfigs.tokenizer)
46def character():
50    return character_tokenizer