Source code for flambe.tokenizer
from flambe.tokenizer.tokenizer import Tokenizer
from flambe.tokenizer.char import CharTokenizer
from flambe.tokenizer.word import WordTokenizer, NGramsTokenizer
from flambe.tokenizer.label import LabelTokenizer
__all__ = ['Tokenizer', 'WordTokenizer', 'CharTokenizer',
'LabelTokenizer', 'NGramsTokenizer']