Public Member Functions | |
def | __init__ (self, vocab_file, do_lower_case=True) |
def | tokenize (self, text) |
def | convert_tokens_to_ids (self, tokens) |
def | convert_ids_to_tokens (self, ids) |
Public Attributes | |
vocab | |
inv_vocab | |
basic_tokenizer | |
wordpiece_tokenizer | |
Runs end-to-end tokenziation.
def helpers.tokenization.FullTokenizer.__init__ | ( | self, | |
vocab_file, | |||
do_lower_case = True |
|||
) |
def helpers.tokenization.FullTokenizer.tokenize | ( | self, | |
text | |||
) |
def helpers.tokenization.FullTokenizer.convert_tokens_to_ids | ( | self, | |
tokens | |||
) |
def helpers.tokenization.FullTokenizer.convert_ids_to_tokens | ( | self, | |
ids | |||
) |
helpers.tokenization.FullTokenizer.vocab |
helpers.tokenization.FullTokenizer.inv_vocab |
helpers.tokenization.FullTokenizer.basic_tokenizer |
helpers.tokenization.FullTokenizer.wordpiece_tokenizer |