addAbbreviation(LanguageCode language, HashSet< String > abbreviations) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
addAbbreviation(LanguageCode language, File abbreviationFile) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
addClitics(LanguageCode language, Clitics clitics) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
addClitics(LanguageCode language, File cliticsFile) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
checkLanguage(String text) | org.corpus_tools.salt.common.tokenizer.Tokenizer | static |
F_CHAR (defined in org.corpus_tools.salt.common.tokenizer.Tokenizer) | org.corpus_tools.salt.common.tokenizer.Tokenizer | protectedstatic |
getAbbreviations(LanguageCode language) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
getClitics(LanguageCode language) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
getDocumentGraph() (defined in org.corpus_tools.salt.common.tokenizer.Tokenizer) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
mapISOLanguageCode(String language) | org.corpus_tools.salt.common.tokenizer.Tokenizer | static |
P_CHAR (defined in org.corpus_tools.salt.common.tokenizer.Tokenizer) | org.corpus_tools.salt.common.tokenizer.Tokenizer | protectedstatic |
setsDocumentGraph(SDocumentGraph sDocumentGraph) (defined in org.corpus_tools.salt.common.tokenizer.Tokenizer) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
tokenize(STextualDS sTextualDSs) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
tokenize(STextualDS sTextualDSs, LanguageCode language) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
tokenize(STextualDS sTextualDS, LanguageCode language, Integer startPos, Integer endPos) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
Tokenizer() | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
tokenizeToString(String strInput, LanguageCode language) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |
tokenizeToToken(STextualDS sTextualDS, LanguageCode language, Integer startPos, Integer endPos) | org.corpus_tools.salt.common.tokenizer.Tokenizer | |