object
Twokenize
Value Members
-
final
def
!=(arg0: AnyRef): Boolean
-
final
def
!=(arg0: Any): Boolean
-
final
def
##(): Int
-
final
def
==(arg0: AnyRef): Boolean
-
final
def
==(arg0: Any): Boolean
-
val
Arrows: String
-
val
AtMention: String
-
val
Bound: String
-
val
Contractions: Regex
-
val
EdgePunctLeft: Regex
-
val
EdgePunctRight: Regex
-
val
Email: String
-
val
Hashtag: String
-
val
Hearts: String
-
def
OR(parts: String*): String
-
val
Protected: Regex
-
val
Whitespace: Regex
-
val
aa1: String
-
val
aa2: String
-
def
allowEntities(pat: String): String
-
def
apply(text: String): List[String]
-
val
arbitraryAbbrev: String
-
final
def
asInstanceOf[T0]: T0
-
val
boundaryNotDot: String
-
def
clone(): AnyRef
-
val
commonTLDs: String
-
val
decorations: String
-
val
edgePunct: String
-
val
edgePunctChars: String
-
val
embeddedApostrophe: String
-
val
emoticon: String
-
val
entity: String
-
final
def
eq(arg0: AnyRef): Boolean
-
def
equals(arg0: Any): Boolean
-
def
finalize(): Unit
-
final
def
getClass(): Class[_]
-
val
happyMouths: String
-
def
hashCode(): Int
-
final
def
isInstanceOf[T0]: Boolean
-
def
main(args: Array[String]): Unit
-
final
def
ne(arg0: AnyRef): Boolean
-
val
normalEyes: String
-
def
normalizeText(text: String): String
-
val
noseArea: String
-
val
notEdgePunct: String
-
final
def
notify(): Unit
-
final
def
notifyAll(): Unit
-
val
numNum: String
-
val
numberWithCommas: String
-
val
offEdge: String
-
val
otherMouths: String
-
val
punctChars: String
-
val
punctSeq: String
-
val
sadMouths: String
-
val
separators: String
-
def
simpleTokenize(text: String): List[String]
-
def
splitEdgePunct(input: String): String
-
def
splitToken(token: String): List[String]
-
def
squeezeWhitespace(input: String): String
-
val
standardAbbreviations: String
-
final
def
synchronized[T0](arg0: ⇒ T0): T0
-
val
thingsThatSplitWords: String
-
val
timeLike: String
-
def
toString(): String
-
def
tokenize(text: String): List[String]
-
def
tokenizeForTagger(text: String): List[String]
-
def
tokenizeForTagger_J(text: String): List[String]
-
def
tokenizeToString(text: String): String
-
val
tongue: String
-
val
url: String
-
val
urlBody: String
-
val
urlEnd: String
-
val
urlExtraCrapBeforeEnd: String
-
val
urlStart1: String
-
val
urlStart2: String
-
final
def
wait(): Unit
-
final
def
wait(arg0: Long, arg1: Int): Unit
-
final
def
wait(arg0: Long): Unit
-
val
wink: String
Inherited from AnyRef
Inherited from Any