blob: 780884a040490c897c005b70507966029ed67ae9 [file] [log] [blame]
Akron310905f2021-08-11 13:49:50 +02001! This tokenizer is based on work by
2! - StandardTokenizerImpl by the Lucene project
3! under the Apache License
4! - https://github.com/dlwh/epic by David Hall (2014)
5! under the Apacahe License
6! - KorAPTokenizerImpl.jflex by Marc Kupietz (2016)
7! under the Apache License
8! - https://github.com/coltekin/TRmorph/tokenize.xfst by Çağrı Çöltekin (2011-2015)
9! under the MIT License
10!
11! The abbreviation list is part of the sentence splitter tool
12! of the IDS.
13
Akron4af79f12021-08-11 14:48:17 +020014! define NLout "\u000a";
15define NLout "@_TOKEN_SYMBOL_@";
Akron310905f2021-08-11 13:49:50 +020016define NLin ("\u000d") "\u000a";
17
18define Digit [%0|1|2|3|4|5|6|7|8|9];
Akrone8837b52021-08-11 17:29:58 +020019define AsciiLetter [a|b|c|d|e|f|g|h|i|j|k|l|m|n|o|p|q|r|s|t|u|v|w|x|y|z];
Akron310905f2021-08-11 13:49:50 +020020
21!!!!!!!!!!!!!!!!!
22! <from tmorph> !
23!!!!!!!!!!!!!!!!!
24define WS [" "|"\u0009"|"\u000a"|"\u000d"|
25 "\u00a0"|"\u1680"|
26 "\u2000"|"\u2001"|"\u2002"|"\u2003"|"\u2004"|"\u2005"|
27 "\u2006"|"\u2007"|"\u2008"|"\u2009"|"\u200a"|
28 "\u2028"|"\u2029"|"\u202f"|"\u205f"|"\u3000"| NLin];
29
30! Punctuation that ends sentences
31! Differs!
32define SP [["."|"?"|"!"]+|"…"]; ! Warning! This results in '...' being a MCS!
33! Left punctuation
34define LP ["("|"["|"{"|
35 "“"|"‘"|"‹"|"«"|
36 "'"|%"|
37 ! differs
38 ["'" "'"] |
39 "*"|"/"|"_"| ! Can be Markdown
40 ! from book
41 [%, %,]];
42! Right punctuation - excluding the characters that can be used as apostrophe
43define RP [SP|","|";"|":"|
44 ")"|"]"|"}"|
45 "”"|"›"|"»"|
46 %"|
47 ! differs
48 ["'" "'"]|
49 "*"|"/"|"_"| ! Can be Markdown
50 ! from book
51 [%‘ %‘]|[%’ %’]];
52
53define Sym ["-"|"+"|"<"|">"|"*"|"/"|%=|%@];
54define Apos %'|%’|%`;
55define Punct LP|RP|Sym;
56!define nonSym \[WS|LP|RP|Sym];
57!!!!!!!!!!!!!!!!!!
58! </from tmorph> !
59!!!!!!!!!!!!!!!!!!
60
61define Emdash [%- %- (%-)+ | ["\u2014"|"\u2015"|"\u2e3a"|"\u2e3b"|"\ufe58"]+];
62define Dash ["-"|"\u2011"|"\u2012"|"\u2013"|"\u2e1a"|"\ufe63"|"\uff0d"];
63define Slash ["⁄"|"∕"|"/"|"/"];
64define Asterisk ["*"];
65
66define Char \[WS|Punct|Apos]; ! |¨;
67
68! source lexicon.xfst
69! define Word;
70define Word Char+ ([Dash|Apos|Asterisk] Char+)*;
71
72define URLChar [Char|[Sym - ["<"|">"|%"]]];
73!define Alpha ["a"|"b"|"c"|"d"|"e"|"f"|"g"|"h"|"i"|"j"|"k"|"l"|"m"|"n"|"o"|"p"|"q"|"r"|"s"|"t"|"u"|"v"|"w"|"x"|"y"|"z"|"_"];
74
75define Caseinsensitive [
76a (->) A,
77b (->) B,
78c (->) C,
79d (->) D,
80e (->) E,
81f (->) F,
82g (->) G,
83h (->) H,
84i (->) I,
85j (->) J,
86k (->) K,
87l (->) L,
88m (->) M,
89n (->) N,
90o (->) O,
91p (->) P,
92q (->) Q,
93r (->) R,
94s (->) S,
95t (->) T,
96u (->) U,
97v (->) V,
98w (->) W,
99x (->) X,
100y (->) Y,
101z (->) Z,
102ö (->) Ö,
103ü (->) Ü,
104ä (->) Ä,
105ß (->) {SS}
106];
107
Akronfd92d7e2021-08-11 16:31:43 +0200108define Abbr @txt"txt/abbrv.txt";
Akron310905f2021-08-11 13:49:50 +0200109
110! A solution to the "(author): problem" may be to add ) at the end of any
111! string as a possible ending
112
113define Years ["(" Digit+ (".") ")"] | ["[" Digit+ (".") "]"];
114
115source emoji.xfst
116define Emoji;
117
118! acronyms: U.S.A., I.B.M., etc.
119! use a post-filter to remove dots
120define AcronymDep Char %. [Char %.]+;
121
122define Dot "."|[["["|"("] "d" "o" "t" [")"|"]"]] .o. Caseinsensitive;
123define At "@"|[["["|"("] "a" "t" [")"|"]"]] .o. Caseinsensitive;
124
125define TldEnd [{org}|{de}|{com}] .o. Caseinsensitive;
126
127! Very relaxed URL scheme, not based on the strict Lucene implementation
128define URL [ [ [{http} (s) | {ftp} | {file}] ":" "/" "/"] | [{www} Dot] ]
129URLChar [URLChar|SP]* URLChar
130.o. Caseinsensitive;
131
132define Domain Char+ [Dash Char+]* Dot TldEnd;
133
134!define XML "<" Alpha URLChar* (">");
135define XML "<" URLChar+ (">");
136
137!define Email [Alpha [URLChar-At]* At Alpha URLChar* [Dot [[Alpha URLChar+]-Dot-At]]+];
138define Email URLChar+ At URLChar+ [Dot URLChar+]+;
139
140! Twitter user, hashtag, Google+
141define SNS ["@"|"#"|"+"] Char+;
142
143define FileEnd [
144 [{htm} ("l")]|
145 [{doc} ("x")]|
146 {pdf}|
147 ["j" "p" ("e") "g"]|
148 ["m" "p" ["3"|"4"]]|
149 {ogg}|
150 {png}|
151 {avi}|
152 {txt}|
153 {xls}|
154 {xml}|
155 {aac}|
Akrone8837b52021-08-11 17:29:58 +0200156 {gif}|
157 {exe}
Akron310905f2021-08-11 13:49:50 +0200158 ] .o. Caseinsensitive;
Akrone8837b52021-08-11 17:29:58 +0200159
160define File (( AsciiLetter ":" %\ | "/" ) [ Char | "_" | "-" | Char [ %\ | "/" ] ]*) [Char | "-" | "_" ]+ "." FileEnd;
Akron310905f2021-08-11 13:49:50 +0200161
Akrona0bded52021-08-11 15:48:02 +0200162define Streetname Word {str} %.;
Akron4af79f12021-08-11 14:48:17 +0200163
Akron310905f2021-08-11 13:49:50 +0200164! Also supports
165! 19.4.2015, 19/4/2015 etc.
166define DigitPunct ["_"|"-"|"."|","|Slash];
167define Num Digit+ [DigitPunct Digit+]* (Char+);
168
Akrona0bded52021-08-11 15:48:02 +0200169! ordinals
170define Ord Digit ( Digit (Digit) ) %.;
171
Akron310905f2021-08-11 13:49:50 +0200172! TODO:
173! floating point, serial, model numbers, ip addresses, etc.
174! every other segment must have at least one digit
175
176! Omission words like "fu**ing!"
177define Omission Char+ Asterisk Asterisk+ Char*;
178
179
180! TODO: Name words with ' and `
181
Akron310905f2021-08-11 13:49:50 +0200182! Support ASCII elements, like
183! +---------------+
184! <---->, -->, <--
185! +---------------+
186! <---> | Worker Node N |
187! +---------------+
188! |============= Core =============|
189
190
191
Akrona0bded52021-08-11 15:48:02 +0200192define RealToken [XML|Email|URL|SNS|[Abbr %.]|Streetname|Omission|Domain|AcronymDep|File|Emdash|Punct|Ord|Num|Years|Emoji|Word];
Akron310905f2021-08-11 13:49:50 +0200193
194echo - Introduce Token splitter
195define Token [RealToken @-> ... NLout]
196.o. [WS+ @-> 0]
197;
198
199echo - Introduce Sentence splitter
200read regex Token .o. [[["."|"!"|"?"]+] @-> ... NLout \/ NLout _];
201
202! foma -e "source tokenizer.xfst" -q -s && cat text.txt | flookup tokenizer.fst -x -b
203
204! In a second pass, XML tags need to be combined. This requires tagging "<..." with ~xmls before \n
205! and anything with > with ~xmle.
206! In case this is part of an emoticon ( >:-P ), this needs to be split again .
207! The same is true for ( and )