blob: 6681b2e6ccace6d8a8003541327b3e217120140d [file] [log] [blame]
Akron1c34ce62021-09-23 23:27:39 +02001package datok
2
3import (
4 "bytes"
Akron28031b72021-10-02 13:07:25 +02005 "fmt"
6 "os"
Akron1c34ce62021-09-23 23:27:39 +02007 "strings"
8 "testing"
9
10 "github.com/stretchr/testify/assert"
11)
12
Akronabcb6a52021-10-09 15:52:08 +020013var s string = `Der Vorsitzende der Abk. hat gewählt. Gefunden auf wikipedia.org. Ich bin unter korap@ids-mannheim.de erreichbar.
14Unsere Website ist https://korap.ids-mannheim.de/?q=Baum. Unser Server ist 10.0.10.51. Zu 50.4% ist es sicher.
15Der Termin ist am 5.9.2018.
16Ich habe die readme.txt heruntergeladen.
17Ausschalten!!! Hast Du nicht gehört???
18Ich wohne in der Weststr. und Du? Kupietz und Schmidt [2018]: Korpuslinguistik. Dieses verf***** Kleid! Ich habe die readme.txt heruntergeladen.
19Er sagte: \"Es geht mir gut!\", daraufhin ging er. "Das ist von C&A!" Früher bzw. später ... Sie erreichte den 1. Platz!
20Archive: Ich bin kein zip. D'dorf Ku'damm Lu'hafen M'gladbach W'schaft.
21Mach's macht's was'n ist's haste willste kannste biste kriegste.`
22
Akron9fb63af2021-10-28 01:15:53 +020023var mat *MatrixTokenizer
24
Akronc9c0eae2021-10-22 19:49:43 +020025func TestMatrixFullTokenizer(t *testing.T) {
Akron1c34ce62021-09-23 23:27:39 +020026 assert := assert.New(t)
27 foma := LoadFomaFile("testdata/simpletok.fst")
28 assert.NotNil(foma)
29
30 mat := foma.ToMatrix()
31
32 r := strings.NewReader(" wald gehen Da kann\t man was \"erleben\"!")
33 b := make([]byte, 0, 2048)
34 w := bytes.NewBuffer(b)
35 var tokens []string
36 mat.Transduce(r, w)
37 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +020038 assert.Equal(len(tokens), 11)
Akron1c34ce62021-09-23 23:27:39 +020039 assert.Equal("wald", tokens[0])
40 assert.Equal("gehen", tokens[1])
41 assert.Equal("Da", tokens[2])
42 assert.Equal("kann", tokens[3])
43 assert.Equal("man", tokens[4])
44 assert.Equal("was", tokens[5])
45 assert.Equal("\"erleben\"", tokens[6])
46 assert.Equal("!", tokens[7])
Akron5c82a922021-09-24 19:11:29 +020047
48 r = strings.NewReader(" In den Wald gehen? -- Da kann\t man was \"erleben\"!")
49 w.Reset()
50 mat.Transduce(r, w)
51 tokens = strings.Split(w.String(), "\n")
52 assert.Equal("In", tokens[0])
53 assert.Equal("den", tokens[1])
54 assert.Equal("Wald", tokens[2])
55 assert.Equal("gehen", tokens[3])
56 assert.Equal("?", tokens[4])
57 assert.Equal("--", tokens[5])
58
59 r = strings.NewReader(" g? -- D")
60 w.Reset()
61 mat.Transduce(r, w)
62 tokens = strings.Split(w.String(), "\n")
63 assert.Equal("g", tokens[0])
64 assert.Equal("?", tokens[1])
65 assert.Equal("--", tokens[2])
66 assert.Equal("D", tokens[3])
67 assert.Equal("", tokens[4])
68 assert.Equal("", tokens[5])
Akrona854faa2021-10-22 19:31:08 +020069 assert.Equal("", tokens[6])
70 assert.Equal(7, len(tokens))
Akron5c82a922021-09-24 19:11:29 +020071}
72
Akrondf275812022-03-27 12:54:46 +020073func TestMatrixSimpleString(t *testing.T) {
74 assert := assert.New(t)
75 // bau | bauamt
76 tok := LoadFomaFile("testdata/bauamt.fst")
77 mat := tok.ToMatrix()
78
79 b := make([]byte, 0, 2048)
80 w := bytes.NewBuffer(b)
81 var tokens []string
82
83 tokens = ttokenize(mat, w, "ibauamt")
84 assert.Equal("i", tokens[0])
85 assert.Equal("bauamt", tokens[1])
86
87 tokens = ttokenize(mat, w, "ibbauamt")
88 assert.Equal("i", tokens[0])
89
90 assert.Equal("b", tokens[1])
91 assert.Equal("bauamt", tokens[2])
92
93 tokens = ttokenize(mat, w, "bau")
94 assert.Equal("bau", tokens[0])
95
96 tokens = ttokenize(mat, w, "baum")
97 assert.Equal("bau", tokens[0])
98 assert.Equal("m", tokens[1])
99
100 tokens = ttokenize(mat, w, "baudibauamt")
101 assert.Equal("bau", tokens[0])
102 assert.Equal("d", tokens[1])
103 assert.Equal("i", tokens[2])
104 assert.Equal("bauamt", tokens[3])
105}
106
Akronc9c0eae2021-10-22 19:49:43 +0200107func TestMatrixReadWriteTokenizer(t *testing.T) {
Akron16c312e2021-09-26 13:11:12 +0200108 assert := assert.New(t)
109 foma := LoadFomaFile("testdata/simpletok.fst")
110 assert.NotNil(foma)
111
112 mat := foma.ToMatrix()
Akron28031b72021-10-02 13:07:25 +0200113 assert.NotNil(mat)
Akron16c312e2021-09-26 13:11:12 +0200114
Akrondf275812022-03-27 12:54:46 +0200115 assert.Equal(ttokenizeStr(mat, "bau"), "bau")
116 assert.Equal(ttokenizeStr(mat, "bad"), "bad")
117 assert.Equal(ttokenizeStr(mat, "wald gehen"), "wald\ngehen")
Akron16c312e2021-09-26 13:11:12 +0200118 b := make([]byte, 0, 1024)
119 buf := bytes.NewBuffer(b)
120 n, err := mat.WriteTo(buf)
121 assert.Nil(err)
Akron28031b72021-10-02 13:07:25 +0200122 assert.Equal(int64(230), n)
Akron16c312e2021-09-26 13:11:12 +0200123 mat2 := ParseMatrix(buf)
124 assert.NotNil(mat2)
125 assert.Equal(mat.sigma, mat2.sigma)
126 assert.Equal(mat.epsilon, mat2.epsilon)
127 assert.Equal(mat.unknown, mat2.unknown)
128 assert.Equal(mat.identity, mat2.identity)
129 assert.Equal(mat.stateCount, mat2.stateCount)
130 assert.Equal(len(mat.array), len(mat2.array))
131 assert.Equal(mat.array, mat2.array)
Akrondf275812022-03-27 12:54:46 +0200132 assert.Equal(ttokenizeStr(mat2, "bau"), "bau")
133 assert.Equal(ttokenizeStr(mat2, "bad"), "bad")
134 assert.Equal(ttokenizeStr(mat2, "wald gehen"), "wald\ngehen")
Akron16c312e2021-09-26 13:11:12 +0200135}
136
Akrone396a932021-10-19 01:06:13 +0200137func TestMatrixIgnorableMCS(t *testing.T) {
138 assert := assert.New(t)
139
140 // This test relies on final states. That's why it is
141 // not working correctly anymore.
142
143 // File has MCS in sigma but not in net
144 tok := LoadFomaFile("testdata/ignorable_mcs.fst")
145 assert.NotNil(tok)
146 mat := tok.ToMatrix()
147 assert.NotNil(mat)
148
149 b := make([]byte, 0, 2048)
150 w := bytes.NewBuffer(b)
151 var tokens []string
152
153 // Is only unambigous when transducing strictly greedy!
154 assert.True(mat.Transduce(strings.NewReader("ab<ab>a"), w))
155 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +0200156 assert.Equal("a\nb\n<ab>a\n\n\n", w.String())
Akrone396a932021-10-19 01:06:13 +0200157 assert.Equal("a", tokens[0])
158 assert.Equal("b", tokens[1])
159 assert.Equal("<ab>a", tokens[2])
Akrona854faa2021-10-22 19:31:08 +0200160 assert.Equal(6, len(tokens))
Akrone396a932021-10-19 01:06:13 +0200161}
162
Akronc9c0eae2021-10-22 19:49:43 +0200163func xTestMatrixReadWriteFullTokenizer(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200164 assert := assert.New(t)
165 foma := LoadFomaFile("testdata/tokenizer.fst")
166 assert.NotNil(foma)
167
168 mat := foma.ToMatrix()
169 assert.NotNil(foma)
170
171 tb := make([]byte, 0, 2048)
172 w := bytes.NewBuffer(tb)
173
174 assert.True(mat.Transduce(strings.NewReader("der alte baum"), w))
Akrona854faa2021-10-22 19:31:08 +0200175 assert.Equal("der\nalte\nbaum\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200176
177 b := make([]byte, 0, 1024)
178 buf := bytes.NewBuffer(b)
179 _, err := mat.WriteTo(buf)
180 assert.Nil(err)
181 w.Reset()
182 // assert.Equal(int64(248), n)
183
184 mat2 := ParseMatrix(buf)
185 assert.NotNil(mat2)
186 assert.Equal(mat.sigma, mat2.sigma)
187 assert.Equal(mat.epsilon, mat2.epsilon)
188 assert.Equal(mat.unknown, mat2.unknown)
189 assert.Equal(mat.identity, mat2.identity)
190 assert.Equal(mat.stateCount, mat2.stateCount)
191 assert.Equal(len(mat.array), len(mat2.array))
192 // assert.Equal(mat.array, mat2.array)
193
194 assert.True(mat2.Transduce(strings.NewReader("der alte baum"), w))
Akrona854faa2021-10-22 19:31:08 +0200195 assert.Equal("der\nalte\nbaum\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200196}
197
Akronc9c0eae2021-10-22 19:49:43 +0200198func TestMatrixFullTokenizerTransduce(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200199 assert := assert.New(t)
200
Akron9fb63af2021-10-28 01:15:53 +0200201 if mat == nil {
202 mat = LoadMatrixFile("testdata/tokenizer.matok")
203 }
Akron28031b72021-10-02 13:07:25 +0200204
205 assert.NotNil(mat)
206
207 b := make([]byte, 0, 2048)
208 w := bytes.NewBuffer(b)
209 var tokens []string
210
211 assert.True(mat.Transduce(strings.NewReader("tra. u Du?"), w))
212
213 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +0200214 assert.Equal("tra\n.\n\nu\nDu\n?\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200215 assert.Equal("tra", tokens[0])
216 assert.Equal(".", tokens[1])
217 assert.Equal("", tokens[2])
218 assert.Equal("u", tokens[3])
219 assert.Equal("Du", tokens[4])
220 assert.Equal("?", tokens[5])
221 assert.Equal("", tokens[6])
222 assert.Equal("", tokens[7])
Akrona854faa2021-10-22 19:31:08 +0200223 assert.Equal(9, len(tokens))
Akron28031b72021-10-02 13:07:25 +0200224
225 w.Reset()
226 assert.True(mat.Transduce(strings.NewReader("\"John Doe\"@xx.com"), w))
Akrona854faa2021-10-22 19:31:08 +0200227 assert.Equal("\"\nJohn\nDoe\n\"\n@xx\n.\n\ncom\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200228}
229
Akronc9c0eae2021-10-22 19:49:43 +0200230func TestMatrixFullTokenizerMatrixSentenceSplitter(t *testing.T) {
Akron5c82a922021-09-24 19:11:29 +0200231 assert := assert.New(t)
Akron9fb63af2021-10-28 01:15:53 +0200232
233 if mat == nil {
234 mat = LoadMatrixFile("testdata/tokenizer.matok")
235 }
Akron5c82a922021-09-24 19:11:29 +0200236
237 b := make([]byte, 0, 2048)
238 w := bytes.NewBuffer(b)
239 var sentences []string
240
241 // testSentSplitterSimple
242 assert.True(mat.Transduce(strings.NewReader("Der alte Mann."), w))
243 sentences = strings.Split(w.String(), "\n\n")
244
Akrona854faa2021-10-22 19:31:08 +0200245 assert.Equal("Der\nalte\nMann\n.\n\n\n", w.String())
Akron5c82a922021-09-24 19:11:29 +0200246 assert.Equal("Der\nalte\nMann\n.", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200247 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200248 assert.Equal(len(sentences), 2)
249
250 w.Reset()
Akrona854faa2021-10-22 19:31:08 +0200251 assert.True(mat.Transduce(strings.NewReader("Der Vorsitzende der F.D.P. hat gewählt."), w))
252 sentences = strings.Split(w.String(), "\n\n")
253 assert.Equal(len(sentences), 2)
254 assert.Equal("Der\nVorsitzende\nder\nF.D.P.\nhat\ngewählt\n.", sentences[0])
255 assert.Equal("\n", sentences[1])
256
257 w.Reset()
Akron5c82a922021-09-24 19:11:29 +0200258 assert.True(mat.Transduce(strings.NewReader("Der Vorsitzende der Abk. hat gewählt."), w))
259 sentences = strings.Split(w.String(), "\n\n")
260 assert.Equal(len(sentences), 2)
261 assert.Equal("Der\nVorsitzende\nder\nAbk.\nhat\ngewählt\n.", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200262 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200263
Akron28031b72021-10-02 13:07:25 +0200264 w.Reset()
265 assert.True(mat.Transduce(strings.NewReader(""), w))
266 sentences = strings.Split(w.String(), "\n\n")
Akrona854faa2021-10-22 19:31:08 +0200267 assert.Equal(len(sentences), 2)
268 assert.Equal("", sentences[0])
269 assert.Equal("", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200270
Akron28031b72021-10-02 13:07:25 +0200271 w.Reset()
272 assert.True(mat.Transduce(strings.NewReader("Gefunden auf wikipedia.org."), w))
273 sentences = strings.Split(w.String(), "\n\n")
274 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200275
Akron28031b72021-10-02 13:07:25 +0200276 w.Reset()
277 assert.True(mat.Transduce(strings.NewReader("Ich bin unter korap@ids-mannheim.de erreichbar."), w))
278 sentences = strings.Split(w.String(), "\n\n")
279 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200280
Akron28031b72021-10-02 13:07:25 +0200281 w.Reset()
282 assert.True(mat.Transduce(strings.NewReader("Unsere Website ist https://korap.ids-mannheim.de/?q=Baum"), w))
283 sentences = strings.Split(w.String(), "\n\n")
284 assert.Equal("Unsere\nWebsite\nist\nhttps://korap.ids-mannheim.de/?q=Baum", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200285 assert.Equal("\n", sentences[1])
Akron28031b72021-10-02 13:07:25 +0200286 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200287
Akron28031b72021-10-02 13:07:25 +0200288 w.Reset()
289 assert.True(mat.Transduce(strings.NewReader("Unser Server ist 10.0.10.51."), w))
290 sentences = strings.Split(w.String(), "\n\n")
Akrona854faa2021-10-22 19:31:08 +0200291 assert.Equal("\n", sentences[1])
Akron28031b72021-10-02 13:07:25 +0200292 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200293
Akron28031b72021-10-02 13:07:25 +0200294 w.Reset()
295 assert.True(mat.Transduce(strings.NewReader("Zu 50.4% ist es sicher"), w))
296 sentences = strings.Split(w.String(), "\n\n")
297 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200298
Akron28031b72021-10-02 13:07:25 +0200299 w.Reset()
300 assert.True(mat.Transduce(strings.NewReader("Der Termin ist am 5.9.2018"), w))
301 sentences = strings.Split(w.String(), "\n\n")
302 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200303
Akron28031b72021-10-02 13:07:25 +0200304 w.Reset()
305 assert.True(mat.Transduce(strings.NewReader("Ich habe die readme.txt heruntergeladen"), w))
306 sentences = strings.Split(w.String(), "\n\n")
307 assert.Equal(len(sentences), 2)
308 assert.Equal("Ich\nhabe\ndie\nreadme.txt\nheruntergeladen", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200309 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200310
Akron28031b72021-10-02 13:07:25 +0200311 w.Reset()
312 assert.True(mat.Transduce(strings.NewReader("Ausschalten!!! Hast Du nicht gehört???"), w))
313 sentences = strings.Split(w.String(), "\n\n")
314 assert.Equal(len(sentences), 3)
315 assert.Equal("Ausschalten\n!!!", sentences[0])
316 assert.Equal("Hast\nDu\nnicht\ngehört\n???", sentences[1])
Akrona854faa2021-10-22 19:31:08 +0200317 assert.Equal("\n", sentences[2])
Akron5c82a922021-09-24 19:11:29 +0200318
Akron28031b72021-10-02 13:07:25 +0200319 w.Reset()
320 assert.True(mat.Transduce(strings.NewReader("Ich wohne in der Weststr. und Du?"), w))
321 sentences = strings.Split(w.String(), "\n\n")
322 assert.Equal(len(sentences), 2)
Akrone96895f2022-03-08 19:58:37 +0100323
324 w.Reset()
325 assert.True(mat.Transduce(strings.NewReader("\"Alter!\", sagte er: \"Komm nicht wieder!\" Geh!!! \"Lass!\" Dann ging er."), w))
326 sentences = strings.Split(w.String(), "\n\n")
327 assert.Equal(len(sentences), 5)
328 assert.Equal("\"\nAlter\n!\n\"\n,\nsagte\ner\n:\n\"\nKomm\nnicht\nwieder\n!\n\"", sentences[0])
329 assert.Equal("Geh\n!!!", sentences[1])
330 assert.Equal("\"\nLass\n!\n\"", sentences[2])
331 assert.Equal("Dann\nging\ner\n.", sentences[3])
332
333 w.Reset()
334 assert.True(mat.Transduce(strings.NewReader("\"Ausschalten!!!\", sagte er. \"Hast Du nicht gehört???\""), w))
335 sentences = strings.Split(w.String(), "\n\n")
336 assert.Equal(len(sentences), 3)
337 assert.Equal("\"\nAusschalten\n!!!\n\"\n,\nsagte\ner\n.", sentences[0])
338 assert.Equal("\"\nHast\nDu\nnicht\ngehört\n???\n\"", sentences[1])
Akronece3f012022-03-09 19:12:15 +0100339
340 w.Reset()
341 assert.True(mat.Transduce(strings.NewReader("“Ausschalten!!!”, sagte er. «Hast Du nicht gehört???»"), w))
342 sentences = strings.Split(w.String(), "\n\n")
343 assert.Equal(len(sentences), 3)
344 assert.Equal("“\nAusschalten\n!!!\n”\n,\nsagte\ner\n.", sentences[0])
345 assert.Equal("«\nHast\nDu\nnicht\ngehört\n???\n»", sentences[1])
346
347 w.Reset()
348 assert.True(mat.Transduce(strings.NewReader("“Ausschalten!!!”, sagte er. «Hast Du nicht gehört???»"), w))
349 sentences = strings.Split(w.String(), "\n\n")
350 assert.Equal(len(sentences), 3)
351 assert.Equal("“\nAusschalten\n!!!\n”\n,\nsagte\ner\n.", sentences[0])
352 assert.Equal("«\nHast\nDu\nnicht\ngehört\n???\n»", sentences[1])
353
354 text := `»Meinetwegen. Denkst du, daß ich darauf warte? Das fehlte noch.
355Übrigens, ich kriege schon einen und vielleicht bald. Da ist mir nicht
356bange. Neulich erst hat mir der kleine Ventivegni von drüben gesagt:
357'Fräulein Effi, was gilt die Wette, wir sind hier noch in diesem Jahre
358zu Polterabend und Hochzeit.'«
359
360»Und was sagtest du da?«`
361
362 w.Reset()
363 assert.True(mat.Transduce(strings.NewReader(text), w))
364 sentences = strings.Split(w.String(), "\n\n")
365 assert.Equal(len(sentences), 8)
366 assert.Equal("Neulich\nerst\nhat\nmir\nder\nkleine\nVentivegni\nvon\ndrüben\ngesagt\n:\n'\nFräulein\nEffi\n,\nwas\ngilt\ndie\nWette\n,\nwir\nsind\nhier\nnoch\nin\ndiesem\nJahre\nzu\nPolterabend\nund\nHochzeit\n.\n'\n«", sentences[5])
367 assert.Equal("»\nUnd\nwas\nsagtest\ndu\nda\n?\n«", sentences[6])
Akron4222ac82022-03-11 01:06:21 +0100368
369 text = `»Nun, gib dich zufrieden, ich fange schon an ... Also Baron
370Innstetten!`
371
372 w.Reset()
373 assert.True(mat.Transduce(strings.NewReader(text), w))
374 sentences = strings.Split(w.String(), "\n\n")
375 assert.Equal(len(sentences), 3)
376 assert.Equal("»\nNun\n,\ngib\ndich\nzufrieden\n,\nich\nfange\nschon\nan\n...", sentences[0])
377 assert.Equal("Also\nBaron\nInnstetten\n!", sentences[1])
Akrondf275812022-03-27 12:54:46 +0200378
Akronb98e4cf2022-03-27 23:56:49 +0200379 // Check parantheses at the end of the sentence
Akronf94b9ce2022-03-27 18:18:09 +0200380 w.Reset()
381 assert.True(mat.Transduce(strings.NewReader("(Er ging.) Und kam (später)."), w))
382 sentences = strings.Split(w.String(), "\n\n")
383 assert.Equal(len(sentences), 3)
384 assert.Equal("(\nEr\nging\n.\n)", sentences[0])
385 assert.Equal("Und\nkam\n(\nspäter\n)\n.", sentences[1])
Akron7aa1cbe2022-03-30 12:44:04 +0200386
387 // Check parantheses and quotes at the end of the sentence
388 w.Reset()
389 assert.True(mat.Transduce(strings.NewReader("(Er sagte: \"Hallo!\") Dann ging er."), w))
390 sentences = strings.Split(w.String(), "\n\n")
391 assert.Equal(len(sentences), 3)
392 assert.Equal("(\nEr\nsagte\n:\n\"\nHallo\n!\n\"\n)", sentences[0])
393 assert.Equal("Dann\nging\ner\n.", sentences[1])
394
Akrondf275812022-03-27 12:54:46 +0200395}
396
397func TestMatrixFullTokenizerMatrixSentenceSplitterBug1(t *testing.T) {
398 assert := assert.New(t)
399
400 if mat == nil {
401 mat = LoadMatrixFile("testdata/tokenizer.matok")
402 }
403
404 b := make([]byte, 0, 2048)
405 w := bytes.NewBuffer(b)
406 var sentences []string
407
408 text := `Wüllersdorf war aufgestanden. »Ich finde es furchtbar, daß Sie recht haben, aber Sie haben recht. Ich quäle Sie nicht länger mit meinem 'Muß es sein?'. Die Welt ist einmal, wie sie ist, und die Dinge verlaufen nicht, wie wir wollen, sondern wie die andern wollen. Das mit dem 'Gottesgericht', wie manche hochtrabend versichern, ist freilich ein Unsinn, nichts davon, umgekehrt, unser Ehrenkultus ist ein Götzendienst, aber wir müssen uns ihm unterwerfen, solange der Götze gilt.«`
409
410 w.Reset()
411 assert.True(mat.Transduce(strings.NewReader(text), w))
412 sentences = strings.Split(w.String(), "\n\n")
Akronb4287552022-03-27 14:11:24 +0200413 assert.Equal(len(sentences), 6)
Akrondf275812022-03-27 12:54:46 +0200414 assert.Equal("Wüllersdorf\nwar\naufgestanden\n.", sentences[0])
415 assert.Equal("»\nIch\nfinde\nes\nfurchtbar\n,\ndaß\nSie\nrecht\nhaben\n,\naber\nSie\nhaben\nrecht\n.", sentences[1])
Akronb4287552022-03-27 14:11:24 +0200416 assert.Equal("Ich\nquäle\nSie\nnicht\nlänger\nmit\nmeinem\n'\nMuß\nes\nsein\n?\n'\n.", sentences[2])
417 assert.Equal("Die\nWelt\nist\neinmal\n,\nwie\nsie\nist\n,\nund\ndie\nDinge\nverlaufen\nnicht\n,\nwie\nwir\nwollen\n,\nsondern\nwie\ndie\nandern\nwollen\n.", sentences[3])
418 assert.Equal("Das\nmit\ndem\n'\nGottesgericht\n'\n,\nwie\nmanche\nhochtrabend\nversichern\n,\nist\nfreilich\nein\nUnsinn\n,\nnichts\ndavon\n,\numgekehrt\n,\nunser\nEhrenkultus\nist\nein\nGötzendienst\n,\naber\nwir\nmüssen\nuns\nihm\nunterwerfen\n,\nsolange\nder\nGötze\ngilt\n.\n«", sentences[4])
Akron1c34ce62021-09-23 23:27:39 +0200419}
Akron28031b72021-10-02 13:07:25 +0200420
Akronc9c0eae2021-10-22 19:49:43 +0200421func TestMatrixFullTokenizerTokenSplitter(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200422 assert := assert.New(t)
423
Akron9fb63af2021-10-28 01:15:53 +0200424 if mat == nil {
425 mat = LoadMatrixFile("testdata/tokenizer.matok")
426 }
Akron28031b72021-10-02 13:07:25 +0200427
428 b := make([]byte, 0, 2048)
429 w := bytes.NewBuffer(b)
430 var tokens []string
431
432 // testTokenizerSimple
433 tokens = ttokenize(mat, w, "Der alte Mann")
434 assert.Equal(tokens[0], "Der")
435 assert.Equal(tokens[1], "alte")
436 assert.Equal(tokens[2], "Mann")
437 assert.Equal(len(tokens), 3)
438
439 tokens = ttokenize(mat, w, "Der alte Mann.")
440 assert.Equal(tokens[0], "Der")
441 assert.Equal(tokens[1], "alte")
442 assert.Equal(tokens[2], "Mann")
443 assert.Equal(tokens[3], ".")
444 assert.Equal(len(tokens), 4)
445
446 // testTokenizerAbbr
447 tokens = ttokenize(mat, w, "Der Vorsitzende der F.D.P. hat gewählt")
448 assert.Equal(tokens[0], "Der")
449 assert.Equal(tokens[1], "Vorsitzende")
450 assert.Equal(tokens[2], "der")
451 assert.Equal(tokens[3], "F.D.P.")
452 assert.Equal(tokens[4], "hat")
453 assert.Equal(tokens[5], "gewählt")
454 assert.Equal(len(tokens), 6)
455 // Ignored in KorAP-Tokenizer
456
457 // testTokenizerHost1
458 tokens = ttokenize(mat, w, "Gefunden auf wikipedia.org")
459 assert.Equal(tokens[0], "Gefunden")
460 assert.Equal(tokens[1], "auf")
461 assert.Equal(tokens[2], "wikipedia.org")
462 assert.Equal(len(tokens), 3)
463
464 // testTokenizerWwwHost
465 tokens = ttokenize(mat, w, "Gefunden auf www.wikipedia.org")
466 assert.Equal("Gefunden", tokens[0])
467 assert.Equal("auf", tokens[1])
468 assert.Equal("www.wikipedia.org", tokens[2])
469 assert.Equal(3, len(tokens))
470
471 // testTokenizerWwwUrl
472 tokens = ttokenize(mat, w, "Weitere Infos unter www.info.biz/info")
473 assert.Equal("www.info.biz/info", tokens[3])
474
475 // testTokenizerFtpHost
476 /*
477 tokens = tokenize(dat, w, "Kann von ftp.download.org heruntergeladen werden")
478 assert.Equal("Kann", tokens[0])
479 assert.Equal("von", tokens[1])
480 assert.Equal("ftp.download.org", tokens[2])
481 assert.Equal(5, len(tokens))
482 // Ignored in KorAP-Tokenizer
483 */
484
485 // testTokenizerDash
486 tokens = ttokenize(mat, w, "Das war -- spitze")
487 assert.Equal(tokens[0], "Das")
488 assert.Equal(tokens[1], "war")
489 assert.Equal(tokens[2], "--")
490 assert.Equal(tokens[3], "spitze")
491 assert.Equal(len(tokens), 4)
492
493 // testTokenizerEmail1
494 tokens = ttokenize(mat, w, "Ich bin unter korap@ids-mannheim.de erreichbar.")
495 assert.Equal(tokens[0], "Ich")
496 assert.Equal(tokens[1], "bin")
497 assert.Equal(tokens[2], "unter")
498 assert.Equal(tokens[3], "korap@ids-mannheim.de")
499 assert.Equal(tokens[4], "erreichbar")
500 assert.Equal(tokens[5], ".")
501 assert.Equal(len(tokens), 6)
502
503 // testTokenizerEmail2
504 tokens = ttokenize(mat, w, "Oder unter korap[at]ids-mannheim[dot]de.")
505 assert.Equal(tokens[0], "Oder")
506 assert.Equal(tokens[1], "unter")
507 assert.Equal(tokens[2], "korap[at]ids-mannheim[dot]de")
508 assert.Equal(tokens[3], ".")
509 assert.Equal(len(tokens), 4)
510
511 // testTokenizerEmail3
512 tokens = ttokenize(mat, w, "Oder unter korap(at)ids-mannheim(dot)de.")
513 assert.Equal(tokens[0], "Oder")
514 assert.Equal(tokens[1], "unter")
515 assert.Equal(tokens[2], "korap(at)ids-mannheim(dot)de")
516 assert.Equal(tokens[3], ".")
517 assert.Equal(len(tokens), 4)
518 // Ignored in KorAP-Tokenizer
519
520 // testTokenizerDoNotAcceptQuotedEmailNames
521 tokens = ttokenize(mat, w, "\"John Doe\"@xx.com")
522 assert.Equal("\"", tokens[0])
523 assert.Equal("John", tokens[1])
524 assert.Equal("Doe", tokens[2])
525 assert.Equal("\"", tokens[3])
526 assert.Equal("@xx", tokens[4])
527 assert.Equal(".", tokens[5]) // Differs - as the sentence splitter splits here!
528 assert.Equal("com", tokens[6])
529 assert.Equal(7, len(tokens))
530
531 // testTokenizerTwitter
532 tokens = ttokenize(mat, w, "Folgt @korap und #korap")
533 assert.Equal(tokens[0], "Folgt")
534 assert.Equal(tokens[1], "@korap")
535 assert.Equal(tokens[2], "und")
536 assert.Equal(tokens[3], "#korap")
537 assert.Equal(len(tokens), 4)
538
539 // testTokenizerWeb1
540 tokens = ttokenize(mat, w, "Unsere Website ist https://korap.ids-mannheim.de/?q=Baum")
541 assert.Equal(tokens[0], "Unsere")
542 assert.Equal(tokens[1], "Website")
543 assert.Equal(tokens[2], "ist")
544 assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum")
545 assert.Equal(len(tokens), 4)
546
547 // testTokenizerWeb2
548 tokens = ttokenize(mat, w, "Wir sind auch im Internet (https://korap.ids-mannheim.de/?q=Baum)")
549 assert.Equal(tokens[0], "Wir")
550 assert.Equal(tokens[1], "sind")
551 assert.Equal(tokens[2], "auch")
552 assert.Equal(tokens[3], "im")
553 assert.Equal(tokens[4], "Internet")
554 assert.Equal(tokens[5], "(")
555 assert.Equal(tokens[6], "https://korap.ids-mannheim.de/?q=Baum")
556 assert.Equal(tokens[7], ")")
557 assert.Equal(len(tokens), 8)
558 // Ignored in KorAP-Tokenizer
559
560 // testTokenizerWeb3
561 tokens = ttokenize(mat, w, "Die Adresse ist https://korap.ids-mannheim.de/?q=Baum.")
562 assert.Equal(tokens[0], "Die")
563 assert.Equal(tokens[1], "Adresse")
564 assert.Equal(tokens[2], "ist")
565 assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum")
566 assert.Equal(tokens[4], ".")
567 assert.Equal(len(tokens), 5)
568 // Ignored in KorAP-Tokenizer
569
570 // testTokenizerServer
571 tokens = ttokenize(mat, w, "Unser Server ist 10.0.10.51.")
572 assert.Equal(tokens[0], "Unser")
573 assert.Equal(tokens[1], "Server")
574 assert.Equal(tokens[2], "ist")
575 assert.Equal(tokens[3], "10.0.10.51")
576 assert.Equal(tokens[4], ".")
577 assert.Equal(len(tokens), 5)
578
579 // testTokenizerNum
580 tokens = ttokenize(mat, w, "Zu 50,4% ist es sicher")
581 assert.Equal(tokens[0], "Zu")
582 assert.Equal(tokens[1], "50,4%")
583 assert.Equal(tokens[2], "ist")
584 assert.Equal(tokens[3], "es")
585 assert.Equal(tokens[4], "sicher")
586 assert.Equal(len(tokens), 5)
587 // Differs from KorAP-Tokenizer
588
589 // testTokenizerDate
590 tokens = ttokenize(mat, w, "Der Termin ist am 5.9.2018")
591 assert.Equal(tokens[0], "Der")
592 assert.Equal(tokens[1], "Termin")
593 assert.Equal(tokens[2], "ist")
594 assert.Equal(tokens[3], "am")
595 assert.Equal(tokens[4], "5.9.2018")
596 assert.Equal(len(tokens), 5)
597
598 tokens = ttokenize(mat, w, "Der Termin ist am 5/9/2018")
599 assert.Equal(tokens[0], "Der")
600 assert.Equal(tokens[1], "Termin")
601 assert.Equal(tokens[2], "ist")
602 assert.Equal(tokens[3], "am")
603 assert.Equal(tokens[4], "5/9/2018")
604 assert.Equal(len(tokens), 5)
605
606 // testTokenizerDateRange
607 /*
608 tokens = tokenize(dat, w, "Der Termin war vom 4.-5.9.2018")
609 assert.Equal(tokens[0], "Der")
610 assert.Equal(tokens[1], "Termin")
611 assert.Equal(tokens[2], "war")
612 assert.Equal(tokens[3], "vom")
613 assert.Equal(tokens[4], "4.")
614 assert.Equal(tokens[5], "-")
615 assert.Equal(tokens[6], "5.9.2018")
616 assert.Equal(len(tokens), 7)
617 // Ignored in KorAP-Tokenizer
618 */
619
620 // testTokenizerEmoji1
621 tokens = ttokenize(mat, w, "Das ist toll! ;)")
622 assert.Equal(tokens[0], "Das")
623 assert.Equal(tokens[1], "ist")
624 assert.Equal(tokens[2], "toll")
625 assert.Equal(tokens[3], "!")
626 assert.Equal(tokens[4], ";)")
627 assert.Equal(len(tokens), 5)
628
629 // testTokenizerRef1
630 tokens = ttokenize(mat, w, "Kupietz und Schmidt (2018): Korpuslinguistik")
631 assert.Equal(tokens[0], "Kupietz")
632 assert.Equal(tokens[1], "und")
633 assert.Equal(tokens[2], "Schmidt")
634 assert.Equal(tokens[3], "(2018)")
635 assert.Equal(tokens[4], ":")
636 assert.Equal(tokens[5], "Korpuslinguistik")
637 assert.Equal(len(tokens), 6)
638 // Differs from KorAP-Tokenizer!
639
640 // testTokenizerRef2 () {
641 tokens = ttokenize(mat, w, "Kupietz und Schmidt [2018]: Korpuslinguistik")
642 assert.Equal(tokens[0], "Kupietz")
643 assert.Equal(tokens[1], "und")
644 assert.Equal(tokens[2], "Schmidt")
645 assert.Equal(tokens[3], "[2018]")
646 assert.Equal(tokens[4], ":")
647 assert.Equal(tokens[5], "Korpuslinguistik")
648 assert.Equal(len(tokens), 6)
649 // Differs from KorAP-Tokenizer!
650
651 // testTokenizerOmission1 () {
652 tokens = ttokenize(mat, w, "Er ist ein A****loch!")
653 assert.Equal(tokens[0], "Er")
654 assert.Equal(tokens[1], "ist")
655 assert.Equal(tokens[2], "ein")
656 assert.Equal(tokens[3], "A****loch")
657 assert.Equal(tokens[4], "!")
658 assert.Equal(len(tokens), 5)
659
660 // testTokenizerOmission2
661 tokens = ttokenize(mat, w, "F*ck!")
662 assert.Equal(tokens[0], "F*ck")
663 assert.Equal(tokens[1], "!")
664 assert.Equal(len(tokens), 2)
665
666 // testTokenizerOmission3 () {
667 tokens = ttokenize(mat, w, "Dieses verf***** Kleid!")
668 assert.Equal(tokens[0], "Dieses")
669 assert.Equal(tokens[1], "verf*****")
670 assert.Equal(tokens[2], "Kleid")
671 assert.Equal(tokens[3], "!")
672 assert.Equal(len(tokens), 4)
673
674 // Probably interpreted as HOST
675 // testTokenizerFileExtension1
676 tokens = ttokenize(mat, w, "Ich habe die readme.txt heruntergeladen")
677 assert.Equal(tokens[0], "Ich")
678 assert.Equal(tokens[1], "habe")
679 assert.Equal(tokens[2], "die")
680 assert.Equal(tokens[3], "readme.txt")
681 assert.Equal(tokens[4], "heruntergeladen")
682 assert.Equal(len(tokens), 5)
683
684 // Probably interpreted as HOST
685 // testTokenizerFileExtension2
686 tokens = ttokenize(mat, w, "Nimm die README.TXT!")
687 assert.Equal(tokens[0], "Nimm")
688 assert.Equal(tokens[1], "die")
689 assert.Equal(tokens[2], "README.TXT")
690 assert.Equal(tokens[3], "!")
691 assert.Equal(len(tokens), 4)
692
693 // Probably interpreted as HOST
694 // testTokenizerFileExtension3
695 tokens = ttokenize(mat, w, "Zeig mir profile.jpeg")
696 assert.Equal(tokens[0], "Zeig")
697 assert.Equal(tokens[1], "mir")
698 assert.Equal(tokens[2], "profile.jpeg")
699 assert.Equal(len(tokens), 3)
700
701 // testTokenizerFile1
702
703 tokens = ttokenize(mat, w, "Zeig mir c:\\Dokumente\\profile.docx")
704 assert.Equal(tokens[0], "Zeig")
705 assert.Equal(tokens[1], "mir")
706 assert.Equal(tokens[2], "c:\\Dokumente\\profile.docx")
707 assert.Equal(len(tokens), 3)
708
709 // testTokenizerFile2
710 tokens = ttokenize(mat, w, "Gehe zu /Dokumente/profile.docx")
711 assert.Equal(tokens[0], "Gehe")
712 assert.Equal(tokens[1], "zu")
713 assert.Equal(tokens[2], "/Dokumente/profile.docx")
714 assert.Equal(len(tokens), 3)
715
716 // testTokenizerFile3
717 tokens = ttokenize(mat, w, "Zeig mir c:\\Dokumente\\profile.jpeg")
718 assert.Equal(tokens[0], "Zeig")
719 assert.Equal(tokens[1], "mir")
720 assert.Equal(tokens[2], "c:\\Dokumente\\profile.jpeg")
721 assert.Equal(len(tokens), 3)
722 // Ignored in KorAP-Tokenizer
723
724 // testTokenizerPunct
725 tokens = ttokenize(mat, w, "Er sagte: \"Es geht mir gut!\", daraufhin ging er.")
726 assert.Equal(tokens[0], "Er")
727 assert.Equal(tokens[1], "sagte")
728 assert.Equal(tokens[2], ":")
729 assert.Equal(tokens[3], "\"")
730 assert.Equal(tokens[4], "Es")
731 assert.Equal(tokens[5], "geht")
732 assert.Equal(tokens[6], "mir")
733 assert.Equal(tokens[7], "gut")
734 assert.Equal(tokens[8], "!")
735 assert.Equal(tokens[9], "\"")
736 assert.Equal(tokens[10], ",")
737 assert.Equal(tokens[11], "daraufhin")
738 assert.Equal(tokens[12], "ging")
739 assert.Equal(tokens[13], "er")
740 assert.Equal(tokens[14], ".")
741 assert.Equal(len(tokens), 15)
742
743 // testTokenizerPlusAmpersand
744 tokens = ttokenize(mat, w, "&quot;Das ist von C&A!&quot;")
745 assert.Equal(tokens[0], "&quot;")
746 assert.Equal(tokens[1], "Das")
747 assert.Equal(tokens[2], "ist")
748 assert.Equal(tokens[3], "von")
749 assert.Equal(tokens[4], "C&A")
750 assert.Equal(tokens[5], "!")
751 assert.Equal(tokens[6], "&quot;")
752 assert.Equal(len(tokens), 7)
753
754 // testTokenizerLongEnd
755 tokens = ttokenize(mat, w, "Siehst Du?!!?")
756 assert.Equal(tokens[0], "Siehst")
757 assert.Equal(tokens[1], "Du")
758 assert.Equal(tokens[2], "?!!?")
759 assert.Equal(len(tokens), 3)
760
761 // testTokenizerIrishO
762 tokens = ttokenize(mat, w, "Peter O'Toole")
763 assert.Equal(tokens[0], "Peter")
764 assert.Equal(tokens[1], "O'Toole")
765 assert.Equal(len(tokens), 2)
766
767 // testTokenizerAbr
768 tokens = ttokenize(mat, w, "Früher bzw. später ...")
769 assert.Equal(tokens[0], "Früher")
770 assert.Equal(tokens[1], "bzw.")
771 assert.Equal(tokens[2], "später")
772 assert.Equal(tokens[3], "...")
773 assert.Equal(len(tokens), 4)
774
775 // testTokenizerUppercaseRule
776 tokens = ttokenize(mat, w, "Es war spät.Morgen ist es früh.")
777 assert.Equal(tokens[0], "Es")
778 assert.Equal(tokens[1], "war")
779 assert.Equal(tokens[2], "spät")
780 assert.Equal(tokens[3], ".")
781 assert.Equal(tokens[4], "Morgen")
782 assert.Equal(tokens[5], "ist")
783 assert.Equal(tokens[6], "es")
784 assert.Equal(tokens[7], "früh")
785 assert.Equal(tokens[8], ".")
786 assert.Equal(len(tokens), 9)
787 // Ignored in KorAP-Tokenizer
788
789 // testTokenizerOrd
790 tokens = ttokenize(mat, w, "Sie erreichte den 1. Platz!")
791 assert.Equal(tokens[0], "Sie")
792 assert.Equal(tokens[1], "erreichte")
793 assert.Equal(tokens[2], "den")
794 assert.Equal(tokens[3], "1.")
795 assert.Equal(tokens[4], "Platz")
796 assert.Equal(tokens[5], "!")
797 assert.Equal(len(tokens), 6)
798
799 // testNoZipOuputArchive
800 tokens = ttokenize(mat, w, "Archive: Ich bin kein zip\n")
801 assert.Equal(tokens[0], "Archive")
802 assert.Equal(tokens[1], ":")
803 assert.Equal(tokens[2], "Ich")
804 assert.Equal(tokens[3], "bin")
805 assert.Equal(tokens[4], "kein")
806 assert.Equal(tokens[5], "zip")
807 assert.Equal(6, len(tokens))
808
809 // testTokenizerStrasse
810 tokens = ttokenize(mat, w, "Ich wohne in der Weststr. und Du?")
811 assert.Equal(tokens[4], "Weststr.")
812 assert.Equal(8, len(tokens))
813
814 // germanTokenizerKnowsGermanOmissionWords
815 tokens = ttokenize(mat, w, "D'dorf Ku'damm Lu'hafen M'gladbach W'schaft")
816 assert.Equal("D'dorf", tokens[0])
817 assert.Equal("Ku'damm", tokens[1])
818 assert.Equal("Lu'hafen", tokens[2])
819 assert.Equal("M'gladbach", tokens[3])
820 assert.Equal("W'schaft", tokens[4])
821 assert.Equal(5, len(tokens))
822
823 // germanTokenizerDoesNOTSeparateGermanContractions
824 tokens = ttokenize(mat, w, "mach's macht's was'n ist's haste willste kannste biste kriegste")
825 assert.Equal("mach's", tokens[0])
826 assert.Equal("macht's", tokens[1])
827 assert.Equal("was'n", tokens[2])
828 assert.Equal("ist's", tokens[3])
829 assert.Equal("haste", tokens[4])
830 assert.Equal("willste", tokens[5])
831 assert.Equal("kannste", tokens[6])
832 assert.Equal("biste", tokens[7])
833 assert.Equal("kriegste", tokens[8])
834 assert.Equal(9, len(tokens))
835
Akron78dba062021-10-28 19:30:46 +0200836 tokens = ttokenize(mat, w, "Es ist gleich 2:30 Uhr.")
837 assert.Equal("Es", tokens[0])
838 assert.Equal("ist", tokens[1])
839 assert.Equal("gleich", tokens[2])
840 assert.Equal("2:30", tokens[3])
841 assert.Equal("Uhr", tokens[4])
842 assert.Equal(".", tokens[5])
843 assert.Equal(6, len(tokens))
844
Akron17984c82021-10-30 11:44:37 +0200845 tokens = ttokenize(mat, w, "Sie schwamm die Strecke in 00:00:57,34 00:57,341 0:57 Stunden.")
846 assert.Equal("Sie", tokens[0])
847 assert.Equal("schwamm", tokens[1])
848 assert.Equal("die", tokens[2])
849 assert.Equal("Strecke", tokens[3])
850 assert.Equal("in", tokens[4])
851 assert.Equal("00:00:57,34", tokens[5])
852 assert.Equal("00:57,341", tokens[6])
853 assert.Equal("0:57", tokens[7])
854 assert.Equal("Stunden", tokens[8])
855 assert.Equal(".", tokens[9])
856 assert.Equal(10, len(tokens))
857
Akronf1106ec2021-11-05 13:04:44 +0100858 // waste example
859 tokens = ttokenize(mat, w, "Am 24.1.1806 feierte E. T. A. Hoffmann seinen 30. Geburtstag.")
860 assert.Equal(tokens[0], "Am")
861 assert.Equal(tokens[1], "24.1.1806")
862 assert.Equal(tokens[2], "feierte")
863 assert.Equal(tokens[3], "E.")
864 assert.Equal(tokens[4], "T.")
865 assert.Equal(tokens[5], "A.")
866 assert.Equal(tokens[6], "Hoffmann")
867 assert.Equal(tokens[7], "seinen")
868 assert.Equal(tokens[8], "30.")
869 assert.Equal(tokens[9], "Geburtstag")
870 assert.Equal(tokens[10], ".")
871 assert.Equal(11, len(tokens))
872
Akron9135b202021-11-06 13:16:07 +0100873 // IPtest
874 tokens = ttokenize(mat, w, "Meine IP ist 192.178.168.55.")
875 assert.Equal(tokens[0], "Meine")
876 assert.Equal(tokens[1], "IP")
877 assert.Equal(tokens[2], "ist")
878 assert.Equal(tokens[3], "192.178.168.55")
879 assert.Equal(tokens[4], ".")
880 assert.Equal(5, len(tokens))
881
Akron6742b962021-11-09 01:17:20 +0100882 // XML entities
883 tokens = ttokenize(mat, w, "Das ist&nbsp;1:30 Stunden&20 Minuten zu spät &GT;.")
884 assert.Equal(tokens[0], "Das")
885 assert.Equal(tokens[1], "ist")
886 assert.Equal(tokens[2], "&nbsp;")
887 assert.Equal(tokens[3], "1:30")
888 assert.Equal(tokens[4], "Stunden")
889 assert.Equal(tokens[5], "&")
890 assert.Equal(tokens[6], "20")
891 assert.Equal(tokens[7], "Minuten")
892 assert.Equal(tokens[8], "zu")
893 assert.Equal(tokens[9], "spät")
894 assert.Equal(tokens[10], "&GT;")
895 assert.Equal(tokens[11], ".")
896 assert.Equal(12, len(tokens))
897
Akron936c0f52021-12-07 11:30:53 +0100898 // Plusampersand compounds (1)
Akrone62e8eb2021-12-03 11:59:53 +0100899 tokens = ttokenize(mat, w, "Die 2G+-Regel soll weitere Covid-19-Erkrankungen reduzieren.")
900 assert.Equal(tokens[0], "Die")
901 assert.Equal(tokens[1], "2G+-Regel")
902 assert.Equal(tokens[2], "soll")
903 assert.Equal(tokens[3], "weitere")
904 assert.Equal(tokens[4], "Covid-19-Erkrankungen")
905 assert.Equal(tokens[5], "reduzieren")
906 assert.Equal(tokens[6], ".")
907 assert.Equal(7, len(tokens))
908
Akron936c0f52021-12-07 11:30:53 +0100909 // Plusampersand compounds (2)
910 tokens = ttokenize(mat, w, "Der Neu-C++-Programmierer.")
911 assert.Equal(tokens[0], "Der")
912 assert.Equal(tokens[1], "Neu-C++-Programmierer")
913 assert.Equal(tokens[2], ".")
914 assert.Equal(3, len(tokens))
915
Akron54ed7e72022-01-04 12:05:00 +0100916 // z.B.
917 tokens = ttokenize(mat, w, "Dies sind z.B. zwei Wörter - z. B. auch.")
918 assert.Equal(tokens[0], "Dies")
919 assert.Equal(tokens[1], "sind")
920 assert.Equal(tokens[2], "z.")
921 assert.Equal(tokens[3], "B.")
922 assert.Equal(tokens[4], "zwei")
923 assert.Equal(tokens[5], "Wörter")
924 assert.Equal(tokens[6], "-")
925 assert.Equal(tokens[7], "z.")
926 assert.Equal(tokens[8], "B.")
927 assert.Equal(tokens[9], "auch")
928 assert.Equal(tokens[10], ".")
929 assert.Equal(11, len(tokens))
930
Akron9a594712022-01-14 11:12:21 +0100931 // z.B.
932 tokens = ttokenize(mat, w, "Dies sind z.B. zwei Wörter - z. B. auch.")
933 assert.Equal(tokens[0], "Dies")
934 assert.Equal(tokens[1], "sind")
935 assert.Equal(tokens[2], "z.")
936 assert.Equal(tokens[3], "B.")
937 assert.Equal(tokens[4], "zwei")
938 assert.Equal(tokens[5], "Wörter")
939 assert.Equal(tokens[6], "-")
940 assert.Equal(tokens[7], "z.")
941 assert.Equal(tokens[8], "B.")
942 assert.Equal(tokens[9], "auch")
943 assert.Equal(tokens[10], ".")
944 assert.Equal(11, len(tokens))
945
946 // Single quote handling
947 tokens = ttokenize(mat, w, "Es heißt 'Leitungssportteams' und nicht anders.")
948 assert.Equal(tokens[0], "Es")
949 assert.Equal(tokens[1], "heißt")
950 assert.Equal(tokens[2], "'")
951 assert.Equal(tokens[3], "Leitungssportteams")
952 assert.Equal(tokens[4], "'")
953 assert.Equal(tokens[5], "und")
954 assert.Equal(tokens[6], "nicht")
955 assert.Equal(tokens[7], "anders")
956 assert.Equal(tokens[8], ".")
957 assert.Equal(9, len(tokens))
958
Akronb02ad072022-01-19 12:41:44 +0100959 // Apostrophe handling
960 tokens = ttokenize(mat, w, "Das ist Nils’ Einkaufskorb bei McDonald's.")
961 assert.Equal(tokens[0], "Das")
962 assert.Equal(tokens[1], "ist")
963 assert.Equal(tokens[2], "Nils’")
964 assert.Equal(tokens[3], "Einkaufskorb")
965 assert.Equal(tokens[4], "bei")
966 assert.Equal(tokens[5], "McDonald's")
967 assert.Equal(tokens[6], ".")
968 assert.Equal(7, len(tokens))
969
Akron28031b72021-10-02 13:07:25 +0200970 /*
971 @Test
972 public void englishTokenizerSeparatesEnglishContractionsAndClitics () {
973 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
974 tokens = tokenize(dat, w, "I've we'll you'd I'm we're Peter's isn't")
975 assert.Equal("'ve", tokens[1]);
976 assert.Equal("'ll", tokens[3]);
977 assert.Equal("'d", tokens[5]);
978 assert.Equal("'m", tokens[7]);
979 assert.Equal("'re", tokens[9]);
980 assert.Equal("'s", tokens[11]);
981 assert.Equal("is", tokens[12]);
982 assert.Equal("n't", tokens[13]);
983 assert.Equal(14, len(tokens));
984 }
985
986 @Test
987 public void frenchTokenizerKnowsFrenchAbbreviations () {
988 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
989 tokens = tokenize(dat, w, "Approx. en juill. 2004 mon prof. M. Foux m'a dit qu'il faut faire exerc. no. 4, et lire pp. 27-30.")
990 assert.Equal("Approx.", tokens[0]);
991 assert.Equal("juill.", tokens[2]);
992 assert.Equal("prof.", tokens[5]);
993 assert.Equal("exerc.", tokens[15]);
994 assert.Equal("no.", tokens[16]);
995 assert.Equal("pp.", tokens[21]);
996 }
997
998 @Test
999 public void frenchTokenizerKnowsFrenchContractions () {
1000 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
1001 tokens = tokenize(dat, w, "J'ai j'habite qu'il d'un jusqu'à Aujourd'hui D'accord Quelqu'un Presqu'île")
1002 assert.Equal("J'", tokens[0]);
1003 assert.Equal("j'", tokens[2]);
1004 assert.Equal("qu'", tokens[4]);
1005 assert.Equal("d'", tokens[6]);
1006 assert.Equal("jusqu'", tokens[8]);
1007 assert.Equal("Aujourd'hui", tokens[10]);
1008 assert.Equal("D'", tokens[11]); // ’
1009 assert.Equal("Quelqu'un", tokens[13]); // ’
1010 assert.Equal("Presqu'île", tokens[14]); // ’
1011 }
1012
1013 @Test
1014 public void frenchTokenizerKnowsFrenchClitics () {
1015 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
1016 tokens = tokenize(dat, w, "suis-je sont-elles ")
1017 assert.Equal("suis", tokens[0]);
1018 assert.Equal("-je", tokens[1]);
1019 assert.Equal("sont", tokens[2]);
1020 assert.Equal("-elles", tokens[3]);
1021 }
1022
1023 @Test
1024 public void testEnglishTokenizerScienceAbbreviations () {
1025 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
1026 tokens = tokenize(dat, w, "Approx. in Sept. 1954, Assoc. Prof. Dr. R. J. Ewing reviewed articles on Enzymol. Bacteriol. effects later published in Nutr. Rheumatol. No. 12 and Nº. 13., pp. 17-18.")
1027 assert.Equal("Approx.", tokens[0]);
1028 assert.Equal("in", tokens[1]);
1029 assert.Equal("Sept.", tokens[2]);
1030 assert.Equal("1954", tokens[3]);
1031 assert.Equal(",", tokens[4]);
1032 assert.Equal("Assoc.", tokens[5]);
1033 assert.Equal("Prof.", tokens[6]);
1034 assert.Equal("Dr.", tokens[7]);
1035 assert.Equal("R.", tokens[8]);
1036 assert.Equal("J.", tokens[9]);
1037 assert.Equal("Ewing", tokens[10]);
1038 assert.Equal("reviewed", tokens[11]);
1039 assert.Equal("articles", tokens[12]);
1040 assert.Equal("on", tokens[13]);
1041 assert.Equal("Enzymol.", tokens[14]);
1042 assert.Equal("Bacteriol.", tokens[15]);
1043 assert.Equal("effects", tokens[16]);
1044 assert.Equal("later", tokens[17]);
1045 assert.Equal("published", tokens[18]);
1046 assert.Equal("in", tokens[19]);
1047 assert.Equal("Nutr.", tokens[20]);
1048 assert.Equal("Rheumatol.", tokens[21]);
1049 assert.Equal("No.", tokens[22]);
1050 assert.Equal("12", tokens[23]);
1051 assert.Equal("and", tokens[24]);
1052 assert.Equal("Nº.", tokens[25]);
1053 assert.Equal("13.", tokens[26]);
1054 assert.Equal(",", tokens[27]);
1055 assert.Equal("pp.", tokens[28]);
1056 assert.Equal("17-18", tokens[29]);
1057 assert.Equal(".", tokens[30]);
1058 }
1059
1060 @Test
1061 public void englishTokenizerCanGuessWhetherIIsAbbrev () {
1062 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
1063 tokens = tokenize(dat, w, "M. I. Baxter was born during World War I. So was I. He went to the Peter I. Hardy school. So did I.")
1064 assert.Equal("I.", tokens[1]);
1065 assert.Equal("I", tokens[8]);
1066 assert.Equal(".", tokens[9]);
1067 assert.Equal("I", tokens[12]);
1068 assert.Equal(".", tokens[13]);
1069 }
1070
1071 @Test
1072 public void testZipOuputArchive () {
1073
1074 final ByteArrayOutputStream clearOut = new ByteArrayOutputStream();
1075 System.setOut(new PrintStream(clearOut));
1076 tokens = tokenize(dat, w, "Archive: ich/bin/ein.zip\n")
1077 assert.Equal(0, len(tokens));
1078 }
1079 */
1080 /*
1081
1082 @Test
1083 public void testTextBreakOutputArchive () throws InstantiationException, IllegalAccessException, ClassNotFoundException {
1084 DerekoDfaTokenizer_de tok = (DerekoDfaTokenizer_de) new KorapTokenizer.Builder()
1085 .tokenizerClassName(DerekoDfaTokenizer_de.class.getName())
1086 .printOffsets(true)
1087 .build();
1088 Span[] tokens = tok.tokenizePos("Text1\004\nText2 Hallo\004Rumsdibums\004Das freut mich sehr.\n");
1089 assert.Equal("Text1", tokens[0].getType());
1090 assert.Equal(len(tokens), 9 );
1091 }
1092 */
1093}
1094
Akronb98e4cf2022-03-27 23:56:49 +02001095func TestMatrixEmoticons(t *testing.T) {
1096 assert := assert.New(t)
1097
1098 if mat == nil {
1099 mat = LoadMatrixFile("testdata/tokenizer.matok")
1100 }
1101
1102 assert.NotNil(mat)
1103
1104 b := make([]byte, 0, 2048)
1105 w := bytes.NewBuffer(b)
1106 var tokens []string
1107
1108 tokens = ttokenize(mat, w, ":-* ;) :)) :*( ^___^ T__T ^^; -_-;;; -_-^")
1109 assert.Equal(tokens[0], ":-*")
1110 assert.Equal(tokens[1], ";)")
1111 assert.Equal(tokens[2], ":))")
1112 assert.Equal(tokens[3], ":*(")
1113 assert.Equal(tokens[4], "^___^")
1114 assert.Equal(tokens[5], "T__T")
1115 assert.Equal(tokens[6], "^^;")
1116 assert.Equal(tokens[7], "-_-;;;")
1117 assert.Equal(tokens[8], "-_-^")
1118 assert.Equal(len(tokens), 9)
1119}
1120
Akronc9c0eae2021-10-22 19:49:43 +02001121func TestMatrixFullTokenizerXML(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +02001122 assert := assert.New(t)
1123
Akron9fb63af2021-10-28 01:15:53 +02001124 if mat == nil {
1125 mat = LoadMatrixFile("testdata/tokenizer.matok")
1126 }
Akron28031b72021-10-02 13:07:25 +02001127
Akron28031b72021-10-02 13:07:25 +02001128 assert.NotNil(mat)
1129
1130 b := make([]byte, 0, 2048)
1131 w := bytes.NewBuffer(b)
1132 var tokens []string
1133
1134 tokens = ttokenize(mat, w, "Das <b>beste</b> Fußballspiel")
1135 assert.Equal("Das", tokens[0])
1136 assert.Equal("<b>", tokens[1])
1137 assert.Equal("beste", tokens[2])
1138 assert.Equal("</b>", tokens[3])
1139 assert.Equal("Fußballspiel", tokens[4])
1140 assert.Equal(5, len(tokens))
1141
1142 tokens = ttokenize(mat, w, "Das <b class=\"c\">beste</b> Fußballspiel")
1143 assert.Equal("Das", tokens[0])
1144 assert.Equal("<b class=\"c\">", tokens[1])
1145 assert.Equal("beste", tokens[2])
1146 assert.Equal("</b>", tokens[3])
1147 assert.Equal("Fußballspiel", tokens[4])
1148 assert.Equal(5, len(tokens))
1149
1150 tokens = ttokenize(mat, w, "der<x y=\"alte \"> <x x> alte</x> etc. et. Mann.")
1151 assert.Equal("der", tokens[0])
1152 assert.Equal("<x y=\"alte \">", tokens[1])
1153 assert.Equal("<x x>", tokens[2])
1154 assert.Equal("alte", tokens[3])
1155 assert.Equal("</x>", tokens[4])
1156 assert.Equal("etc.", tokens[5])
1157 assert.Equal("et", tokens[6])
1158 assert.Equal(".", tokens[7])
1159 assert.Equal("Mann", tokens[8])
1160 assert.Equal(".", tokens[9])
1161 assert.Equal(10, len(tokens))
Akron066d99c2021-10-28 19:04:59 +02001162
1163 tokens = ttokenize(mat, w, "das<br class=\"br\" />ging.")
1164 assert.Equal("das", tokens[0])
1165 assert.Equal("<br class=\"br\" />", tokens[1])
1166 assert.Equal("ging", tokens[2])
1167 assert.Equal(".", tokens[3])
1168 assert.Equal(4, len(tokens))
Akron28031b72021-10-02 13:07:25 +02001169}
1170
Akronabcb6a52021-10-09 15:52:08 +02001171func TestMatokDatokEquivalence(t *testing.T) {
1172 assert := assert.New(t)
1173
Akron9fb63af2021-10-28 01:15:53 +02001174 if mat == nil {
1175 mat = LoadMatrixFile("testdata/tokenizer.matok")
1176 }
Akronabcb6a52021-10-09 15:52:08 +02001177 dat := LoadDatokFile("testdata/tokenizer.datok")
1178
1179 r := strings.NewReader(s)
1180
1181 tb := make([]byte, 0, 2048)
1182 w := bytes.NewBuffer(tb)
1183
1184 // Transduce with double array representation
1185 dat.Transduce(r, w)
1186
1187 datStr := w.String()
1188
1189 r.Reset(s)
1190 w.Reset()
1191
1192 // Transduce with matrix representation
1193 mat.Transduce(r, w)
1194
1195 matStr := w.String()
1196
1197 assert.Equal(datStr, matStr)
1198}
1199
Akronc9c0eae2021-10-22 19:49:43 +02001200func TestMatrixFullTokenizerCallbackTransduce(t *testing.T) {
Akrone396a932021-10-19 01:06:13 +02001201 assert := assert.New(t)
1202
Akron9fb63af2021-10-28 01:15:53 +02001203 if mat == nil {
1204 mat = LoadMatrixFile("testdata/tokenizer.matok")
1205 }
Akrone396a932021-10-19 01:06:13 +02001206
1207 assert.NotNil(mat)
1208
1209 b := make([]byte, 0, 2048)
1210 w := bytes.NewBuffer(b)
Akrone396a932021-10-19 01:06:13 +02001211
1212 assert.True(mat.Transduce(strings.NewReader("Der alte Baum. Er war schon alt."), w))
Akrona854faa2021-10-22 19:31:08 +02001213
1214 matStr := w.String()
1215
1216 assert.Equal("Der\nalte\nBaum\n.\n\nEr\nwar\nschon\nalt\n.\n\n\n", matStr)
1217}
1218
Akronc9c0eae2021-10-22 19:49:43 +02001219func TestMatrixFullTokenizerTextTreatment(t *testing.T) {
Akrona854faa2021-10-22 19:31:08 +02001220 assert := assert.New(t)
1221
Akron9fb63af2021-10-28 01:15:53 +02001222 if mat == nil {
1223 mat = LoadMatrixFile("testdata/tokenizer.matok")
1224 }
Akrona854faa2021-10-22 19:31:08 +02001225
1226 assert.NotNil(mat)
1227
1228 b := make([]byte, 0, 2048)
1229 w := bytes.NewBuffer(b)
1230
1231 assert.True(mat.Transduce(strings.NewReader("Erste.\n\n\n\n\x04\x0aNächst.\x04"), w))
1232 matStr := w.String()
1233 assert.Equal("Erste\n.\n\n\nNächst\n.\n\n\n", matStr)
Akronf6bdfdb2021-10-23 15:56:53 +02001234}
Akrona854faa2021-10-22 19:31:08 +02001235
Akron22c565a2021-11-28 17:31:36 +01001236func TestMatrixFullTokenizerLongText(t *testing.T) {
1237 assert := assert.New(t)
1238
1239 if mat == nil {
1240 mat = LoadMatrixFile("testdata/tokenizer.matok")
1241 }
1242
1243 assert.NotNil(mat)
1244
1245 b := make([]byte, 0, 2048)
1246 w := bytes.NewBuffer(b)
1247
1248 text := `The Project Gutenberg EBook of Effi Briest, by Theodor Fontane
1249
1250Copyright laws are changing all over the world. Be sure to check the
1251copyright laws for your country before downloading or redistributing
1252this or any other Project Gutenberg eBook.
1253
1254This header should be the first thing seen when viewing this Project
1255Gutenberg file. Please do not remove it. Do not change or edit the
1256header without written permission.
1257
1258Please read the "legal small print," and other information about the
1259eBook and Project Gutenberg at the bottom of this file. Included is
1260important information about your specific rights and restrictions in
1261how the file may be used. You can also find out about how to make a
1262donation to Project Gutenberg, and how to get involved.
1263
1264
1265**Welcome To The World of Free Plain Vanilla Electronic Texts**
1266
1267**eBooks Readable By Both Humans and By Computers, Since 1971**
1268
1269*****These eBooks Were Prepared By Thousands of Volunteers!*****
1270
1271
1272Title: Effi Briest
1273
1274Author: Theodor Fontane
1275
1276Release Date: March, 2004 [EBook #5323]
1277`
1278
1279 assert.True(mat.Transduce(strings.NewReader(text), w))
1280
1281 assert.True(strings.Contains(w.String(), "Release"))
1282}
1283
Akronf6bdfdb2021-10-23 15:56:53 +02001284func TestMatrixTrimming(t *testing.T) {
1285 assert := assert.New(t)
1286
Akron9fb63af2021-10-28 01:15:53 +02001287 if mat == nil {
1288 mat = LoadMatrixFile("testdata/tokenizer.matok")
1289 }
Akronf6bdfdb2021-10-23 15:56:53 +02001290
1291 assert.NotNil(mat)
1292
1293 b := make([]byte, 0, 2048)
1294 w := bytes.NewBuffer(b)
1295
1296 assert.True(mat.Transduce(strings.NewReader(" Erste."), w))
1297 matStr := w.String()
1298 assert.Equal("Erste\n.\n\n\n", matStr)
Akrone396a932021-10-19 01:06:13 +02001299}
1300
Akronc9c0eae2021-10-22 19:49:43 +02001301func BenchmarkMatrixTransduce(b *testing.B) {
Akron28031b72021-10-02 13:07:25 +02001302 bu := make([]byte, 0, 2048)
1303 w := bytes.NewBuffer(bu)
1304
Akron28031b72021-10-02 13:07:25 +02001305 r := strings.NewReader(s)
1306
Akron094a4e82021-10-02 18:37:00 +02001307 mat := LoadMatrixFile("testdata/tokenizer.matok")
Akron28031b72021-10-02 13:07:25 +02001308
1309 b.ResetTimer()
1310
1311 for i := 0; i < b.N; i++ {
1312 w.Reset()
1313 r.Reset(s)
1314 ok := mat.Transduce(r, w)
1315 if !ok {
1316 fmt.Println("Fail!")
1317 fmt.Println(w.String())
1318 os.Exit(1)
1319 }
1320 }
1321}