blob: 1e5bdd18df0d5ad946752312858b6a1e8d862b38 [file] [log] [blame]
Akron1c34ce62021-09-23 23:27:39 +02001package datok
2
3import (
4 "bytes"
Akron28031b72021-10-02 13:07:25 +02005 "fmt"
6 "os"
Akron1c34ce62021-09-23 23:27:39 +02007 "strings"
8 "testing"
9
10 "github.com/stretchr/testify/assert"
11)
12
Akronabcb6a52021-10-09 15:52:08 +020013var s string = `Der Vorsitzende der Abk. hat gewählt. Gefunden auf wikipedia.org. Ich bin unter korap@ids-mannheim.de erreichbar.
14Unsere Website ist https://korap.ids-mannheim.de/?q=Baum. Unser Server ist 10.0.10.51. Zu 50.4% ist es sicher.
15Der Termin ist am 5.9.2018.
16Ich habe die readme.txt heruntergeladen.
17Ausschalten!!! Hast Du nicht gehört???
18Ich wohne in der Weststr. und Du? Kupietz und Schmidt [2018]: Korpuslinguistik. Dieses verf***** Kleid! Ich habe die readme.txt heruntergeladen.
19Er sagte: \"Es geht mir gut!\", daraufhin ging er. "Das ist von C&A!" Früher bzw. später ... Sie erreichte den 1. Platz!
20Archive: Ich bin kein zip. D'dorf Ku'damm Lu'hafen M'gladbach W'schaft.
21Mach's macht's was'n ist's haste willste kannste biste kriegste.`
22
Akron9fb63af2021-10-28 01:15:53 +020023var mat *MatrixTokenizer
24
Akronc9c0eae2021-10-22 19:49:43 +020025func TestMatrixFullTokenizer(t *testing.T) {
Akron1c34ce62021-09-23 23:27:39 +020026 assert := assert.New(t)
27 foma := LoadFomaFile("testdata/simpletok.fst")
28 assert.NotNil(foma)
29
30 mat := foma.ToMatrix()
31
32 r := strings.NewReader(" wald gehen Da kann\t man was \"erleben\"!")
33 b := make([]byte, 0, 2048)
34 w := bytes.NewBuffer(b)
35 var tokens []string
36 mat.Transduce(r, w)
37 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +020038 assert.Equal(len(tokens), 11)
Akron1c34ce62021-09-23 23:27:39 +020039 assert.Equal("wald", tokens[0])
40 assert.Equal("gehen", tokens[1])
41 assert.Equal("Da", tokens[2])
42 assert.Equal("kann", tokens[3])
43 assert.Equal("man", tokens[4])
44 assert.Equal("was", tokens[5])
45 assert.Equal("\"erleben\"", tokens[6])
46 assert.Equal("!", tokens[7])
Akron5c82a922021-09-24 19:11:29 +020047
48 r = strings.NewReader(" In den Wald gehen? -- Da kann\t man was \"erleben\"!")
49 w.Reset()
50 mat.Transduce(r, w)
51 tokens = strings.Split(w.String(), "\n")
52 assert.Equal("In", tokens[0])
53 assert.Equal("den", tokens[1])
54 assert.Equal("Wald", tokens[2])
55 assert.Equal("gehen", tokens[3])
56 assert.Equal("?", tokens[4])
57 assert.Equal("--", tokens[5])
58
59 r = strings.NewReader(" g? -- D")
60 w.Reset()
61 mat.Transduce(r, w)
62 tokens = strings.Split(w.String(), "\n")
63 assert.Equal("g", tokens[0])
64 assert.Equal("?", tokens[1])
65 assert.Equal("--", tokens[2])
66 assert.Equal("D", tokens[3])
67 assert.Equal("", tokens[4])
68 assert.Equal("", tokens[5])
Akrona854faa2021-10-22 19:31:08 +020069 assert.Equal("", tokens[6])
70 assert.Equal(7, len(tokens))
Akron5c82a922021-09-24 19:11:29 +020071}
72
Akrondf275812022-03-27 12:54:46 +020073func TestMatrixSimpleString(t *testing.T) {
74 assert := assert.New(t)
75 // bau | bauamt
76 tok := LoadFomaFile("testdata/bauamt.fst")
77 mat := tok.ToMatrix()
78
79 b := make([]byte, 0, 2048)
80 w := bytes.NewBuffer(b)
81 var tokens []string
82
83 tokens = ttokenize(mat, w, "ibauamt")
84 assert.Equal("i", tokens[0])
85 assert.Equal("bauamt", tokens[1])
86
87 tokens = ttokenize(mat, w, "ibbauamt")
88 assert.Equal("i", tokens[0])
89
90 assert.Equal("b", tokens[1])
91 assert.Equal("bauamt", tokens[2])
92
93 tokens = ttokenize(mat, w, "bau")
94 assert.Equal("bau", tokens[0])
95
96 tokens = ttokenize(mat, w, "baum")
97 assert.Equal("bau", tokens[0])
98 assert.Equal("m", tokens[1])
99
100 tokens = ttokenize(mat, w, "baudibauamt")
101 assert.Equal("bau", tokens[0])
102 assert.Equal("d", tokens[1])
103 assert.Equal("i", tokens[2])
104 assert.Equal("bauamt", tokens[3])
105}
106
Akronc9c0eae2021-10-22 19:49:43 +0200107func TestMatrixReadWriteTokenizer(t *testing.T) {
Akron16c312e2021-09-26 13:11:12 +0200108 assert := assert.New(t)
109 foma := LoadFomaFile("testdata/simpletok.fst")
110 assert.NotNil(foma)
111
112 mat := foma.ToMatrix()
Akron28031b72021-10-02 13:07:25 +0200113 assert.NotNil(mat)
Akron16c312e2021-09-26 13:11:12 +0200114
Akrondf275812022-03-27 12:54:46 +0200115 assert.Equal(ttokenizeStr(mat, "bau"), "bau")
116 assert.Equal(ttokenizeStr(mat, "bad"), "bad")
117 assert.Equal(ttokenizeStr(mat, "wald gehen"), "wald\ngehen")
Akron16c312e2021-09-26 13:11:12 +0200118 b := make([]byte, 0, 1024)
119 buf := bytes.NewBuffer(b)
120 n, err := mat.WriteTo(buf)
121 assert.Nil(err)
Akron28031b72021-10-02 13:07:25 +0200122 assert.Equal(int64(230), n)
Akron16c312e2021-09-26 13:11:12 +0200123 mat2 := ParseMatrix(buf)
124 assert.NotNil(mat2)
125 assert.Equal(mat.sigma, mat2.sigma)
126 assert.Equal(mat.epsilon, mat2.epsilon)
127 assert.Equal(mat.unknown, mat2.unknown)
128 assert.Equal(mat.identity, mat2.identity)
129 assert.Equal(mat.stateCount, mat2.stateCount)
130 assert.Equal(len(mat.array), len(mat2.array))
131 assert.Equal(mat.array, mat2.array)
Akrondf275812022-03-27 12:54:46 +0200132 assert.Equal(ttokenizeStr(mat2, "bau"), "bau")
133 assert.Equal(ttokenizeStr(mat2, "bad"), "bad")
134 assert.Equal(ttokenizeStr(mat2, "wald gehen"), "wald\ngehen")
Akron16c312e2021-09-26 13:11:12 +0200135}
136
Akrone396a932021-10-19 01:06:13 +0200137func TestMatrixIgnorableMCS(t *testing.T) {
138 assert := assert.New(t)
139
140 // This test relies on final states. That's why it is
141 // not working correctly anymore.
142
143 // File has MCS in sigma but not in net
144 tok := LoadFomaFile("testdata/ignorable_mcs.fst")
145 assert.NotNil(tok)
146 mat := tok.ToMatrix()
147 assert.NotNil(mat)
148
149 b := make([]byte, 0, 2048)
150 w := bytes.NewBuffer(b)
151 var tokens []string
152
153 // Is only unambigous when transducing strictly greedy!
154 assert.True(mat.Transduce(strings.NewReader("ab<ab>a"), w))
155 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +0200156 assert.Equal("a\nb\n<ab>a\n\n\n", w.String())
Akrone396a932021-10-19 01:06:13 +0200157 assert.Equal("a", tokens[0])
158 assert.Equal("b", tokens[1])
159 assert.Equal("<ab>a", tokens[2])
Akrona854faa2021-10-22 19:31:08 +0200160 assert.Equal(6, len(tokens))
Akrone396a932021-10-19 01:06:13 +0200161}
162
Akronc9c0eae2021-10-22 19:49:43 +0200163func xTestMatrixReadWriteFullTokenizer(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200164 assert := assert.New(t)
165 foma := LoadFomaFile("testdata/tokenizer.fst")
166 assert.NotNil(foma)
167
168 mat := foma.ToMatrix()
169 assert.NotNil(foma)
170
171 tb := make([]byte, 0, 2048)
172 w := bytes.NewBuffer(tb)
173
174 assert.True(mat.Transduce(strings.NewReader("der alte baum"), w))
Akrona854faa2021-10-22 19:31:08 +0200175 assert.Equal("der\nalte\nbaum\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200176
177 b := make([]byte, 0, 1024)
178 buf := bytes.NewBuffer(b)
179 _, err := mat.WriteTo(buf)
180 assert.Nil(err)
181 w.Reset()
182 // assert.Equal(int64(248), n)
183
184 mat2 := ParseMatrix(buf)
185 assert.NotNil(mat2)
186 assert.Equal(mat.sigma, mat2.sigma)
187 assert.Equal(mat.epsilon, mat2.epsilon)
188 assert.Equal(mat.unknown, mat2.unknown)
189 assert.Equal(mat.identity, mat2.identity)
190 assert.Equal(mat.stateCount, mat2.stateCount)
191 assert.Equal(len(mat.array), len(mat2.array))
192 // assert.Equal(mat.array, mat2.array)
193
194 assert.True(mat2.Transduce(strings.NewReader("der alte baum"), w))
Akrona854faa2021-10-22 19:31:08 +0200195 assert.Equal("der\nalte\nbaum\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200196}
197
Akronc9c0eae2021-10-22 19:49:43 +0200198func TestMatrixFullTokenizerTransduce(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200199 assert := assert.New(t)
200
Akron9fb63af2021-10-28 01:15:53 +0200201 if mat == nil {
202 mat = LoadMatrixFile("testdata/tokenizer.matok")
203 }
Akron28031b72021-10-02 13:07:25 +0200204
205 assert.NotNil(mat)
206
207 b := make([]byte, 0, 2048)
208 w := bytes.NewBuffer(b)
209 var tokens []string
210
211 assert.True(mat.Transduce(strings.NewReader("tra. u Du?"), w))
212
213 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +0200214 assert.Equal("tra\n.\n\nu\nDu\n?\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200215 assert.Equal("tra", tokens[0])
216 assert.Equal(".", tokens[1])
217 assert.Equal("", tokens[2])
218 assert.Equal("u", tokens[3])
219 assert.Equal("Du", tokens[4])
220 assert.Equal("?", tokens[5])
221 assert.Equal("", tokens[6])
222 assert.Equal("", tokens[7])
Akrona854faa2021-10-22 19:31:08 +0200223 assert.Equal(9, len(tokens))
Akron28031b72021-10-02 13:07:25 +0200224
225 w.Reset()
226 assert.True(mat.Transduce(strings.NewReader("\"John Doe\"@xx.com"), w))
Akrona854faa2021-10-22 19:31:08 +0200227 assert.Equal("\"\nJohn\nDoe\n\"\n@xx\n.\n\ncom\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200228}
229
Akronc9c0eae2021-10-22 19:49:43 +0200230func TestMatrixFullTokenizerMatrixSentenceSplitter(t *testing.T) {
Akron5c82a922021-09-24 19:11:29 +0200231 assert := assert.New(t)
Akron9fb63af2021-10-28 01:15:53 +0200232
233 if mat == nil {
234 mat = LoadMatrixFile("testdata/tokenizer.matok")
235 }
Akron5c82a922021-09-24 19:11:29 +0200236
237 b := make([]byte, 0, 2048)
238 w := bytes.NewBuffer(b)
239 var sentences []string
240
241 // testSentSplitterSimple
242 assert.True(mat.Transduce(strings.NewReader("Der alte Mann."), w))
243 sentences = strings.Split(w.String(), "\n\n")
244
Akrona854faa2021-10-22 19:31:08 +0200245 assert.Equal("Der\nalte\nMann\n.\n\n\n", w.String())
Akron5c82a922021-09-24 19:11:29 +0200246 assert.Equal("Der\nalte\nMann\n.", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200247 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200248 assert.Equal(len(sentences), 2)
249
250 w.Reset()
Akrona854faa2021-10-22 19:31:08 +0200251 assert.True(mat.Transduce(strings.NewReader("Der Vorsitzende der F.D.P. hat gewählt."), w))
252 sentences = strings.Split(w.String(), "\n\n")
253 assert.Equal(len(sentences), 2)
254 assert.Equal("Der\nVorsitzende\nder\nF.D.P.\nhat\ngewählt\n.", sentences[0])
255 assert.Equal("\n", sentences[1])
256
257 w.Reset()
Akron5c82a922021-09-24 19:11:29 +0200258 assert.True(mat.Transduce(strings.NewReader("Der Vorsitzende der Abk. hat gewählt."), w))
259 sentences = strings.Split(w.String(), "\n\n")
260 assert.Equal(len(sentences), 2)
261 assert.Equal("Der\nVorsitzende\nder\nAbk.\nhat\ngewählt\n.", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200262 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200263
Akron28031b72021-10-02 13:07:25 +0200264 w.Reset()
265 assert.True(mat.Transduce(strings.NewReader(""), w))
266 sentences = strings.Split(w.String(), "\n\n")
Akrona854faa2021-10-22 19:31:08 +0200267 assert.Equal(len(sentences), 2)
268 assert.Equal("", sentences[0])
269 assert.Equal("", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200270
Akron28031b72021-10-02 13:07:25 +0200271 w.Reset()
272 assert.True(mat.Transduce(strings.NewReader("Gefunden auf wikipedia.org."), w))
273 sentences = strings.Split(w.String(), "\n\n")
274 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200275
Akron28031b72021-10-02 13:07:25 +0200276 w.Reset()
277 assert.True(mat.Transduce(strings.NewReader("Ich bin unter korap@ids-mannheim.de erreichbar."), w))
278 sentences = strings.Split(w.String(), "\n\n")
279 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200280
Akron28031b72021-10-02 13:07:25 +0200281 w.Reset()
282 assert.True(mat.Transduce(strings.NewReader("Unsere Website ist https://korap.ids-mannheim.de/?q=Baum"), w))
283 sentences = strings.Split(w.String(), "\n\n")
284 assert.Equal("Unsere\nWebsite\nist\nhttps://korap.ids-mannheim.de/?q=Baum", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200285 assert.Equal("\n", sentences[1])
Akron28031b72021-10-02 13:07:25 +0200286 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200287
Akron28031b72021-10-02 13:07:25 +0200288 w.Reset()
289 assert.True(mat.Transduce(strings.NewReader("Unser Server ist 10.0.10.51."), w))
290 sentences = strings.Split(w.String(), "\n\n")
Akrona854faa2021-10-22 19:31:08 +0200291 assert.Equal("\n", sentences[1])
Akron28031b72021-10-02 13:07:25 +0200292 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200293
Akron28031b72021-10-02 13:07:25 +0200294 w.Reset()
295 assert.True(mat.Transduce(strings.NewReader("Zu 50.4% ist es sicher"), w))
296 sentences = strings.Split(w.String(), "\n\n")
297 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200298
Akron28031b72021-10-02 13:07:25 +0200299 w.Reset()
300 assert.True(mat.Transduce(strings.NewReader("Der Termin ist am 5.9.2018"), w))
301 sentences = strings.Split(w.String(), "\n\n")
302 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200303
Akron28031b72021-10-02 13:07:25 +0200304 w.Reset()
305 assert.True(mat.Transduce(strings.NewReader("Ich habe die readme.txt heruntergeladen"), w))
306 sentences = strings.Split(w.String(), "\n\n")
307 assert.Equal(len(sentences), 2)
308 assert.Equal("Ich\nhabe\ndie\nreadme.txt\nheruntergeladen", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200309 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200310
Akron28031b72021-10-02 13:07:25 +0200311 w.Reset()
312 assert.True(mat.Transduce(strings.NewReader("Ausschalten!!! Hast Du nicht gehört???"), w))
313 sentences = strings.Split(w.String(), "\n\n")
314 assert.Equal(len(sentences), 3)
315 assert.Equal("Ausschalten\n!!!", sentences[0])
316 assert.Equal("Hast\nDu\nnicht\ngehört\n???", sentences[1])
Akrona854faa2021-10-22 19:31:08 +0200317 assert.Equal("\n", sentences[2])
Akron5c82a922021-09-24 19:11:29 +0200318
Akron28031b72021-10-02 13:07:25 +0200319 w.Reset()
320 assert.True(mat.Transduce(strings.NewReader("Ich wohne in der Weststr. und Du?"), w))
321 sentences = strings.Split(w.String(), "\n\n")
322 assert.Equal(len(sentences), 2)
Akrone96895f2022-03-08 19:58:37 +0100323
324 w.Reset()
325 assert.True(mat.Transduce(strings.NewReader("\"Alter!\", sagte er: \"Komm nicht wieder!\" Geh!!! \"Lass!\" Dann ging er."), w))
326 sentences = strings.Split(w.String(), "\n\n")
327 assert.Equal(len(sentences), 5)
328 assert.Equal("\"\nAlter\n!\n\"\n,\nsagte\ner\n:\n\"\nKomm\nnicht\nwieder\n!\n\"", sentences[0])
329 assert.Equal("Geh\n!!!", sentences[1])
330 assert.Equal("\"\nLass\n!\n\"", sentences[2])
331 assert.Equal("Dann\nging\ner\n.", sentences[3])
332
333 w.Reset()
334 assert.True(mat.Transduce(strings.NewReader("\"Ausschalten!!!\", sagte er. \"Hast Du nicht gehört???\""), w))
335 sentences = strings.Split(w.String(), "\n\n")
336 assert.Equal(len(sentences), 3)
337 assert.Equal("\"\nAusschalten\n!!!\n\"\n,\nsagte\ner\n.", sentences[0])
338 assert.Equal("\"\nHast\nDu\nnicht\ngehört\n???\n\"", sentences[1])
Akronece3f012022-03-09 19:12:15 +0100339
340 w.Reset()
341 assert.True(mat.Transduce(strings.NewReader("“Ausschalten!!!”, sagte er. «Hast Du nicht gehört???»"), w))
342 sentences = strings.Split(w.String(), "\n\n")
343 assert.Equal(len(sentences), 3)
344 assert.Equal("“\nAusschalten\n!!!\n”\n,\nsagte\ner\n.", sentences[0])
345 assert.Equal("«\nHast\nDu\nnicht\ngehört\n???\n»", sentences[1])
346
347 w.Reset()
348 assert.True(mat.Transduce(strings.NewReader("“Ausschalten!!!”, sagte er. «Hast Du nicht gehört???»"), w))
349 sentences = strings.Split(w.String(), "\n\n")
350 assert.Equal(len(sentences), 3)
351 assert.Equal("“\nAusschalten\n!!!\n”\n,\nsagte\ner\n.", sentences[0])
352 assert.Equal("«\nHast\nDu\nnicht\ngehört\n???\n»", sentences[1])
353
354 text := `»Meinetwegen. Denkst du, daß ich darauf warte? Das fehlte noch.
355Übrigens, ich kriege schon einen und vielleicht bald. Da ist mir nicht
356bange. Neulich erst hat mir der kleine Ventivegni von drüben gesagt:
357'Fräulein Effi, was gilt die Wette, wir sind hier noch in diesem Jahre
358zu Polterabend und Hochzeit.'«
359
360»Und was sagtest du da?«`
361
362 w.Reset()
363 assert.True(mat.Transduce(strings.NewReader(text), w))
364 sentences = strings.Split(w.String(), "\n\n")
365 assert.Equal(len(sentences), 8)
366 assert.Equal("Neulich\nerst\nhat\nmir\nder\nkleine\nVentivegni\nvon\ndrüben\ngesagt\n:\n'\nFräulein\nEffi\n,\nwas\ngilt\ndie\nWette\n,\nwir\nsind\nhier\nnoch\nin\ndiesem\nJahre\nzu\nPolterabend\nund\nHochzeit\n.\n'\n«", sentences[5])
367 assert.Equal("»\nUnd\nwas\nsagtest\ndu\nda\n?\n«", sentences[6])
Akron4222ac82022-03-11 01:06:21 +0100368
369 text = `»Nun, gib dich zufrieden, ich fange schon an ... Also Baron
370Innstetten!`
371
372 w.Reset()
373 assert.True(mat.Transduce(strings.NewReader(text), w))
374 sentences = strings.Split(w.String(), "\n\n")
375 assert.Equal(len(sentences), 3)
376 assert.Equal("»\nNun\n,\ngib\ndich\nzufrieden\n,\nich\nfange\nschon\nan\n...", sentences[0])
377 assert.Equal("Also\nBaron\nInnstetten\n!", sentences[1])
Akrondf275812022-03-27 12:54:46 +0200378
379}
380
381func TestMatrixFullTokenizerMatrixSentenceSplitterBug1(t *testing.T) {
382 assert := assert.New(t)
383
384 if mat == nil {
385 mat = LoadMatrixFile("testdata/tokenizer.matok")
386 }
387
388 b := make([]byte, 0, 2048)
389 w := bytes.NewBuffer(b)
390 var sentences []string
391
392 text := `Wüllersdorf war aufgestanden. »Ich finde es furchtbar, daß Sie recht haben, aber Sie haben recht. Ich quäle Sie nicht länger mit meinem 'Muß es sein?'. Die Welt ist einmal, wie sie ist, und die Dinge verlaufen nicht, wie wir wollen, sondern wie die andern wollen. Das mit dem 'Gottesgericht', wie manche hochtrabend versichern, ist freilich ein Unsinn, nichts davon, umgekehrt, unser Ehrenkultus ist ein Götzendienst, aber wir müssen uns ihm unterwerfen, solange der Götze gilt.«`
393
394 w.Reset()
395 assert.True(mat.Transduce(strings.NewReader(text), w))
396 sentences = strings.Split(w.String(), "\n\n")
397 assert.Equal(len(sentences), 5)
398 assert.Equal("Wüllersdorf\nwar\naufgestanden\n.", sentences[0])
399 assert.Equal("»\nIch\nfinde\nes\nfurchtbar\n,\ndaß\nSie\nrecht\nhaben\n,\naber\nSie\nhaben\nrecht\n.", sentences[1])
400 assert.Equal("Ich\nquäle\nSie\nnicht\nlänger\nmit\nmeinem\n'\nMuß\nes\nsein\n?\n'\n.\n \nDie\nWelt\nist\neinmal\n,\nwie\nsie\nist\n,\nund\ndie\nDinge\nverlaufen\nnicht\n,\nwie\nwir\nwollen\n,\nsondern\nwie\ndie\nandern\nwollen\n.", sentences[2])
401 assert.Equal("Das\nmit\ndem\n'\nGottesgericht\n'\n,\nwie\nmanche\nhochtrabend\nversichern\n,\nist\nfreilich\nein\nUnsinn\n,\nnichts\ndavon\n,\numgekehrt\n,\nunser\nEhrenkultus\nist\nein\nGötzendienst\n,\naber\nwir\nmüssen\nuns\nihm\nunterwerfen\n,\nsolange\nder\nGötze\ngilt\n.\n«", sentences[3])
Akron1c34ce62021-09-23 23:27:39 +0200402}
Akron28031b72021-10-02 13:07:25 +0200403
Akronc9c0eae2021-10-22 19:49:43 +0200404func TestMatrixFullTokenizerTokenSplitter(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200405 assert := assert.New(t)
406
Akron9fb63af2021-10-28 01:15:53 +0200407 if mat == nil {
408 mat = LoadMatrixFile("testdata/tokenizer.matok")
409 }
Akron28031b72021-10-02 13:07:25 +0200410
411 b := make([]byte, 0, 2048)
412 w := bytes.NewBuffer(b)
413 var tokens []string
414
415 // testTokenizerSimple
416 tokens = ttokenize(mat, w, "Der alte Mann")
417 assert.Equal(tokens[0], "Der")
418 assert.Equal(tokens[1], "alte")
419 assert.Equal(tokens[2], "Mann")
420 assert.Equal(len(tokens), 3)
421
422 tokens = ttokenize(mat, w, "Der alte Mann.")
423 assert.Equal(tokens[0], "Der")
424 assert.Equal(tokens[1], "alte")
425 assert.Equal(tokens[2], "Mann")
426 assert.Equal(tokens[3], ".")
427 assert.Equal(len(tokens), 4)
428
429 // testTokenizerAbbr
430 tokens = ttokenize(mat, w, "Der Vorsitzende der F.D.P. hat gewählt")
431 assert.Equal(tokens[0], "Der")
432 assert.Equal(tokens[1], "Vorsitzende")
433 assert.Equal(tokens[2], "der")
434 assert.Equal(tokens[3], "F.D.P.")
435 assert.Equal(tokens[4], "hat")
436 assert.Equal(tokens[5], "gewählt")
437 assert.Equal(len(tokens), 6)
438 // Ignored in KorAP-Tokenizer
439
440 // testTokenizerHost1
441 tokens = ttokenize(mat, w, "Gefunden auf wikipedia.org")
442 assert.Equal(tokens[0], "Gefunden")
443 assert.Equal(tokens[1], "auf")
444 assert.Equal(tokens[2], "wikipedia.org")
445 assert.Equal(len(tokens), 3)
446
447 // testTokenizerWwwHost
448 tokens = ttokenize(mat, w, "Gefunden auf www.wikipedia.org")
449 assert.Equal("Gefunden", tokens[0])
450 assert.Equal("auf", tokens[1])
451 assert.Equal("www.wikipedia.org", tokens[2])
452 assert.Equal(3, len(tokens))
453
454 // testTokenizerWwwUrl
455 tokens = ttokenize(mat, w, "Weitere Infos unter www.info.biz/info")
456 assert.Equal("www.info.biz/info", tokens[3])
457
458 // testTokenizerFtpHost
459 /*
460 tokens = tokenize(dat, w, "Kann von ftp.download.org heruntergeladen werden")
461 assert.Equal("Kann", tokens[0])
462 assert.Equal("von", tokens[1])
463 assert.Equal("ftp.download.org", tokens[2])
464 assert.Equal(5, len(tokens))
465 // Ignored in KorAP-Tokenizer
466 */
467
468 // testTokenizerDash
469 tokens = ttokenize(mat, w, "Das war -- spitze")
470 assert.Equal(tokens[0], "Das")
471 assert.Equal(tokens[1], "war")
472 assert.Equal(tokens[2], "--")
473 assert.Equal(tokens[3], "spitze")
474 assert.Equal(len(tokens), 4)
475
476 // testTokenizerEmail1
477 tokens = ttokenize(mat, w, "Ich bin unter korap@ids-mannheim.de erreichbar.")
478 assert.Equal(tokens[0], "Ich")
479 assert.Equal(tokens[1], "bin")
480 assert.Equal(tokens[2], "unter")
481 assert.Equal(tokens[3], "korap@ids-mannheim.de")
482 assert.Equal(tokens[4], "erreichbar")
483 assert.Equal(tokens[5], ".")
484 assert.Equal(len(tokens), 6)
485
486 // testTokenizerEmail2
487 tokens = ttokenize(mat, w, "Oder unter korap[at]ids-mannheim[dot]de.")
488 assert.Equal(tokens[0], "Oder")
489 assert.Equal(tokens[1], "unter")
490 assert.Equal(tokens[2], "korap[at]ids-mannheim[dot]de")
491 assert.Equal(tokens[3], ".")
492 assert.Equal(len(tokens), 4)
493
494 // testTokenizerEmail3
495 tokens = ttokenize(mat, w, "Oder unter korap(at)ids-mannheim(dot)de.")
496 assert.Equal(tokens[0], "Oder")
497 assert.Equal(tokens[1], "unter")
498 assert.Equal(tokens[2], "korap(at)ids-mannheim(dot)de")
499 assert.Equal(tokens[3], ".")
500 assert.Equal(len(tokens), 4)
501 // Ignored in KorAP-Tokenizer
502
503 // testTokenizerDoNotAcceptQuotedEmailNames
504 tokens = ttokenize(mat, w, "\"John Doe\"@xx.com")
505 assert.Equal("\"", tokens[0])
506 assert.Equal("John", tokens[1])
507 assert.Equal("Doe", tokens[2])
508 assert.Equal("\"", tokens[3])
509 assert.Equal("@xx", tokens[4])
510 assert.Equal(".", tokens[5]) // Differs - as the sentence splitter splits here!
511 assert.Equal("com", tokens[6])
512 assert.Equal(7, len(tokens))
513
514 // testTokenizerTwitter
515 tokens = ttokenize(mat, w, "Folgt @korap und #korap")
516 assert.Equal(tokens[0], "Folgt")
517 assert.Equal(tokens[1], "@korap")
518 assert.Equal(tokens[2], "und")
519 assert.Equal(tokens[3], "#korap")
520 assert.Equal(len(tokens), 4)
521
522 // testTokenizerWeb1
523 tokens = ttokenize(mat, w, "Unsere Website ist https://korap.ids-mannheim.de/?q=Baum")
524 assert.Equal(tokens[0], "Unsere")
525 assert.Equal(tokens[1], "Website")
526 assert.Equal(tokens[2], "ist")
527 assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum")
528 assert.Equal(len(tokens), 4)
529
530 // testTokenizerWeb2
531 tokens = ttokenize(mat, w, "Wir sind auch im Internet (https://korap.ids-mannheim.de/?q=Baum)")
532 assert.Equal(tokens[0], "Wir")
533 assert.Equal(tokens[1], "sind")
534 assert.Equal(tokens[2], "auch")
535 assert.Equal(tokens[3], "im")
536 assert.Equal(tokens[4], "Internet")
537 assert.Equal(tokens[5], "(")
538 assert.Equal(tokens[6], "https://korap.ids-mannheim.de/?q=Baum")
539 assert.Equal(tokens[7], ")")
540 assert.Equal(len(tokens), 8)
541 // Ignored in KorAP-Tokenizer
542
543 // testTokenizerWeb3
544 tokens = ttokenize(mat, w, "Die Adresse ist https://korap.ids-mannheim.de/?q=Baum.")
545 assert.Equal(tokens[0], "Die")
546 assert.Equal(tokens[1], "Adresse")
547 assert.Equal(tokens[2], "ist")
548 assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum")
549 assert.Equal(tokens[4], ".")
550 assert.Equal(len(tokens), 5)
551 // Ignored in KorAP-Tokenizer
552
553 // testTokenizerServer
554 tokens = ttokenize(mat, w, "Unser Server ist 10.0.10.51.")
555 assert.Equal(tokens[0], "Unser")
556 assert.Equal(tokens[1], "Server")
557 assert.Equal(tokens[2], "ist")
558 assert.Equal(tokens[3], "10.0.10.51")
559 assert.Equal(tokens[4], ".")
560 assert.Equal(len(tokens), 5)
561
562 // testTokenizerNum
563 tokens = ttokenize(mat, w, "Zu 50,4% ist es sicher")
564 assert.Equal(tokens[0], "Zu")
565 assert.Equal(tokens[1], "50,4%")
566 assert.Equal(tokens[2], "ist")
567 assert.Equal(tokens[3], "es")
568 assert.Equal(tokens[4], "sicher")
569 assert.Equal(len(tokens), 5)
570 // Differs from KorAP-Tokenizer
571
572 // testTokenizerDate
573 tokens = ttokenize(mat, w, "Der Termin ist am 5.9.2018")
574 assert.Equal(tokens[0], "Der")
575 assert.Equal(tokens[1], "Termin")
576 assert.Equal(tokens[2], "ist")
577 assert.Equal(tokens[3], "am")
578 assert.Equal(tokens[4], "5.9.2018")
579 assert.Equal(len(tokens), 5)
580
581 tokens = ttokenize(mat, w, "Der Termin ist am 5/9/2018")
582 assert.Equal(tokens[0], "Der")
583 assert.Equal(tokens[1], "Termin")
584 assert.Equal(tokens[2], "ist")
585 assert.Equal(tokens[3], "am")
586 assert.Equal(tokens[4], "5/9/2018")
587 assert.Equal(len(tokens), 5)
588
589 // testTokenizerDateRange
590 /*
591 tokens = tokenize(dat, w, "Der Termin war vom 4.-5.9.2018")
592 assert.Equal(tokens[0], "Der")
593 assert.Equal(tokens[1], "Termin")
594 assert.Equal(tokens[2], "war")
595 assert.Equal(tokens[3], "vom")
596 assert.Equal(tokens[4], "4.")
597 assert.Equal(tokens[5], "-")
598 assert.Equal(tokens[6], "5.9.2018")
599 assert.Equal(len(tokens), 7)
600 // Ignored in KorAP-Tokenizer
601 */
602
603 // testTokenizerEmoji1
604 tokens = ttokenize(mat, w, "Das ist toll! ;)")
605 assert.Equal(tokens[0], "Das")
606 assert.Equal(tokens[1], "ist")
607 assert.Equal(tokens[2], "toll")
608 assert.Equal(tokens[3], "!")
609 assert.Equal(tokens[4], ";)")
610 assert.Equal(len(tokens), 5)
611
612 // testTokenizerRef1
613 tokens = ttokenize(mat, w, "Kupietz und Schmidt (2018): Korpuslinguistik")
614 assert.Equal(tokens[0], "Kupietz")
615 assert.Equal(tokens[1], "und")
616 assert.Equal(tokens[2], "Schmidt")
617 assert.Equal(tokens[3], "(2018)")
618 assert.Equal(tokens[4], ":")
619 assert.Equal(tokens[5], "Korpuslinguistik")
620 assert.Equal(len(tokens), 6)
621 // Differs from KorAP-Tokenizer!
622
623 // testTokenizerRef2 () {
624 tokens = ttokenize(mat, w, "Kupietz und Schmidt [2018]: Korpuslinguistik")
625 assert.Equal(tokens[0], "Kupietz")
626 assert.Equal(tokens[1], "und")
627 assert.Equal(tokens[2], "Schmidt")
628 assert.Equal(tokens[3], "[2018]")
629 assert.Equal(tokens[4], ":")
630 assert.Equal(tokens[5], "Korpuslinguistik")
631 assert.Equal(len(tokens), 6)
632 // Differs from KorAP-Tokenizer!
633
634 // testTokenizerOmission1 () {
635 tokens = ttokenize(mat, w, "Er ist ein A****loch!")
636 assert.Equal(tokens[0], "Er")
637 assert.Equal(tokens[1], "ist")
638 assert.Equal(tokens[2], "ein")
639 assert.Equal(tokens[3], "A****loch")
640 assert.Equal(tokens[4], "!")
641 assert.Equal(len(tokens), 5)
642
643 // testTokenizerOmission2
644 tokens = ttokenize(mat, w, "F*ck!")
645 assert.Equal(tokens[0], "F*ck")
646 assert.Equal(tokens[1], "!")
647 assert.Equal(len(tokens), 2)
648
649 // testTokenizerOmission3 () {
650 tokens = ttokenize(mat, w, "Dieses verf***** Kleid!")
651 assert.Equal(tokens[0], "Dieses")
652 assert.Equal(tokens[1], "verf*****")
653 assert.Equal(tokens[2], "Kleid")
654 assert.Equal(tokens[3], "!")
655 assert.Equal(len(tokens), 4)
656
657 // Probably interpreted as HOST
658 // testTokenizerFileExtension1
659 tokens = ttokenize(mat, w, "Ich habe die readme.txt heruntergeladen")
660 assert.Equal(tokens[0], "Ich")
661 assert.Equal(tokens[1], "habe")
662 assert.Equal(tokens[2], "die")
663 assert.Equal(tokens[3], "readme.txt")
664 assert.Equal(tokens[4], "heruntergeladen")
665 assert.Equal(len(tokens), 5)
666
667 // Probably interpreted as HOST
668 // testTokenizerFileExtension2
669 tokens = ttokenize(mat, w, "Nimm die README.TXT!")
670 assert.Equal(tokens[0], "Nimm")
671 assert.Equal(tokens[1], "die")
672 assert.Equal(tokens[2], "README.TXT")
673 assert.Equal(tokens[3], "!")
674 assert.Equal(len(tokens), 4)
675
676 // Probably interpreted as HOST
677 // testTokenizerFileExtension3
678 tokens = ttokenize(mat, w, "Zeig mir profile.jpeg")
679 assert.Equal(tokens[0], "Zeig")
680 assert.Equal(tokens[1], "mir")
681 assert.Equal(tokens[2], "profile.jpeg")
682 assert.Equal(len(tokens), 3)
683
684 // testTokenizerFile1
685
686 tokens = ttokenize(mat, w, "Zeig mir c:\\Dokumente\\profile.docx")
687 assert.Equal(tokens[0], "Zeig")
688 assert.Equal(tokens[1], "mir")
689 assert.Equal(tokens[2], "c:\\Dokumente\\profile.docx")
690 assert.Equal(len(tokens), 3)
691
692 // testTokenizerFile2
693 tokens = ttokenize(mat, w, "Gehe zu /Dokumente/profile.docx")
694 assert.Equal(tokens[0], "Gehe")
695 assert.Equal(tokens[1], "zu")
696 assert.Equal(tokens[2], "/Dokumente/profile.docx")
697 assert.Equal(len(tokens), 3)
698
699 // testTokenizerFile3
700 tokens = ttokenize(mat, w, "Zeig mir c:\\Dokumente\\profile.jpeg")
701 assert.Equal(tokens[0], "Zeig")
702 assert.Equal(tokens[1], "mir")
703 assert.Equal(tokens[2], "c:\\Dokumente\\profile.jpeg")
704 assert.Equal(len(tokens), 3)
705 // Ignored in KorAP-Tokenizer
706
707 // testTokenizerPunct
708 tokens = ttokenize(mat, w, "Er sagte: \"Es geht mir gut!\", daraufhin ging er.")
709 assert.Equal(tokens[0], "Er")
710 assert.Equal(tokens[1], "sagte")
711 assert.Equal(tokens[2], ":")
712 assert.Equal(tokens[3], "\"")
713 assert.Equal(tokens[4], "Es")
714 assert.Equal(tokens[5], "geht")
715 assert.Equal(tokens[6], "mir")
716 assert.Equal(tokens[7], "gut")
717 assert.Equal(tokens[8], "!")
718 assert.Equal(tokens[9], "\"")
719 assert.Equal(tokens[10], ",")
720 assert.Equal(tokens[11], "daraufhin")
721 assert.Equal(tokens[12], "ging")
722 assert.Equal(tokens[13], "er")
723 assert.Equal(tokens[14], ".")
724 assert.Equal(len(tokens), 15)
725
726 // testTokenizerPlusAmpersand
727 tokens = ttokenize(mat, w, "&quot;Das ist von C&A!&quot;")
728 assert.Equal(tokens[0], "&quot;")
729 assert.Equal(tokens[1], "Das")
730 assert.Equal(tokens[2], "ist")
731 assert.Equal(tokens[3], "von")
732 assert.Equal(tokens[4], "C&A")
733 assert.Equal(tokens[5], "!")
734 assert.Equal(tokens[6], "&quot;")
735 assert.Equal(len(tokens), 7)
736
737 // testTokenizerLongEnd
738 tokens = ttokenize(mat, w, "Siehst Du?!!?")
739 assert.Equal(tokens[0], "Siehst")
740 assert.Equal(tokens[1], "Du")
741 assert.Equal(tokens[2], "?!!?")
742 assert.Equal(len(tokens), 3)
743
744 // testTokenizerIrishO
745 tokens = ttokenize(mat, w, "Peter O'Toole")
746 assert.Equal(tokens[0], "Peter")
747 assert.Equal(tokens[1], "O'Toole")
748 assert.Equal(len(tokens), 2)
749
750 // testTokenizerAbr
751 tokens = ttokenize(mat, w, "Früher bzw. später ...")
752 assert.Equal(tokens[0], "Früher")
753 assert.Equal(tokens[1], "bzw.")
754 assert.Equal(tokens[2], "später")
755 assert.Equal(tokens[3], "...")
756 assert.Equal(len(tokens), 4)
757
758 // testTokenizerUppercaseRule
759 tokens = ttokenize(mat, w, "Es war spät.Morgen ist es früh.")
760 assert.Equal(tokens[0], "Es")
761 assert.Equal(tokens[1], "war")
762 assert.Equal(tokens[2], "spät")
763 assert.Equal(tokens[3], ".")
764 assert.Equal(tokens[4], "Morgen")
765 assert.Equal(tokens[5], "ist")
766 assert.Equal(tokens[6], "es")
767 assert.Equal(tokens[7], "früh")
768 assert.Equal(tokens[8], ".")
769 assert.Equal(len(tokens), 9)
770 // Ignored in KorAP-Tokenizer
771
772 // testTokenizerOrd
773 tokens = ttokenize(mat, w, "Sie erreichte den 1. Platz!")
774 assert.Equal(tokens[0], "Sie")
775 assert.Equal(tokens[1], "erreichte")
776 assert.Equal(tokens[2], "den")
777 assert.Equal(tokens[3], "1.")
778 assert.Equal(tokens[4], "Platz")
779 assert.Equal(tokens[5], "!")
780 assert.Equal(len(tokens), 6)
781
782 // testNoZipOuputArchive
783 tokens = ttokenize(mat, w, "Archive: Ich bin kein zip\n")
784 assert.Equal(tokens[0], "Archive")
785 assert.Equal(tokens[1], ":")
786 assert.Equal(tokens[2], "Ich")
787 assert.Equal(tokens[3], "bin")
788 assert.Equal(tokens[4], "kein")
789 assert.Equal(tokens[5], "zip")
790 assert.Equal(6, len(tokens))
791
792 // testTokenizerStrasse
793 tokens = ttokenize(mat, w, "Ich wohne in der Weststr. und Du?")
794 assert.Equal(tokens[4], "Weststr.")
795 assert.Equal(8, len(tokens))
796
797 // germanTokenizerKnowsGermanOmissionWords
798 tokens = ttokenize(mat, w, "D'dorf Ku'damm Lu'hafen M'gladbach W'schaft")
799 assert.Equal("D'dorf", tokens[0])
800 assert.Equal("Ku'damm", tokens[1])
801 assert.Equal("Lu'hafen", tokens[2])
802 assert.Equal("M'gladbach", tokens[3])
803 assert.Equal("W'schaft", tokens[4])
804 assert.Equal(5, len(tokens))
805
806 // germanTokenizerDoesNOTSeparateGermanContractions
807 tokens = ttokenize(mat, w, "mach's macht's was'n ist's haste willste kannste biste kriegste")
808 assert.Equal("mach's", tokens[0])
809 assert.Equal("macht's", tokens[1])
810 assert.Equal("was'n", tokens[2])
811 assert.Equal("ist's", tokens[3])
812 assert.Equal("haste", tokens[4])
813 assert.Equal("willste", tokens[5])
814 assert.Equal("kannste", tokens[6])
815 assert.Equal("biste", tokens[7])
816 assert.Equal("kriegste", tokens[8])
817 assert.Equal(9, len(tokens))
818
Akron78dba062021-10-28 19:30:46 +0200819 tokens = ttokenize(mat, w, "Es ist gleich 2:30 Uhr.")
820 assert.Equal("Es", tokens[0])
821 assert.Equal("ist", tokens[1])
822 assert.Equal("gleich", tokens[2])
823 assert.Equal("2:30", tokens[3])
824 assert.Equal("Uhr", tokens[4])
825 assert.Equal(".", tokens[5])
826 assert.Equal(6, len(tokens))
827
Akron17984c82021-10-30 11:44:37 +0200828 tokens = ttokenize(mat, w, "Sie schwamm die Strecke in 00:00:57,34 00:57,341 0:57 Stunden.")
829 assert.Equal("Sie", tokens[0])
830 assert.Equal("schwamm", tokens[1])
831 assert.Equal("die", tokens[2])
832 assert.Equal("Strecke", tokens[3])
833 assert.Equal("in", tokens[4])
834 assert.Equal("00:00:57,34", tokens[5])
835 assert.Equal("00:57,341", tokens[6])
836 assert.Equal("0:57", tokens[7])
837 assert.Equal("Stunden", tokens[8])
838 assert.Equal(".", tokens[9])
839 assert.Equal(10, len(tokens))
840
Akronf1106ec2021-11-05 13:04:44 +0100841 // waste example
842 tokens = ttokenize(mat, w, "Am 24.1.1806 feierte E. T. A. Hoffmann seinen 30. Geburtstag.")
843 assert.Equal(tokens[0], "Am")
844 assert.Equal(tokens[1], "24.1.1806")
845 assert.Equal(tokens[2], "feierte")
846 assert.Equal(tokens[3], "E.")
847 assert.Equal(tokens[4], "T.")
848 assert.Equal(tokens[5], "A.")
849 assert.Equal(tokens[6], "Hoffmann")
850 assert.Equal(tokens[7], "seinen")
851 assert.Equal(tokens[8], "30.")
852 assert.Equal(tokens[9], "Geburtstag")
853 assert.Equal(tokens[10], ".")
854 assert.Equal(11, len(tokens))
855
Akron9135b202021-11-06 13:16:07 +0100856 // IPtest
857 tokens = ttokenize(mat, w, "Meine IP ist 192.178.168.55.")
858 assert.Equal(tokens[0], "Meine")
859 assert.Equal(tokens[1], "IP")
860 assert.Equal(tokens[2], "ist")
861 assert.Equal(tokens[3], "192.178.168.55")
862 assert.Equal(tokens[4], ".")
863 assert.Equal(5, len(tokens))
864
Akron6742b962021-11-09 01:17:20 +0100865 // XML entities
866 tokens = ttokenize(mat, w, "Das ist&nbsp;1:30 Stunden&20 Minuten zu spät &GT;.")
867 assert.Equal(tokens[0], "Das")
868 assert.Equal(tokens[1], "ist")
869 assert.Equal(tokens[2], "&nbsp;")
870 assert.Equal(tokens[3], "1:30")
871 assert.Equal(tokens[4], "Stunden")
872 assert.Equal(tokens[5], "&")
873 assert.Equal(tokens[6], "20")
874 assert.Equal(tokens[7], "Minuten")
875 assert.Equal(tokens[8], "zu")
876 assert.Equal(tokens[9], "spät")
877 assert.Equal(tokens[10], "&GT;")
878 assert.Equal(tokens[11], ".")
879 assert.Equal(12, len(tokens))
880
Akron936c0f52021-12-07 11:30:53 +0100881 // Plusampersand compounds (1)
Akrone62e8eb2021-12-03 11:59:53 +0100882 tokens = ttokenize(mat, w, "Die 2G+-Regel soll weitere Covid-19-Erkrankungen reduzieren.")
883 assert.Equal(tokens[0], "Die")
884 assert.Equal(tokens[1], "2G+-Regel")
885 assert.Equal(tokens[2], "soll")
886 assert.Equal(tokens[3], "weitere")
887 assert.Equal(tokens[4], "Covid-19-Erkrankungen")
888 assert.Equal(tokens[5], "reduzieren")
889 assert.Equal(tokens[6], ".")
890 assert.Equal(7, len(tokens))
891
Akron936c0f52021-12-07 11:30:53 +0100892 // Plusampersand compounds (2)
893 tokens = ttokenize(mat, w, "Der Neu-C++-Programmierer.")
894 assert.Equal(tokens[0], "Der")
895 assert.Equal(tokens[1], "Neu-C++-Programmierer")
896 assert.Equal(tokens[2], ".")
897 assert.Equal(3, len(tokens))
898
Akron54ed7e72022-01-04 12:05:00 +0100899 // z.B.
900 tokens = ttokenize(mat, w, "Dies sind z.B. zwei Wörter - z. B. auch.")
901 assert.Equal(tokens[0], "Dies")
902 assert.Equal(tokens[1], "sind")
903 assert.Equal(tokens[2], "z.")
904 assert.Equal(tokens[3], "B.")
905 assert.Equal(tokens[4], "zwei")
906 assert.Equal(tokens[5], "Wörter")
907 assert.Equal(tokens[6], "-")
908 assert.Equal(tokens[7], "z.")
909 assert.Equal(tokens[8], "B.")
910 assert.Equal(tokens[9], "auch")
911 assert.Equal(tokens[10], ".")
912 assert.Equal(11, len(tokens))
913
Akron9a594712022-01-14 11:12:21 +0100914 // z.B.
915 tokens = ttokenize(mat, w, "Dies sind z.B. zwei Wörter - z. B. auch.")
916 assert.Equal(tokens[0], "Dies")
917 assert.Equal(tokens[1], "sind")
918 assert.Equal(tokens[2], "z.")
919 assert.Equal(tokens[3], "B.")
920 assert.Equal(tokens[4], "zwei")
921 assert.Equal(tokens[5], "Wörter")
922 assert.Equal(tokens[6], "-")
923 assert.Equal(tokens[7], "z.")
924 assert.Equal(tokens[8], "B.")
925 assert.Equal(tokens[9], "auch")
926 assert.Equal(tokens[10], ".")
927 assert.Equal(11, len(tokens))
928
929 // Single quote handling
930 tokens = ttokenize(mat, w, "Es heißt 'Leitungssportteams' und nicht anders.")
931 assert.Equal(tokens[0], "Es")
932 assert.Equal(tokens[1], "heißt")
933 assert.Equal(tokens[2], "'")
934 assert.Equal(tokens[3], "Leitungssportteams")
935 assert.Equal(tokens[4], "'")
936 assert.Equal(tokens[5], "und")
937 assert.Equal(tokens[6], "nicht")
938 assert.Equal(tokens[7], "anders")
939 assert.Equal(tokens[8], ".")
940 assert.Equal(9, len(tokens))
941
Akronb02ad072022-01-19 12:41:44 +0100942 // Apostrophe handling
943 tokens = ttokenize(mat, w, "Das ist Nils’ Einkaufskorb bei McDonald's.")
944 assert.Equal(tokens[0], "Das")
945 assert.Equal(tokens[1], "ist")
946 assert.Equal(tokens[2], "Nils’")
947 assert.Equal(tokens[3], "Einkaufskorb")
948 assert.Equal(tokens[4], "bei")
949 assert.Equal(tokens[5], "McDonald's")
950 assert.Equal(tokens[6], ".")
951 assert.Equal(7, len(tokens))
952
Akron28031b72021-10-02 13:07:25 +0200953 /*
954 @Test
955 public void englishTokenizerSeparatesEnglishContractionsAndClitics () {
956 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
957 tokens = tokenize(dat, w, "I've we'll you'd I'm we're Peter's isn't")
958 assert.Equal("'ve", tokens[1]);
959 assert.Equal("'ll", tokens[3]);
960 assert.Equal("'d", tokens[5]);
961 assert.Equal("'m", tokens[7]);
962 assert.Equal("'re", tokens[9]);
963 assert.Equal("'s", tokens[11]);
964 assert.Equal("is", tokens[12]);
965 assert.Equal("n't", tokens[13]);
966 assert.Equal(14, len(tokens));
967 }
968
969 @Test
970 public void frenchTokenizerKnowsFrenchAbbreviations () {
971 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
972 tokens = tokenize(dat, w, "Approx. en juill. 2004 mon prof. M. Foux m'a dit qu'il faut faire exerc. no. 4, et lire pp. 27-30.")
973 assert.Equal("Approx.", tokens[0]);
974 assert.Equal("juill.", tokens[2]);
975 assert.Equal("prof.", tokens[5]);
976 assert.Equal("exerc.", tokens[15]);
977 assert.Equal("no.", tokens[16]);
978 assert.Equal("pp.", tokens[21]);
979 }
980
981 @Test
982 public void frenchTokenizerKnowsFrenchContractions () {
983 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
984 tokens = tokenize(dat, w, "J'ai j'habite qu'il d'un jusqu'à Aujourd'hui D'accord Quelqu'un Presqu'île")
985 assert.Equal("J'", tokens[0]);
986 assert.Equal("j'", tokens[2]);
987 assert.Equal("qu'", tokens[4]);
988 assert.Equal("d'", tokens[6]);
989 assert.Equal("jusqu'", tokens[8]);
990 assert.Equal("Aujourd'hui", tokens[10]);
991 assert.Equal("D'", tokens[11]); // ’
992 assert.Equal("Quelqu'un", tokens[13]); // ’
993 assert.Equal("Presqu'île", tokens[14]); // ’
994 }
995
996 @Test
997 public void frenchTokenizerKnowsFrenchClitics () {
998 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
999 tokens = tokenize(dat, w, "suis-je sont-elles ")
1000 assert.Equal("suis", tokens[0]);
1001 assert.Equal("-je", tokens[1]);
1002 assert.Equal("sont", tokens[2]);
1003 assert.Equal("-elles", tokens[3]);
1004 }
1005
1006 @Test
1007 public void testEnglishTokenizerScienceAbbreviations () {
1008 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
1009 tokens = tokenize(dat, w, "Approx. in Sept. 1954, Assoc. Prof. Dr. R. J. Ewing reviewed articles on Enzymol. Bacteriol. effects later published in Nutr. Rheumatol. No. 12 and Nº. 13., pp. 17-18.")
1010 assert.Equal("Approx.", tokens[0]);
1011 assert.Equal("in", tokens[1]);
1012 assert.Equal("Sept.", tokens[2]);
1013 assert.Equal("1954", tokens[3]);
1014 assert.Equal(",", tokens[4]);
1015 assert.Equal("Assoc.", tokens[5]);
1016 assert.Equal("Prof.", tokens[6]);
1017 assert.Equal("Dr.", tokens[7]);
1018 assert.Equal("R.", tokens[8]);
1019 assert.Equal("J.", tokens[9]);
1020 assert.Equal("Ewing", tokens[10]);
1021 assert.Equal("reviewed", tokens[11]);
1022 assert.Equal("articles", tokens[12]);
1023 assert.Equal("on", tokens[13]);
1024 assert.Equal("Enzymol.", tokens[14]);
1025 assert.Equal("Bacteriol.", tokens[15]);
1026 assert.Equal("effects", tokens[16]);
1027 assert.Equal("later", tokens[17]);
1028 assert.Equal("published", tokens[18]);
1029 assert.Equal("in", tokens[19]);
1030 assert.Equal("Nutr.", tokens[20]);
1031 assert.Equal("Rheumatol.", tokens[21]);
1032 assert.Equal("No.", tokens[22]);
1033 assert.Equal("12", tokens[23]);
1034 assert.Equal("and", tokens[24]);
1035 assert.Equal("Nº.", tokens[25]);
1036 assert.Equal("13.", tokens[26]);
1037 assert.Equal(",", tokens[27]);
1038 assert.Equal("pp.", tokens[28]);
1039 assert.Equal("17-18", tokens[29]);
1040 assert.Equal(".", tokens[30]);
1041 }
1042
1043 @Test
1044 public void englishTokenizerCanGuessWhetherIIsAbbrev () {
1045 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
1046 tokens = tokenize(dat, w, "M. I. Baxter was born during World War I. So was I. He went to the Peter I. Hardy school. So did I.")
1047 assert.Equal("I.", tokens[1]);
1048 assert.Equal("I", tokens[8]);
1049 assert.Equal(".", tokens[9]);
1050 assert.Equal("I", tokens[12]);
1051 assert.Equal(".", tokens[13]);
1052 }
1053
1054 @Test
1055 public void testZipOuputArchive () {
1056
1057 final ByteArrayOutputStream clearOut = new ByteArrayOutputStream();
1058 System.setOut(new PrintStream(clearOut));
1059 tokens = tokenize(dat, w, "Archive: ich/bin/ein.zip\n")
1060 assert.Equal(0, len(tokens));
1061 }
1062 */
1063 /*
1064
1065 @Test
1066 public void testTextBreakOutputArchive () throws InstantiationException, IllegalAccessException, ClassNotFoundException {
1067 DerekoDfaTokenizer_de tok = (DerekoDfaTokenizer_de) new KorapTokenizer.Builder()
1068 .tokenizerClassName(DerekoDfaTokenizer_de.class.getName())
1069 .printOffsets(true)
1070 .build();
1071 Span[] tokens = tok.tokenizePos("Text1\004\nText2 Hallo\004Rumsdibums\004Das freut mich sehr.\n");
1072 assert.Equal("Text1", tokens[0].getType());
1073 assert.Equal(len(tokens), 9 );
1074 }
1075 */
1076}
1077
Akronc9c0eae2021-10-22 19:49:43 +02001078func TestMatrixFullTokenizerXML(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +02001079 assert := assert.New(t)
1080
Akron9fb63af2021-10-28 01:15:53 +02001081 if mat == nil {
1082 mat = LoadMatrixFile("testdata/tokenizer.matok")
1083 }
Akron28031b72021-10-02 13:07:25 +02001084
Akron28031b72021-10-02 13:07:25 +02001085 assert.NotNil(mat)
1086
1087 b := make([]byte, 0, 2048)
1088 w := bytes.NewBuffer(b)
1089 var tokens []string
1090
1091 tokens = ttokenize(mat, w, "Das <b>beste</b> Fußballspiel")
1092 assert.Equal("Das", tokens[0])
1093 assert.Equal("<b>", tokens[1])
1094 assert.Equal("beste", tokens[2])
1095 assert.Equal("</b>", tokens[3])
1096 assert.Equal("Fußballspiel", tokens[4])
1097 assert.Equal(5, len(tokens))
1098
1099 tokens = ttokenize(mat, w, "Das <b class=\"c\">beste</b> Fußballspiel")
1100 assert.Equal("Das", tokens[0])
1101 assert.Equal("<b class=\"c\">", tokens[1])
1102 assert.Equal("beste", tokens[2])
1103 assert.Equal("</b>", tokens[3])
1104 assert.Equal("Fußballspiel", tokens[4])
1105 assert.Equal(5, len(tokens))
1106
1107 tokens = ttokenize(mat, w, "der<x y=\"alte \"> <x x> alte</x> etc. et. Mann.")
1108 assert.Equal("der", tokens[0])
1109 assert.Equal("<x y=\"alte \">", tokens[1])
1110 assert.Equal("<x x>", tokens[2])
1111 assert.Equal("alte", tokens[3])
1112 assert.Equal("</x>", tokens[4])
1113 assert.Equal("etc.", tokens[5])
1114 assert.Equal("et", tokens[6])
1115 assert.Equal(".", tokens[7])
1116 assert.Equal("Mann", tokens[8])
1117 assert.Equal(".", tokens[9])
1118 assert.Equal(10, len(tokens))
Akron066d99c2021-10-28 19:04:59 +02001119
1120 tokens = ttokenize(mat, w, "das<br class=\"br\" />ging.")
1121 assert.Equal("das", tokens[0])
1122 assert.Equal("<br class=\"br\" />", tokens[1])
1123 assert.Equal("ging", tokens[2])
1124 assert.Equal(".", tokens[3])
1125 assert.Equal(4, len(tokens))
Akron28031b72021-10-02 13:07:25 +02001126}
1127
Akronabcb6a52021-10-09 15:52:08 +02001128func TestMatokDatokEquivalence(t *testing.T) {
1129 assert := assert.New(t)
1130
Akron9fb63af2021-10-28 01:15:53 +02001131 if mat == nil {
1132 mat = LoadMatrixFile("testdata/tokenizer.matok")
1133 }
Akronabcb6a52021-10-09 15:52:08 +02001134 dat := LoadDatokFile("testdata/tokenizer.datok")
1135
1136 r := strings.NewReader(s)
1137
1138 tb := make([]byte, 0, 2048)
1139 w := bytes.NewBuffer(tb)
1140
1141 // Transduce with double array representation
1142 dat.Transduce(r, w)
1143
1144 datStr := w.String()
1145
1146 r.Reset(s)
1147 w.Reset()
1148
1149 // Transduce with matrix representation
1150 mat.Transduce(r, w)
1151
1152 matStr := w.String()
1153
1154 assert.Equal(datStr, matStr)
1155}
1156
Akronc9c0eae2021-10-22 19:49:43 +02001157func TestMatrixFullTokenizerCallbackTransduce(t *testing.T) {
Akrone396a932021-10-19 01:06:13 +02001158 assert := assert.New(t)
1159
Akron9fb63af2021-10-28 01:15:53 +02001160 if mat == nil {
1161 mat = LoadMatrixFile("testdata/tokenizer.matok")
1162 }
Akrone396a932021-10-19 01:06:13 +02001163
1164 assert.NotNil(mat)
1165
1166 b := make([]byte, 0, 2048)
1167 w := bytes.NewBuffer(b)
Akrone396a932021-10-19 01:06:13 +02001168
1169 assert.True(mat.Transduce(strings.NewReader("Der alte Baum. Er war schon alt."), w))
Akrona854faa2021-10-22 19:31:08 +02001170
1171 matStr := w.String()
1172
1173 assert.Equal("Der\nalte\nBaum\n.\n\nEr\nwar\nschon\nalt\n.\n\n\n", matStr)
1174}
1175
Akronc9c0eae2021-10-22 19:49:43 +02001176func TestMatrixFullTokenizerTextTreatment(t *testing.T) {
Akrona854faa2021-10-22 19:31:08 +02001177 assert := assert.New(t)
1178
Akron9fb63af2021-10-28 01:15:53 +02001179 if mat == nil {
1180 mat = LoadMatrixFile("testdata/tokenizer.matok")
1181 }
Akrona854faa2021-10-22 19:31:08 +02001182
1183 assert.NotNil(mat)
1184
1185 b := make([]byte, 0, 2048)
1186 w := bytes.NewBuffer(b)
1187
1188 assert.True(mat.Transduce(strings.NewReader("Erste.\n\n\n\n\x04\x0aNächst.\x04"), w))
1189 matStr := w.String()
1190 assert.Equal("Erste\n.\n\n\nNächst\n.\n\n\n", matStr)
Akronf6bdfdb2021-10-23 15:56:53 +02001191}
Akrona854faa2021-10-22 19:31:08 +02001192
Akron22c565a2021-11-28 17:31:36 +01001193func TestMatrixFullTokenizerLongText(t *testing.T) {
1194 assert := assert.New(t)
1195
1196 if mat == nil {
1197 mat = LoadMatrixFile("testdata/tokenizer.matok")
1198 }
1199
1200 assert.NotNil(mat)
1201
1202 b := make([]byte, 0, 2048)
1203 w := bytes.NewBuffer(b)
1204
1205 text := `The Project Gutenberg EBook of Effi Briest, by Theodor Fontane
1206
1207Copyright laws are changing all over the world. Be sure to check the
1208copyright laws for your country before downloading or redistributing
1209this or any other Project Gutenberg eBook.
1210
1211This header should be the first thing seen when viewing this Project
1212Gutenberg file. Please do not remove it. Do not change or edit the
1213header without written permission.
1214
1215Please read the "legal small print," and other information about the
1216eBook and Project Gutenberg at the bottom of this file. Included is
1217important information about your specific rights and restrictions in
1218how the file may be used. You can also find out about how to make a
1219donation to Project Gutenberg, and how to get involved.
1220
1221
1222**Welcome To The World of Free Plain Vanilla Electronic Texts**
1223
1224**eBooks Readable By Both Humans and By Computers, Since 1971**
1225
1226*****These eBooks Were Prepared By Thousands of Volunteers!*****
1227
1228
1229Title: Effi Briest
1230
1231Author: Theodor Fontane
1232
1233Release Date: March, 2004 [EBook #5323]
1234`
1235
1236 assert.True(mat.Transduce(strings.NewReader(text), w))
1237
1238 assert.True(strings.Contains(w.String(), "Release"))
1239}
1240
Akronf6bdfdb2021-10-23 15:56:53 +02001241func TestMatrixTrimming(t *testing.T) {
1242 assert := assert.New(t)
1243
Akron9fb63af2021-10-28 01:15:53 +02001244 if mat == nil {
1245 mat = LoadMatrixFile("testdata/tokenizer.matok")
1246 }
Akronf6bdfdb2021-10-23 15:56:53 +02001247
1248 assert.NotNil(mat)
1249
1250 b := make([]byte, 0, 2048)
1251 w := bytes.NewBuffer(b)
1252
1253 assert.True(mat.Transduce(strings.NewReader(" Erste."), w))
1254 matStr := w.String()
1255 assert.Equal("Erste\n.\n\n\n", matStr)
Akrone396a932021-10-19 01:06:13 +02001256}
1257
Akronc9c0eae2021-10-22 19:49:43 +02001258func BenchmarkMatrixTransduce(b *testing.B) {
Akron28031b72021-10-02 13:07:25 +02001259 bu := make([]byte, 0, 2048)
1260 w := bytes.NewBuffer(bu)
1261
Akron28031b72021-10-02 13:07:25 +02001262 r := strings.NewReader(s)
1263
Akron094a4e82021-10-02 18:37:00 +02001264 mat := LoadMatrixFile("testdata/tokenizer.matok")
Akron28031b72021-10-02 13:07:25 +02001265
1266 b.ResetTimer()
1267
1268 for i := 0; i < b.N; i++ {
1269 w.Reset()
1270 r.Reset(s)
1271 ok := mat.Transduce(r, w)
1272 if !ok {
1273 fmt.Println("Fail!")
1274 fmt.Println(w.String())
1275 os.Exit(1)
1276 }
1277 }
1278}