blob: 697e5647350e8f0dbc32fd652642850613795e72 [file] [log] [blame]
Akron1c34ce62021-09-23 23:27:39 +02001package datok
2
3import (
4 "bytes"
Akron28031b72021-10-02 13:07:25 +02005 "fmt"
6 "os"
Akron1c34ce62021-09-23 23:27:39 +02007 "strings"
8 "testing"
9
10 "github.com/stretchr/testify/assert"
11)
12
Akronabcb6a52021-10-09 15:52:08 +020013var s string = `Der Vorsitzende der Abk. hat gewählt. Gefunden auf wikipedia.org. Ich bin unter korap@ids-mannheim.de erreichbar.
14Unsere Website ist https://korap.ids-mannheim.de/?q=Baum. Unser Server ist 10.0.10.51. Zu 50.4% ist es sicher.
15Der Termin ist am 5.9.2018.
16Ich habe die readme.txt heruntergeladen.
17Ausschalten!!! Hast Du nicht gehört???
18Ich wohne in der Weststr. und Du? Kupietz und Schmidt [2018]: Korpuslinguistik. Dieses verf***** Kleid! Ich habe die readme.txt heruntergeladen.
19Er sagte: \"Es geht mir gut!\", daraufhin ging er. "Das ist von C&A!" Früher bzw. später ... Sie erreichte den 1. Platz!
20Archive: Ich bin kein zip. D'dorf Ku'damm Lu'hafen M'gladbach W'schaft.
21Mach's macht's was'n ist's haste willste kannste biste kriegste.`
22
Akron1c34ce62021-09-23 23:27:39 +020023func TestFullTokenizerMatrix(t *testing.T) {
24 assert := assert.New(t)
25 foma := LoadFomaFile("testdata/simpletok.fst")
26 assert.NotNil(foma)
27
28 mat := foma.ToMatrix()
29
30 r := strings.NewReader(" wald gehen Da kann\t man was \"erleben\"!")
31 b := make([]byte, 0, 2048)
32 w := bytes.NewBuffer(b)
33 var tokens []string
34 mat.Transduce(r, w)
35 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +020036 assert.Equal(len(tokens), 11)
Akron1c34ce62021-09-23 23:27:39 +020037 assert.Equal("wald", tokens[0])
38 assert.Equal("gehen", tokens[1])
39 assert.Equal("Da", tokens[2])
40 assert.Equal("kann", tokens[3])
41 assert.Equal("man", tokens[4])
42 assert.Equal("was", tokens[5])
43 assert.Equal("\"erleben\"", tokens[6])
44 assert.Equal("!", tokens[7])
Akron5c82a922021-09-24 19:11:29 +020045
46 r = strings.NewReader(" In den Wald gehen? -- Da kann\t man was \"erleben\"!")
47 w.Reset()
48 mat.Transduce(r, w)
49 tokens = strings.Split(w.String(), "\n")
50 assert.Equal("In", tokens[0])
51 assert.Equal("den", tokens[1])
52 assert.Equal("Wald", tokens[2])
53 assert.Equal("gehen", tokens[3])
54 assert.Equal("?", tokens[4])
55 assert.Equal("--", tokens[5])
56
57 r = strings.NewReader(" g? -- D")
58 w.Reset()
59 mat.Transduce(r, w)
60 tokens = strings.Split(w.String(), "\n")
61 assert.Equal("g", tokens[0])
62 assert.Equal("?", tokens[1])
63 assert.Equal("--", tokens[2])
64 assert.Equal("D", tokens[3])
65 assert.Equal("", tokens[4])
66 assert.Equal("", tokens[5])
Akrona854faa2021-10-22 19:31:08 +020067 assert.Equal("", tokens[6])
68 assert.Equal(7, len(tokens))
Akron5c82a922021-09-24 19:11:29 +020069}
70
Akron16c312e2021-09-26 13:11:12 +020071func TestReadWriteMatrixTokenizer(t *testing.T) {
72 assert := assert.New(t)
73 foma := LoadFomaFile("testdata/simpletok.fst")
74 assert.NotNil(foma)
75
76 mat := foma.ToMatrix()
Akron28031b72021-10-02 13:07:25 +020077 assert.NotNil(mat)
Akron16c312e2021-09-26 13:11:12 +020078
79 assert.True(tmatch(mat, "bau"))
80 assert.True(tmatch(mat, "bad"))
81 assert.True(tmatch(mat, "wald gehen"))
82 b := make([]byte, 0, 1024)
83 buf := bytes.NewBuffer(b)
84 n, err := mat.WriteTo(buf)
85 assert.Nil(err)
Akron28031b72021-10-02 13:07:25 +020086 assert.Equal(int64(230), n)
Akron16c312e2021-09-26 13:11:12 +020087 mat2 := ParseMatrix(buf)
88 assert.NotNil(mat2)
89 assert.Equal(mat.sigma, mat2.sigma)
90 assert.Equal(mat.epsilon, mat2.epsilon)
91 assert.Equal(mat.unknown, mat2.unknown)
92 assert.Equal(mat.identity, mat2.identity)
93 assert.Equal(mat.stateCount, mat2.stateCount)
94 assert.Equal(len(mat.array), len(mat2.array))
95 assert.Equal(mat.array, mat2.array)
96 assert.True(tmatch(mat2, "bau"))
97 assert.True(tmatch(mat2, "bad"))
98 assert.True(tmatch(mat2, "wald gehen"))
99}
100
Akrone396a932021-10-19 01:06:13 +0200101func TestMatrixIgnorableMCS(t *testing.T) {
102 assert := assert.New(t)
103
104 // This test relies on final states. That's why it is
105 // not working correctly anymore.
106
107 // File has MCS in sigma but not in net
108 tok := LoadFomaFile("testdata/ignorable_mcs.fst")
109 assert.NotNil(tok)
110 mat := tok.ToMatrix()
111 assert.NotNil(mat)
112
113 b := make([]byte, 0, 2048)
114 w := bytes.NewBuffer(b)
115 var tokens []string
116
117 // Is only unambigous when transducing strictly greedy!
118 assert.True(mat.Transduce(strings.NewReader("ab<ab>a"), w))
119 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +0200120 assert.Equal("a\nb\n<ab>a\n\n\n", w.String())
Akrone396a932021-10-19 01:06:13 +0200121 assert.Equal("a", tokens[0])
122 assert.Equal("b", tokens[1])
123 assert.Equal("<ab>a", tokens[2])
Akrona854faa2021-10-22 19:31:08 +0200124 assert.Equal(6, len(tokens))
Akrone396a932021-10-19 01:06:13 +0200125}
126
Akrona854faa2021-10-22 19:31:08 +0200127func xTestReadWriteMatrixFullTokenizer(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200128 assert := assert.New(t)
129 foma := LoadFomaFile("testdata/tokenizer.fst")
130 assert.NotNil(foma)
131
132 mat := foma.ToMatrix()
133 assert.NotNil(foma)
134
135 tb := make([]byte, 0, 2048)
136 w := bytes.NewBuffer(tb)
137
138 assert.True(mat.Transduce(strings.NewReader("der alte baum"), w))
Akrona854faa2021-10-22 19:31:08 +0200139 assert.Equal("der\nalte\nbaum\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200140
141 b := make([]byte, 0, 1024)
142 buf := bytes.NewBuffer(b)
143 _, err := mat.WriteTo(buf)
144 assert.Nil(err)
145 w.Reset()
146 // assert.Equal(int64(248), n)
147
148 mat2 := ParseMatrix(buf)
149 assert.NotNil(mat2)
150 assert.Equal(mat.sigma, mat2.sigma)
151 assert.Equal(mat.epsilon, mat2.epsilon)
152 assert.Equal(mat.unknown, mat2.unknown)
153 assert.Equal(mat.identity, mat2.identity)
154 assert.Equal(mat.stateCount, mat2.stateCount)
155 assert.Equal(len(mat.array), len(mat2.array))
156 // assert.Equal(mat.array, mat2.array)
157
158 assert.True(mat2.Transduce(strings.NewReader("der alte baum"), w))
Akrona854faa2021-10-22 19:31:08 +0200159 assert.Equal("der\nalte\nbaum\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200160}
161
162func TestFullTokenizerMatrixTransduce(t *testing.T) {
163 assert := assert.New(t)
164
Akron094a4e82021-10-02 18:37:00 +0200165 mat := LoadMatrixFile("testdata/tokenizer.matok")
Akron28031b72021-10-02 13:07:25 +0200166
167 assert.NotNil(mat)
168
169 b := make([]byte, 0, 2048)
170 w := bytes.NewBuffer(b)
171 var tokens []string
172
173 assert.True(mat.Transduce(strings.NewReader("tra. u Du?"), w))
174
175 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +0200176 assert.Equal("tra\n.\n\nu\nDu\n?\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200177 assert.Equal("tra", tokens[0])
178 assert.Equal(".", tokens[1])
179 assert.Equal("", tokens[2])
180 assert.Equal("u", tokens[3])
181 assert.Equal("Du", tokens[4])
182 assert.Equal("?", tokens[5])
183 assert.Equal("", tokens[6])
184 assert.Equal("", tokens[7])
Akrona854faa2021-10-22 19:31:08 +0200185 assert.Equal(9, len(tokens))
Akron28031b72021-10-02 13:07:25 +0200186
187 w.Reset()
188 assert.True(mat.Transduce(strings.NewReader("\"John Doe\"@xx.com"), w))
Akrona854faa2021-10-22 19:31:08 +0200189 assert.Equal("\"\nJohn\nDoe\n\"\n@xx\n.\n\ncom\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200190}
191
Akron5c82a922021-09-24 19:11:29 +0200192func TestFullTokenizerMatrixSentenceSplitter(t *testing.T) {
193 assert := assert.New(t)
Akron094a4e82021-10-02 18:37:00 +0200194 mat := LoadMatrixFile("testdata/tokenizer.matok")
Akron5c82a922021-09-24 19:11:29 +0200195
196 b := make([]byte, 0, 2048)
197 w := bytes.NewBuffer(b)
198 var sentences []string
199
200 // testSentSplitterSimple
201 assert.True(mat.Transduce(strings.NewReader("Der alte Mann."), w))
202 sentences = strings.Split(w.String(), "\n\n")
203
Akrona854faa2021-10-22 19:31:08 +0200204 assert.Equal("Der\nalte\nMann\n.\n\n\n", w.String())
Akron5c82a922021-09-24 19:11:29 +0200205 assert.Equal("Der\nalte\nMann\n.", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200206 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200207 assert.Equal(len(sentences), 2)
208
209 w.Reset()
Akrona854faa2021-10-22 19:31:08 +0200210 assert.True(mat.Transduce(strings.NewReader("Der Vorsitzende der F.D.P. hat gewählt."), w))
211 sentences = strings.Split(w.String(), "\n\n")
212 assert.Equal(len(sentences), 2)
213 assert.Equal("Der\nVorsitzende\nder\nF.D.P.\nhat\ngewählt\n.", sentences[0])
214 assert.Equal("\n", sentences[1])
215
216 w.Reset()
Akron5c82a922021-09-24 19:11:29 +0200217 assert.True(mat.Transduce(strings.NewReader("Der Vorsitzende der Abk. hat gewählt."), w))
218 sentences = strings.Split(w.String(), "\n\n")
219 assert.Equal(len(sentences), 2)
220 assert.Equal("Der\nVorsitzende\nder\nAbk.\nhat\ngewählt\n.", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200221 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200222
Akron28031b72021-10-02 13:07:25 +0200223 w.Reset()
224 assert.True(mat.Transduce(strings.NewReader(""), w))
225 sentences = strings.Split(w.String(), "\n\n")
Akrona854faa2021-10-22 19:31:08 +0200226 assert.Equal(len(sentences), 2)
227 assert.Equal("", sentences[0])
228 assert.Equal("", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200229
Akron28031b72021-10-02 13:07:25 +0200230 w.Reset()
231 assert.True(mat.Transduce(strings.NewReader("Gefunden auf wikipedia.org."), w))
232 sentences = strings.Split(w.String(), "\n\n")
233 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200234
Akron28031b72021-10-02 13:07:25 +0200235 w.Reset()
236 assert.True(mat.Transduce(strings.NewReader("Ich bin unter korap@ids-mannheim.de erreichbar."), w))
237 sentences = strings.Split(w.String(), "\n\n")
238 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200239
Akron28031b72021-10-02 13:07:25 +0200240 w.Reset()
241 assert.True(mat.Transduce(strings.NewReader("Unsere Website ist https://korap.ids-mannheim.de/?q=Baum"), w))
242 sentences = strings.Split(w.String(), "\n\n")
243 assert.Equal("Unsere\nWebsite\nist\nhttps://korap.ids-mannheim.de/?q=Baum", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200244 assert.Equal("\n", sentences[1])
Akron28031b72021-10-02 13:07:25 +0200245 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200246
Akron28031b72021-10-02 13:07:25 +0200247 w.Reset()
248 assert.True(mat.Transduce(strings.NewReader("Unser Server ist 10.0.10.51."), w))
249 sentences = strings.Split(w.String(), "\n\n")
Akrona854faa2021-10-22 19:31:08 +0200250 assert.Equal("\n", sentences[1])
Akron28031b72021-10-02 13:07:25 +0200251 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200252
Akron28031b72021-10-02 13:07:25 +0200253 w.Reset()
254 assert.True(mat.Transduce(strings.NewReader("Zu 50.4% ist es sicher"), w))
255 sentences = strings.Split(w.String(), "\n\n")
256 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200257
Akron28031b72021-10-02 13:07:25 +0200258 w.Reset()
259 assert.True(mat.Transduce(strings.NewReader("Der Termin ist am 5.9.2018"), w))
260 sentences = strings.Split(w.String(), "\n\n")
261 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200262
Akron28031b72021-10-02 13:07:25 +0200263 w.Reset()
264 assert.True(mat.Transduce(strings.NewReader("Ich habe die readme.txt heruntergeladen"), w))
265 sentences = strings.Split(w.String(), "\n\n")
266 assert.Equal(len(sentences), 2)
267 assert.Equal("Ich\nhabe\ndie\nreadme.txt\nheruntergeladen", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200268 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200269
Akron28031b72021-10-02 13:07:25 +0200270 w.Reset()
271 assert.True(mat.Transduce(strings.NewReader("Ausschalten!!! Hast Du nicht gehört???"), w))
272 sentences = strings.Split(w.String(), "\n\n")
273 assert.Equal(len(sentences), 3)
274 assert.Equal("Ausschalten\n!!!", sentences[0])
275 assert.Equal("Hast\nDu\nnicht\ngehört\n???", sentences[1])
Akrona854faa2021-10-22 19:31:08 +0200276 assert.Equal("\n", sentences[2])
Akron5c82a922021-09-24 19:11:29 +0200277
Akron28031b72021-10-02 13:07:25 +0200278 w.Reset()
279 assert.True(mat.Transduce(strings.NewReader("Ich wohne in der Weststr. und Du?"), w))
280 sentences = strings.Split(w.String(), "\n\n")
281 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200282 /*
283 Test:
284 "\"Ausschalten!!!\", sagte er. \"Hast Du nicht gehört???\""), w))
285 */
Akron1c34ce62021-09-23 23:27:39 +0200286}
Akron28031b72021-10-02 13:07:25 +0200287
288func TestFullTokenizerMatrixTokenSplitter(t *testing.T) {
289 assert := assert.New(t)
290
Akron094a4e82021-10-02 18:37:00 +0200291 mat := LoadMatrixFile("testdata/tokenizer.matok")
Akron28031b72021-10-02 13:07:25 +0200292
293 b := make([]byte, 0, 2048)
294 w := bytes.NewBuffer(b)
295 var tokens []string
296
297 // testTokenizerSimple
298 tokens = ttokenize(mat, w, "Der alte Mann")
299 assert.Equal(tokens[0], "Der")
300 assert.Equal(tokens[1], "alte")
301 assert.Equal(tokens[2], "Mann")
302 assert.Equal(len(tokens), 3)
303
304 tokens = ttokenize(mat, w, "Der alte Mann.")
305 assert.Equal(tokens[0], "Der")
306 assert.Equal(tokens[1], "alte")
307 assert.Equal(tokens[2], "Mann")
308 assert.Equal(tokens[3], ".")
309 assert.Equal(len(tokens), 4)
310
311 // testTokenizerAbbr
312 tokens = ttokenize(mat, w, "Der Vorsitzende der F.D.P. hat gewählt")
313 assert.Equal(tokens[0], "Der")
314 assert.Equal(tokens[1], "Vorsitzende")
315 assert.Equal(tokens[2], "der")
316 assert.Equal(tokens[3], "F.D.P.")
317 assert.Equal(tokens[4], "hat")
318 assert.Equal(tokens[5], "gewählt")
319 assert.Equal(len(tokens), 6)
320 // Ignored in KorAP-Tokenizer
321
322 // testTokenizerHost1
323 tokens = ttokenize(mat, w, "Gefunden auf wikipedia.org")
324 assert.Equal(tokens[0], "Gefunden")
325 assert.Equal(tokens[1], "auf")
326 assert.Equal(tokens[2], "wikipedia.org")
327 assert.Equal(len(tokens), 3)
328
329 // testTokenizerWwwHost
330 tokens = ttokenize(mat, w, "Gefunden auf www.wikipedia.org")
331 assert.Equal("Gefunden", tokens[0])
332 assert.Equal("auf", tokens[1])
333 assert.Equal("www.wikipedia.org", tokens[2])
334 assert.Equal(3, len(tokens))
335
336 // testTokenizerWwwUrl
337 tokens = ttokenize(mat, w, "Weitere Infos unter www.info.biz/info")
338 assert.Equal("www.info.biz/info", tokens[3])
339
340 // testTokenizerFtpHost
341 /*
342 tokens = tokenize(dat, w, "Kann von ftp.download.org heruntergeladen werden")
343 assert.Equal("Kann", tokens[0])
344 assert.Equal("von", tokens[1])
345 assert.Equal("ftp.download.org", tokens[2])
346 assert.Equal(5, len(tokens))
347 // Ignored in KorAP-Tokenizer
348 */
349
350 // testTokenizerDash
351 tokens = ttokenize(mat, w, "Das war -- spitze")
352 assert.Equal(tokens[0], "Das")
353 assert.Equal(tokens[1], "war")
354 assert.Equal(tokens[2], "--")
355 assert.Equal(tokens[3], "spitze")
356 assert.Equal(len(tokens), 4)
357
358 // testTokenizerEmail1
359 tokens = ttokenize(mat, w, "Ich bin unter korap@ids-mannheim.de erreichbar.")
360 assert.Equal(tokens[0], "Ich")
361 assert.Equal(tokens[1], "bin")
362 assert.Equal(tokens[2], "unter")
363 assert.Equal(tokens[3], "korap@ids-mannheim.de")
364 assert.Equal(tokens[4], "erreichbar")
365 assert.Equal(tokens[5], ".")
366 assert.Equal(len(tokens), 6)
367
368 // testTokenizerEmail2
369 tokens = ttokenize(mat, w, "Oder unter korap[at]ids-mannheim[dot]de.")
370 assert.Equal(tokens[0], "Oder")
371 assert.Equal(tokens[1], "unter")
372 assert.Equal(tokens[2], "korap[at]ids-mannheim[dot]de")
373 assert.Equal(tokens[3], ".")
374 assert.Equal(len(tokens), 4)
375
376 // testTokenizerEmail3
377 tokens = ttokenize(mat, w, "Oder unter korap(at)ids-mannheim(dot)de.")
378 assert.Equal(tokens[0], "Oder")
379 assert.Equal(tokens[1], "unter")
380 assert.Equal(tokens[2], "korap(at)ids-mannheim(dot)de")
381 assert.Equal(tokens[3], ".")
382 assert.Equal(len(tokens), 4)
383 // Ignored in KorAP-Tokenizer
384
385 // testTokenizerDoNotAcceptQuotedEmailNames
386 tokens = ttokenize(mat, w, "\"John Doe\"@xx.com")
387 assert.Equal("\"", tokens[0])
388 assert.Equal("John", tokens[1])
389 assert.Equal("Doe", tokens[2])
390 assert.Equal("\"", tokens[3])
391 assert.Equal("@xx", tokens[4])
392 assert.Equal(".", tokens[5]) // Differs - as the sentence splitter splits here!
393 assert.Equal("com", tokens[6])
394 assert.Equal(7, len(tokens))
395
396 // testTokenizerTwitter
397 tokens = ttokenize(mat, w, "Folgt @korap und #korap")
398 assert.Equal(tokens[0], "Folgt")
399 assert.Equal(tokens[1], "@korap")
400 assert.Equal(tokens[2], "und")
401 assert.Equal(tokens[3], "#korap")
402 assert.Equal(len(tokens), 4)
403
404 // testTokenizerWeb1
405 tokens = ttokenize(mat, w, "Unsere Website ist https://korap.ids-mannheim.de/?q=Baum")
406 assert.Equal(tokens[0], "Unsere")
407 assert.Equal(tokens[1], "Website")
408 assert.Equal(tokens[2], "ist")
409 assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum")
410 assert.Equal(len(tokens), 4)
411
412 // testTokenizerWeb2
413 tokens = ttokenize(mat, w, "Wir sind auch im Internet (https://korap.ids-mannheim.de/?q=Baum)")
414 assert.Equal(tokens[0], "Wir")
415 assert.Equal(tokens[1], "sind")
416 assert.Equal(tokens[2], "auch")
417 assert.Equal(tokens[3], "im")
418 assert.Equal(tokens[4], "Internet")
419 assert.Equal(tokens[5], "(")
420 assert.Equal(tokens[6], "https://korap.ids-mannheim.de/?q=Baum")
421 assert.Equal(tokens[7], ")")
422 assert.Equal(len(tokens), 8)
423 // Ignored in KorAP-Tokenizer
424
425 // testTokenizerWeb3
426 tokens = ttokenize(mat, w, "Die Adresse ist https://korap.ids-mannheim.de/?q=Baum.")
427 assert.Equal(tokens[0], "Die")
428 assert.Equal(tokens[1], "Adresse")
429 assert.Equal(tokens[2], "ist")
430 assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum")
431 assert.Equal(tokens[4], ".")
432 assert.Equal(len(tokens), 5)
433 // Ignored in KorAP-Tokenizer
434
435 // testTokenizerServer
436 tokens = ttokenize(mat, w, "Unser Server ist 10.0.10.51.")
437 assert.Equal(tokens[0], "Unser")
438 assert.Equal(tokens[1], "Server")
439 assert.Equal(tokens[2], "ist")
440 assert.Equal(tokens[3], "10.0.10.51")
441 assert.Equal(tokens[4], ".")
442 assert.Equal(len(tokens), 5)
443
444 // testTokenizerNum
445 tokens = ttokenize(mat, w, "Zu 50,4% ist es sicher")
446 assert.Equal(tokens[0], "Zu")
447 assert.Equal(tokens[1], "50,4%")
448 assert.Equal(tokens[2], "ist")
449 assert.Equal(tokens[3], "es")
450 assert.Equal(tokens[4], "sicher")
451 assert.Equal(len(tokens), 5)
452 // Differs from KorAP-Tokenizer
453
454 // testTokenizerDate
455 tokens = ttokenize(mat, w, "Der Termin ist am 5.9.2018")
456 assert.Equal(tokens[0], "Der")
457 assert.Equal(tokens[1], "Termin")
458 assert.Equal(tokens[2], "ist")
459 assert.Equal(tokens[3], "am")
460 assert.Equal(tokens[4], "5.9.2018")
461 assert.Equal(len(tokens), 5)
462
463 tokens = ttokenize(mat, w, "Der Termin ist am 5/9/2018")
464 assert.Equal(tokens[0], "Der")
465 assert.Equal(tokens[1], "Termin")
466 assert.Equal(tokens[2], "ist")
467 assert.Equal(tokens[3], "am")
468 assert.Equal(tokens[4], "5/9/2018")
469 assert.Equal(len(tokens), 5)
470
471 // testTokenizerDateRange
472 /*
473 tokens = tokenize(dat, w, "Der Termin war vom 4.-5.9.2018")
474 assert.Equal(tokens[0], "Der")
475 assert.Equal(tokens[1], "Termin")
476 assert.Equal(tokens[2], "war")
477 assert.Equal(tokens[3], "vom")
478 assert.Equal(tokens[4], "4.")
479 assert.Equal(tokens[5], "-")
480 assert.Equal(tokens[6], "5.9.2018")
481 assert.Equal(len(tokens), 7)
482 // Ignored in KorAP-Tokenizer
483 */
484
485 // testTokenizerEmoji1
486 tokens = ttokenize(mat, w, "Das ist toll! ;)")
487 assert.Equal(tokens[0], "Das")
488 assert.Equal(tokens[1], "ist")
489 assert.Equal(tokens[2], "toll")
490 assert.Equal(tokens[3], "!")
491 assert.Equal(tokens[4], ";)")
492 assert.Equal(len(tokens), 5)
493
494 // testTokenizerRef1
495 tokens = ttokenize(mat, w, "Kupietz und Schmidt (2018): Korpuslinguistik")
496 assert.Equal(tokens[0], "Kupietz")
497 assert.Equal(tokens[1], "und")
498 assert.Equal(tokens[2], "Schmidt")
499 assert.Equal(tokens[3], "(2018)")
500 assert.Equal(tokens[4], ":")
501 assert.Equal(tokens[5], "Korpuslinguistik")
502 assert.Equal(len(tokens), 6)
503 // Differs from KorAP-Tokenizer!
504
505 // testTokenizerRef2 () {
506 tokens = ttokenize(mat, w, "Kupietz und Schmidt [2018]: Korpuslinguistik")
507 assert.Equal(tokens[0], "Kupietz")
508 assert.Equal(tokens[1], "und")
509 assert.Equal(tokens[2], "Schmidt")
510 assert.Equal(tokens[3], "[2018]")
511 assert.Equal(tokens[4], ":")
512 assert.Equal(tokens[5], "Korpuslinguistik")
513 assert.Equal(len(tokens), 6)
514 // Differs from KorAP-Tokenizer!
515
516 // testTokenizerOmission1 () {
517 tokens = ttokenize(mat, w, "Er ist ein A****loch!")
518 assert.Equal(tokens[0], "Er")
519 assert.Equal(tokens[1], "ist")
520 assert.Equal(tokens[2], "ein")
521 assert.Equal(tokens[3], "A****loch")
522 assert.Equal(tokens[4], "!")
523 assert.Equal(len(tokens), 5)
524
525 // testTokenizerOmission2
526 tokens = ttokenize(mat, w, "F*ck!")
527 assert.Equal(tokens[0], "F*ck")
528 assert.Equal(tokens[1], "!")
529 assert.Equal(len(tokens), 2)
530
531 // testTokenizerOmission3 () {
532 tokens = ttokenize(mat, w, "Dieses verf***** Kleid!")
533 assert.Equal(tokens[0], "Dieses")
534 assert.Equal(tokens[1], "verf*****")
535 assert.Equal(tokens[2], "Kleid")
536 assert.Equal(tokens[3], "!")
537 assert.Equal(len(tokens), 4)
538
539 // Probably interpreted as HOST
540 // testTokenizerFileExtension1
541 tokens = ttokenize(mat, w, "Ich habe die readme.txt heruntergeladen")
542 assert.Equal(tokens[0], "Ich")
543 assert.Equal(tokens[1], "habe")
544 assert.Equal(tokens[2], "die")
545 assert.Equal(tokens[3], "readme.txt")
546 assert.Equal(tokens[4], "heruntergeladen")
547 assert.Equal(len(tokens), 5)
548
549 // Probably interpreted as HOST
550 // testTokenizerFileExtension2
551 tokens = ttokenize(mat, w, "Nimm die README.TXT!")
552 assert.Equal(tokens[0], "Nimm")
553 assert.Equal(tokens[1], "die")
554 assert.Equal(tokens[2], "README.TXT")
555 assert.Equal(tokens[3], "!")
556 assert.Equal(len(tokens), 4)
557
558 // Probably interpreted as HOST
559 // testTokenizerFileExtension3
560 tokens = ttokenize(mat, w, "Zeig mir profile.jpeg")
561 assert.Equal(tokens[0], "Zeig")
562 assert.Equal(tokens[1], "mir")
563 assert.Equal(tokens[2], "profile.jpeg")
564 assert.Equal(len(tokens), 3)
565
566 // testTokenizerFile1
567
568 tokens = ttokenize(mat, w, "Zeig mir c:\\Dokumente\\profile.docx")
569 assert.Equal(tokens[0], "Zeig")
570 assert.Equal(tokens[1], "mir")
571 assert.Equal(tokens[2], "c:\\Dokumente\\profile.docx")
572 assert.Equal(len(tokens), 3)
573
574 // testTokenizerFile2
575 tokens = ttokenize(mat, w, "Gehe zu /Dokumente/profile.docx")
576 assert.Equal(tokens[0], "Gehe")
577 assert.Equal(tokens[1], "zu")
578 assert.Equal(tokens[2], "/Dokumente/profile.docx")
579 assert.Equal(len(tokens), 3)
580
581 // testTokenizerFile3
582 tokens = ttokenize(mat, w, "Zeig mir c:\\Dokumente\\profile.jpeg")
583 assert.Equal(tokens[0], "Zeig")
584 assert.Equal(tokens[1], "mir")
585 assert.Equal(tokens[2], "c:\\Dokumente\\profile.jpeg")
586 assert.Equal(len(tokens), 3)
587 // Ignored in KorAP-Tokenizer
588
589 // testTokenizerPunct
590 tokens = ttokenize(mat, w, "Er sagte: \"Es geht mir gut!\", daraufhin ging er.")
591 assert.Equal(tokens[0], "Er")
592 assert.Equal(tokens[1], "sagte")
593 assert.Equal(tokens[2], ":")
594 assert.Equal(tokens[3], "\"")
595 assert.Equal(tokens[4], "Es")
596 assert.Equal(tokens[5], "geht")
597 assert.Equal(tokens[6], "mir")
598 assert.Equal(tokens[7], "gut")
599 assert.Equal(tokens[8], "!")
600 assert.Equal(tokens[9], "\"")
601 assert.Equal(tokens[10], ",")
602 assert.Equal(tokens[11], "daraufhin")
603 assert.Equal(tokens[12], "ging")
604 assert.Equal(tokens[13], "er")
605 assert.Equal(tokens[14], ".")
606 assert.Equal(len(tokens), 15)
607
608 // testTokenizerPlusAmpersand
609 tokens = ttokenize(mat, w, "&quot;Das ist von C&A!&quot;")
610 assert.Equal(tokens[0], "&quot;")
611 assert.Equal(tokens[1], "Das")
612 assert.Equal(tokens[2], "ist")
613 assert.Equal(tokens[3], "von")
614 assert.Equal(tokens[4], "C&A")
615 assert.Equal(tokens[5], "!")
616 assert.Equal(tokens[6], "&quot;")
617 assert.Equal(len(tokens), 7)
618
619 // testTokenizerLongEnd
620 tokens = ttokenize(mat, w, "Siehst Du?!!?")
621 assert.Equal(tokens[0], "Siehst")
622 assert.Equal(tokens[1], "Du")
623 assert.Equal(tokens[2], "?!!?")
624 assert.Equal(len(tokens), 3)
625
626 // testTokenizerIrishO
627 tokens = ttokenize(mat, w, "Peter O'Toole")
628 assert.Equal(tokens[0], "Peter")
629 assert.Equal(tokens[1], "O'Toole")
630 assert.Equal(len(tokens), 2)
631
632 // testTokenizerAbr
633 tokens = ttokenize(mat, w, "Früher bzw. später ...")
634 assert.Equal(tokens[0], "Früher")
635 assert.Equal(tokens[1], "bzw.")
636 assert.Equal(tokens[2], "später")
637 assert.Equal(tokens[3], "...")
638 assert.Equal(len(tokens), 4)
639
640 // testTokenizerUppercaseRule
641 tokens = ttokenize(mat, w, "Es war spät.Morgen ist es früh.")
642 assert.Equal(tokens[0], "Es")
643 assert.Equal(tokens[1], "war")
644 assert.Equal(tokens[2], "spät")
645 assert.Equal(tokens[3], ".")
646 assert.Equal(tokens[4], "Morgen")
647 assert.Equal(tokens[5], "ist")
648 assert.Equal(tokens[6], "es")
649 assert.Equal(tokens[7], "früh")
650 assert.Equal(tokens[8], ".")
651 assert.Equal(len(tokens), 9)
652 // Ignored in KorAP-Tokenizer
653
654 // testTokenizerOrd
655 tokens = ttokenize(mat, w, "Sie erreichte den 1. Platz!")
656 assert.Equal(tokens[0], "Sie")
657 assert.Equal(tokens[1], "erreichte")
658 assert.Equal(tokens[2], "den")
659 assert.Equal(tokens[3], "1.")
660 assert.Equal(tokens[4], "Platz")
661 assert.Equal(tokens[5], "!")
662 assert.Equal(len(tokens), 6)
663
664 // testNoZipOuputArchive
665 tokens = ttokenize(mat, w, "Archive: Ich bin kein zip\n")
666 assert.Equal(tokens[0], "Archive")
667 assert.Equal(tokens[1], ":")
668 assert.Equal(tokens[2], "Ich")
669 assert.Equal(tokens[3], "bin")
670 assert.Equal(tokens[4], "kein")
671 assert.Equal(tokens[5], "zip")
672 assert.Equal(6, len(tokens))
673
674 // testTokenizerStrasse
675 tokens = ttokenize(mat, w, "Ich wohne in der Weststr. und Du?")
676 assert.Equal(tokens[4], "Weststr.")
677 assert.Equal(8, len(tokens))
678
679 // germanTokenizerKnowsGermanOmissionWords
680 tokens = ttokenize(mat, w, "D'dorf Ku'damm Lu'hafen M'gladbach W'schaft")
681 assert.Equal("D'dorf", tokens[0])
682 assert.Equal("Ku'damm", tokens[1])
683 assert.Equal("Lu'hafen", tokens[2])
684 assert.Equal("M'gladbach", tokens[3])
685 assert.Equal("W'schaft", tokens[4])
686 assert.Equal(5, len(tokens))
687
688 // germanTokenizerDoesNOTSeparateGermanContractions
689 tokens = ttokenize(mat, w, "mach's macht's was'n ist's haste willste kannste biste kriegste")
690 assert.Equal("mach's", tokens[0])
691 assert.Equal("macht's", tokens[1])
692 assert.Equal("was'n", tokens[2])
693 assert.Equal("ist's", tokens[3])
694 assert.Equal("haste", tokens[4])
695 assert.Equal("willste", tokens[5])
696 assert.Equal("kannste", tokens[6])
697 assert.Equal("biste", tokens[7])
698 assert.Equal("kriegste", tokens[8])
699 assert.Equal(9, len(tokens))
700
701 /*
702 @Test
703 public void englishTokenizerSeparatesEnglishContractionsAndClitics () {
704 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
705 tokens = tokenize(dat, w, "I've we'll you'd I'm we're Peter's isn't")
706 assert.Equal("'ve", tokens[1]);
707 assert.Equal("'ll", tokens[3]);
708 assert.Equal("'d", tokens[5]);
709 assert.Equal("'m", tokens[7]);
710 assert.Equal("'re", tokens[9]);
711 assert.Equal("'s", tokens[11]);
712 assert.Equal("is", tokens[12]);
713 assert.Equal("n't", tokens[13]);
714 assert.Equal(14, len(tokens));
715 }
716
717 @Test
718 public void frenchTokenizerKnowsFrenchAbbreviations () {
719 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
720 tokens = tokenize(dat, w, "Approx. en juill. 2004 mon prof. M. Foux m'a dit qu'il faut faire exerc. no. 4, et lire pp. 27-30.")
721 assert.Equal("Approx.", tokens[0]);
722 assert.Equal("juill.", tokens[2]);
723 assert.Equal("prof.", tokens[5]);
724 assert.Equal("exerc.", tokens[15]);
725 assert.Equal("no.", tokens[16]);
726 assert.Equal("pp.", tokens[21]);
727 }
728
729 @Test
730 public void frenchTokenizerKnowsFrenchContractions () {
731 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
732 tokens = tokenize(dat, w, "J'ai j'habite qu'il d'un jusqu'à Aujourd'hui D'accord Quelqu'un Presqu'île")
733 assert.Equal("J'", tokens[0]);
734 assert.Equal("j'", tokens[2]);
735 assert.Equal("qu'", tokens[4]);
736 assert.Equal("d'", tokens[6]);
737 assert.Equal("jusqu'", tokens[8]);
738 assert.Equal("Aujourd'hui", tokens[10]);
739 assert.Equal("D'", tokens[11]); // ’
740 assert.Equal("Quelqu'un", tokens[13]); // ’
741 assert.Equal("Presqu'île", tokens[14]); // ’
742 }
743
744 @Test
745 public void frenchTokenizerKnowsFrenchClitics () {
746 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
747 tokens = tokenize(dat, w, "suis-je sont-elles ")
748 assert.Equal("suis", tokens[0]);
749 assert.Equal("-je", tokens[1]);
750 assert.Equal("sont", tokens[2]);
751 assert.Equal("-elles", tokens[3]);
752 }
753
754 @Test
755 public void testEnglishTokenizerScienceAbbreviations () {
756 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
757 tokens = tokenize(dat, w, "Approx. in Sept. 1954, Assoc. Prof. Dr. R. J. Ewing reviewed articles on Enzymol. Bacteriol. effects later published in Nutr. Rheumatol. No. 12 and Nº. 13., pp. 17-18.")
758 assert.Equal("Approx.", tokens[0]);
759 assert.Equal("in", tokens[1]);
760 assert.Equal("Sept.", tokens[2]);
761 assert.Equal("1954", tokens[3]);
762 assert.Equal(",", tokens[4]);
763 assert.Equal("Assoc.", tokens[5]);
764 assert.Equal("Prof.", tokens[6]);
765 assert.Equal("Dr.", tokens[7]);
766 assert.Equal("R.", tokens[8]);
767 assert.Equal("J.", tokens[9]);
768 assert.Equal("Ewing", tokens[10]);
769 assert.Equal("reviewed", tokens[11]);
770 assert.Equal("articles", tokens[12]);
771 assert.Equal("on", tokens[13]);
772 assert.Equal("Enzymol.", tokens[14]);
773 assert.Equal("Bacteriol.", tokens[15]);
774 assert.Equal("effects", tokens[16]);
775 assert.Equal("later", tokens[17]);
776 assert.Equal("published", tokens[18]);
777 assert.Equal("in", tokens[19]);
778 assert.Equal("Nutr.", tokens[20]);
779 assert.Equal("Rheumatol.", tokens[21]);
780 assert.Equal("No.", tokens[22]);
781 assert.Equal("12", tokens[23]);
782 assert.Equal("and", tokens[24]);
783 assert.Equal("Nº.", tokens[25]);
784 assert.Equal("13.", tokens[26]);
785 assert.Equal(",", tokens[27]);
786 assert.Equal("pp.", tokens[28]);
787 assert.Equal("17-18", tokens[29]);
788 assert.Equal(".", tokens[30]);
789 }
790
791 @Test
792 public void englishTokenizerCanGuessWhetherIIsAbbrev () {
793 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
794 tokens = tokenize(dat, w, "M. I. Baxter was born during World War I. So was I. He went to the Peter I. Hardy school. So did I.")
795 assert.Equal("I.", tokens[1]);
796 assert.Equal("I", tokens[8]);
797 assert.Equal(".", tokens[9]);
798 assert.Equal("I", tokens[12]);
799 assert.Equal(".", tokens[13]);
800 }
801
802 @Test
803 public void testZipOuputArchive () {
804
805 final ByteArrayOutputStream clearOut = new ByteArrayOutputStream();
806 System.setOut(new PrintStream(clearOut));
807 tokens = tokenize(dat, w, "Archive: ich/bin/ein.zip\n")
808 assert.Equal(0, len(tokens));
809 }
810 */
811 /*
812
813 @Test
814 public void testTextBreakOutputArchive () throws InstantiationException, IllegalAccessException, ClassNotFoundException {
815 DerekoDfaTokenizer_de tok = (DerekoDfaTokenizer_de) new KorapTokenizer.Builder()
816 .tokenizerClassName(DerekoDfaTokenizer_de.class.getName())
817 .printOffsets(true)
818 .build();
819 Span[] tokens = tok.tokenizePos("Text1\004\nText2 Hallo\004Rumsdibums\004Das freut mich sehr.\n");
820 assert.Equal("Text1", tokens[0].getType());
821 assert.Equal(len(tokens), 9 );
822 }
823 */
824}
825
826func TestFullTokenizerMatrixXML(t *testing.T) {
827 assert := assert.New(t)
828
Akron094a4e82021-10-02 18:37:00 +0200829 mat := LoadMatrixFile("testdata/tokenizer.matok")
Akron28031b72021-10-02 13:07:25 +0200830
Akron28031b72021-10-02 13:07:25 +0200831 assert.NotNil(mat)
832
833 b := make([]byte, 0, 2048)
834 w := bytes.NewBuffer(b)
835 var tokens []string
836
837 tokens = ttokenize(mat, w, "Das <b>beste</b> Fußballspiel")
838 assert.Equal("Das", tokens[0])
839 assert.Equal("<b>", tokens[1])
840 assert.Equal("beste", tokens[2])
841 assert.Equal("</b>", tokens[3])
842 assert.Equal("Fußballspiel", tokens[4])
843 assert.Equal(5, len(tokens))
844
845 tokens = ttokenize(mat, w, "Das <b class=\"c\">beste</b> Fußballspiel")
846 assert.Equal("Das", tokens[0])
847 assert.Equal("<b class=\"c\">", tokens[1])
848 assert.Equal("beste", tokens[2])
849 assert.Equal("</b>", tokens[3])
850 assert.Equal("Fußballspiel", tokens[4])
851 assert.Equal(5, len(tokens))
852
853 tokens = ttokenize(mat, w, "der<x y=\"alte \"> <x x> alte</x> etc. et. Mann.")
854 assert.Equal("der", tokens[0])
855 assert.Equal("<x y=\"alte \">", tokens[1])
856 assert.Equal("<x x>", tokens[2])
857 assert.Equal("alte", tokens[3])
858 assert.Equal("</x>", tokens[4])
859 assert.Equal("etc.", tokens[5])
860 assert.Equal("et", tokens[6])
861 assert.Equal(".", tokens[7])
862 assert.Equal("Mann", tokens[8])
863 assert.Equal(".", tokens[9])
864 assert.Equal(10, len(tokens))
865}
866
Akronabcb6a52021-10-09 15:52:08 +0200867func TestMatokDatokEquivalence(t *testing.T) {
868 assert := assert.New(t)
869
870 mat := LoadMatrixFile("testdata/tokenizer.matok")
871 dat := LoadDatokFile("testdata/tokenizer.datok")
872
873 r := strings.NewReader(s)
874
875 tb := make([]byte, 0, 2048)
876 w := bytes.NewBuffer(tb)
877
878 // Transduce with double array representation
879 dat.Transduce(r, w)
880
881 datStr := w.String()
882
883 r.Reset(s)
884 w.Reset()
885
886 // Transduce with matrix representation
887 mat.Transduce(r, w)
888
889 matStr := w.String()
890
891 assert.Equal(datStr, matStr)
892}
893
Akrone396a932021-10-19 01:06:13 +0200894func TestFullTokenizerMatrixCallbackTransduce(t *testing.T) {
895 assert := assert.New(t)
896
897 mat := LoadMatrixFile("testdata/tokenizer.matok")
898
899 assert.NotNil(mat)
900
901 b := make([]byte, 0, 2048)
902 w := bytes.NewBuffer(b)
Akrone396a932021-10-19 01:06:13 +0200903
904 assert.True(mat.Transduce(strings.NewReader("Der alte Baum. Er war schon alt."), w))
Akrona854faa2021-10-22 19:31:08 +0200905
906 matStr := w.String()
907
908 assert.Equal("Der\nalte\nBaum\n.\n\nEr\nwar\nschon\nalt\n.\n\n\n", matStr)
909}
910
911func TestFullTokenizerMatrixTextTreatment(t *testing.T) {
912 assert := assert.New(t)
913
914 mat := LoadMatrixFile("testdata/tokenizer.matok")
915
916 assert.NotNil(mat)
917
918 b := make([]byte, 0, 2048)
919 w := bytes.NewBuffer(b)
920
921 assert.True(mat.Transduce(strings.NewReader("Erste.\n\n\n\n\x04\x0aNächst.\x04"), w))
922 matStr := w.String()
923 assert.Equal("Erste\n.\n\n\nNächst\n.\n\n\n", matStr)
924
Akrone396a932021-10-19 01:06:13 +0200925}
926
Akron28031b72021-10-02 13:07:25 +0200927func BenchmarkTransduceMatrix(b *testing.B) {
928 bu := make([]byte, 0, 2048)
929 w := bytes.NewBuffer(bu)
930
Akron28031b72021-10-02 13:07:25 +0200931 r := strings.NewReader(s)
932
Akron094a4e82021-10-02 18:37:00 +0200933 mat := LoadMatrixFile("testdata/tokenizer.matok")
Akron28031b72021-10-02 13:07:25 +0200934
935 b.ResetTimer()
936
937 for i := 0; i < b.N; i++ {
938 w.Reset()
939 r.Reset(s)
940 ok := mat.Transduce(r, w)
941 if !ok {
942 fmt.Println("Fail!")
943 fmt.Println(w.String())
944 os.Exit(1)
945 }
946 }
947}