blob: f9857366874378f32adf3972c2b29fca7b28eed1 [file] [log] [blame]
Akron1c34ce62021-09-23 23:27:39 +02001package datok
2
3import (
4 "bytes"
Akron28031b72021-10-02 13:07:25 +02005 "fmt"
6 "os"
Akron1c34ce62021-09-23 23:27:39 +02007 "strings"
8 "testing"
9
10 "github.com/stretchr/testify/assert"
11)
12
Akronabcb6a52021-10-09 15:52:08 +020013var s string = `Der Vorsitzende der Abk. hat gewählt. Gefunden auf wikipedia.org. Ich bin unter korap@ids-mannheim.de erreichbar.
14Unsere Website ist https://korap.ids-mannheim.de/?q=Baum. Unser Server ist 10.0.10.51. Zu 50.4% ist es sicher.
15Der Termin ist am 5.9.2018.
16Ich habe die readme.txt heruntergeladen.
17Ausschalten!!! Hast Du nicht gehört???
18Ich wohne in der Weststr. und Du? Kupietz und Schmidt [2018]: Korpuslinguistik. Dieses verf***** Kleid! Ich habe die readme.txt heruntergeladen.
19Er sagte: \"Es geht mir gut!\", daraufhin ging er. "Das ist von C&A!" Früher bzw. später ... Sie erreichte den 1. Platz!
20Archive: Ich bin kein zip. D'dorf Ku'damm Lu'hafen M'gladbach W'schaft.
21Mach's macht's was'n ist's haste willste kannste biste kriegste.`
22
Akron9fb63af2021-10-28 01:15:53 +020023var mat *MatrixTokenizer
24
Akronc9c0eae2021-10-22 19:49:43 +020025func TestMatrixFullTokenizer(t *testing.T) {
Akron1c34ce62021-09-23 23:27:39 +020026 assert := assert.New(t)
27 foma := LoadFomaFile("testdata/simpletok.fst")
28 assert.NotNil(foma)
29
30 mat := foma.ToMatrix()
31
32 r := strings.NewReader(" wald gehen Da kann\t man was \"erleben\"!")
33 b := make([]byte, 0, 2048)
34 w := bytes.NewBuffer(b)
35 var tokens []string
36 mat.Transduce(r, w)
37 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +020038 assert.Equal(len(tokens), 11)
Akron1c34ce62021-09-23 23:27:39 +020039 assert.Equal("wald", tokens[0])
40 assert.Equal("gehen", tokens[1])
41 assert.Equal("Da", tokens[2])
42 assert.Equal("kann", tokens[3])
43 assert.Equal("man", tokens[4])
44 assert.Equal("was", tokens[5])
45 assert.Equal("\"erleben\"", tokens[6])
46 assert.Equal("!", tokens[7])
Akron5c82a922021-09-24 19:11:29 +020047
48 r = strings.NewReader(" In den Wald gehen? -- Da kann\t man was \"erleben\"!")
49 w.Reset()
50 mat.Transduce(r, w)
51 tokens = strings.Split(w.String(), "\n")
52 assert.Equal("In", tokens[0])
53 assert.Equal("den", tokens[1])
54 assert.Equal("Wald", tokens[2])
55 assert.Equal("gehen", tokens[3])
56 assert.Equal("?", tokens[4])
57 assert.Equal("--", tokens[5])
58
59 r = strings.NewReader(" g? -- D")
60 w.Reset()
61 mat.Transduce(r, w)
62 tokens = strings.Split(w.String(), "\n")
63 assert.Equal("g", tokens[0])
64 assert.Equal("?", tokens[1])
65 assert.Equal("--", tokens[2])
66 assert.Equal("D", tokens[3])
67 assert.Equal("", tokens[4])
68 assert.Equal("", tokens[5])
Akrona854faa2021-10-22 19:31:08 +020069 assert.Equal("", tokens[6])
70 assert.Equal(7, len(tokens))
Akron5c82a922021-09-24 19:11:29 +020071}
72
Akronc9c0eae2021-10-22 19:49:43 +020073func TestMatrixReadWriteTokenizer(t *testing.T) {
Akron16c312e2021-09-26 13:11:12 +020074 assert := assert.New(t)
75 foma := LoadFomaFile("testdata/simpletok.fst")
76 assert.NotNil(foma)
77
78 mat := foma.ToMatrix()
Akron28031b72021-10-02 13:07:25 +020079 assert.NotNil(mat)
Akron16c312e2021-09-26 13:11:12 +020080
81 assert.True(tmatch(mat, "bau"))
82 assert.True(tmatch(mat, "bad"))
83 assert.True(tmatch(mat, "wald gehen"))
84 b := make([]byte, 0, 1024)
85 buf := bytes.NewBuffer(b)
86 n, err := mat.WriteTo(buf)
87 assert.Nil(err)
Akron28031b72021-10-02 13:07:25 +020088 assert.Equal(int64(230), n)
Akron16c312e2021-09-26 13:11:12 +020089 mat2 := ParseMatrix(buf)
90 assert.NotNil(mat2)
91 assert.Equal(mat.sigma, mat2.sigma)
92 assert.Equal(mat.epsilon, mat2.epsilon)
93 assert.Equal(mat.unknown, mat2.unknown)
94 assert.Equal(mat.identity, mat2.identity)
95 assert.Equal(mat.stateCount, mat2.stateCount)
96 assert.Equal(len(mat.array), len(mat2.array))
97 assert.Equal(mat.array, mat2.array)
98 assert.True(tmatch(mat2, "bau"))
99 assert.True(tmatch(mat2, "bad"))
100 assert.True(tmatch(mat2, "wald gehen"))
101}
102
Akrone396a932021-10-19 01:06:13 +0200103func TestMatrixIgnorableMCS(t *testing.T) {
104 assert := assert.New(t)
105
106 // This test relies on final states. That's why it is
107 // not working correctly anymore.
108
109 // File has MCS in sigma but not in net
110 tok := LoadFomaFile("testdata/ignorable_mcs.fst")
111 assert.NotNil(tok)
112 mat := tok.ToMatrix()
113 assert.NotNil(mat)
114
115 b := make([]byte, 0, 2048)
116 w := bytes.NewBuffer(b)
117 var tokens []string
118
119 // Is only unambigous when transducing strictly greedy!
120 assert.True(mat.Transduce(strings.NewReader("ab<ab>a"), w))
121 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +0200122 assert.Equal("a\nb\n<ab>a\n\n\n", w.String())
Akrone396a932021-10-19 01:06:13 +0200123 assert.Equal("a", tokens[0])
124 assert.Equal("b", tokens[1])
125 assert.Equal("<ab>a", tokens[2])
Akrona854faa2021-10-22 19:31:08 +0200126 assert.Equal(6, len(tokens))
Akrone396a932021-10-19 01:06:13 +0200127}
128
Akronc9c0eae2021-10-22 19:49:43 +0200129func xTestMatrixReadWriteFullTokenizer(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200130 assert := assert.New(t)
131 foma := LoadFomaFile("testdata/tokenizer.fst")
132 assert.NotNil(foma)
133
134 mat := foma.ToMatrix()
135 assert.NotNil(foma)
136
137 tb := make([]byte, 0, 2048)
138 w := bytes.NewBuffer(tb)
139
140 assert.True(mat.Transduce(strings.NewReader("der alte baum"), w))
Akrona854faa2021-10-22 19:31:08 +0200141 assert.Equal("der\nalte\nbaum\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200142
143 b := make([]byte, 0, 1024)
144 buf := bytes.NewBuffer(b)
145 _, err := mat.WriteTo(buf)
146 assert.Nil(err)
147 w.Reset()
148 // assert.Equal(int64(248), n)
149
150 mat2 := ParseMatrix(buf)
151 assert.NotNil(mat2)
152 assert.Equal(mat.sigma, mat2.sigma)
153 assert.Equal(mat.epsilon, mat2.epsilon)
154 assert.Equal(mat.unknown, mat2.unknown)
155 assert.Equal(mat.identity, mat2.identity)
156 assert.Equal(mat.stateCount, mat2.stateCount)
157 assert.Equal(len(mat.array), len(mat2.array))
158 // assert.Equal(mat.array, mat2.array)
159
160 assert.True(mat2.Transduce(strings.NewReader("der alte baum"), w))
Akrona854faa2021-10-22 19:31:08 +0200161 assert.Equal("der\nalte\nbaum\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200162}
163
Akronc9c0eae2021-10-22 19:49:43 +0200164func TestMatrixFullTokenizerTransduce(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200165 assert := assert.New(t)
166
Akron9fb63af2021-10-28 01:15:53 +0200167 if mat == nil {
168 mat = LoadMatrixFile("testdata/tokenizer.matok")
169 }
Akron28031b72021-10-02 13:07:25 +0200170
171 assert.NotNil(mat)
172
173 b := make([]byte, 0, 2048)
174 w := bytes.NewBuffer(b)
175 var tokens []string
176
177 assert.True(mat.Transduce(strings.NewReader("tra. u Du?"), w))
178
179 tokens = strings.Split(w.String(), "\n")
Akrona854faa2021-10-22 19:31:08 +0200180 assert.Equal("tra\n.\n\nu\nDu\n?\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200181 assert.Equal("tra", tokens[0])
182 assert.Equal(".", tokens[1])
183 assert.Equal("", tokens[2])
184 assert.Equal("u", tokens[3])
185 assert.Equal("Du", tokens[4])
186 assert.Equal("?", tokens[5])
187 assert.Equal("", tokens[6])
188 assert.Equal("", tokens[7])
Akrona854faa2021-10-22 19:31:08 +0200189 assert.Equal(9, len(tokens))
Akron28031b72021-10-02 13:07:25 +0200190
191 w.Reset()
192 assert.True(mat.Transduce(strings.NewReader("\"John Doe\"@xx.com"), w))
Akrona854faa2021-10-22 19:31:08 +0200193 assert.Equal("\"\nJohn\nDoe\n\"\n@xx\n.\n\ncom\n\n\n", w.String())
Akron28031b72021-10-02 13:07:25 +0200194}
195
Akronc9c0eae2021-10-22 19:49:43 +0200196func TestMatrixFullTokenizerMatrixSentenceSplitter(t *testing.T) {
Akron5c82a922021-09-24 19:11:29 +0200197 assert := assert.New(t)
Akron9fb63af2021-10-28 01:15:53 +0200198
199 if mat == nil {
200 mat = LoadMatrixFile("testdata/tokenizer.matok")
201 }
Akron5c82a922021-09-24 19:11:29 +0200202
203 b := make([]byte, 0, 2048)
204 w := bytes.NewBuffer(b)
205 var sentences []string
206
207 // testSentSplitterSimple
208 assert.True(mat.Transduce(strings.NewReader("Der alte Mann."), w))
209 sentences = strings.Split(w.String(), "\n\n")
210
Akrona854faa2021-10-22 19:31:08 +0200211 assert.Equal("Der\nalte\nMann\n.\n\n\n", w.String())
Akron5c82a922021-09-24 19:11:29 +0200212 assert.Equal("Der\nalte\nMann\n.", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200213 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200214 assert.Equal(len(sentences), 2)
215
216 w.Reset()
Akrona854faa2021-10-22 19:31:08 +0200217 assert.True(mat.Transduce(strings.NewReader("Der Vorsitzende der F.D.P. hat gewählt."), w))
218 sentences = strings.Split(w.String(), "\n\n")
219 assert.Equal(len(sentences), 2)
220 assert.Equal("Der\nVorsitzende\nder\nF.D.P.\nhat\ngewählt\n.", sentences[0])
221 assert.Equal("\n", sentences[1])
222
223 w.Reset()
Akron5c82a922021-09-24 19:11:29 +0200224 assert.True(mat.Transduce(strings.NewReader("Der Vorsitzende der Abk. hat gewählt."), w))
225 sentences = strings.Split(w.String(), "\n\n")
226 assert.Equal(len(sentences), 2)
227 assert.Equal("Der\nVorsitzende\nder\nAbk.\nhat\ngewählt\n.", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200228 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200229
Akron28031b72021-10-02 13:07:25 +0200230 w.Reset()
231 assert.True(mat.Transduce(strings.NewReader(""), w))
232 sentences = strings.Split(w.String(), "\n\n")
Akrona854faa2021-10-22 19:31:08 +0200233 assert.Equal(len(sentences), 2)
234 assert.Equal("", sentences[0])
235 assert.Equal("", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200236
Akron28031b72021-10-02 13:07:25 +0200237 w.Reset()
238 assert.True(mat.Transduce(strings.NewReader("Gefunden auf wikipedia.org."), w))
239 sentences = strings.Split(w.String(), "\n\n")
240 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200241
Akron28031b72021-10-02 13:07:25 +0200242 w.Reset()
243 assert.True(mat.Transduce(strings.NewReader("Ich bin unter korap@ids-mannheim.de erreichbar."), w))
244 sentences = strings.Split(w.String(), "\n\n")
245 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200246
Akron28031b72021-10-02 13:07:25 +0200247 w.Reset()
248 assert.True(mat.Transduce(strings.NewReader("Unsere Website ist https://korap.ids-mannheim.de/?q=Baum"), w))
249 sentences = strings.Split(w.String(), "\n\n")
250 assert.Equal("Unsere\nWebsite\nist\nhttps://korap.ids-mannheim.de/?q=Baum", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200251 assert.Equal("\n", sentences[1])
Akron28031b72021-10-02 13:07:25 +0200252 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200253
Akron28031b72021-10-02 13:07:25 +0200254 w.Reset()
255 assert.True(mat.Transduce(strings.NewReader("Unser Server ist 10.0.10.51."), w))
256 sentences = strings.Split(w.String(), "\n\n")
Akrona854faa2021-10-22 19:31:08 +0200257 assert.Equal("\n", sentences[1])
Akron28031b72021-10-02 13:07:25 +0200258 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200259
Akron28031b72021-10-02 13:07:25 +0200260 w.Reset()
261 assert.True(mat.Transduce(strings.NewReader("Zu 50.4% ist es sicher"), w))
262 sentences = strings.Split(w.String(), "\n\n")
263 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200264
Akron28031b72021-10-02 13:07:25 +0200265 w.Reset()
266 assert.True(mat.Transduce(strings.NewReader("Der Termin ist am 5.9.2018"), w))
267 sentences = strings.Split(w.String(), "\n\n")
268 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200269
Akron28031b72021-10-02 13:07:25 +0200270 w.Reset()
271 assert.True(mat.Transduce(strings.NewReader("Ich habe die readme.txt heruntergeladen"), w))
272 sentences = strings.Split(w.String(), "\n\n")
273 assert.Equal(len(sentences), 2)
274 assert.Equal("Ich\nhabe\ndie\nreadme.txt\nheruntergeladen", sentences[0])
Akrona854faa2021-10-22 19:31:08 +0200275 assert.Equal("\n", sentences[1])
Akron5c82a922021-09-24 19:11:29 +0200276
Akron28031b72021-10-02 13:07:25 +0200277 w.Reset()
278 assert.True(mat.Transduce(strings.NewReader("Ausschalten!!! Hast Du nicht gehört???"), w))
279 sentences = strings.Split(w.String(), "\n\n")
280 assert.Equal(len(sentences), 3)
281 assert.Equal("Ausschalten\n!!!", sentences[0])
282 assert.Equal("Hast\nDu\nnicht\ngehört\n???", sentences[1])
Akrona854faa2021-10-22 19:31:08 +0200283 assert.Equal("\n", sentences[2])
Akron5c82a922021-09-24 19:11:29 +0200284
Akron28031b72021-10-02 13:07:25 +0200285 w.Reset()
286 assert.True(mat.Transduce(strings.NewReader("Ich wohne in der Weststr. und Du?"), w))
287 sentences = strings.Split(w.String(), "\n\n")
288 assert.Equal(len(sentences), 2)
Akron5c82a922021-09-24 19:11:29 +0200289 /*
290 Test:
291 "\"Ausschalten!!!\", sagte er. \"Hast Du nicht gehört???\""), w))
292 */
Akron1c34ce62021-09-23 23:27:39 +0200293}
Akron28031b72021-10-02 13:07:25 +0200294
Akronc9c0eae2021-10-22 19:49:43 +0200295func TestMatrixFullTokenizerTokenSplitter(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200296 assert := assert.New(t)
297
Akron9fb63af2021-10-28 01:15:53 +0200298 if mat == nil {
299 mat = LoadMatrixFile("testdata/tokenizer.matok")
300 }
Akron28031b72021-10-02 13:07:25 +0200301
302 b := make([]byte, 0, 2048)
303 w := bytes.NewBuffer(b)
304 var tokens []string
305
306 // testTokenizerSimple
307 tokens = ttokenize(mat, w, "Der alte Mann")
308 assert.Equal(tokens[0], "Der")
309 assert.Equal(tokens[1], "alte")
310 assert.Equal(tokens[2], "Mann")
311 assert.Equal(len(tokens), 3)
312
313 tokens = ttokenize(mat, w, "Der alte Mann.")
314 assert.Equal(tokens[0], "Der")
315 assert.Equal(tokens[1], "alte")
316 assert.Equal(tokens[2], "Mann")
317 assert.Equal(tokens[3], ".")
318 assert.Equal(len(tokens), 4)
319
320 // testTokenizerAbbr
321 tokens = ttokenize(mat, w, "Der Vorsitzende der F.D.P. hat gewählt")
322 assert.Equal(tokens[0], "Der")
323 assert.Equal(tokens[1], "Vorsitzende")
324 assert.Equal(tokens[2], "der")
325 assert.Equal(tokens[3], "F.D.P.")
326 assert.Equal(tokens[4], "hat")
327 assert.Equal(tokens[5], "gewählt")
328 assert.Equal(len(tokens), 6)
329 // Ignored in KorAP-Tokenizer
330
331 // testTokenizerHost1
332 tokens = ttokenize(mat, w, "Gefunden auf wikipedia.org")
333 assert.Equal(tokens[0], "Gefunden")
334 assert.Equal(tokens[1], "auf")
335 assert.Equal(tokens[2], "wikipedia.org")
336 assert.Equal(len(tokens), 3)
337
338 // testTokenizerWwwHost
339 tokens = ttokenize(mat, w, "Gefunden auf www.wikipedia.org")
340 assert.Equal("Gefunden", tokens[0])
341 assert.Equal("auf", tokens[1])
342 assert.Equal("www.wikipedia.org", tokens[2])
343 assert.Equal(3, len(tokens))
344
345 // testTokenizerWwwUrl
346 tokens = ttokenize(mat, w, "Weitere Infos unter www.info.biz/info")
347 assert.Equal("www.info.biz/info", tokens[3])
348
349 // testTokenizerFtpHost
350 /*
351 tokens = tokenize(dat, w, "Kann von ftp.download.org heruntergeladen werden")
352 assert.Equal("Kann", tokens[0])
353 assert.Equal("von", tokens[1])
354 assert.Equal("ftp.download.org", tokens[2])
355 assert.Equal(5, len(tokens))
356 // Ignored in KorAP-Tokenizer
357 */
358
359 // testTokenizerDash
360 tokens = ttokenize(mat, w, "Das war -- spitze")
361 assert.Equal(tokens[0], "Das")
362 assert.Equal(tokens[1], "war")
363 assert.Equal(tokens[2], "--")
364 assert.Equal(tokens[3], "spitze")
365 assert.Equal(len(tokens), 4)
366
367 // testTokenizerEmail1
368 tokens = ttokenize(mat, w, "Ich bin unter korap@ids-mannheim.de erreichbar.")
369 assert.Equal(tokens[0], "Ich")
370 assert.Equal(tokens[1], "bin")
371 assert.Equal(tokens[2], "unter")
372 assert.Equal(tokens[3], "korap@ids-mannheim.de")
373 assert.Equal(tokens[4], "erreichbar")
374 assert.Equal(tokens[5], ".")
375 assert.Equal(len(tokens), 6)
376
377 // testTokenizerEmail2
378 tokens = ttokenize(mat, w, "Oder unter korap[at]ids-mannheim[dot]de.")
379 assert.Equal(tokens[0], "Oder")
380 assert.Equal(tokens[1], "unter")
381 assert.Equal(tokens[2], "korap[at]ids-mannheim[dot]de")
382 assert.Equal(tokens[3], ".")
383 assert.Equal(len(tokens), 4)
384
385 // testTokenizerEmail3
386 tokens = ttokenize(mat, w, "Oder unter korap(at)ids-mannheim(dot)de.")
387 assert.Equal(tokens[0], "Oder")
388 assert.Equal(tokens[1], "unter")
389 assert.Equal(tokens[2], "korap(at)ids-mannheim(dot)de")
390 assert.Equal(tokens[3], ".")
391 assert.Equal(len(tokens), 4)
392 // Ignored in KorAP-Tokenizer
393
394 // testTokenizerDoNotAcceptQuotedEmailNames
395 tokens = ttokenize(mat, w, "\"John Doe\"@xx.com")
396 assert.Equal("\"", tokens[0])
397 assert.Equal("John", tokens[1])
398 assert.Equal("Doe", tokens[2])
399 assert.Equal("\"", tokens[3])
400 assert.Equal("@xx", tokens[4])
401 assert.Equal(".", tokens[5]) // Differs - as the sentence splitter splits here!
402 assert.Equal("com", tokens[6])
403 assert.Equal(7, len(tokens))
404
405 // testTokenizerTwitter
406 tokens = ttokenize(mat, w, "Folgt @korap und #korap")
407 assert.Equal(tokens[0], "Folgt")
408 assert.Equal(tokens[1], "@korap")
409 assert.Equal(tokens[2], "und")
410 assert.Equal(tokens[3], "#korap")
411 assert.Equal(len(tokens), 4)
412
413 // testTokenizerWeb1
414 tokens = ttokenize(mat, w, "Unsere Website ist https://korap.ids-mannheim.de/?q=Baum")
415 assert.Equal(tokens[0], "Unsere")
416 assert.Equal(tokens[1], "Website")
417 assert.Equal(tokens[2], "ist")
418 assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum")
419 assert.Equal(len(tokens), 4)
420
421 // testTokenizerWeb2
422 tokens = ttokenize(mat, w, "Wir sind auch im Internet (https://korap.ids-mannheim.de/?q=Baum)")
423 assert.Equal(tokens[0], "Wir")
424 assert.Equal(tokens[1], "sind")
425 assert.Equal(tokens[2], "auch")
426 assert.Equal(tokens[3], "im")
427 assert.Equal(tokens[4], "Internet")
428 assert.Equal(tokens[5], "(")
429 assert.Equal(tokens[6], "https://korap.ids-mannheim.de/?q=Baum")
430 assert.Equal(tokens[7], ")")
431 assert.Equal(len(tokens), 8)
432 // Ignored in KorAP-Tokenizer
433
434 // testTokenizerWeb3
435 tokens = ttokenize(mat, w, "Die Adresse ist https://korap.ids-mannheim.de/?q=Baum.")
436 assert.Equal(tokens[0], "Die")
437 assert.Equal(tokens[1], "Adresse")
438 assert.Equal(tokens[2], "ist")
439 assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum")
440 assert.Equal(tokens[4], ".")
441 assert.Equal(len(tokens), 5)
442 // Ignored in KorAP-Tokenizer
443
444 // testTokenizerServer
445 tokens = ttokenize(mat, w, "Unser Server ist 10.0.10.51.")
446 assert.Equal(tokens[0], "Unser")
447 assert.Equal(tokens[1], "Server")
448 assert.Equal(tokens[2], "ist")
449 assert.Equal(tokens[3], "10.0.10.51")
450 assert.Equal(tokens[4], ".")
451 assert.Equal(len(tokens), 5)
452
453 // testTokenizerNum
454 tokens = ttokenize(mat, w, "Zu 50,4% ist es sicher")
455 assert.Equal(tokens[0], "Zu")
456 assert.Equal(tokens[1], "50,4%")
457 assert.Equal(tokens[2], "ist")
458 assert.Equal(tokens[3], "es")
459 assert.Equal(tokens[4], "sicher")
460 assert.Equal(len(tokens), 5)
461 // Differs from KorAP-Tokenizer
462
463 // testTokenizerDate
464 tokens = ttokenize(mat, w, "Der Termin ist am 5.9.2018")
465 assert.Equal(tokens[0], "Der")
466 assert.Equal(tokens[1], "Termin")
467 assert.Equal(tokens[2], "ist")
468 assert.Equal(tokens[3], "am")
469 assert.Equal(tokens[4], "5.9.2018")
470 assert.Equal(len(tokens), 5)
471
472 tokens = ttokenize(mat, w, "Der Termin ist am 5/9/2018")
473 assert.Equal(tokens[0], "Der")
474 assert.Equal(tokens[1], "Termin")
475 assert.Equal(tokens[2], "ist")
476 assert.Equal(tokens[3], "am")
477 assert.Equal(tokens[4], "5/9/2018")
478 assert.Equal(len(tokens), 5)
479
480 // testTokenizerDateRange
481 /*
482 tokens = tokenize(dat, w, "Der Termin war vom 4.-5.9.2018")
483 assert.Equal(tokens[0], "Der")
484 assert.Equal(tokens[1], "Termin")
485 assert.Equal(tokens[2], "war")
486 assert.Equal(tokens[3], "vom")
487 assert.Equal(tokens[4], "4.")
488 assert.Equal(tokens[5], "-")
489 assert.Equal(tokens[6], "5.9.2018")
490 assert.Equal(len(tokens), 7)
491 // Ignored in KorAP-Tokenizer
492 */
493
494 // testTokenizerEmoji1
495 tokens = ttokenize(mat, w, "Das ist toll! ;)")
496 assert.Equal(tokens[0], "Das")
497 assert.Equal(tokens[1], "ist")
498 assert.Equal(tokens[2], "toll")
499 assert.Equal(tokens[3], "!")
500 assert.Equal(tokens[4], ";)")
501 assert.Equal(len(tokens), 5)
502
503 // testTokenizerRef1
504 tokens = ttokenize(mat, w, "Kupietz und Schmidt (2018): Korpuslinguistik")
505 assert.Equal(tokens[0], "Kupietz")
506 assert.Equal(tokens[1], "und")
507 assert.Equal(tokens[2], "Schmidt")
508 assert.Equal(tokens[3], "(2018)")
509 assert.Equal(tokens[4], ":")
510 assert.Equal(tokens[5], "Korpuslinguistik")
511 assert.Equal(len(tokens), 6)
512 // Differs from KorAP-Tokenizer!
513
514 // testTokenizerRef2 () {
515 tokens = ttokenize(mat, w, "Kupietz und Schmidt [2018]: Korpuslinguistik")
516 assert.Equal(tokens[0], "Kupietz")
517 assert.Equal(tokens[1], "und")
518 assert.Equal(tokens[2], "Schmidt")
519 assert.Equal(tokens[3], "[2018]")
520 assert.Equal(tokens[4], ":")
521 assert.Equal(tokens[5], "Korpuslinguistik")
522 assert.Equal(len(tokens), 6)
523 // Differs from KorAP-Tokenizer!
524
525 // testTokenizerOmission1 () {
526 tokens = ttokenize(mat, w, "Er ist ein A****loch!")
527 assert.Equal(tokens[0], "Er")
528 assert.Equal(tokens[1], "ist")
529 assert.Equal(tokens[2], "ein")
530 assert.Equal(tokens[3], "A****loch")
531 assert.Equal(tokens[4], "!")
532 assert.Equal(len(tokens), 5)
533
534 // testTokenizerOmission2
535 tokens = ttokenize(mat, w, "F*ck!")
536 assert.Equal(tokens[0], "F*ck")
537 assert.Equal(tokens[1], "!")
538 assert.Equal(len(tokens), 2)
539
540 // testTokenizerOmission3 () {
541 tokens = ttokenize(mat, w, "Dieses verf***** Kleid!")
542 assert.Equal(tokens[0], "Dieses")
543 assert.Equal(tokens[1], "verf*****")
544 assert.Equal(tokens[2], "Kleid")
545 assert.Equal(tokens[3], "!")
546 assert.Equal(len(tokens), 4)
547
548 // Probably interpreted as HOST
549 // testTokenizerFileExtension1
550 tokens = ttokenize(mat, w, "Ich habe die readme.txt heruntergeladen")
551 assert.Equal(tokens[0], "Ich")
552 assert.Equal(tokens[1], "habe")
553 assert.Equal(tokens[2], "die")
554 assert.Equal(tokens[3], "readme.txt")
555 assert.Equal(tokens[4], "heruntergeladen")
556 assert.Equal(len(tokens), 5)
557
558 // Probably interpreted as HOST
559 // testTokenizerFileExtension2
560 tokens = ttokenize(mat, w, "Nimm die README.TXT!")
561 assert.Equal(tokens[0], "Nimm")
562 assert.Equal(tokens[1], "die")
563 assert.Equal(tokens[2], "README.TXT")
564 assert.Equal(tokens[3], "!")
565 assert.Equal(len(tokens), 4)
566
567 // Probably interpreted as HOST
568 // testTokenizerFileExtension3
569 tokens = ttokenize(mat, w, "Zeig mir profile.jpeg")
570 assert.Equal(tokens[0], "Zeig")
571 assert.Equal(tokens[1], "mir")
572 assert.Equal(tokens[2], "profile.jpeg")
573 assert.Equal(len(tokens), 3)
574
575 // testTokenizerFile1
576
577 tokens = ttokenize(mat, w, "Zeig mir c:\\Dokumente\\profile.docx")
578 assert.Equal(tokens[0], "Zeig")
579 assert.Equal(tokens[1], "mir")
580 assert.Equal(tokens[2], "c:\\Dokumente\\profile.docx")
581 assert.Equal(len(tokens), 3)
582
583 // testTokenizerFile2
584 tokens = ttokenize(mat, w, "Gehe zu /Dokumente/profile.docx")
585 assert.Equal(tokens[0], "Gehe")
586 assert.Equal(tokens[1], "zu")
587 assert.Equal(tokens[2], "/Dokumente/profile.docx")
588 assert.Equal(len(tokens), 3)
589
590 // testTokenizerFile3
591 tokens = ttokenize(mat, w, "Zeig mir c:\\Dokumente\\profile.jpeg")
592 assert.Equal(tokens[0], "Zeig")
593 assert.Equal(tokens[1], "mir")
594 assert.Equal(tokens[2], "c:\\Dokumente\\profile.jpeg")
595 assert.Equal(len(tokens), 3)
596 // Ignored in KorAP-Tokenizer
597
598 // testTokenizerPunct
599 tokens = ttokenize(mat, w, "Er sagte: \"Es geht mir gut!\", daraufhin ging er.")
600 assert.Equal(tokens[0], "Er")
601 assert.Equal(tokens[1], "sagte")
602 assert.Equal(tokens[2], ":")
603 assert.Equal(tokens[3], "\"")
604 assert.Equal(tokens[4], "Es")
605 assert.Equal(tokens[5], "geht")
606 assert.Equal(tokens[6], "mir")
607 assert.Equal(tokens[7], "gut")
608 assert.Equal(tokens[8], "!")
609 assert.Equal(tokens[9], "\"")
610 assert.Equal(tokens[10], ",")
611 assert.Equal(tokens[11], "daraufhin")
612 assert.Equal(tokens[12], "ging")
613 assert.Equal(tokens[13], "er")
614 assert.Equal(tokens[14], ".")
615 assert.Equal(len(tokens), 15)
616
617 // testTokenizerPlusAmpersand
618 tokens = ttokenize(mat, w, "&quot;Das ist von C&A!&quot;")
619 assert.Equal(tokens[0], "&quot;")
620 assert.Equal(tokens[1], "Das")
621 assert.Equal(tokens[2], "ist")
622 assert.Equal(tokens[3], "von")
623 assert.Equal(tokens[4], "C&A")
624 assert.Equal(tokens[5], "!")
625 assert.Equal(tokens[6], "&quot;")
626 assert.Equal(len(tokens), 7)
627
628 // testTokenizerLongEnd
629 tokens = ttokenize(mat, w, "Siehst Du?!!?")
630 assert.Equal(tokens[0], "Siehst")
631 assert.Equal(tokens[1], "Du")
632 assert.Equal(tokens[2], "?!!?")
633 assert.Equal(len(tokens), 3)
634
635 // testTokenizerIrishO
636 tokens = ttokenize(mat, w, "Peter O'Toole")
637 assert.Equal(tokens[0], "Peter")
638 assert.Equal(tokens[1], "O'Toole")
639 assert.Equal(len(tokens), 2)
640
641 // testTokenizerAbr
642 tokens = ttokenize(mat, w, "Früher bzw. später ...")
643 assert.Equal(tokens[0], "Früher")
644 assert.Equal(tokens[1], "bzw.")
645 assert.Equal(tokens[2], "später")
646 assert.Equal(tokens[3], "...")
647 assert.Equal(len(tokens), 4)
648
649 // testTokenizerUppercaseRule
650 tokens = ttokenize(mat, w, "Es war spät.Morgen ist es früh.")
651 assert.Equal(tokens[0], "Es")
652 assert.Equal(tokens[1], "war")
653 assert.Equal(tokens[2], "spät")
654 assert.Equal(tokens[3], ".")
655 assert.Equal(tokens[4], "Morgen")
656 assert.Equal(tokens[5], "ist")
657 assert.Equal(tokens[6], "es")
658 assert.Equal(tokens[7], "früh")
659 assert.Equal(tokens[8], ".")
660 assert.Equal(len(tokens), 9)
661 // Ignored in KorAP-Tokenizer
662
663 // testTokenizerOrd
664 tokens = ttokenize(mat, w, "Sie erreichte den 1. Platz!")
665 assert.Equal(tokens[0], "Sie")
666 assert.Equal(tokens[1], "erreichte")
667 assert.Equal(tokens[2], "den")
668 assert.Equal(tokens[3], "1.")
669 assert.Equal(tokens[4], "Platz")
670 assert.Equal(tokens[5], "!")
671 assert.Equal(len(tokens), 6)
672
673 // testNoZipOuputArchive
674 tokens = ttokenize(mat, w, "Archive: Ich bin kein zip\n")
675 assert.Equal(tokens[0], "Archive")
676 assert.Equal(tokens[1], ":")
677 assert.Equal(tokens[2], "Ich")
678 assert.Equal(tokens[3], "bin")
679 assert.Equal(tokens[4], "kein")
680 assert.Equal(tokens[5], "zip")
681 assert.Equal(6, len(tokens))
682
683 // testTokenizerStrasse
684 tokens = ttokenize(mat, w, "Ich wohne in der Weststr. und Du?")
685 assert.Equal(tokens[4], "Weststr.")
686 assert.Equal(8, len(tokens))
687
688 // germanTokenizerKnowsGermanOmissionWords
689 tokens = ttokenize(mat, w, "D'dorf Ku'damm Lu'hafen M'gladbach W'schaft")
690 assert.Equal("D'dorf", tokens[0])
691 assert.Equal("Ku'damm", tokens[1])
692 assert.Equal("Lu'hafen", tokens[2])
693 assert.Equal("M'gladbach", tokens[3])
694 assert.Equal("W'schaft", tokens[4])
695 assert.Equal(5, len(tokens))
696
697 // germanTokenizerDoesNOTSeparateGermanContractions
698 tokens = ttokenize(mat, w, "mach's macht's was'n ist's haste willste kannste biste kriegste")
699 assert.Equal("mach's", tokens[0])
700 assert.Equal("macht's", tokens[1])
701 assert.Equal("was'n", tokens[2])
702 assert.Equal("ist's", tokens[3])
703 assert.Equal("haste", tokens[4])
704 assert.Equal("willste", tokens[5])
705 assert.Equal("kannste", tokens[6])
706 assert.Equal("biste", tokens[7])
707 assert.Equal("kriegste", tokens[8])
708 assert.Equal(9, len(tokens))
709
710 /*
711 @Test
712 public void englishTokenizerSeparatesEnglishContractionsAndClitics () {
713 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
714 tokens = tokenize(dat, w, "I've we'll you'd I'm we're Peter's isn't")
715 assert.Equal("'ve", tokens[1]);
716 assert.Equal("'ll", tokens[3]);
717 assert.Equal("'d", tokens[5]);
718 assert.Equal("'m", tokens[7]);
719 assert.Equal("'re", tokens[9]);
720 assert.Equal("'s", tokens[11]);
721 assert.Equal("is", tokens[12]);
722 assert.Equal("n't", tokens[13]);
723 assert.Equal(14, len(tokens));
724 }
725
726 @Test
727 public void frenchTokenizerKnowsFrenchAbbreviations () {
728 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
729 tokens = tokenize(dat, w, "Approx. en juill. 2004 mon prof. M. Foux m'a dit qu'il faut faire exerc. no. 4, et lire pp. 27-30.")
730 assert.Equal("Approx.", tokens[0]);
731 assert.Equal("juill.", tokens[2]);
732 assert.Equal("prof.", tokens[5]);
733 assert.Equal("exerc.", tokens[15]);
734 assert.Equal("no.", tokens[16]);
735 assert.Equal("pp.", tokens[21]);
736 }
737
738 @Test
739 public void frenchTokenizerKnowsFrenchContractions () {
740 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
741 tokens = tokenize(dat, w, "J'ai j'habite qu'il d'un jusqu'à Aujourd'hui D'accord Quelqu'un Presqu'île")
742 assert.Equal("J'", tokens[0]);
743 assert.Equal("j'", tokens[2]);
744 assert.Equal("qu'", tokens[4]);
745 assert.Equal("d'", tokens[6]);
746 assert.Equal("jusqu'", tokens[8]);
747 assert.Equal("Aujourd'hui", tokens[10]);
748 assert.Equal("D'", tokens[11]); // ’
749 assert.Equal("Quelqu'un", tokens[13]); // ’
750 assert.Equal("Presqu'île", tokens[14]); // ’
751 }
752
753 @Test
754 public void frenchTokenizerKnowsFrenchClitics () {
755 DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr();
756 tokens = tokenize(dat, w, "suis-je sont-elles ")
757 assert.Equal("suis", tokens[0]);
758 assert.Equal("-je", tokens[1]);
759 assert.Equal("sont", tokens[2]);
760 assert.Equal("-elles", tokens[3]);
761 }
762
763 @Test
764 public void testEnglishTokenizerScienceAbbreviations () {
765 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
766 tokens = tokenize(dat, w, "Approx. in Sept. 1954, Assoc. Prof. Dr. R. J. Ewing reviewed articles on Enzymol. Bacteriol. effects later published in Nutr. Rheumatol. No. 12 and Nº. 13., pp. 17-18.")
767 assert.Equal("Approx.", tokens[0]);
768 assert.Equal("in", tokens[1]);
769 assert.Equal("Sept.", tokens[2]);
770 assert.Equal("1954", tokens[3]);
771 assert.Equal(",", tokens[4]);
772 assert.Equal("Assoc.", tokens[5]);
773 assert.Equal("Prof.", tokens[6]);
774 assert.Equal("Dr.", tokens[7]);
775 assert.Equal("R.", tokens[8]);
776 assert.Equal("J.", tokens[9]);
777 assert.Equal("Ewing", tokens[10]);
778 assert.Equal("reviewed", tokens[11]);
779 assert.Equal("articles", tokens[12]);
780 assert.Equal("on", tokens[13]);
781 assert.Equal("Enzymol.", tokens[14]);
782 assert.Equal("Bacteriol.", tokens[15]);
783 assert.Equal("effects", tokens[16]);
784 assert.Equal("later", tokens[17]);
785 assert.Equal("published", tokens[18]);
786 assert.Equal("in", tokens[19]);
787 assert.Equal("Nutr.", tokens[20]);
788 assert.Equal("Rheumatol.", tokens[21]);
789 assert.Equal("No.", tokens[22]);
790 assert.Equal("12", tokens[23]);
791 assert.Equal("and", tokens[24]);
792 assert.Equal("Nº.", tokens[25]);
793 assert.Equal("13.", tokens[26]);
794 assert.Equal(",", tokens[27]);
795 assert.Equal("pp.", tokens[28]);
796 assert.Equal("17-18", tokens[29]);
797 assert.Equal(".", tokens[30]);
798 }
799
800 @Test
801 public void englishTokenizerCanGuessWhetherIIsAbbrev () {
802 DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en();
803 tokens = tokenize(dat, w, "M. I. Baxter was born during World War I. So was I. He went to the Peter I. Hardy school. So did I.")
804 assert.Equal("I.", tokens[1]);
805 assert.Equal("I", tokens[8]);
806 assert.Equal(".", tokens[9]);
807 assert.Equal("I", tokens[12]);
808 assert.Equal(".", tokens[13]);
809 }
810
811 @Test
812 public void testZipOuputArchive () {
813
814 final ByteArrayOutputStream clearOut = new ByteArrayOutputStream();
815 System.setOut(new PrintStream(clearOut));
816 tokens = tokenize(dat, w, "Archive: ich/bin/ein.zip\n")
817 assert.Equal(0, len(tokens));
818 }
819 */
820 /*
821
822 @Test
823 public void testTextBreakOutputArchive () throws InstantiationException, IllegalAccessException, ClassNotFoundException {
824 DerekoDfaTokenizer_de tok = (DerekoDfaTokenizer_de) new KorapTokenizer.Builder()
825 .tokenizerClassName(DerekoDfaTokenizer_de.class.getName())
826 .printOffsets(true)
827 .build();
828 Span[] tokens = tok.tokenizePos("Text1\004\nText2 Hallo\004Rumsdibums\004Das freut mich sehr.\n");
829 assert.Equal("Text1", tokens[0].getType());
830 assert.Equal(len(tokens), 9 );
831 }
832 */
833}
834
Akronc9c0eae2021-10-22 19:49:43 +0200835func TestMatrixFullTokenizerXML(t *testing.T) {
Akron28031b72021-10-02 13:07:25 +0200836 assert := assert.New(t)
837
Akron9fb63af2021-10-28 01:15:53 +0200838 if mat == nil {
839 mat = LoadMatrixFile("testdata/tokenizer.matok")
840 }
Akron28031b72021-10-02 13:07:25 +0200841
Akron28031b72021-10-02 13:07:25 +0200842 assert.NotNil(mat)
843
844 b := make([]byte, 0, 2048)
845 w := bytes.NewBuffer(b)
846 var tokens []string
847
848 tokens = ttokenize(mat, w, "Das <b>beste</b> Fußballspiel")
849 assert.Equal("Das", tokens[0])
850 assert.Equal("<b>", tokens[1])
851 assert.Equal("beste", tokens[2])
852 assert.Equal("</b>", tokens[3])
853 assert.Equal("Fußballspiel", tokens[4])
854 assert.Equal(5, len(tokens))
855
856 tokens = ttokenize(mat, w, "Das <b class=\"c\">beste</b> Fußballspiel")
857 assert.Equal("Das", tokens[0])
858 assert.Equal("<b class=\"c\">", tokens[1])
859 assert.Equal("beste", tokens[2])
860 assert.Equal("</b>", tokens[3])
861 assert.Equal("Fußballspiel", tokens[4])
862 assert.Equal(5, len(tokens))
863
864 tokens = ttokenize(mat, w, "der<x y=\"alte \"> <x x> alte</x> etc. et. Mann.")
865 assert.Equal("der", tokens[0])
866 assert.Equal("<x y=\"alte \">", tokens[1])
867 assert.Equal("<x x>", tokens[2])
868 assert.Equal("alte", tokens[3])
869 assert.Equal("</x>", tokens[4])
870 assert.Equal("etc.", tokens[5])
871 assert.Equal("et", tokens[6])
872 assert.Equal(".", tokens[7])
873 assert.Equal("Mann", tokens[8])
874 assert.Equal(".", tokens[9])
875 assert.Equal(10, len(tokens))
Akron066d99c2021-10-28 19:04:59 +0200876
877 tokens = ttokenize(mat, w, "das<br class=\"br\" />ging.")
878 assert.Equal("das", tokens[0])
879 assert.Equal("<br class=\"br\" />", tokens[1])
880 assert.Equal("ging", tokens[2])
881 assert.Equal(".", tokens[3])
882 assert.Equal(4, len(tokens))
Akron28031b72021-10-02 13:07:25 +0200883}
884
Akronabcb6a52021-10-09 15:52:08 +0200885func TestMatokDatokEquivalence(t *testing.T) {
886 assert := assert.New(t)
887
Akron9fb63af2021-10-28 01:15:53 +0200888 if mat == nil {
889 mat = LoadMatrixFile("testdata/tokenizer.matok")
890 }
Akronabcb6a52021-10-09 15:52:08 +0200891 dat := LoadDatokFile("testdata/tokenizer.datok")
892
893 r := strings.NewReader(s)
894
895 tb := make([]byte, 0, 2048)
896 w := bytes.NewBuffer(tb)
897
898 // Transduce with double array representation
899 dat.Transduce(r, w)
900
901 datStr := w.String()
902
903 r.Reset(s)
904 w.Reset()
905
906 // Transduce with matrix representation
907 mat.Transduce(r, w)
908
909 matStr := w.String()
910
911 assert.Equal(datStr, matStr)
912}
913
Akronc9c0eae2021-10-22 19:49:43 +0200914func TestMatrixFullTokenizerCallbackTransduce(t *testing.T) {
Akrone396a932021-10-19 01:06:13 +0200915 assert := assert.New(t)
916
Akron9fb63af2021-10-28 01:15:53 +0200917 if mat == nil {
918 mat = LoadMatrixFile("testdata/tokenizer.matok")
919 }
Akrone396a932021-10-19 01:06:13 +0200920
921 assert.NotNil(mat)
922
923 b := make([]byte, 0, 2048)
924 w := bytes.NewBuffer(b)
Akrone396a932021-10-19 01:06:13 +0200925
926 assert.True(mat.Transduce(strings.NewReader("Der alte Baum. Er war schon alt."), w))
Akrona854faa2021-10-22 19:31:08 +0200927
928 matStr := w.String()
929
930 assert.Equal("Der\nalte\nBaum\n.\n\nEr\nwar\nschon\nalt\n.\n\n\n", matStr)
931}
932
Akronc9c0eae2021-10-22 19:49:43 +0200933func TestMatrixFullTokenizerTextTreatment(t *testing.T) {
Akrona854faa2021-10-22 19:31:08 +0200934 assert := assert.New(t)
935
Akron9fb63af2021-10-28 01:15:53 +0200936 if mat == nil {
937 mat = LoadMatrixFile("testdata/tokenizer.matok")
938 }
Akrona854faa2021-10-22 19:31:08 +0200939
940 assert.NotNil(mat)
941
942 b := make([]byte, 0, 2048)
943 w := bytes.NewBuffer(b)
944
945 assert.True(mat.Transduce(strings.NewReader("Erste.\n\n\n\n\x04\x0aNächst.\x04"), w))
946 matStr := w.String()
947 assert.Equal("Erste\n.\n\n\nNächst\n.\n\n\n", matStr)
Akronf6bdfdb2021-10-23 15:56:53 +0200948}
Akrona854faa2021-10-22 19:31:08 +0200949
Akronf6bdfdb2021-10-23 15:56:53 +0200950func TestMatrixTrimming(t *testing.T) {
951 assert := assert.New(t)
952
Akron9fb63af2021-10-28 01:15:53 +0200953 if mat == nil {
954 mat = LoadMatrixFile("testdata/tokenizer.matok")
955 }
Akronf6bdfdb2021-10-23 15:56:53 +0200956
957 assert.NotNil(mat)
958
959 b := make([]byte, 0, 2048)
960 w := bytes.NewBuffer(b)
961
962 assert.True(mat.Transduce(strings.NewReader(" Erste."), w))
963 matStr := w.String()
964 assert.Equal("Erste\n.\n\n\n", matStr)
Akrone396a932021-10-19 01:06:13 +0200965}
966
Akronc9c0eae2021-10-22 19:49:43 +0200967func BenchmarkMatrixTransduce(b *testing.B) {
Akron28031b72021-10-02 13:07:25 +0200968 bu := make([]byte, 0, 2048)
969 w := bytes.NewBuffer(bu)
970
Akron28031b72021-10-02 13:07:25 +0200971 r := strings.NewReader(s)
972
Akron094a4e82021-10-02 18:37:00 +0200973 mat := LoadMatrixFile("testdata/tokenizer.matok")
Akron28031b72021-10-02 13:07:25 +0200974
975 b.ResetTimer()
976
977 for i := 0; i < b.N; i++ {
978 w.Reset()
979 r.Reset(s)
980 ok := mat.Transduce(r, w)
981 if !ok {
982 fmt.Println("Fail!")
983 fmt.Println(w.String())
984 os.Exit(1)
985 }
986 }
987}