| Akron | 7f1097f | 2021-09-21 16:00:29 +0200 | [diff] [blame] | 1 | package datok |
| Akron | 8ef408b | 2021-08-02 22:11:04 +0200 | [diff] [blame] | 2 | |
| 3 | import ( |
| Akron | 2f7f6f3 | 2026-02-11 15:12:48 +0100 | [diff] [blame^] | 4 | "bufio" |
| Akron | 6247a5d | 2021-08-03 19:18:28 +0200 | [diff] [blame] | 5 | "bytes" |
| Akron | bd40680 | 2021-08-11 18:39:13 +0200 | [diff] [blame] | 6 | "fmt" |
| 7 | "os" |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 8 | "regexp" |
| Akron | 3f8571a | 2021-08-05 11:18:10 +0200 | [diff] [blame] | 9 | "strings" |
| Akron | 8ef408b | 2021-08-02 22:11:04 +0200 | [diff] [blame] | 10 | "testing" |
| 11 | |
| 12 | "github.com/stretchr/testify/assert" |
| 13 | ) |
| 14 | |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 15 | var dat *DaTokenizer |
| 16 | |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 17 | func ttokenizeStr(tok Tokenizer, str string) string { |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 18 | b := make([]byte, 0, 2048) |
| 19 | w := bytes.NewBuffer(b) |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 20 | return strings.Join(ttokenize(tok, w, str), "\n") |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 21 | } |
| 22 | |
| Akron | 1c34ce6 | 2021-09-23 23:27:39 +0200 | [diff] [blame] | 23 | func ttokenize(tok Tokenizer, w *bytes.Buffer, str string) []string { |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 24 | w.Reset() |
| Akron | 1c34ce6 | 2021-09-23 23:27:39 +0200 | [diff] [blame] | 25 | ok := tok.Transduce(strings.NewReader(str), w) |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 26 | if !ok { |
| 27 | return []string{} |
| 28 | } |
| 29 | obj := regexp.MustCompile("\n+") |
| 30 | |
| 31 | tokens := obj.Split(w.String(), -1) |
| 32 | return tokens[:len(tokens)-1] |
| 33 | } |
| 34 | |
| Akron | 2f7f6f3 | 2026-02-11 15:12:48 +0100 | [diff] [blame^] | 35 | func ttokenLines(t *testing.T, path string) []string { |
| 36 | f, err := os.Open(path) |
| 37 | if err != nil { |
| 38 | t.Fatalf("failed to open %s: %v", path, err) |
| 39 | } |
| 40 | defer f.Close() |
| 41 | |
| 42 | lines := []string{} |
| 43 | scanner := bufio.NewScanner(f) |
| 44 | for scanner.Scan() { |
| 45 | line := strings.TrimSpace(scanner.Text()) |
| 46 | if line == "" || strings.HasPrefix(line, "#") { |
| 47 | continue |
| 48 | } |
| 49 | lines = append(lines, line) |
| 50 | } |
| 51 | if err := scanner.Err(); err != nil { |
| 52 | t.Fatalf("failed to read %s: %v", path, err) |
| 53 | } |
| 54 | return lines |
| 55 | } |
| 56 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 57 | func TestDoubleArraySimpleString(t *testing.T) { |
| Akron | 8ef408b | 2021-08-02 22:11:04 +0200 | [diff] [blame] | 58 | assert := assert.New(t) |
| Akron | 8ef408b | 2021-08-02 22:11:04 +0200 | [diff] [blame] | 59 | // bau | bauamt |
| Akron | 64ffd9a | 2021-08-03 19:55:21 +0200 | [diff] [blame] | 60 | tok := LoadFomaFile("testdata/bauamt.fst") |
| Akron | f2120ca | 2021-08-03 16:26:41 +0200 | [diff] [blame] | 61 | dat := tok.ToDoubleArray() |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 62 | |
| 63 | b := make([]byte, 0, 2048) |
| 64 | w := bytes.NewBuffer(b) |
| 65 | var tokens []string |
| 66 | |
| 67 | tokens = ttokenize(dat, w, "ibauamt") |
| 68 | assert.Equal("i", tokens[0]) |
| 69 | assert.Equal("bauamt", tokens[1]) |
| 70 | |
| 71 | tokens = ttokenize(dat, w, "ibbauamt") |
| 72 | assert.Equal("i", tokens[0]) |
| 73 | |
| 74 | assert.Equal("b", tokens[1]) |
| 75 | assert.Equal("bauamt", tokens[2]) |
| 76 | |
| 77 | tokens = ttokenize(dat, w, "bau") |
| 78 | assert.Equal("bau", tokens[0]) |
| 79 | |
| 80 | tokens = ttokenize(dat, w, "baum") |
| 81 | assert.Equal("bau", tokens[0]) |
| 82 | assert.Equal("m", tokens[1]) |
| 83 | |
| 84 | tokens = ttokenize(dat, w, "baudibauamt") |
| 85 | assert.Equal("bau", tokens[0]) |
| 86 | assert.Equal("d", tokens[1]) |
| 87 | assert.Equal("i", tokens[2]) |
| 88 | assert.Equal("bauamt", tokens[3]) |
| Akron | 8ef408b | 2021-08-02 22:11:04 +0200 | [diff] [blame] | 89 | } |
| Akron | 75ebe7f | 2021-08-03 10:34:10 +0200 | [diff] [blame] | 90 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 91 | func TestDoubleArraySimpleBranches(t *testing.T) { |
| Akron | 75ebe7f | 2021-08-03 10:34:10 +0200 | [diff] [blame] | 92 | assert := assert.New(t) |
| Akron | 75ebe7f | 2021-08-03 10:34:10 +0200 | [diff] [blame] | 93 | // (bau | wahl) (amt | en) |
| Akron | 64ffd9a | 2021-08-03 19:55:21 +0200 | [diff] [blame] | 94 | tok := LoadFomaFile("testdata/wahlamt.fst") |
| Akron | f2120ca | 2021-08-03 16:26:41 +0200 | [diff] [blame] | 95 | dat := tok.ToDoubleArray() |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 96 | |
| 97 | b := make([]byte, 0, 2048) |
| 98 | w := bytes.NewBuffer(b) |
| 99 | var tokens []string |
| 100 | |
| 101 | tokens = ttokenize(dat, w, "bau") |
| 102 | assert.Equal("bau", tokens[0]) |
| 103 | |
| 104 | tokens = ttokenize(dat, w, "bauamt") |
| 105 | assert.Equal("bauamt", tokens[0]) |
| 106 | |
| 107 | tokens = ttokenize(dat, w, "wahlamt") |
| 108 | assert.Equal("wahlamt", tokens[0]) |
| 109 | |
| 110 | tokens = ttokenize(dat, w, "bauen") |
| 111 | assert.Equal("bauen", tokens[0]) |
| 112 | |
| 113 | tokens = ttokenize(dat, w, "wahlen") |
| 114 | assert.Equal("wahlen", tokens[0]) |
| 115 | |
| 116 | tokens = ttokenize(dat, w, "baum") |
| 117 | assert.Equal("bau", tokens[0]) |
| 118 | assert.Equal("m", tokens[1]) |
| Akron | 75ebe7f | 2021-08-03 10:34:10 +0200 | [diff] [blame] | 119 | } |
| Akron | 730a79c | 2021-08-03 11:05:29 +0200 | [diff] [blame] | 120 | |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 121 | func TestDoubleArraySimpleTokenizer(t *testing.T) { |
| Akron | 730a79c | 2021-08-03 11:05:29 +0200 | [diff] [blame] | 122 | assert := assert.New(t) |
| Akron | 64ffd9a | 2021-08-03 19:55:21 +0200 | [diff] [blame] | 123 | tok := LoadFomaFile("testdata/simpletok.fst") |
| Akron | f2120ca | 2021-08-03 16:26:41 +0200 | [diff] [blame] | 124 | dat := tok.ToDoubleArray() |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 125 | assert.Equal(ttokenizeStr(dat, "bau"), "bau") |
| 126 | assert.Equal(ttokenizeStr(dat, "bad"), "bad") |
| 127 | assert.Equal(ttokenizeStr(dat, "wald gehen"), "wald\ngehen") |
| Akron | 730a79c | 2021-08-03 11:05:29 +0200 | [diff] [blame] | 128 | } |
| Akron | 740f3d7 | 2021-08-03 12:12:34 +0200 | [diff] [blame] | 129 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 130 | func TestDoubleArraySimpleTokenizerTransduce(t *testing.T) { |
| Akron | 84d68e6 | 2021-08-04 17:06:52 +0200 | [diff] [blame] | 131 | assert := assert.New(t) |
| 132 | tok := LoadFomaFile("testdata/simpletok.fst") |
| Akron | 84d68e6 | 2021-08-04 17:06:52 +0200 | [diff] [blame] | 133 | dat := tok.ToDoubleArray() |
| Akron | 3f8571a | 2021-08-05 11:18:10 +0200 | [diff] [blame] | 134 | |
| 135 | r := strings.NewReader(" wald gehen Da kann\t man was \"erleben\"!") |
| 136 | b := make([]byte, 0, 2048) |
| 137 | w := bytes.NewBuffer(b) |
| Akron | 524c543 | 2021-08-05 14:14:27 +0200 | [diff] [blame] | 138 | var tokens []string |
| Akron | 524c543 | 2021-08-05 14:14:27 +0200 | [diff] [blame] | 139 | dat.Transduce(r, w) |
| 140 | tokens = strings.Split(w.String(), "\n") |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 141 | assert.Equal(len(tokens), 11) |
| Akron | 3f8571a | 2021-08-05 11:18:10 +0200 | [diff] [blame] | 142 | assert.Equal("wald", tokens[0]) |
| 143 | assert.Equal("gehen", tokens[1]) |
| 144 | assert.Equal("Da", tokens[2]) |
| 145 | assert.Equal("kann", tokens[3]) |
| 146 | assert.Equal("man", tokens[4]) |
| 147 | assert.Equal("was", tokens[5]) |
| 148 | assert.Equal("\"erleben\"", tokens[6]) |
| 149 | |
| Akron | 524c543 | 2021-08-05 14:14:27 +0200 | [diff] [blame] | 150 | r = strings.NewReader(" In den Wald gehen? -- Da kann\t man was \"erleben\"!") |
| 151 | w.Reset() |
| 152 | dat.Transduce(r, w) |
| 153 | tokens = strings.Split(w.String(), "\n") |
| 154 | assert.Equal("In", tokens[0]) |
| 155 | assert.Equal("den", tokens[1]) |
| 156 | assert.Equal("Wald", tokens[2]) |
| 157 | assert.Equal("gehen", tokens[3]) |
| 158 | assert.Equal("?", tokens[4]) |
| 159 | assert.Equal("--", tokens[5]) |
| Akron | 3f8571a | 2021-08-05 11:18:10 +0200 | [diff] [blame] | 160 | |
| Akron | 524c543 | 2021-08-05 14:14:27 +0200 | [diff] [blame] | 161 | r = strings.NewReader(" g? -- D") |
| 162 | w.Reset() |
| 163 | dat.Transduce(r, w) |
| 164 | tokens = strings.Split(w.String(), "\n") |
| 165 | assert.Equal("g", tokens[0]) |
| 166 | assert.Equal("?", tokens[1]) |
| 167 | assert.Equal("--", tokens[2]) |
| 168 | assert.Equal("D", tokens[3]) |
| 169 | assert.Equal("", tokens[4]) |
| Akron | 6e70dc8 | 2021-08-11 11:33:18 +0200 | [diff] [blame] | 170 | assert.Equal("", tokens[5]) |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 171 | assert.Equal(7, len(tokens)) |
| Akron | 068874c | 2021-08-04 15:19:56 +0200 | [diff] [blame] | 172 | } |
| 173 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 174 | func TestDoubleArrayReadWriteTokenizer(t *testing.T) { |
| Akron | 740f3d7 | 2021-08-03 12:12:34 +0200 | [diff] [blame] | 175 | assert := assert.New(t) |
| Akron | 64ffd9a | 2021-08-03 19:55:21 +0200 | [diff] [blame] | 176 | tok := LoadFomaFile("testdata/simpletok.fst") |
| Akron | f2120ca | 2021-08-03 16:26:41 +0200 | [diff] [blame] | 177 | dat := tok.ToDoubleArray() |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 178 | assert.Equal(ttokenizeStr(dat, "bau"), "bau") |
| 179 | assert.Equal(ttokenizeStr(dat, "bad"), "bad") |
| 180 | assert.Equal(ttokenizeStr(dat, "wald gehen"), "wald\ngehen") |
| Akron | 6247a5d | 2021-08-03 19:18:28 +0200 | [diff] [blame] | 181 | |
| Akron | 3f8571a | 2021-08-05 11:18:10 +0200 | [diff] [blame] | 182 | b := make([]byte, 0, 1024) |
| Akron | 6247a5d | 2021-08-03 19:18:28 +0200 | [diff] [blame] | 183 | buf := bytes.NewBuffer(b) |
| 184 | n, err := dat.WriteTo(buf) |
| 185 | assert.Nil(err) |
| Akron | 29e306f | 2021-09-02 18:29:56 +0200 | [diff] [blame] | 186 | assert.Equal(int64(296), n) |
| Akron | 3f8571a | 2021-08-05 11:18:10 +0200 | [diff] [blame] | 187 | |
| 188 | dat2 := ParseDatok(buf) |
| 189 | assert.NotNil(dat2) |
| 190 | assert.Equal(dat.array, dat2.array) |
| 191 | assert.Equal(dat.sigma, dat2.sigma) |
| 192 | assert.Equal(dat.epsilon, dat2.epsilon) |
| 193 | assert.Equal(dat.unknown, dat2.unknown) |
| 194 | assert.Equal(dat.identity, dat2.identity) |
| 195 | assert.Equal(dat.final, dat2.final) |
| 196 | assert.Equal(dat.LoadFactor(), dat2.LoadFactor()) |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 197 | assert.Equal(ttokenizeStr(dat2, "bau"), "bau") |
| 198 | assert.Equal(ttokenizeStr(dat2, "bad"), "bad") |
| 199 | assert.Equal(ttokenizeStr(dat2, "wald gehen"), "wald\ngehen") |
| Akron | 4fa28b3 | 2021-08-27 10:55:41 +0200 | [diff] [blame] | 200 | |
| Akron | 92704eb | 2021-08-27 10:59:46 +0200 | [diff] [blame] | 201 | assert.Equal(dat.TransCount(), 17) |
| 202 | assert.Equal(dat2.TransCount(), 17) |
| Akron | 6247a5d | 2021-08-03 19:18:28 +0200 | [diff] [blame] | 203 | } |
| 204 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 205 | func TestDoubleArrayIgnorableMCS(t *testing.T) { |
| Akron | e396a93 | 2021-10-19 01:06:13 +0200 | [diff] [blame] | 206 | |
| 207 | // This test relies on final states. That's why it is |
| 208 | // not working correctly anymore. |
| 209 | |
| Akron | 31f3c06 | 2021-08-27 10:15:13 +0200 | [diff] [blame] | 210 | assert := assert.New(t) |
| 211 | // File has MCS in sigma but not in net |
| 212 | tok := LoadFomaFile("testdata/ignorable_mcs.fst") |
| 213 | assert.NotNil(tok) |
| 214 | dat := tok.ToDoubleArray() |
| 215 | assert.NotNil(dat) |
| 216 | |
| 217 | b := make([]byte, 0, 2048) |
| 218 | w := bytes.NewBuffer(b) |
| 219 | var tokens []string |
| 220 | |
| 221 | // Is only unambigous when transducing strictly greedy! |
| Akron | e396a93 | 2021-10-19 01:06:13 +0200 | [diff] [blame] | 222 | assert.True(dat.Transduce(strings.NewReader("ab<ab>a"), w)) |
| Akron | 31f3c06 | 2021-08-27 10:15:13 +0200 | [diff] [blame] | 223 | tokens = strings.Split(w.String(), "\n") |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 224 | assert.Equal("a\nb\n<ab>a\n\n\n", w.String()) |
| Akron | 31f3c06 | 2021-08-27 10:15:13 +0200 | [diff] [blame] | 225 | assert.Equal("a", tokens[0]) |
| 226 | assert.Equal("b", tokens[1]) |
| Akron | e396a93 | 2021-10-19 01:06:13 +0200 | [diff] [blame] | 227 | assert.Equal("<ab>a", tokens[2]) |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 228 | assert.Equal(6, len(tokens)) |
| Akron | 92704eb | 2021-08-27 10:59:46 +0200 | [diff] [blame] | 229 | assert.Equal(dat.TransCount(), 15) |
| Akron | 31f3c06 | 2021-08-27 10:15:13 +0200 | [diff] [blame] | 230 | } |
| 231 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 232 | func TestDoubleArrayFullTokenizer(t *testing.T) { |
| Akron | 3a063ef | 2021-08-05 19:36:35 +0200 | [diff] [blame] | 233 | assert := assert.New(t) |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 234 | |
| 235 | if dat == nil { |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 236 | dat = LoadDatokFile("testdata/tokenizer_de.datok") |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 237 | } |
| Akron | 3a063ef | 2021-08-05 19:36:35 +0200 | [diff] [blame] | 238 | assert.NotNil(dat) |
| Akron | d47c67e | 2022-04-10 11:02:59 +0200 | [diff] [blame] | 239 | assert.True(dat.LoadFactor() >= 60) |
| Akron | 3a063ef | 2021-08-05 19:36:35 +0200 | [diff] [blame] | 240 | assert.Equal(dat.epsilon, 1) |
| 241 | assert.Equal(dat.unknown, 2) |
| 242 | assert.Equal(dat.identity, 3) |
| Akron | d0c6e10 | 2021-12-09 10:30:29 +0100 | [diff] [blame] | 243 | // assert.Equal(dat.final, 142) |
| 244 | // assert.Equal(len(dat.sigma), 137) |
| Akron | fac8abc | 2021-11-10 07:19:59 +0100 | [diff] [blame] | 245 | // assert.True(len(dat.array) > 3000000) |
| 246 | // assert.True(dat.maxSize > 3000000) |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 247 | assert.Equal(ttokenizeStr(dat, "bau"), "bau") |
| 248 | assert.Equal(ttokenizeStr(dat, "bad"), "bad") |
| 249 | assert.Equal(ttokenizeStr(dat, "wald gehen"), "wald\ngehen") |
| Akron | 740f3d7 | 2021-08-03 12:12:34 +0200 | [diff] [blame] | 250 | } |
| Akron | 3f8571a | 2021-08-05 11:18:10 +0200 | [diff] [blame] | 251 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 252 | func TestDoubleArrayTokenizerBranch(t *testing.T) { |
| Akron | 941f215 | 2021-09-26 15:14:25 +0200 | [diff] [blame] | 253 | assert := assert.New(t) |
| 254 | tok := LoadTokenizerFile("testdata/simpletok.datok") |
| 255 | assert.NotNil(tok) |
| 256 | assert.Equal(tok.Type(), "DATOK") |
| 257 | |
| 258 | tok = LoadTokenizerFile("testdata/simpletok.matok") |
| 259 | assert.NotNil(tok) |
| 260 | assert.Equal(tok.Type(), "MATOK") |
| 261 | } |
| 262 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 263 | func XTestDoubleArrayFullTokenizerBuild(t *testing.T) { |
| Akron | a0bded5 | 2021-08-11 15:48:02 +0200 | [diff] [blame] | 264 | assert := assert.New(t) |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 265 | tok := LoadFomaFile("testdata/tokenizer_de.fst") |
| Akron | a0bded5 | 2021-08-11 15:48:02 +0200 | [diff] [blame] | 266 | dat := tok.ToDoubleArray() |
| Akron | de18e90 | 2021-08-27 09:34:12 +0200 | [diff] [blame] | 267 | assert.NotNil(dat) |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 268 | // n, err := dat.Save("testdata/tokenizer_de.datok") |
| Akron | de18e90 | 2021-08-27 09:34:12 +0200 | [diff] [blame] | 269 | // assert.Nil(err) |
| 270 | // assert.True(n > 500) |
| Akron | a0bded5 | 2021-08-11 15:48:02 +0200 | [diff] [blame] | 271 | } |
| 272 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 273 | func TestDoubleArrayFullTokenizerTransduce(t *testing.T) { |
| Akron | 3610f10 | 2021-08-08 14:13:25 +0200 | [diff] [blame] | 274 | assert := assert.New(t) |
| 275 | |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 276 | if dat == nil { |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 277 | dat = LoadDatokFile("testdata/tokenizer_de.datok") |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 278 | } |
| 279 | |
| Akron | 3610f10 | 2021-08-08 14:13:25 +0200 | [diff] [blame] | 280 | assert.NotNil(dat) |
| 281 | |
| Akron | 3610f10 | 2021-08-08 14:13:25 +0200 | [diff] [blame] | 282 | b := make([]byte, 0, 2048) |
| 283 | w := bytes.NewBuffer(b) |
| 284 | var tokens []string |
| 285 | |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 286 | assert.True(dat.Transduce(strings.NewReader("tra. u Du?"), w)) |
| Akron | 3610f10 | 2021-08-08 14:13:25 +0200 | [diff] [blame] | 287 | |
| 288 | tokens = strings.Split(w.String(), "\n") |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 289 | assert.Equal("tra\n.\n\nu\nDu\n?\n\n\n", w.String()) |
| Akron | 3610f10 | 2021-08-08 14:13:25 +0200 | [diff] [blame] | 290 | assert.Equal("tra", tokens[0]) |
| 291 | assert.Equal(".", tokens[1]) |
| Akron | b4bbb47 | 2021-08-09 11:49:38 +0200 | [diff] [blame] | 292 | assert.Equal("", tokens[2]) |
| Akron | c5d8d43 | 2021-08-10 16:48:44 +0200 | [diff] [blame] | 293 | assert.Equal("u", tokens[3]) |
| Akron | b4bbb47 | 2021-08-09 11:49:38 +0200 | [diff] [blame] | 294 | assert.Equal("Du", tokens[4]) |
| 295 | assert.Equal("?", tokens[5]) |
| Akron | 3610f10 | 2021-08-08 14:13:25 +0200 | [diff] [blame] | 296 | assert.Equal("", tokens[6]) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 297 | assert.Equal("", tokens[7]) |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 298 | assert.Equal("", tokens[8]) |
| 299 | assert.Equal(9, len(tokens)) |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 300 | |
| 301 | w.Reset() |
| 302 | assert.True(dat.Transduce(strings.NewReader("\"John Doe\"@xx.com"), w)) |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 303 | assert.Equal("\"\nJohn\nDoe\n\"\n@xx\n.\n\ncom\n\n\n", w.String()) |
| Akron | 3f8571a | 2021-08-05 11:18:10 +0200 | [diff] [blame] | 304 | } |
| Akron | b7e1f13 | 2021-08-10 11:52:31 +0200 | [diff] [blame] | 305 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 306 | func TestDoubleArrayFullTokenizerSentenceSplitter(t *testing.T) { |
| Akron | b7e1f13 | 2021-08-10 11:52:31 +0200 | [diff] [blame] | 307 | assert := assert.New(t) |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 308 | |
| 309 | if dat == nil { |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 310 | dat = LoadDatokFile("testdata/tokenizer_de.datok") |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 311 | } |
| 312 | |
| Akron | b7e1f13 | 2021-08-10 11:52:31 +0200 | [diff] [blame] | 313 | assert.NotNil(dat) |
| 314 | |
| 315 | b := make([]byte, 0, 2048) |
| 316 | w := bytes.NewBuffer(b) |
| 317 | var sentences []string |
| 318 | |
| 319 | // testSentSplitterSimple |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 320 | assert.True(dat.Transduce(strings.NewReader("Der alte Mann."), w)) |
| 321 | sentences = strings.Split(w.String(), "\n\n") |
| 322 | |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 323 | assert.Equal("Der\nalte\nMann\n.\n\n\n", w.String()) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 324 | assert.Equal("Der\nalte\nMann\n.", sentences[0]) |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 325 | assert.Equal("\n", sentences[1]) |
| 326 | assert.Equal(2, len(sentences)) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 327 | |
| 328 | w.Reset() |
| 329 | assert.True(dat.Transduce(strings.NewReader("Der Vorsitzende der Abk. hat gewählt."), w)) |
| 330 | sentences = strings.Split(w.String(), "\n\n") |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 331 | assert.Equal(2, len(sentences)) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 332 | assert.Equal("Der\nVorsitzende\nder\nAbk.\nhat\ngewählt\n.", sentences[0]) |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 333 | assert.Equal("\n", sentences[1]) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 334 | |
| 335 | w.Reset() |
| 336 | assert.True(dat.Transduce(strings.NewReader(""), w)) |
| Akron | b7e1f13 | 2021-08-10 11:52:31 +0200 | [diff] [blame] | 337 | sentences = strings.Split(w.String(), "\n\n") |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 338 | assert.Equal(2, len(sentences)) |
| 339 | assert.Equal("", sentences[0]) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 340 | |
| 341 | w.Reset() |
| 342 | assert.True(dat.Transduce(strings.NewReader("Gefunden auf wikipedia.org."), w)) |
| 343 | sentences = strings.Split(w.String(), "\n\n") |
| 344 | assert.Equal(len(sentences), 2) |
| 345 | |
| 346 | w.Reset() |
| 347 | assert.True(dat.Transduce(strings.NewReader("Ich bin unter korap@ids-mannheim.de erreichbar."), w)) |
| 348 | sentences = strings.Split(w.String(), "\n\n") |
| 349 | assert.Equal(len(sentences), 2) |
| 350 | |
| Akron | 6e70dc8 | 2021-08-11 11:33:18 +0200 | [diff] [blame] | 351 | w.Reset() |
| 352 | assert.True(dat.Transduce(strings.NewReader("Unsere Website ist https://korap.ids-mannheim.de/?q=Baum"), w)) |
| 353 | sentences = strings.Split(w.String(), "\n\n") |
| 354 | assert.Equal("Unsere\nWebsite\nist\nhttps://korap.ids-mannheim.de/?q=Baum", sentences[0]) |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 355 | assert.Equal("\n", sentences[1]) |
| 356 | assert.Equal(2, len(sentences)) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 357 | |
| Akron | 6e70dc8 | 2021-08-11 11:33:18 +0200 | [diff] [blame] | 358 | w.Reset() |
| 359 | assert.True(dat.Transduce(strings.NewReader("Unser Server ist 10.0.10.51."), w)) |
| 360 | sentences = strings.Split(w.String(), "\n\n") |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 361 | assert.Equal("\n", sentences[1]) |
| 362 | assert.Equal(2, len(sentences)) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 363 | |
| Akron | 6e70dc8 | 2021-08-11 11:33:18 +0200 | [diff] [blame] | 364 | w.Reset() |
| 365 | assert.True(dat.Transduce(strings.NewReader("Zu 50.4% ist es sicher"), w)) |
| 366 | sentences = strings.Split(w.String(), "\n\n") |
| 367 | assert.Equal(len(sentences), 2) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 368 | |
| Akron | 6e70dc8 | 2021-08-11 11:33:18 +0200 | [diff] [blame] | 369 | w.Reset() |
| 370 | assert.True(dat.Transduce(strings.NewReader("Der Termin ist am 5.9.2018"), w)) |
| 371 | sentences = strings.Split(w.String(), "\n\n") |
| 372 | assert.Equal(len(sentences), 2) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 373 | |
| Akron | 6e70dc8 | 2021-08-11 11:33:18 +0200 | [diff] [blame] | 374 | w.Reset() |
| 375 | assert.True(dat.Transduce(strings.NewReader("Ich habe die readme.txt heruntergeladen"), w)) |
| 376 | sentences = strings.Split(w.String(), "\n\n") |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 377 | assert.Equal(2, len(sentences)) |
| Akron | 6e70dc8 | 2021-08-11 11:33:18 +0200 | [diff] [blame] | 378 | assert.Equal("Ich\nhabe\ndie\nreadme.txt\nheruntergeladen", sentences[0]) |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 379 | assert.Equal("\n", sentences[1]) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 380 | |
| Akron | 6e70dc8 | 2021-08-11 11:33:18 +0200 | [diff] [blame] | 381 | w.Reset() |
| 382 | assert.True(dat.Transduce(strings.NewReader("Ausschalten!!! Hast Du nicht gehört???"), w)) |
| 383 | sentences = strings.Split(w.String(), "\n\n") |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 384 | assert.Equal(3, len(sentences)) |
| Akron | 6e70dc8 | 2021-08-11 11:33:18 +0200 | [diff] [blame] | 385 | assert.Equal("Ausschalten\n!!!", sentences[0]) |
| 386 | assert.Equal("Hast\nDu\nnicht\ngehört\n???", sentences[1]) |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 387 | assert.Equal("\n", sentences[2]) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 388 | |
| Akron | 4af79f1 | 2021-08-11 14:48:17 +0200 | [diff] [blame] | 389 | w.Reset() |
| 390 | assert.True(dat.Transduce(strings.NewReader("Ich wohne in der Weststr. und Du?"), w)) |
| 391 | sentences = strings.Split(w.String(), "\n\n") |
| 392 | assert.Equal(len(sentences), 2) |
| Akron | 1594cb8 | 2021-08-11 11:14:56 +0200 | [diff] [blame] | 393 | |
| 394 | /* |
| 395 | Test: |
| 396 | "\"Ausschalten!!!\", sagte er. \"Hast Du nicht gehört???\""), w)) |
| 397 | */ |
| Akron | b7e1f13 | 2021-08-10 11:52:31 +0200 | [diff] [blame] | 398 | } |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 399 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 400 | func TestDoubleArrayFullTokenizerTokenSplitter(t *testing.T) { |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 401 | assert := assert.New(t) |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 402 | |
| 403 | if dat == nil { |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 404 | dat = LoadDatokFile("testdata/tokenizer_de.datok") |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 405 | } |
| 406 | |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 407 | assert.NotNil(dat) |
| 408 | |
| 409 | b := make([]byte, 0, 2048) |
| 410 | w := bytes.NewBuffer(b) |
| 411 | var tokens []string |
| 412 | |
| 413 | // testTokenizerSimple |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 414 | tokens = ttokenize(dat, w, "Der alte Mann") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 415 | assert.Equal(tokens[0], "Der") |
| 416 | assert.Equal(tokens[1], "alte") |
| 417 | assert.Equal(tokens[2], "Mann") |
| 418 | assert.Equal(len(tokens), 3) |
| 419 | |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 420 | tokens = ttokenize(dat, w, "Der alte Mann.") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 421 | assert.Equal(tokens[0], "Der") |
| 422 | assert.Equal(tokens[1], "alte") |
| 423 | assert.Equal(tokens[2], "Mann") |
| 424 | assert.Equal(tokens[3], ".") |
| 425 | assert.Equal(len(tokens), 4) |
| 426 | |
| 427 | // testTokenizerAbbr |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 428 | tokens = ttokenize(dat, w, "Der Vorsitzende der F.D.P. hat gewählt") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 429 | assert.Equal(tokens[0], "Der") |
| 430 | assert.Equal(tokens[1], "Vorsitzende") |
| 431 | assert.Equal(tokens[2], "der") |
| 432 | assert.Equal(tokens[3], "F.D.P.") |
| 433 | assert.Equal(tokens[4], "hat") |
| 434 | assert.Equal(tokens[5], "gewählt") |
| 435 | assert.Equal(len(tokens), 6) |
| 436 | // Ignored in KorAP-Tokenizer |
| 437 | |
| 438 | // testTokenizerHost1 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 439 | tokens = ttokenize(dat, w, "Gefunden auf wikipedia.org") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 440 | assert.Equal(tokens[0], "Gefunden") |
| 441 | assert.Equal(tokens[1], "auf") |
| 442 | assert.Equal(tokens[2], "wikipedia.org") |
| 443 | assert.Equal(len(tokens), 3) |
| 444 | |
| 445 | // testTokenizerWwwHost |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 446 | tokens = ttokenize(dat, w, "Gefunden auf www.wikipedia.org") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 447 | assert.Equal("Gefunden", tokens[0]) |
| 448 | assert.Equal("auf", tokens[1]) |
| 449 | assert.Equal("www.wikipedia.org", tokens[2]) |
| 450 | assert.Equal(3, len(tokens)) |
| 451 | |
| 452 | // testTokenizerWwwUrl |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 453 | tokens = ttokenize(dat, w, "Weitere Infos unter www.info.biz/info") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 454 | assert.Equal("www.info.biz/info", tokens[3]) |
| 455 | |
| 456 | // testTokenizerFtpHost |
| 457 | /* |
| 458 | tokens = tokenize(dat, w, "Kann von ftp.download.org heruntergeladen werden") |
| 459 | assert.Equal("Kann", tokens[0]) |
| 460 | assert.Equal("von", tokens[1]) |
| 461 | assert.Equal("ftp.download.org", tokens[2]) |
| 462 | assert.Equal(5, len(tokens)) |
| 463 | // Ignored in KorAP-Tokenizer |
| 464 | */ |
| 465 | |
| 466 | // testTokenizerDash |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 467 | tokens = ttokenize(dat, w, "Das war -- spitze") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 468 | assert.Equal(tokens[0], "Das") |
| 469 | assert.Equal(tokens[1], "war") |
| 470 | assert.Equal(tokens[2], "--") |
| 471 | assert.Equal(tokens[3], "spitze") |
| 472 | assert.Equal(len(tokens), 4) |
| 473 | |
| 474 | // testTokenizerEmail1 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 475 | tokens = ttokenize(dat, w, "Ich bin unter korap@ids-mannheim.de erreichbar.") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 476 | assert.Equal(tokens[0], "Ich") |
| 477 | assert.Equal(tokens[1], "bin") |
| 478 | assert.Equal(tokens[2], "unter") |
| 479 | assert.Equal(tokens[3], "korap@ids-mannheim.de") |
| 480 | assert.Equal(tokens[4], "erreichbar") |
| 481 | assert.Equal(tokens[5], ".") |
| 482 | assert.Equal(len(tokens), 6) |
| 483 | |
| 484 | // testTokenizerEmail2 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 485 | tokens = ttokenize(dat, w, "Oder unter korap[at]ids-mannheim[dot]de.") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 486 | assert.Equal(tokens[0], "Oder") |
| 487 | assert.Equal(tokens[1], "unter") |
| 488 | assert.Equal(tokens[2], "korap[at]ids-mannheim[dot]de") |
| 489 | assert.Equal(tokens[3], ".") |
| 490 | assert.Equal(len(tokens), 4) |
| 491 | |
| 492 | // testTokenizerEmail3 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 493 | tokens = ttokenize(dat, w, "Oder unter korap(at)ids-mannheim(dot)de.") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 494 | assert.Equal(tokens[0], "Oder") |
| 495 | assert.Equal(tokens[1], "unter") |
| 496 | assert.Equal(tokens[2], "korap(at)ids-mannheim(dot)de") |
| 497 | assert.Equal(tokens[3], ".") |
| 498 | assert.Equal(len(tokens), 4) |
| 499 | // Ignored in KorAP-Tokenizer |
| 500 | |
| 501 | // testTokenizerDoNotAcceptQuotedEmailNames |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 502 | tokens = ttokenize(dat, w, "\"John Doe\"@xx.com") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 503 | assert.Equal("\"", tokens[0]) |
| 504 | assert.Equal("John", tokens[1]) |
| 505 | assert.Equal("Doe", tokens[2]) |
| 506 | assert.Equal("\"", tokens[3]) |
| 507 | assert.Equal("@xx", tokens[4]) |
| 508 | assert.Equal(".", tokens[5]) // Differs - as the sentence splitter splits here! |
| 509 | assert.Equal("com", tokens[6]) |
| 510 | assert.Equal(7, len(tokens)) |
| 511 | |
| 512 | // testTokenizerTwitter |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 513 | tokens = ttokenize(dat, w, "Folgt @korap und #korap") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 514 | assert.Equal(tokens[0], "Folgt") |
| 515 | assert.Equal(tokens[1], "@korap") |
| 516 | assert.Equal(tokens[2], "und") |
| 517 | assert.Equal(tokens[3], "#korap") |
| 518 | assert.Equal(len(tokens), 4) |
| 519 | |
| 520 | // testTokenizerWeb1 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 521 | tokens = ttokenize(dat, w, "Unsere Website ist https://korap.ids-mannheim.de/?q=Baum") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 522 | assert.Equal(tokens[0], "Unsere") |
| 523 | assert.Equal(tokens[1], "Website") |
| 524 | assert.Equal(tokens[2], "ist") |
| 525 | assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum") |
| 526 | assert.Equal(len(tokens), 4) |
| 527 | |
| 528 | // testTokenizerWeb2 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 529 | tokens = ttokenize(dat, w, "Wir sind auch im Internet (https://korap.ids-mannheim.de/?q=Baum)") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 530 | assert.Equal(tokens[0], "Wir") |
| 531 | assert.Equal(tokens[1], "sind") |
| 532 | assert.Equal(tokens[2], "auch") |
| 533 | assert.Equal(tokens[3], "im") |
| 534 | assert.Equal(tokens[4], "Internet") |
| 535 | assert.Equal(tokens[5], "(") |
| 536 | assert.Equal(tokens[6], "https://korap.ids-mannheim.de/?q=Baum") |
| 537 | assert.Equal(tokens[7], ")") |
| 538 | assert.Equal(len(tokens), 8) |
| 539 | // Ignored in KorAP-Tokenizer |
| 540 | |
| 541 | // testTokenizerWeb3 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 542 | tokens = ttokenize(dat, w, "Die Adresse ist https://korap.ids-mannheim.de/?q=Baum.") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 543 | assert.Equal(tokens[0], "Die") |
| 544 | assert.Equal(tokens[1], "Adresse") |
| 545 | assert.Equal(tokens[2], "ist") |
| 546 | assert.Equal(tokens[3], "https://korap.ids-mannheim.de/?q=Baum") |
| 547 | assert.Equal(tokens[4], ".") |
| 548 | assert.Equal(len(tokens), 5) |
| 549 | // Ignored in KorAP-Tokenizer |
| 550 | |
| 551 | // testTokenizerServer |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 552 | tokens = ttokenize(dat, w, "Unser Server ist 10.0.10.51.") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 553 | assert.Equal(tokens[0], "Unser") |
| 554 | assert.Equal(tokens[1], "Server") |
| 555 | assert.Equal(tokens[2], "ist") |
| 556 | assert.Equal(tokens[3], "10.0.10.51") |
| 557 | assert.Equal(tokens[4], ".") |
| 558 | assert.Equal(len(tokens), 5) |
| 559 | |
| 560 | // testTokenizerNum |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 561 | tokens = ttokenize(dat, w, "Zu 50,4% ist es sicher") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 562 | assert.Equal(tokens[0], "Zu") |
| 563 | assert.Equal(tokens[1], "50,4%") |
| 564 | assert.Equal(tokens[2], "ist") |
| 565 | assert.Equal(tokens[3], "es") |
| 566 | assert.Equal(tokens[4], "sicher") |
| 567 | assert.Equal(len(tokens), 5) |
| 568 | // Differs from KorAP-Tokenizer |
| 569 | |
| 570 | // testTokenizerDate |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 571 | tokens = ttokenize(dat, w, "Der Termin ist am 5.9.2018") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 572 | assert.Equal(tokens[0], "Der") |
| 573 | assert.Equal(tokens[1], "Termin") |
| 574 | assert.Equal(tokens[2], "ist") |
| 575 | assert.Equal(tokens[3], "am") |
| 576 | assert.Equal(tokens[4], "5.9.2018") |
| 577 | assert.Equal(len(tokens), 5) |
| 578 | |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 579 | tokens = ttokenize(dat, w, "Der Termin ist am 5/9/2018") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 580 | assert.Equal(tokens[0], "Der") |
| 581 | assert.Equal(tokens[1], "Termin") |
| 582 | assert.Equal(tokens[2], "ist") |
| 583 | assert.Equal(tokens[3], "am") |
| 584 | assert.Equal(tokens[4], "5/9/2018") |
| 585 | assert.Equal(len(tokens), 5) |
| 586 | |
| 587 | // testTokenizerDateRange |
| 588 | /* |
| 589 | tokens = tokenize(dat, w, "Der Termin war vom 4.-5.9.2018") |
| 590 | assert.Equal(tokens[0], "Der") |
| 591 | assert.Equal(tokens[1], "Termin") |
| 592 | assert.Equal(tokens[2], "war") |
| 593 | assert.Equal(tokens[3], "vom") |
| 594 | assert.Equal(tokens[4], "4.") |
| 595 | assert.Equal(tokens[5], "-") |
| 596 | assert.Equal(tokens[6], "5.9.2018") |
| 597 | assert.Equal(len(tokens), 7) |
| 598 | // Ignored in KorAP-Tokenizer |
| 599 | */ |
| 600 | |
| 601 | // testTokenizerEmoji1 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 602 | tokens = ttokenize(dat, w, "Das ist toll! ;)") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 603 | assert.Equal(tokens[0], "Das") |
| 604 | assert.Equal(tokens[1], "ist") |
| 605 | assert.Equal(tokens[2], "toll") |
| 606 | assert.Equal(tokens[3], "!") |
| 607 | assert.Equal(tokens[4], ";)") |
| 608 | assert.Equal(len(tokens), 5) |
| 609 | |
| 610 | // testTokenizerRef1 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 611 | tokens = ttokenize(dat, w, "Kupietz und Schmidt (2018): Korpuslinguistik") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 612 | assert.Equal(tokens[0], "Kupietz") |
| 613 | assert.Equal(tokens[1], "und") |
| 614 | assert.Equal(tokens[2], "Schmidt") |
| 615 | assert.Equal(tokens[3], "(2018)") |
| 616 | assert.Equal(tokens[4], ":") |
| 617 | assert.Equal(tokens[5], "Korpuslinguistik") |
| 618 | assert.Equal(len(tokens), 6) |
| 619 | // Differs from KorAP-Tokenizer! |
| 620 | |
| 621 | // testTokenizerRef2 () { |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 622 | tokens = ttokenize(dat, w, "Kupietz und Schmidt [2018]: Korpuslinguistik") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 623 | assert.Equal(tokens[0], "Kupietz") |
| 624 | assert.Equal(tokens[1], "und") |
| 625 | assert.Equal(tokens[2], "Schmidt") |
| 626 | assert.Equal(tokens[3], "[2018]") |
| 627 | assert.Equal(tokens[4], ":") |
| 628 | assert.Equal(tokens[5], "Korpuslinguistik") |
| 629 | assert.Equal(len(tokens), 6) |
| 630 | // Differs from KorAP-Tokenizer! |
| 631 | |
| 632 | // testTokenizerOmission1 () { |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 633 | tokens = ttokenize(dat, w, "Er ist ein A****loch!") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 634 | assert.Equal(tokens[0], "Er") |
| 635 | assert.Equal(tokens[1], "ist") |
| 636 | assert.Equal(tokens[2], "ein") |
| 637 | assert.Equal(tokens[3], "A****loch") |
| 638 | assert.Equal(tokens[4], "!") |
| 639 | assert.Equal(len(tokens), 5) |
| 640 | |
| 641 | // testTokenizerOmission2 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 642 | tokens = ttokenize(dat, w, "F*ck!") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 643 | assert.Equal(tokens[0], "F*ck") |
| 644 | assert.Equal(tokens[1], "!") |
| 645 | assert.Equal(len(tokens), 2) |
| 646 | |
| 647 | // testTokenizerOmission3 () { |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 648 | tokens = ttokenize(dat, w, "Dieses verf***** Kleid!") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 649 | assert.Equal(tokens[0], "Dieses") |
| 650 | assert.Equal(tokens[1], "verf*****") |
| 651 | assert.Equal(tokens[2], "Kleid") |
| 652 | assert.Equal(tokens[3], "!") |
| 653 | assert.Equal(len(tokens), 4) |
| 654 | |
| 655 | // Probably interpreted as HOST |
| 656 | // testTokenizerFileExtension1 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 657 | tokens = ttokenize(dat, w, "Ich habe die readme.txt heruntergeladen") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 658 | assert.Equal(tokens[0], "Ich") |
| 659 | assert.Equal(tokens[1], "habe") |
| 660 | assert.Equal(tokens[2], "die") |
| 661 | assert.Equal(tokens[3], "readme.txt") |
| 662 | assert.Equal(tokens[4], "heruntergeladen") |
| 663 | assert.Equal(len(tokens), 5) |
| 664 | |
| 665 | // Probably interpreted as HOST |
| 666 | // testTokenizerFileExtension2 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 667 | tokens = ttokenize(dat, w, "Nimm die README.TXT!") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 668 | assert.Equal(tokens[0], "Nimm") |
| 669 | assert.Equal(tokens[1], "die") |
| 670 | assert.Equal(tokens[2], "README.TXT") |
| 671 | assert.Equal(tokens[3], "!") |
| 672 | assert.Equal(len(tokens), 4) |
| 673 | |
| 674 | // Probably interpreted as HOST |
| 675 | // testTokenizerFileExtension3 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 676 | tokens = ttokenize(dat, w, "Zeig mir profile.jpeg") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 677 | assert.Equal(tokens[0], "Zeig") |
| 678 | assert.Equal(tokens[1], "mir") |
| 679 | assert.Equal(tokens[2], "profile.jpeg") |
| 680 | assert.Equal(len(tokens), 3) |
| 681 | |
| 682 | // testTokenizerFile1 |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 683 | |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 684 | tokens = ttokenize(dat, w, "Zeig mir c:\\Dokumente\\profile.docx") |
| Akron | e8837b5 | 2021-08-11 17:29:58 +0200 | [diff] [blame] | 685 | assert.Equal(tokens[0], "Zeig") |
| 686 | assert.Equal(tokens[1], "mir") |
| 687 | assert.Equal(tokens[2], "c:\\Dokumente\\profile.docx") |
| 688 | assert.Equal(len(tokens), 3) |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 689 | |
| Akron | e8837b5 | 2021-08-11 17:29:58 +0200 | [diff] [blame] | 690 | // testTokenizerFile2 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 691 | tokens = ttokenize(dat, w, "Gehe zu /Dokumente/profile.docx") |
| Akron | e8837b5 | 2021-08-11 17:29:58 +0200 | [diff] [blame] | 692 | assert.Equal(tokens[0], "Gehe") |
| 693 | assert.Equal(tokens[1], "zu") |
| 694 | assert.Equal(tokens[2], "/Dokumente/profile.docx") |
| 695 | assert.Equal(len(tokens), 3) |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 696 | |
| Akron | e8837b5 | 2021-08-11 17:29:58 +0200 | [diff] [blame] | 697 | // testTokenizerFile3 |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 698 | tokens = ttokenize(dat, w, "Zeig mir c:\\Dokumente\\profile.jpeg") |
| Akron | e8837b5 | 2021-08-11 17:29:58 +0200 | [diff] [blame] | 699 | assert.Equal(tokens[0], "Zeig") |
| 700 | assert.Equal(tokens[1], "mir") |
| 701 | assert.Equal(tokens[2], "c:\\Dokumente\\profile.jpeg") |
| 702 | assert.Equal(len(tokens), 3) |
| 703 | // Ignored in KorAP-Tokenizer |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 704 | |
| Akron | fd92d7e | 2021-08-11 16:31:43 +0200 | [diff] [blame] | 705 | // testTokenizerPunct |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 706 | tokens = ttokenize(dat, w, "Er sagte: \"Es geht mir gut!\", daraufhin ging er.") |
| Akron | fd92d7e | 2021-08-11 16:31:43 +0200 | [diff] [blame] | 707 | assert.Equal(tokens[0], "Er") |
| 708 | assert.Equal(tokens[1], "sagte") |
| 709 | assert.Equal(tokens[2], ":") |
| 710 | assert.Equal(tokens[3], "\"") |
| 711 | assert.Equal(tokens[4], "Es") |
| 712 | assert.Equal(tokens[5], "geht") |
| 713 | assert.Equal(tokens[6], "mir") |
| 714 | assert.Equal(tokens[7], "gut") |
| 715 | assert.Equal(tokens[8], "!") |
| 716 | assert.Equal(tokens[9], "\"") |
| 717 | assert.Equal(tokens[10], ",") |
| 718 | assert.Equal(tokens[11], "daraufhin") |
| 719 | assert.Equal(tokens[12], "ging") |
| 720 | assert.Equal(tokens[13], "er") |
| 721 | assert.Equal(tokens[14], ".") |
| 722 | assert.Equal(len(tokens), 15) |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 723 | |
| 724 | // testTokenizerPlusAmpersand |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 725 | tokens = ttokenize(dat, w, ""Das ist von C&A!"") |
| 726 | assert.Equal(tokens[0], """) |
| 727 | assert.Equal(tokens[1], "Das") |
| 728 | assert.Equal(tokens[2], "ist") |
| 729 | assert.Equal(tokens[3], "von") |
| 730 | assert.Equal(tokens[4], "C&A") |
| 731 | assert.Equal(tokens[5], "!") |
| 732 | assert.Equal(tokens[6], """) |
| 733 | assert.Equal(len(tokens), 7) |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 734 | |
| 735 | // testTokenizerLongEnd |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 736 | tokens = ttokenize(dat, w, "Siehst Du?!!?") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 737 | assert.Equal(tokens[0], "Siehst") |
| 738 | assert.Equal(tokens[1], "Du") |
| 739 | assert.Equal(tokens[2], "?!!?") |
| 740 | assert.Equal(len(tokens), 3) |
| 741 | |
| 742 | // testTokenizerIrishO |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 743 | tokens = ttokenize(dat, w, "Peter O'Toole") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 744 | assert.Equal(tokens[0], "Peter") |
| 745 | assert.Equal(tokens[1], "O'Toole") |
| 746 | assert.Equal(len(tokens), 2) |
| 747 | |
| 748 | // testTokenizerAbr |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 749 | tokens = ttokenize(dat, w, "Früher bzw. später ...") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 750 | assert.Equal(tokens[0], "Früher") |
| 751 | assert.Equal(tokens[1], "bzw.") |
| 752 | assert.Equal(tokens[2], "später") |
| 753 | assert.Equal(tokens[3], "...") |
| 754 | assert.Equal(len(tokens), 4) |
| 755 | |
| 756 | // testTokenizerUppercaseRule |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 757 | tokens = ttokenize(dat, w, "Es war spät.Morgen ist es früh.") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 758 | assert.Equal(tokens[0], "Es") |
| 759 | assert.Equal(tokens[1], "war") |
| 760 | assert.Equal(tokens[2], "spät") |
| 761 | assert.Equal(tokens[3], ".") |
| 762 | assert.Equal(tokens[4], "Morgen") |
| 763 | assert.Equal(tokens[5], "ist") |
| 764 | assert.Equal(tokens[6], "es") |
| 765 | assert.Equal(tokens[7], "früh") |
| 766 | assert.Equal(tokens[8], ".") |
| 767 | assert.Equal(len(tokens), 9) |
| 768 | // Ignored in KorAP-Tokenizer |
| 769 | |
| 770 | // testTokenizerOrd |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 771 | tokens = ttokenize(dat, w, "Sie erreichte den 1. Platz!") |
| Akron | a0bded5 | 2021-08-11 15:48:02 +0200 | [diff] [blame] | 772 | assert.Equal(tokens[0], "Sie") |
| 773 | assert.Equal(tokens[1], "erreichte") |
| 774 | assert.Equal(tokens[2], "den") |
| 775 | assert.Equal(tokens[3], "1.") |
| 776 | assert.Equal(tokens[4], "Platz") |
| 777 | assert.Equal(tokens[5], "!") |
| 778 | assert.Equal(len(tokens), 6) |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 779 | |
| 780 | // testNoZipOuputArchive |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 781 | tokens = ttokenize(dat, w, "Archive: Ich bin kein zip\n") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 782 | assert.Equal(tokens[0], "Archive") |
| 783 | assert.Equal(tokens[1], ":") |
| 784 | assert.Equal(tokens[2], "Ich") |
| 785 | assert.Equal(tokens[3], "bin") |
| 786 | assert.Equal(tokens[4], "kein") |
| 787 | assert.Equal(tokens[5], "zip") |
| 788 | assert.Equal(6, len(tokens)) |
| 789 | |
| 790 | // testTokenizerStrasse |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 791 | tokens = ttokenize(dat, w, "Ich wohne in der Weststr. und Du?") |
| Akron | 4af79f1 | 2021-08-11 14:48:17 +0200 | [diff] [blame] | 792 | assert.Equal(tokens[4], "Weststr.") |
| 793 | assert.Equal(8, len(tokens)) |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 794 | |
| 795 | // germanTokenizerKnowsGermanOmissionWords |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 796 | tokens = ttokenize(dat, w, "D'dorf Ku'damm Lu'hafen M'gladbach W'schaft") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 797 | assert.Equal("D'dorf", tokens[0]) |
| 798 | assert.Equal("Ku'damm", tokens[1]) |
| 799 | assert.Equal("Lu'hafen", tokens[2]) |
| 800 | assert.Equal("M'gladbach", tokens[3]) |
| 801 | assert.Equal("W'schaft", tokens[4]) |
| 802 | assert.Equal(5, len(tokens)) |
| 803 | |
| 804 | // germanTokenizerDoesNOTSeparateGermanContractions |
| Akron | ec835ad | 2021-08-11 18:23:22 +0200 | [diff] [blame] | 805 | tokens = ttokenize(dat, w, "mach's macht's was'n ist's haste willste kannste biste kriegste") |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 806 | assert.Equal("mach's", tokens[0]) |
| 807 | assert.Equal("macht's", tokens[1]) |
| 808 | assert.Equal("was'n", tokens[2]) |
| 809 | assert.Equal("ist's", tokens[3]) |
| 810 | assert.Equal("haste", tokens[4]) |
| 811 | assert.Equal("willste", tokens[5]) |
| 812 | assert.Equal("kannste", tokens[6]) |
| 813 | assert.Equal("biste", tokens[7]) |
| 814 | assert.Equal("kriegste", tokens[8]) |
| 815 | assert.Equal(9, len(tokens)) |
| 816 | |
| Akron | d8d8895 | 2026-02-04 09:02:09 +0100 | [diff] [blame] | 817 | // Regression test for hyphenated abbreviations from Wiktionary (2024-12) |
| 818 | tokens = ttokenize(dat, w, "Ich wohne in Ba.-Wü. und bin Dipl.-Ing. bei Reg.-Bez. Karlsruhe.") |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 819 | assert.Equal("Ich", tokens[0]) |
| 820 | assert.Equal("wohne", tokens[1]) |
| 821 | assert.Equal("in", tokens[2]) |
| 822 | assert.Equal("Ba.-Wü.", tokens[3]) |
| 823 | assert.Equal("und", tokens[4]) |
| 824 | assert.Equal("bin", tokens[5]) |
| 825 | assert.Equal("Dipl.-Ing.", tokens[6]) |
| 826 | assert.Equal("bei", tokens[7]) |
| 827 | assert.Equal("Reg.-Bez.", tokens[8]) |
| 828 | assert.Equal("Karlsruhe", tokens[9]) |
| 829 | assert.Equal(".", tokens[10]) |
| 830 | assert.Equal(11, len(tokens)) |
| Akron | d8d8895 | 2026-02-04 09:02:09 +0100 | [diff] [blame] | 831 | |
| Akron | a2f952f | 2026-02-04 09:51:51 +0100 | [diff] [blame] | 832 | // Regression test for https://github.com/KorAP/KorAP-Tokenizer/issues/131 |
| 833 | tokens = ttokenize(dat, w, "Donau\u00ADdampf\u00ADschiff") |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 834 | assert.Equal("Donau\u00ADdampf\u00ADschiff", tokens[0]) |
| 835 | assert.Equal(1, len(tokens)) |
| Akron | a2f952f | 2026-02-04 09:51:51 +0100 | [diff] [blame] | 836 | |
| 837 | // Regression test for https://github.com/KorAP/KorAP-Tokenizer/issues/115 |
| 838 | tokens = ttokenize(dat, w, "Die Serb*innen wie die Kosovo-Albaner*innen") |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 839 | assert.Equal("Die", tokens[0]) |
| 840 | assert.Equal("Serb*innen", tokens[1]) |
| 841 | assert.Equal("wie", tokens[2]) |
| 842 | assert.Equal("die", tokens[3]) |
| 843 | assert.Equal("Kosovo-Albaner*innen", tokens[4]) |
| 844 | assert.Equal(5, len(tokens)) |
| 845 | |
| 846 | // Test Wikipedia emoji template from the issue |
| Akron | a2f952f | 2026-02-04 09:51:51 +0100 | [diff] [blame] | 847 | tokens = ttokenize(dat, w, "Ein Smiley [_EMOJI:{{S|;)}}_] hier") |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 848 | assert.Equal("Ein", tokens[0]) |
| 849 | assert.Equal("Smiley", tokens[1]) |
| 850 | assert.Equal("[_EMOJI:{{S|;)}}_]", tokens[2]) // Should be one token |
| 851 | assert.Equal("hier", tokens[3]) |
| 852 | assert.Equal(4, len(tokens)) |
| 853 | |
| 854 | // Test simple pragma still works |
| Akron | a2f952f | 2026-02-04 09:51:51 +0100 | [diff] [blame] | 855 | tokens = ttokenize(dat, w, "Name: [_ANONYMIZED_] Ende") |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 856 | assert.Equal("Name", tokens[0]) |
| 857 | assert.Equal(":", tokens[1]) |
| 858 | assert.Equal("[_ANONYMIZED_]", tokens[2]) // Should be one token |
| 859 | assert.Equal("Ende", tokens[3]) |
| 860 | assert.Equal(4, len(tokens)) |
| 861 | |
| 862 | // Gender forms |
| 863 | // Basic colon forms with -in/-innen |
| 864 | tokens = ttokenize(dat, w, "Die Schüler:innen und Lehrer:in kamen.") |
| 865 | assert.Equal("Die", tokens[0]) |
| 866 | assert.Equal("Schüler:innen", tokens[1]) |
| 867 | assert.Equal("und", tokens[2]) |
| 868 | assert.Equal("Lehrer:in", tokens[3]) |
| 869 | assert.Equal("kamen", tokens[4]) |
| 870 | assert.Equal(".", tokens[5]) |
| 871 | assert.Equal(6, len(tokens)) |
| 872 | |
| 873 | // More colon examples |
| 874 | tokens = ttokenize(dat, w, "Künstler:innen Mitarbeiter:innen Bürger:innen") |
| 875 | assert.Equal("Künstler:innen", tokens[0]) |
| 876 | assert.Equal("Mitarbeiter:innen", tokens[1]) |
| 877 | assert.Equal("Bürger:innen", tokens[2]) |
| 878 | assert.Equal(3, len(tokens)) |
| 879 | |
| 880 | // Basic slash forms |
| 881 | tokens = ttokenize(dat, w, "Autor/in Autor/innen Teilnehmer/innen") |
| 882 | assert.Equal("Autor/in", tokens[0]) |
| 883 | assert.Equal("Autor/innen", tokens[1]) |
| 884 | assert.Equal("Teilnehmer/innen", tokens[2]) |
| 885 | assert.Equal(3, len(tokens)) |
| 886 | |
| 887 | // Slash forms with hyphen: /-in, /-innen, /-frau |
| 888 | tokens = ttokenize(dat, w, "Kaufmann/-frau und Fachmann/-frau") |
| 889 | assert.Equal("Kaufmann/-frau", tokens[0]) |
| 890 | assert.Equal("und", tokens[1]) |
| 891 | assert.Equal("Fachmann/-frau", tokens[2]) |
| 892 | assert.Equal(3, len(tokens)) |
| 893 | |
| 894 | // Slash forms without hyphen for frau (lowercase only) |
| 895 | tokens = ttokenize(dat, w, "Kaufmann/frau ist auch korrekt.") |
| 896 | assert.Equal("Kaufmann/frau", tokens[0]) |
| 897 | assert.Equal("ist", tokens[1]) |
| 898 | assert.Equal("auch", tokens[2]) |
| 899 | assert.Equal("korrekt", tokens[3]) |
| 900 | assert.Equal(".", tokens[4]) |
| 901 | assert.Equal(5, len(tokens)) |
| 902 | |
| 903 | // Basic parenthetical forms |
| 904 | tokens = ttokenize(dat, w, "Schüler(innen) und Lehrer(in) kamen.") |
| 905 | assert.Equal("Schüler(innen)", tokens[0]) |
| 906 | assert.Equal("und", tokens[1]) |
| 907 | assert.Equal("Lehrer(in)", tokens[2]) |
| 908 | assert.Equal("kamen", tokens[3]) |
| 909 | assert.Equal(".", tokens[4]) |
| 910 | assert.Equal(5, len(tokens)) |
| 911 | |
| 912 | // Compound words with hyphen + gender ending |
| 913 | tokens = ttokenize(dat, w, "Die Kosovo-Albaner/innen und Kosovo-Albaner:innen trafen sich.") |
| 914 | assert.Equal("Die", tokens[0]) |
| 915 | assert.Equal("Kosovo-Albaner/innen", tokens[1]) |
| 916 | assert.Equal("und", tokens[2]) |
| 917 | assert.Equal("Kosovo-Albaner:innen", tokens[3]) |
| 918 | assert.Equal("trafen", tokens[4]) |
| 919 | assert.Equal("sich", tokens[5]) |
| 920 | assert.Equal(".", tokens[6]) |
| 921 | assert.Equal(7, len(tokens)) |
| 922 | |
| 923 | // With hyphen: Kosovo-Albaner/-innen |
| 924 | tokens = ttokenize(dat, w, "Kosovo-Albaner/-innen kamen.") |
| 925 | assert.Equal("Kosovo-Albaner/-innen", tokens[0]) |
| 926 | assert.Equal("kamen", tokens[1]) |
| 927 | assert.Equal(".", tokens[2]) |
| 928 | assert.Equal(3, len(tokens)) |
| 929 | |
| 930 | // Mann/Frau should be separated (capital F = standalone word, not suffix) |
| 931 | tokens = ttokenize(dat, w, "Ob Mann/Frau das will?") |
| 932 | assert.Equal("Ob", tokens[0]) |
| 933 | assert.Equal("Mann", tokens[1]) |
| 934 | assert.Equal("/", tokens[2]) |
| 935 | assert.Equal("Frau", tokens[3]) |
| 936 | assert.Equal("das", tokens[4]) |
| 937 | assert.Equal("will", tokens[5]) |
| 938 | assert.Equal("?", tokens[6]) |
| 939 | assert.Equal(7, len(tokens)) |
| 940 | |
| 941 | // Also Männer/Frauen |
| 942 | tokens = ttokenize(dat, w, "Männer/Frauen sind willkommen.") |
| 943 | assert.Equal("Männer", tokens[0]) |
| 944 | assert.Equal("/", tokens[1]) |
| 945 | assert.Equal("Frauen", tokens[2]) |
| 946 | assert.Equal("sind", tokens[3]) |
| 947 | assert.Equal("willkommen", tokens[4]) |
| 948 | assert.Equal(".", tokens[5]) |
| 949 | assert.Equal(6, len(tokens)) |
| 950 | |
| 951 | // /frau should only be joined when word ends in "mann" |
| 952 | // "xxx/frau" where xxx doesn't end in "mann" should be SEPARATED |
| 953 | tokens = ttokenize(dat, w, "xxx/frau sollte getrennt sein.") |
| 954 | assert.Equal("xxx", tokens[0]) |
| 955 | assert.Equal("/", tokens[1]) |
| 956 | assert.Equal("frau", tokens[2]) |
| 957 | assert.Equal("sollte", tokens[3]) |
| 958 | assert.Equal("getrennt", tokens[4]) |
| 959 | assert.Equal("sein", tokens[5]) |
| 960 | assert.Equal(".", tokens[6]) |
| 961 | assert.Equal(7, len(tokens)) |
| 962 | |
| 963 | // But Kaufmann/frau should be one token (word ends in "mann") |
| 964 | tokens = ttokenize(dat, w, "Kaufmann/frau ist ein Beruf.") |
| 965 | assert.Equal("Kaufmann/frau", tokens[0]) |
| 966 | assert.Equal("ist", tokens[1]) |
| 967 | assert.Equal("ein", tokens[2]) |
| 968 | assert.Equal("Beruf", tokens[3]) |
| 969 | assert.Equal(".", tokens[4]) |
| 970 | assert.Equal(5, len(tokens)) |
| 971 | |
| 972 | // And Fachmann/-frau should be one token |
| 973 | tokens = ttokenize(dat, w, "Fachmann/-frau gesucht") |
| 974 | assert.Equal("Fachmann/-frau", tokens[0]) |
| 975 | assert.Equal("gesucht", tokens[1]) |
| 976 | assert.Equal(2, len(tokens)) |
| 977 | |
| 978 | // Geschäftsmann/frau should also be one token |
| 979 | tokens = ttokenize(dat, w, "Ein Geschäftsmann/frau wird gesucht.") |
| 980 | assert.Equal("Ein", tokens[0]) |
| 981 | assert.Equal("Geschäftsmann/frau", tokens[1]) |
| 982 | assert.Equal("wird", tokens[2]) |
| 983 | assert.Equal("gesucht", tokens[3]) |
| 984 | assert.Equal(".", tokens[4]) |
| 985 | assert.Equal(5, len(tokens)) |
| 986 | |
| 987 | // Genderstern forms (these should already work via existing rules) |
| 988 | tokens = ttokenize(dat, w, "Schüler*innen und Lehrer*innen") |
| 989 | assert.Equal("Schüler*innen", tokens[0]) |
| 990 | assert.Equal("und", tokens[1]) |
| 991 | assert.Equal("Lehrer*innen", tokens[2]) |
| 992 | assert.Equal(3, len(tokens)) |
| 993 | |
| 994 | // Mixed sentence with various gender forms |
| 995 | tokens = ttokenize(dat, w, "Die Schüler:innen, Lehrer/innen und Mitarbeiter(innen) sowie Kaufmann/-frau trafen sich.") |
| 996 | assert.Equal("Die", tokens[0]) |
| 997 | assert.Equal("Schüler:innen", tokens[1]) |
| 998 | assert.Equal(",", tokens[2]) |
| 999 | assert.Equal("Lehrer/innen", tokens[3]) |
| 1000 | assert.Equal("und", tokens[4]) |
| 1001 | assert.Equal("Mitarbeiter(innen)", tokens[5]) |
| 1002 | assert.Equal("sowie", tokens[6]) |
| 1003 | assert.Equal("Kaufmann/-frau", tokens[7]) |
| 1004 | assert.Equal("trafen", tokens[8]) |
| 1005 | assert.Equal("sich", tokens[9]) |
| 1006 | assert.Equal(".", tokens[10]) |
| 1007 | assert.Equal(11, len(tokens)) |
| 1008 | |
| 1009 | tokens = ttokenize(dat, w, "Nutzer/Innenarchitekt") |
| 1010 | assert.Equal("Nutzer", tokens[0]) |
| 1011 | assert.Equal("/", tokens[1]) |
| 1012 | assert.Equal("Innenarchitekt", tokens[2]) |
| 1013 | assert.Equal(3, len(tokens)) |
| 1014 | |
| 1015 | tokens = ttokenize(dat, w, "Innenminister/in") |
| 1016 | assert.Equal("Innenminister/in", tokens[0]) |
| 1017 | assert.Equal(1, len(tokens)) |
| 1018 | |
| 1019 | tokens = ttokenize(dat, w, "Innenminister/Innenministerinnen") |
| 1020 | assert.Equal("Innenminister", tokens[0]) |
| 1021 | assert.Equal("/", tokens[1]) |
| 1022 | assert.Equal("Innenministerinnen", tokens[2]) |
| 1023 | assert.Equal(3, len(tokens)) |
| Akron | a2f952f | 2026-02-04 09:51:51 +0100 | [diff] [blame] | 1024 | |
| 1025 | /* |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 1026 | DeReKo-Behaviour |
| 1027 | tokens = ttokenize(dat, w, "I've we'll you'd I'm we're Peter's isn't") |
| 1028 | assert.Equal("'ve", tokens[1]); |
| 1029 | assert.Equal("'ll", tokens[3]); |
| 1030 | assert.Equal("'d", tokens[5]); |
| 1031 | assert.Equal("'m", tokens[7]); |
| 1032 | assert.Equal("'re", tokens[9]); |
| 1033 | assert.Equal("'s", tokens[11]); |
| 1034 | assert.Equal("is", tokens[12]); |
| 1035 | assert.Equal("n't", tokens[13]); |
| 1036 | assert.Equal(14, len(tokens)); |
| Akron | a2f952f | 2026-02-04 09:51:51 +0100 | [diff] [blame] | 1037 | |
| Akron | d8d8895 | 2026-02-04 09:02:09 +0100 | [diff] [blame] | 1038 | |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 1039 | assert.Equal(tokens[0], "Der") |
| 1040 | assert.Equal(tokens[1], "alte") |
| 1041 | assert.Equal(tokens[2], "Mann") |
| 1042 | assert.Equal(len(tokens), 3) |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 1043 | |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 1044 | /* |
| 1045 | @Test |
| 1046 | public void englishTokenizerSeparatesEnglishContractionsAndClitics () { |
| 1047 | DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en(); |
| 1048 | } |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 1049 | |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 1050 | @Test |
| 1051 | public void frenchTokenizerKnowsFrenchAbbreviations () { |
| 1052 | DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr(); |
| 1053 | tokens = tokenize(dat, w, "Approx. en juill. 2004 mon prof. M. Foux m'a dit qu'il faut faire exerc. no. 4, et lire pp. 27-30.") |
| 1054 | assert.Equal("Approx.", tokens[0]); |
| 1055 | assert.Equal("juill.", tokens[2]); |
| 1056 | assert.Equal("prof.", tokens[5]); |
| 1057 | assert.Equal("exerc.", tokens[15]); |
| 1058 | assert.Equal("no.", tokens[16]); |
| 1059 | assert.Equal("pp.", tokens[21]); |
| 1060 | } |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 1061 | |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 1062 | @Test |
| 1063 | public void frenchTokenizerKnowsFrenchContractions () { |
| 1064 | DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr(); |
| 1065 | tokens = tokenize(dat, w, "J'ai j'habite qu'il d'un jusqu'à Aujourd'hui D'accord Quelqu'un Presqu'île") |
| 1066 | assert.Equal("J'", tokens[0]); |
| 1067 | assert.Equal("j'", tokens[2]); |
| 1068 | assert.Equal("qu'", tokens[4]); |
| 1069 | assert.Equal("d'", tokens[6]); |
| 1070 | assert.Equal("jusqu'", tokens[8]); |
| 1071 | assert.Equal("Aujourd'hui", tokens[10]); |
| 1072 | assert.Equal("D'", tokens[11]); // ’ |
| 1073 | assert.Equal("Quelqu'un", tokens[13]); // ’ |
| 1074 | assert.Equal("Presqu'île", tokens[14]); // ’ |
| 1075 | } |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 1076 | |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 1077 | @Test |
| 1078 | public void frenchTokenizerKnowsFrenchClitics () { |
| 1079 | DerekoDfaTokenizer_fr tok = new DerekoDfaTokenizer_fr(); |
| 1080 | tokens = tokenize(dat, w, "suis-je sont-elles ") |
| 1081 | assert.Equal("suis", tokens[0]); |
| 1082 | assert.Equal("-je", tokens[1]); |
| 1083 | assert.Equal("sont", tokens[2]); |
| 1084 | assert.Equal("-elles", tokens[3]); |
| 1085 | } |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 1086 | |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 1087 | @Test |
| 1088 | public void testEnglishTokenizerScienceAbbreviations () { |
| 1089 | DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en(); |
| 1090 | tokens = tokenize(dat, w, "Approx. in Sept. 1954, Assoc. Prof. Dr. R. J. Ewing reviewed articles on Enzymol. Bacteriol. effects later published in Nutr. Rheumatol. No. 12 and Nº. 13., pp. 17-18.") |
| 1091 | assert.Equal("Approx.", tokens[0]); |
| 1092 | assert.Equal("in", tokens[1]); |
| 1093 | assert.Equal("Sept.", tokens[2]); |
| 1094 | assert.Equal("1954", tokens[3]); |
| 1095 | assert.Equal(",", tokens[4]); |
| 1096 | assert.Equal("Assoc.", tokens[5]); |
| 1097 | assert.Equal("Prof.", tokens[6]); |
| 1098 | assert.Equal("Dr.", tokens[7]); |
| 1099 | assert.Equal("R.", tokens[8]); |
| 1100 | assert.Equal("J.", tokens[9]); |
| 1101 | assert.Equal("Ewing", tokens[10]); |
| 1102 | assert.Equal("reviewed", tokens[11]); |
| 1103 | assert.Equal("articles", tokens[12]); |
| 1104 | assert.Equal("on", tokens[13]); |
| 1105 | assert.Equal("Enzymol.", tokens[14]); |
| 1106 | assert.Equal("Bacteriol.", tokens[15]); |
| 1107 | assert.Equal("effects", tokens[16]); |
| 1108 | assert.Equal("later", tokens[17]); |
| 1109 | assert.Equal("published", tokens[18]); |
| 1110 | assert.Equal("in", tokens[19]); |
| 1111 | assert.Equal("Nutr.", tokens[20]); |
| 1112 | assert.Equal("Rheumatol.", tokens[21]); |
| 1113 | assert.Equal("No.", tokens[22]); |
| 1114 | assert.Equal("12", tokens[23]); |
| 1115 | assert.Equal("and", tokens[24]); |
| 1116 | assert.Equal("Nº.", tokens[25]); |
| 1117 | assert.Equal("13.", tokens[26]); |
| 1118 | assert.Equal(",", tokens[27]); |
| 1119 | assert.Equal("pp.", tokens[28]); |
| 1120 | assert.Equal("17-18", tokens[29]); |
| 1121 | assert.Equal(".", tokens[30]); |
| 1122 | } |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 1123 | |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 1124 | @Test |
| 1125 | public void englishTokenizerCanGuessWhetherIIsAbbrev () { |
| 1126 | DerekoDfaTokenizer_en tok = new DerekoDfaTokenizer_en(); |
| 1127 | tokens = tokenize(dat, w, "M. I. Baxter was born during World War I. So was I. He went to the Peter I. Hardy school. So did I.") |
| 1128 | assert.Equal("I.", tokens[1]); |
| 1129 | assert.Equal("I", tokens[8]); |
| 1130 | assert.Equal(".", tokens[9]); |
| 1131 | assert.Equal("I", tokens[12]); |
| 1132 | assert.Equal(".", tokens[13]); |
| 1133 | } |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 1134 | |
| Akron | 3dd560e | 2026-02-04 11:23:08 +0100 | [diff] [blame] | 1135 | @Test |
| 1136 | public void testZipOuputArchive () { |
| 1137 | |
| 1138 | final ByteArrayOutputStream clearOut = new ByteArrayOutputStream(); |
| 1139 | System.setOut(new PrintStream(clearOut)); |
| 1140 | tokens = tokenize(dat, w, "Archive: ich/bin/ein.zip\n") |
| 1141 | assert.Equal(0, len(tokens)); |
| 1142 | } |
| Akron | 03ca425 | 2021-08-11 13:32:53 +0200 | [diff] [blame] | 1143 | */ |
| 1144 | /* |
| 1145 | |
| 1146 | @Test |
| 1147 | public void testTextBreakOutputArchive () throws InstantiationException, IllegalAccessException, ClassNotFoundException { |
| 1148 | DerekoDfaTokenizer_de tok = (DerekoDfaTokenizer_de) new KorapTokenizer.Builder() |
| 1149 | .tokenizerClassName(DerekoDfaTokenizer_de.class.getName()) |
| 1150 | .printOffsets(true) |
| 1151 | .build(); |
| 1152 | Span[] tokens = tok.tokenizePos("Text1\004\nText2 Hallo\004Rumsdibums\004Das freut mich sehr.\n"); |
| 1153 | assert.Equal("Text1", tokens[0].getType()); |
| 1154 | assert.Equal(len(tokens), 9 ); |
| 1155 | } |
| 1156 | */ |
| 1157 | } |
| Akron | bd40680 | 2021-08-11 18:39:13 +0200 | [diff] [blame] | 1158 | |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 1159 | func TestDoubleArrayFullTokenizerSentenceSplitterBug1(t *testing.T) { |
| 1160 | assert := assert.New(t) |
| 1161 | |
| 1162 | if dat == nil { |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 1163 | dat = LoadDatokFile("testdata/tokenizer_de.datok") |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 1164 | } |
| 1165 | |
| 1166 | b := make([]byte, 0, 2048) |
| 1167 | w := bytes.NewBuffer(b) |
| 1168 | var sentences []string |
| 1169 | |
| 1170 | text := `Wüllersdorf war aufgestanden. »Ich finde es furchtbar, daß Sie recht haben, aber Sie haben recht. Ich quäle Sie nicht länger mit meinem 'Muß es sein?'. Die Welt ist einmal, wie sie ist, und die Dinge verlaufen nicht, wie wir wollen, sondern wie die andern wollen. Das mit dem 'Gottesgericht', wie manche hochtrabend versichern, ist freilich ein Unsinn, nichts davon, umgekehrt, unser Ehrenkultus ist ein Götzendienst, aber wir müssen uns ihm unterwerfen, solange der Götze gilt.«` |
| 1171 | |
| 1172 | w.Reset() |
| 1173 | assert.True(dat.Transduce(strings.NewReader(text), w)) |
| 1174 | sentences = strings.Split(w.String(), "\n\n") |
| Akron | b428755 | 2022-03-27 14:11:24 +0200 | [diff] [blame] | 1175 | assert.Equal(len(sentences), 6) |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 1176 | assert.Equal("Wüllersdorf\nwar\naufgestanden\n.", sentences[0]) |
| 1177 | assert.Equal("»\nIch\nfinde\nes\nfurchtbar\n,\ndaß\nSie\nrecht\nhaben\n,\naber\nSie\nhaben\nrecht\n.", sentences[1]) |
| Akron | b428755 | 2022-03-27 14:11:24 +0200 | [diff] [blame] | 1178 | assert.Equal("Ich\nquäle\nSie\nnicht\nlänger\nmit\nmeinem\n'\nMuß\nes\nsein\n?\n'\n.", sentences[2]) |
| 1179 | assert.Equal("Die\nWelt\nist\neinmal\n,\nwie\nsie\nist\n,\nund\ndie\nDinge\nverlaufen\nnicht\n,\nwie\nwir\nwollen\n,\nsondern\nwie\ndie\nandern\nwollen\n.", sentences[3]) |
| 1180 | assert.Equal("Das\nmit\ndem\n'\nGottesgericht\n'\n,\nwie\nmanche\nhochtrabend\nversichern\n,\nist\nfreilich\nein\nUnsinn\n,\nnichts\ndavon\n,\numgekehrt\n,\nunser\nEhrenkultus\nist\nein\nGötzendienst\n,\naber\nwir\nmüssen\nuns\nihm\nunterwerfen\n,\nsolange\nder\nGötze\ngilt\n.\n«", sentences[4]) |
| Akron | df27581 | 2022-03-27 12:54:46 +0200 | [diff] [blame] | 1181 | } |
| 1182 | |
| Akron | 2f7f6f3 | 2026-02-11 15:12:48 +0100 | [diff] [blame^] | 1183 | func TestDoubleArrayFullTokenizerGenderDontSplitFromFile(t *testing.T) { |
| 1184 | assert := assert.New(t) |
| 1185 | |
| 1186 | if dat == nil { |
| 1187 | dat = LoadDatokFile("testdata/tokenizer_de.datok") |
| 1188 | } |
| 1189 | assert.NotNil(dat) |
| 1190 | |
| 1191 | b := make([]byte, 0, 2048) |
| 1192 | w := bytes.NewBuffer(b) |
| 1193 | |
| 1194 | for _, token := range ttokenLines(t, "testdata/de/dontsplit.txt") { |
| 1195 | tokens := ttokenize(dat, w, token) |
| 1196 | assert.Equalf(1, len(tokens), "should not split %q", token) |
| 1197 | if len(tokens) == 1 { |
| 1198 | assert.Equalf(token, tokens[0], "token surface should match for %q", token) |
| 1199 | } |
| 1200 | } |
| 1201 | } |
| 1202 | |
| 1203 | func TestDoubleArrayFullTokenizerGenderSplitFromFile(t *testing.T) { |
| 1204 | assert := assert.New(t) |
| 1205 | |
| 1206 | if dat == nil { |
| 1207 | dat = LoadDatokFile("testdata/tokenizer_de.datok") |
| 1208 | } |
| 1209 | assert.NotNil(dat) |
| 1210 | |
| 1211 | b := make([]byte, 0, 2048) |
| 1212 | w := bytes.NewBuffer(b) |
| 1213 | |
| 1214 | for _, token := range ttokenLines(t, "testdata/de/split.txt") { |
| 1215 | tokens := ttokenize(dat, w, token) |
| 1216 | assert.Greaterf(len(tokens), 1, "should split %q", token) |
| 1217 | } |
| 1218 | } |
| 1219 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 1220 | func TestDoubleArrayLoadFactor1(t *testing.T) { |
| Akron | 29e306f | 2021-09-02 18:29:56 +0200 | [diff] [blame] | 1221 | assert := assert.New(t) |
| 1222 | tok := LoadFomaFile("testdata/abbr_bench.fst") |
| 1223 | dat := tok.ToDoubleArray() |
| 1224 | assert.True(dat.LoadFactor() > 88) |
| 1225 | } |
| 1226 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 1227 | func TestDoubleArrayFullTokenizerXML(t *testing.T) { |
| Akron | 4c2a1ad | 2021-08-31 00:35:53 +0200 | [diff] [blame] | 1228 | assert := assert.New(t) |
| 1229 | |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 1230 | if dat == nil { |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 1231 | dat = LoadDatokFile("testdata/tokenizer_de.datok") |
| Akron | 9fb63af | 2021-10-28 01:15:53 +0200 | [diff] [blame] | 1232 | } |
| 1233 | |
| Akron | 4c2a1ad | 2021-08-31 00:35:53 +0200 | [diff] [blame] | 1234 | assert.NotNil(dat) |
| 1235 | |
| 1236 | b := make([]byte, 0, 2048) |
| 1237 | w := bytes.NewBuffer(b) |
| 1238 | var tokens []string |
| 1239 | |
| 1240 | tokens = ttokenize(dat, w, "Das <b>beste</b> Fußballspiel") |
| 1241 | assert.Equal("Das", tokens[0]) |
| 1242 | assert.Equal("<b>", tokens[1]) |
| 1243 | assert.Equal("beste", tokens[2]) |
| 1244 | assert.Equal("</b>", tokens[3]) |
| 1245 | assert.Equal("Fußballspiel", tokens[4]) |
| 1246 | assert.Equal(5, len(tokens)) |
| 1247 | |
| 1248 | tokens = ttokenize(dat, w, "Das <b class=\"c\">beste</b> Fußballspiel") |
| 1249 | assert.Equal("Das", tokens[0]) |
| 1250 | assert.Equal("<b class=\"c\">", tokens[1]) |
| 1251 | assert.Equal("beste", tokens[2]) |
| 1252 | assert.Equal("</b>", tokens[3]) |
| 1253 | assert.Equal("Fußballspiel", tokens[4]) |
| 1254 | assert.Equal(5, len(tokens)) |
| 1255 | |
| 1256 | tokens = ttokenize(dat, w, "der<x y=\"alte \"> <x x> alte</x> etc. et. Mann.") |
| 1257 | assert.Equal("der", tokens[0]) |
| 1258 | assert.Equal("<x y=\"alte \">", tokens[1]) |
| 1259 | assert.Equal("<x x>", tokens[2]) |
| 1260 | assert.Equal("alte", tokens[3]) |
| 1261 | assert.Equal("</x>", tokens[4]) |
| 1262 | assert.Equal("etc.", tokens[5]) |
| 1263 | assert.Equal("et", tokens[6]) |
| 1264 | assert.Equal(".", tokens[7]) |
| 1265 | assert.Equal("Mann", tokens[8]) |
| 1266 | assert.Equal(".", tokens[9]) |
| 1267 | assert.Equal(10, len(tokens)) |
| 1268 | } |
| 1269 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 1270 | func BenchmarkDoubleArrayTransduce(b *testing.B) { |
| Akron | bd40680 | 2021-08-11 18:39:13 +0200 | [diff] [blame] | 1271 | bu := make([]byte, 0, 2048) |
| 1272 | w := bytes.NewBuffer(bu) |
| 1273 | |
| 1274 | s := `Der Vorsitzende der Abk. hat gewählt. Gefunden auf wikipedia.org. Ich bin unter korap@ids-mannheim.de erreichbar. |
| 1275 | Unsere Website ist https://korap.ids-mannheim.de/?q=Baum. Unser Server ist 10.0.10.51. Zu 50.4% ist es sicher. |
| 1276 | Der Termin ist am 5.9.2018. |
| 1277 | Ich habe die readme.txt heruntergeladen. |
| 1278 | Ausschalten!!! Hast Du nicht gehört??? |
| 1279 | Ich wohne in der Weststr. und Du? Kupietz und Schmidt [2018]: Korpuslinguistik. Dieses verf***** Kleid! Ich habe die readme.txt heruntergeladen. |
| 1280 | Er sagte: \"Es geht mir gut!\", daraufhin ging er. "Das ist von C&A!" Früher bzw. später ... Sie erreichte den 1. Platz! |
| 1281 | Archive: Ich bin kein zip. D'dorf Ku'damm Lu'hafen M'gladbach W'schaft. |
| 1282 | Mach's macht's was'n ist's haste willste kannste biste kriegste.` |
| 1283 | r := strings.NewReader(s) |
| 1284 | |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 1285 | dat := LoadDatokFile("testdata/tokenizer_de.datok") |
| Akron | bd40680 | 2021-08-11 18:39:13 +0200 | [diff] [blame] | 1286 | |
| Akron | df37a55 | 2021-09-02 12:16:08 +0200 | [diff] [blame] | 1287 | b.ResetTimer() |
| 1288 | |
| Akron | bd40680 | 2021-08-11 18:39:13 +0200 | [diff] [blame] | 1289 | for i := 0; i < b.N; i++ { |
| 1290 | w.Reset() |
| 1291 | r.Reset(s) |
| 1292 | ok := dat.Transduce(r, w) |
| 1293 | if !ok { |
| 1294 | fmt.Println("Fail!") |
| 1295 | fmt.Println(w.String()) |
| 1296 | os.Exit(1) |
| 1297 | } |
| 1298 | } |
| Akron | bd40680 | 2021-08-11 18:39:13 +0200 | [diff] [blame] | 1299 | } |
| Akron | bb4aac5 | 2021-08-13 00:52:27 +0200 | [diff] [blame] | 1300 | |
| Akron | 6f1c16c | 2021-08-17 10:45:42 +0200 | [diff] [blame] | 1301 | // This test is deprecated as the datok file changes over time |
| 1302 | func XBenchmarkLoadDatokFile(b *testing.B) { |
| Akron | bb4aac5 | 2021-08-13 00:52:27 +0200 | [diff] [blame] | 1303 | for i := 0; i < b.N; i++ { |
| Akron | 0139bc5 | 2023-08-31 16:35:58 +0200 | [diff] [blame] | 1304 | dat := LoadDatokFile("testdata/tokenizer_de.datok") |
| Akron | bb4aac5 | 2021-08-13 00:52:27 +0200 | [diff] [blame] | 1305 | if dat == nil { |
| 1306 | fmt.Println("Fail!") |
| 1307 | os.Exit(1) |
| 1308 | } |
| 1309 | } |
| 1310 | } |
| 1311 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 1312 | func BenchmarkDoubleArrayConstruction(b *testing.B) { |
| Akron | 6f1c16c | 2021-08-17 10:45:42 +0200 | [diff] [blame] | 1313 | tok := LoadFomaFile("testdata/simple_bench.fst") |
| Akron | df37a55 | 2021-09-02 12:16:08 +0200 | [diff] [blame] | 1314 | b.ResetTimer() |
| Akron | 6f1c16c | 2021-08-17 10:45:42 +0200 | [diff] [blame] | 1315 | for i := 0; i < b.N; i++ { |
| 1316 | dat := tok.ToDoubleArray() |
| 1317 | if dat == nil { |
| 1318 | fmt.Println("Fail!") |
| 1319 | os.Exit(1) |
| 1320 | } |
| 1321 | } |
| 1322 | } |
| 1323 | |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 1324 | func BenchmarkDoubleArrayLarger(b *testing.B) { |
| Akron | 7b1faa6 | 2021-09-02 16:10:21 +0200 | [diff] [blame] | 1325 | tok := LoadFomaFile("testdata/abbr_bench.fst") |
| 1326 | b.ResetTimer() |
| 1327 | for i := 0; i < b.N; i++ { |
| 1328 | dat := tok.ToDoubleArray() |
| 1329 | if dat == nil { |
| 1330 | fmt.Println("Fail!") |
| 1331 | os.Exit(1) |
| 1332 | } |
| 1333 | } |
| 1334 | } |
| 1335 | |
| Akron | bb4aac5 | 2021-08-13 00:52:27 +0200 | [diff] [blame] | 1336 | // 2021-08-11 (go 1.16) |
| 1337 | // go test -bench=. -test.benchmem |
| 1338 | // BenchmarkTransduce-4 19069 60609 ns/op 11048 B/op 137 allocs/op |
| Akron | f1a1650 | 2021-08-16 15:24:38 +0200 | [diff] [blame] | 1339 | // 2021-08-12 (go 1.16) |
| Akron | bb4aac5 | 2021-08-13 00:52:27 +0200 | [diff] [blame] | 1340 | // BenchmarkTransduce-4 20833 55241 ns/op 9676 B/op 3 allocs/op |
| 1341 | // BenchmarkLoadDatokFile-4 4 258418169 ns/op 29916470 B/op 5697 allocs/op |
| 1342 | // BenchmarkTransduce-4 19430 58133 ns/op 18696 B/op 3 allocs/op |
| 1343 | // BenchmarkLoadDatokFile-4 8 139071939 ns/op 203158377 B/op 5742 allocs/op |
| Akron | f1a1650 | 2021-08-16 15:24:38 +0200 | [diff] [blame] | 1344 | // 2021-08-16 |
| 1345 | // BenchmarkTransduce-4 22251 49989 ns/op 17370 B/op 3 allocs/op |
| 1346 | // BenchmarkLoadDatokFile-4 8 138937532 ns/op 203158327 B/op 5742 allocs/op |
| 1347 | // BenchmarkTransduce-4 22005 48665 ns/op 17472 B/op 3 allocs/op |
| 1348 | // BenchmarkLoadDatokFile-4 7 143143934 ns/op 203158450 B/op 5743 allocs/op |
| Akron | ea46e8a | 2021-08-17 00:36:31 +0200 | [diff] [blame] | 1349 | // BenchmarkTransduce-4 34939 34363 ns/op 14056 B/op 3 allocs/op |
| 1350 | // BenchmarkLoadDatokFile-4 7 149511609 ns/op 203217193 B/op 5915 allocs/op |
| Akron | 6f1c16c | 2021-08-17 10:45:42 +0200 | [diff] [blame] | 1351 | // 2021-08-17 |
| 1352 | // BenchmarkTransduce-4 31204 32678 ns/op 14752 B/op 3 allocs/op |
| 1353 | // BenchmarkToDoubleArray-4 44138 26850 ns/op 10704 B/op 29 allocs/op |
| Akron | de18e90 | 2021-08-27 09:34:12 +0200 | [diff] [blame] | 1354 | // BenchmarkTransduce-4 29376 34562 ns/op 15157 B/op 3 allocs/op |
| 1355 | // BenchmarkToDoubleArray-4 54441 21355 ns/op 10704 B/op 29 allocs/op |
| Akron | df37a55 | 2021-09-02 12:16:08 +0200 | [diff] [blame] | 1356 | // 2021-09-02 - New tokenizer - fixed loading |
| Akron | 7b1faa6 | 2021-09-02 16:10:21 +0200 | [diff] [blame] | 1357 | // BenchmarkTransduce-4 40149 31515 ns/op 8240 B/op 3 allocs/op |
| 1358 | // BenchmarkToDoubleArray-4 51043 22586 ns/op 10702 B/op 29 allocs/op |
| 1359 | // BenchmarkToDoubleArrayLarger-4 3 396009639 ns/op 6352293 B/op 2575 allocs/op |
| 1360 | // BenchmarkTransduce-4 38698 31900 ns/op 8240 B/op 3 allocs/op |
| 1361 | // BenchmarkToDoubleArray-4 50644 21569 ns/op 11151 B/op 14 allocs/op |
| 1362 | // BenchmarkToDoubleArrayLarger-4 3 441260766 ns/op 6942336 B/op 30 allocs/op |
| 1363 | // BenchmarkTransduce-4 39966 30835 ns/op 8240 B/op 3 allocs/op |
| 1364 | // BenchmarkToDoubleArray-4 50720 24863 ns/op 11091 B/op 46 allocs/op |
| 1365 | // BenchmarkToDoubleArrayLarger-4 3 432523828 ns/op 6413381 B/op 5122 allocs/op |
| Akron | 679b486 | 2021-09-02 16:59:26 +0200 | [diff] [blame] | 1366 | // 2021-09-02 - xCheckSkip() with .9 |
| 1367 | // BenchmarkTransduce-4 36325 38501 ns/op 8240 B/op 3 allocs/op |
| 1368 | // BenchmarkToDoubleArray-4 66858 19286 ns/op 10607 B/op 29 allocs/op |
| 1369 | // BenchmarkToDoubleArrayLarger-4 18 67428011 ns/op 6360604 B/op 2578 allocs/op |
| Akron | 29e306f | 2021-09-02 18:29:56 +0200 | [diff] [blame] | 1370 | // 2021-09-02 - xCheckSkipNiu() with .9 and >= 3 |
| 1371 | // BenchmarkTransduce-4 37105 27714 ns/op 8240 B/op 3 allocs/op |
| 1372 | // BenchmarkToDoubleArray-4 76600 15973 ns/op 10703 B/op 29 allocs/op |
| 1373 | // BenchmarkToDoubleArrayLarger-4 21 55161934 ns/op 6357889 B/op 2578 allocs/op |
| Akron | 28031b7 | 2021-10-02 13:07:25 +0200 | [diff] [blame] | 1374 | // 2021-09-30 - Go 1.17.1 |
| 1375 | // BenchmarkTransduce-4 47222 25962 ns/op 8240 B/op 3 allocs/op |
| 1376 | // BenchmarkToDoubleArray-4 69192 17355 ns/op 10704 B/op 29 allocs/op |
| 1377 | // BenchmarkToDoubleArrayLarger-4 16 65042885 ns/op 6357794 B/op 2576 allocs/op |
| 1378 | // BenchmarkTransduceMatrix-4 45404 25156 ns/op 8240 B/op 3 allocs/op |
| Akron | 094a4e8 | 2021-10-02 18:37:00 +0200 | [diff] [blame] | 1379 | // 2021-10-02 |
| 1380 | // BenchmarkTransduce-4 47676 25398 ns/op 8240 B/op 3 allocs/op |
| 1381 | // BenchmarkToDoubleArray-4 71919 16083 ns/op 10702 B/op 29 allocs/op |
| 1382 | // BenchmarkToDoubleArrayLarger-4 16 68012819 ns/op 6357920 B/op 2578 allocs/op |
| 1383 | // BenchmarkTransduceMatrix-4 51529 23678 ns/op 8240 B/op 3 allocs/op |
| Akron | e396a93 | 2021-10-19 01:06:13 +0200 | [diff] [blame] | 1384 | // 2021-10-12 - Introduction of Callbacks in Matrix |
| 1385 | // BenchmarkTransduce-4 46947 26043 ns/op 8240 B/op 3 allocs/op |
| 1386 | // BenchmarkToDoubleArray-4 65192 16501 ns/op 10703 B/op 29 allocs/op |
| 1387 | // BenchmarkToDoubleArrayLarger-4 15 69263576 ns/op 6357859 B/op 2577 allocs/op |
| 1388 | // BenchmarkTransduceMatrix-4 49928 26313 ns/op 12408 B/op 6 allocs/op |
| 1389 | // 2021-10-18 - Introduction of Callbacks in DA |
| 1390 | // BenchmarkTransduce-4 41055 30058 ns/op 12408 B/op 6 allocs/op |
| 1391 | // BenchmarkToDoubleArray-4 64672 17659 ns/op 10703 B/op 29 allocs/op |
| 1392 | // BenchmarkToDoubleArrayLarger-4 15 71640553 ns/op 6357865 B/op 2577 allocs/op |
| 1393 | // BenchmarkTransduceMatrix-4 47036 26009 ns/op 12408 B/op 6 allocs/op |
| Akron | a854faa | 2021-10-22 19:31:08 +0200 | [diff] [blame] | 1394 | // 2021-10-21 - Simplify DA code to ignore final states |
| 1395 | // BenchmarkTransduce-4 41365 33766 ns/op 12408 B/op 6 allocs/op |
| 1396 | // BenchmarkToDoubleArray-4 63663 17675 ns/op 10703 B/op 29 allocs/op |
| 1397 | // BenchmarkToDoubleArrayLarger-4 16 83535733 ns/op 6357874 B/op 2577 allocs/op |
| 1398 | // BenchmarkTransduceMatrix-4 45362 25258 ns/op 12408 B/op 6 allocs/op |
| Akron | 98fbfef | 2021-10-23 17:02:11 +0200 | [diff] [blame] | 1399 | // 2021-10-22 - Introduxe EOT |
| Akron | c9c0eae | 2021-10-22 19:49:43 +0200 | [diff] [blame] | 1400 | // BenchmarkDoubleArrayTransduce-4 43820 27661 ns/op 12408 B/op 6 allocs/op |
| 1401 | // BenchmarkDoubleArrayConstruction-4 68259 16608 ns/op 10703 B/op 29 allocs/op |
| 1402 | // BenchmarkDoubleArrayLarger-4 16 69889532 ns/op 6357901 B/op 2578 allocs/op |
| 1403 | // BenchmarkMatrixTransduce-4 49426 25105 ns/op 12408 B/op 6 allocs/op |
| Akron | 98fbfef | 2021-10-23 17:02:11 +0200 | [diff] [blame] | 1404 | // 2021-10-23 - Improve offset handling |
| 1405 | // BenchmarkDoubleArrayTransduce-4 41890 29729 ns/op 12408 B/op 6 allocs/op |
| 1406 | // BenchmarkDoubleArrayConstruction-4 74510 15879 ns/op 10703 B/op 29 allocs/op |
| 1407 | // BenchmarkDoubleArrayLarger-4 18 73752383 ns/op 6357956 B/op 2579 allocs/op |
| 1408 | // BenchmarkMatrixTransduce-4 46870 27140 ns/op 12408 B/op 6 allocs/op |
| Akron | 04335c6 | 2021-10-28 11:56:00 +0200 | [diff] [blame] | 1409 | // 2021-10-28 - Finalize feature compatibility with KorAP-Tokenizer |
| 1410 | // BenchmarkDoubleArrayTransduce-4 39130 31612 ns/op 28944 B/op 16 allocs/op |
| 1411 | // BenchmarkDoubleArrayConstruction-4 79302 14994 ns/op 10703 B/op 29 allocs/op |
| 1412 | // BenchmarkDoubleArrayLarger-4 18 67942077 ns/op 6357870 B/op 2577 allocs/op |
| 1413 | // BenchmarkMatrixTransduce-4 39536 30510 ns/op 28944 B/op 16 allocs/op |
| Akron | 289414f | 2021-11-09 19:56:42 +0100 | [diff] [blame] | 1414 | // 2021-11-09 - go 1.17.3 |
| 1415 | // BenchmarkDoubleArrayTransduce-4 35067 34192 ns/op 28944 B/op 17 allocs/op |
| 1416 | // BenchmarkDoubleArrayConstruction-4 72446 15614 ns/op 10703 B/op 29 allocs/op |
| 1417 | // BenchmarkDoubleArrayLarger-4 16 71058822 ns/op 6357860 B/op 2577 allocs/op |
| 1418 | // BenchmarkMatrixTransduce-4 36703 31891 ns/op 28944 B/op 17 allocs/op |
| Akron | fac8abc | 2021-11-10 07:19:59 +0100 | [diff] [blame] | 1419 | // 2021-11-10 - rearranged longest match operator |
| Akron | 4880fb6 | 2021-12-05 12:03:05 +0100 | [diff] [blame] | 1420 | // BenchmarkDoubleArrayTransduce-4 34522 33210 ns/op 28944 B/op 17 allocs/op |
| 1421 | // BenchmarkDoubleArrayConstruction-4 66990 16012 ns/op 10703 B/op 29 allocs/op |
| 1422 | // BenchmarkDoubleArrayLarger-4 16 62829878 ns/op 6357823 B/op 2576 allocs/op |
| 1423 | // BenchmarkMatrixTransduce-4 36154 32702 ns/op 28944 B/op 17 allocs/op |
| 1424 | // 2021-12-04 - optimize identity branch |
| 1425 | // BenchmarkDoubleArrayTransduce-4 34903 32255 ns/op 28944 B/op 17 allocs/op |
| 1426 | // BenchmarkDoubleArrayConstruction-4 79394 14561 ns/op 10703 B/op 29 allocs/op |
| 1427 | // BenchmarkDoubleArrayLarger-4 19 60257675 ns/op 6357911 B/op 2577 allocs/op |
| 1428 | // BenchmarkMatrixTransduce-4 35076 30581 ns/op 28944 B/op 17 allocs/op |
| Akron | 00cecd1 | 2021-12-05 13:14:03 +0100 | [diff] [blame] | 1429 | // 2021-12-05 - init identity for sigma < 256 |
| 1430 | // BenchmarkDoubleArrayTransduce-4 35284 31918 ns/op 28944 B/op 17 allocs/op |
| 1431 | // BenchmarkDoubleArrayConstruction-4 80342 14504 ns/op 10703 B/op 29 allocs/op |
| 1432 | // BenchmarkDoubleArrayLarger-4 19 60343253 ns/op 6357789 B/op 2575 allocs/op |
| 1433 | // BenchmarkMatrixTransduce-4 34029 30238 ns/op 28944 B/op 17 allocs/op |