blob: 2addd6bcd9384800f56740e9aeea15376d5f8e4b [file] [log] [blame]
Nils Diewald98767bb2014-04-25 20:31:19 +00001#!/usr/bin/env perl
2# source ~/perl5/perlbrew/etc/bashrc
3# perlbrew switch perl-blead@korap
4use strict;
5use warnings;
6use utf8;
7use Test::More;
8use Benchmark ':hireswallclock';
9use lib 'lib', '../lib';
10use Scalar::Util qw/weaken/;
11
12use File::Basename 'dirname';
13use File::Spec::Functions 'catdir';
14
15use_ok('KorAP::Document');
16
17my $path = catdir(dirname(__FILE__), 'artificial');
18ok(my $doc = KorAP::Document->new( path => $path . '/' ), 'Load Korap::Document');
19is($doc->path, $path . '/', 'Path');
20ok($doc->parse, 'Parse document');
21
22sub new_tokenizer {
23 my $x = $doc;
24 weaken $x;
25 return KorAP::Tokenizer->new(
26 path => $x->path,
27 doc => $x,
28 foundry => 'OpenNLP',
29 layer => 'Tokens',
30 name => 'tokens'
31 )
32};
33
34is($doc->primary->data,
35 'Zum letzten kulturellen Anlass lädt die Leitung des Schulheimes Hofbergli ein, '.
36 'bevor der Betrieb Ende Schuljahr eingestellt wird.', 'Primary data');
37
38is($doc->primary->data_length, 129, 'Primary data length');
39
40is($doc->primary->data(0,3), 'Zum', 'Get primary data');
41
42# Get tokens
43use_ok('KorAP::Tokenizer');
44# Get tokenization
45ok(my $tokens = KorAP::Tokenizer->new(
46 path => $doc->path,
47 doc => $doc,
48 foundry => 'OpenNLP',
49 layer => 'Tokens',
50 name => 'tokens'
51), 'New Tokenizer');
52ok($tokens->parse, 'Parse');
53
54is($tokens->foundry, 'OpenNLP', 'Foundry');
55
56is($tokens->doc->id, 'ART_00001', 'Doc id');
57is($tokens->should, 20, 'Should');
58is($tokens->have, 18, 'Have');
59is($tokens->name, 'tokens', 'Name');
60is($tokens->layer, 'Tokens', 'Layer');
61
62is($tokens->stream->pos(0)->to_string, '[(0-3)s:Zum|i:zum|_0#0-3|-:tokens$<i>18]', 'Token is correct');
63is($tokens->stream->pos(1)->to_string, '[(4-11)s:letzten|i:letzten|_1#4-11]', 'Token is correct');
64
65my $i = 2;
66foreach ([12,23, 'kulturellen'],
67 [24,30, 'Anlass'],
68 [31,35, 'lädt'],
69 [36,39, 'die'],
70 [40,47, 'Leitung'],
71 [48,51, 'des'],
72 [52,63, 'Schulheimes'],
73 [64,73, 'Hofbergli'],
74 [74,77, 'ein'],
75 [79,84, 'bevor'],
76 [85,88, 'der'],
77 [89,96, 'Betrieb'],
78 [97,101, 'Ende'],
79 [102,111, 'Schuljahr'],
80 [112,123, 'eingestellt'],
81 [124,128, 'wird']
82 ) {
83 is($tokens->stream->pos($i++)->to_string,
84 '[('.$_->[0].'-'.$_->[1].')'.
85 's:'.$_->[2].'|i:'.lc($_->[2]).'|'.
86 '_'.($i-1).'#'.$_->[0].'-'.$_->[1].']',
87 'Token is correct');
88};
89
90ok(!$tokens->stream->pos($i++), 'No more tokens');
91
92# Add OpenNLP/morpho
93ok($tokens->add('OpenNLP', 'Morpho'), 'Add OpenNLP/Morpho');
94
95$i = 0;
96foreach (qw/APPRART ADJA ADJA NN VVFIN ART NN ART NN NE PTKVZ KOUS ART NN NN NN VVPP VAFIN/) {
97 like($tokens->stream->pos($i++)->to_string,
98 qr!\|opennlp/p:$_!,
Nils Diewald21a3e1a2014-04-28 18:48:16 +000099 'Annotation (OpenNLP/p) is correct: ' . $_
100 );
Nils Diewald98767bb2014-04-25 20:31:19 +0000101};
102
103# Add OpenNLP/sentences
104ok($tokens->add('OpenNLP', 'Sentences'), 'Add OpenNLP/Sentences');
105
106is($tokens->stream->pos(0)->to_string, '[(0-3)s:Zum|i:zum|_0#0-3|-:tokens$<i>18|opennlp/p:APPRART|<>:opennlp/s#0-129$<i>17|-:opennlp/sentences$<i>1]', 'Correct sentence');
107
108
109# New instantiation
110ok($tokens = KorAP::Tokenizer->new(
111 path => $doc->path,
112 doc => $doc,
113 foundry => 'OpenNLP',
114 layer => 'Tokens',
115 name => 'tokens'
116), 'New Tokenizer');
117
118ok($tokens->parse, 'Parse');
119
120# Add OpenNLP/sentences
121ok($tokens->add('Base', 'Sentences'), 'Add Base/Sentences');
122
123# Add OpenNLP/sentences
124ok($tokens->add('Base', 'Paragraphs'), 'Add Base/Paragraphs');
125
126is($tokens->stream->pos(0)->to_string,
127 '[(0-3)s:Zum|i:zum|_0#0-3|-:tokens$<i>18|<>:base/s#0-129$<i>17|<>:base/text#0-129$<i>17|-:base/sentences$<i>1|-:base/paragraphs$<i>0]',
128 'Correct base annotation');
129
130
131# New instantiation
Nils Diewald21a3e1a2014-04-28 18:48:16 +0000132ok($tokens = new_tokenizer->parse, 'Parse');
Nils Diewald98767bb2014-04-25 20:31:19 +0000133
134# Add CoreNLP/NamedEntities
135ok($tokens->add('CoreNLP', 'NamedEntities', 'ne_dewac_175m_600'), 'Add CoreNLP/NamedEntities');
136ok($tokens->add('CoreNLP', 'NamedEntities', 'ne_hgc_175m_600'), 'Add CoreNLP/NamedEntities');
137
138is($tokens->stream->pos(9)->to_string,
139 '[(64-73)s:Hofbergli|i:hofbergli|_9#64-73|corenlp/ne_dewac_175m_600:I-LOC|corenlp/ne_hgc_175m_600:I-LOC]',
140 'Correct NamedEntities annotation');
141
142
143# New instantiation
Nils Diewald21a3e1a2014-04-28 18:48:16 +0000144ok($tokens = new_tokenizer->parse, 'Parse');
Nils Diewald98767bb2014-04-25 20:31:19 +0000145
146# Add CoreNLP/Morpho
147ok($tokens->add('CoreNLP', 'Morpho'), 'Add CoreNLP/Morpho');
148
149is($tokens->stream->pos(0)->to_string,
150 '[(0-3)s:Zum|i:zum|_0#0-3|-:tokens$<i>18|corenlp/p:APPRART]',
151 'Correct corenlp annotation');
152
153$i = 0;
154foreach (qw/APPRART ADJ ADJA NN VVFIN ART NN ART NN NE PTKVZ KOUS ART NN NN NN VVPP VAFIN/) {
155 like($tokens->stream->pos($i++)->to_string,
156 qr!\|corenlp/p:$_!,
Nils Diewald21a3e1a2014-04-28 18:48:16 +0000157 'Annotation (CoreNLP/p) is correct: '. $_);
Nils Diewald98767bb2014-04-25 20:31:19 +0000158};
159
160# Add CoreNLP/Sentences
161ok($tokens->add('CoreNLP', 'Sentences'), 'Add CoreNLP/Sentences');
162
163is($tokens->stream->pos(0)->to_string,
164 '[(0-3)s:Zum|i:zum|_0#0-3|-:tokens$<i>18|corenlp/p:APPRART|<>:corenlp/s#0-129$<i>17|-:corenlp/sentences$<i>1]',
165 'Correct corenlp annotation');
166
167
168# New instantiation
Nils Diewald21a3e1a2014-04-28 18:48:16 +0000169ok($tokens = new_tokenizer->parse, 'New Tokenizer');
Nils Diewald98767bb2014-04-25 20:31:19 +0000170
171# Add CoreNLP/Sentences
172ok($tokens->add('Connexor', 'Sentences'), 'Add Connexor/Sentences');
173
174is($tokens->stream->pos(0)->to_string,
175 '[(0-3)s:Zum|i:zum|_0#0-3|-:tokens$<i>18|<>:cnx/s#0-129$<i>17|-:cnx/sentences$<i>1]',
176 'Correct cnx annotation');
177
Nils Diewald21a3e1a2014-04-28 18:48:16 +0000178# New instantiation
179ok($tokens = new_tokenizer->parse, 'New Tokenizer');
180
181# Add Connexor/Morpho
182ok($tokens->add('Connexor', 'Morpho'), 'Add Connexor/Morpho');
183
184$i = 0;
185foreach (qw/! A A N V DET N DET N N NUM CS DET N N N V V/) {
186 if ($_ eq '!') {
187 $i++;
188 next;
189 };
190 like($tokens->stream->pos($i++)->to_string,
191 qr!\|cnx/p:$_!,
192 'Annotation (Connexor/p) is correct: ' . $_);
193};
194
195$i = 0;
196foreach (qw/! ! ! ! IND:PRES ! ! ! ! Prop ! ! ! ! ! ! PCP:PERF IND:PRES/) {
197 if ($_ eq '!') {
198 $i++;
199 next;
200 };
201 foreach my $f (split(':', $_)) {
202 like($tokens->stream->pos($i)->to_string,
203 qr!\|cnx/m:$f!,
204 'Annotation (Connexor/m) is correct: '. $f);
205 };
206 $i++;
207};
208
209# New instantiation
210ok($tokens = new_tokenizer->parse, 'New Tokenizer');
211
212# Add Connexor/Phrase
213ok($tokens->add('Connexor', 'Phrase'), 'Add Connexor/Phrase');
214my $stream = $tokens->stream;
215like($stream->pos(1)->to_string, qr!\|<>:cnx/c:np#4-30\$<i>4!, 'Annotation (Connexor/c) is correct');
216like($stream->pos(6)->to_string, qr!\|<>:cnx/c:np#40-47\$<i>7!, 'Annotation (Connexor/c) is correct');
217like($stream->pos(8)->to_string, qr!\|<>:cnx/c:np#52-73\$<i>10!, 'Annotation (Connexor/c) is correct');
218like($stream->pos(13)->to_string, qr!\|<>:cnx/c:np#89-111\$<i>16!, 'Annotation (Connexor/c) is correct');
219
220# New instantiation
221ok($tokens = new_tokenizer->parse, 'New Tokenizer');
222
223# Add Connexor/Syntax
224ok($tokens->add('Connexor', 'Syntax'), 'Add Connexor/Syntax');
225$stream = $tokens->stream;
226
227$i = 0;
228foreach (qw/! @PREMOD @PREMOD @NH @MAIN @PREMOD @NH @PREMOD
229 @PREMOD @NH @NH @PREMARK @PREMOD @PREMOD @NH @NH @MAIN @AUX/) {
230 if ($_ eq '!') {
231 $i++;
232 next;
233 };
234 like($tokens->stream->pos($i++)->to_string,
235 qr!\|cnx/syn:$_!,
236 'Annotation (Connexor/syn) is correct: ' . $_);
237};
238
239# New instantiation
240ok($tokens = new_tokenizer->parse, 'New Tokenizer');
241
242# Add XIP/Sentences
243ok($tokens->add('XIP', 'Sentences'), 'Add XIP/Sentences');
244
245is($tokens->stream->pos(0)->to_string, '[(0-3)s:Zum|i:zum|_0#0-3|-:tokens$<i>18|<>:xip/s#0-129$<i>17|-:xip/sentences$<i>1]', 'First sentence');
246
247# Add XIP/Morpho
248ok($tokens->add('XIP', 'Morpho'), 'Add XIP/Morpho');
249$stream = $tokens->stream;
250
251$i = 0;
252foreach (qw/PREP ADJ ADJ NOUN VERB DET NOUN DET NOUN NOUN PTCL CONJ DET NOUN NOUN NOUN VERB VERB/) {
253 if ($_ eq '!') {
254 $i++;
255 next;
256 };
257 like($tokens->stream->pos($i++)->to_string,
258 qr!\|xip/p:$_!,
259 'Annotation (xip/p) is correct: ' . $_);
260};
261
262$i = 0;
263foreach ('zu', 'letzt', 'kulturell', 'Anlass', '=laden:laden', 'die', 'Leitung', 'der', '#schulen:#Heim:schulen#Heim', 'Hofbergli', 'ein', 'bevor', 'der', 'Betrieb', 'Ende', '#schulen:#Jahr:schulen#Jahr') {
264 if ($_ eq '!') {
265 $i++;
266 next;
267 };
268 foreach my $f (split(':', $_)) {
269 like($tokens->stream->pos($i)->to_string,
270 qr!\|xip/l:$f!,
271 'Annotation (xip/l) is correct: ' . $f);
272 };
273 $i++;
274};
275
276# New instantiation
277ok($tokens = new_tokenizer->parse, 'New Tokenizer');
278
279# Add XIP/Sentences
280ok($tokens->add('XIP', 'Dependency'), 'Add XIP/Sentences');
281
282$stream = $tokens->stream;
283like($stream->pos(1)->to_string, qr!\|>:xip/d:NMOD\$<i>3!, 'Dependency fine');
284like($stream->pos(3)->to_string, qr!\|<:xip/d:NMOD\$<i>1!, 'Dependency fine');
285like($stream->pos(3)->to_string, qr!\|<:xip/d:NMOD\$<i>2!, 'Dependency fine');
286like($stream->pos(4)->to_string, qr!\|>xip/d:VMAIN\$<i>4!, 'Dependency fine');
287like($stream->pos(4)->to_string, qr!\|<:xip/d:SUBJ\$<i>6!, 'Dependency fine');
288like($stream->pos(4)->to_string, qr!\|<:xip/d:VPREF\$<i>10!, 'Dependency fine');
289like($stream->pos(5)->to_string, qr!\|>:xip/d:DETERM\$<i>6!, 'Dependency fine');
290like($stream->pos(6)->to_string, qr!\|<:xip/d:DETERM\$<i>5!, 'Dependency fine');
291like($stream->pos(6)->to_string, qr!\|>:xip/d:SUBJ\$<i>4!, 'Dependency fine');
292like($stream->pos(6)->to_string, qr!\|<:xip/d:NMOD\$<i>8!, 'Dependency fine');
293like($stream->pos(7)->to_string, qr!\|>:xip/d:DETERM\$<i>8!, 'Dependency fine');
294like($stream->pos(8)->to_string, qr!\|<:xip/d:DETERM\$<i>7!, 'Dependency fine');
295like($stream->pos(8)->to_string, qr!\|>:xip/d:NMOD\$<i>6!, 'Dependency fine');
296like($stream->pos(8)->to_string, qr!\|<:xip/d:NMOD\$<i>9!, 'Dependency fine');
297like($stream->pos(9)->to_string, qr!\|>:xip/d:NMOD\$<i>8!, 'Dependency fine');
298like($stream->pos(10)->to_string, qr!\|>:xip/d:VPREF\$<i>4!, 'Dependency fine');
299like($stream->pos(11)->to_string, qr!\|>:xip/d:CONNECT\$<i>16!, 'Dependency fine');
300like($stream->pos(12)->to_string, qr!\|>:xip/d:DETERM\$<i>13!, 'Dependency fine');
301like($stream->pos(13)->to_string, qr!\|<:xip/d:DETERM\$<i>12!, 'Dependency fine');
302like($stream->pos(13)->to_string, qr!\|>:xip/d:SUBJ\$<i>16!, 'Dependency fine');
303like($stream->pos(14)->to_string, qr!\|>:xip/d:OBJ\$<i>16!, 'Dependency fine');
304like($stream->pos(15)->to_string, qr!\|>:xip/d:OBJ\$<i>16!, 'Dependency fine');
305like($stream->pos(16)->to_string, qr!\|<:xip/d:CONNECT\$<i>11!, 'Dependency fine');
306like($stream->pos(16)->to_string, qr!\|<:xip/d:SUBJ\$<i>13!, 'Dependency fine');
307like($stream->pos(16)->to_string, qr!\|<:xip/d:OBJ\$<i>14!, 'Dependency fine');
308like($stream->pos(16)->to_string, qr!\|<:xip/d:OBJ\$<i>15!, 'Dependency fine');
309like($stream->pos(16)->to_string, qr!\|>:xip/d:AUXIL\$<i>17!, 'Dependency fine');
310like($stream->pos(16)->to_string, qr!\|>xip/d:VMAIN\$<i>16!, 'Dependency fine');
311like($stream->pos(16)->to_string, qr!\|<xip/d:VMAIN\$<i>16!, 'Dependency fine');
312like($stream->pos(17)->to_string, qr!\|<:xip/d:AUXIL\$<i>16!, 'Dependency fine');
313
314
315# ADJA ADJA NN VVFIN ART NN ART NN NE PTKVZ KOUS ART NN NN NN VVPP VAFIN
316done_testing;
317__END__
Nils Diewald98767bb2014-04-25 20:31:19 +0000318
319
320# Todo: CoreNLP/Constituency!
Nils Diewald98767bb2014-04-25 20:31:19 +0000321
322
Nils Diewald98767bb2014-04-25 20:31:19 +0000323
324
325
326# Connexor
327push(@layers, ['Connexor', 'Morpho']);
328push(@layers, ['Connexor', 'Syntax']);
329push(@layers, ['Connexor', 'Phrase']);
330push(@layers, ['Connexor', 'Sentences']);
331
332# TreeTagger
333push(@layers, ['TreeTagger', 'Morpho']);
334push(@layers, ['TreeTagger', 'Sentences']);
335
336# Mate
337# push(@layers, ['Mate', 'Morpho']);
338push(@layers, ['Mate', 'Dependency']);
339
340# XIP
341push(@layers, ['XIP', 'Morpho']);
342push(@layers, ['XIP', 'Constituency']);
343push(@layers, ['XIP', 'Dependency']);
344push(@layers, ['XIP', 'Sentences']);
345
346
Nils Diewald98767bb2014-04-25 20:31:19 +0000347__END__