blob: 2327677d52f6bf1fa8a19801f71c3b40be7e2219 [file] [log] [blame]
Akron9c0488f2016-01-28 14:17:15 +01001use strict;
2use warnings;
3use Test::More;
4use Data::Dumper;
5use JSON::XS;
6
7use Benchmark qw/:hireswallclock/;
8
9my $t = Benchmark->new;
10
11use utf8;
12use lib 'lib', '../lib';
13
14use File::Basename 'dirname';
15use File::Spec::Functions 'catdir';
16
Akrone4c2e412016-01-28 15:10:50 +010017use_ok('KorAP::XML::Krill');
Akron9c0488f2016-01-28 14:17:15 +010018
19# GOE/AGA/03828
20my $path = catdir(dirname(__FILE__), '../corpus/WDD/G27/38989');
21
Akrone4c2e412016-01-28 15:10:50 +010022ok(my $doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document');
Akron9c0488f2016-01-28 14:17:15 +010023ok($doc->parse, 'Parse document');
24
Akron1cd5b872016-03-22 00:23:46 +010025is($doc->text_sigle, 'WDD11/G27/38989', 'Correct text sigle');
26is($doc->doc_sigle, 'WDD11/G27', 'Correct document sigle');
Akron9c0488f2016-01-28 14:17:15 +010027is($doc->corpus_sigle, 'WDD11', 'Correct corpus sigle');
28
Akron35db6e32016-03-17 22:42:22 +010029my $meta = $doc->meta;
Akron5eb3aa02019-01-25 18:30:47 +010030is($meta->{T_title}, 'Diskussion:Gunter A. Pilz', 'Title');
31ok(!$meta->{T_sub_title}, 'No SubTitle');
32is($meta->{T_author}, '€pa, u.a.', 'Author');
33is($meta->{A_editor}, 'wikipedia.org', 'Editor');
Akron9c0488f2016-01-28 14:17:15 +010034
Akron5eb3aa02019-01-25 18:30:47 +010035is($meta->{S_pub_place}, 'URL:http://de.wikipedia.org', 'PubPlace');
36is($meta->{A_publisher}, 'Wikipedia', 'Publisher');
37is($meta->{S_text_type}, 'Diskussionen zu Enzyklopädie-Artikeln', 'Correct Text Type');
38ok(!$meta->{S_text_type_art}, 'Correct Text Type Art');
39ok(!$meta->{S_text_type_ref}, 'Correct Text Type Ref');
40ok(!$meta->{S_text_domain}, 'Correct Text Domain');
41is($meta->{D_creation_date}, '20070707', 'Creation date');
42is($meta->{S_availability}, 'CC-BY-SA', 'License');
Akron35db6e32016-03-17 22:42:22 +010043ok(!$meta->{pages}, 'Pages');
Akron5eb3aa02019-01-25 18:30:47 +010044ok(!$meta->{A_file_edition_statement}, 'File Statement');
45ok(!$meta->{A_bibl_edition_statement}, 'Bibl Statement');
46is($meta->{A_reference} . "\n", <<'REF', 'Reference');
Akron9c0488f2016-01-28 14:17:15 +010047Diskussion:Gunter A. Pilz, In: Wikipedia - URL:http://de.wikipedia.org/wiki/Diskussion:Gunter_A._Pilz: Wikipedia, 2007
48REF
Akron5eb3aa02019-01-25 18:30:47 +010049is($meta->{S_language}, 'de', 'Language');
Akron9c0488f2016-01-28 14:17:15 +010050
Akron5eb3aa02019-01-25 18:30:47 +010051is($meta->{T_corpus_title}, 'Wikipedia', 'Correct Corpus title');
52ok(!$meta->{T_corpus_sub_title}, 'Correct Corpus sub title');
53ok(!$meta->{T_corpus_author}, 'Correct Corpus author');
54is($meta->{A_corpus_editor}, 'wikipedia.org', 'Correct Corpus editor');
Akron9c0488f2016-01-28 14:17:15 +010055
Akron5eb3aa02019-01-25 18:30:47 +010056is($meta->{T_doc_title}, 'Wikipedia, Diskussionen zu Artikeln mit Anfangsbuchstabe G, Teil 27', 'Correct Doc title');
57ok(!$meta->{T_doc_sub_title}, 'Correct Doc sub title');
58ok(!$meta->{T_doc_author}, 'Correct Doc author');
59ok(!$meta->{A_doc_editor}, 'Correct doc editor');
Akron9c0488f2016-01-28 14:17:15 +010060
61# Tokenization
Akrone4c2e412016-01-28 15:10:50 +010062use_ok('KorAP::XML::Tokenizer');
Akron9c0488f2016-01-28 14:17:15 +010063
64my ($token_base_foundry, $token_base_layer) = (qw/OpenNLP Tokens/);
65
66# Get tokenization
Akrone4c2e412016-01-28 15:10:50 +010067my $tokens = KorAP::XML::Tokenizer->new(
Akron9c0488f2016-01-28 14:17:15 +010068 path => $doc->path,
69 doc => $doc,
70 foundry => $token_base_foundry,
71 layer => $token_base_layer,
72 name => 'tokens'
73);
74ok($tokens, 'Token Object is fine');
75ok($tokens->parse, 'Token parsing is fine');
76
77my $output = decode_json( $tokens->to_json );
78
79is(substr($output->{data}->{text}, 0, 100), '{{War Löschkandidat|6. Juli 2007|(erl., bleibt)}}', 'Primary Data');
80is($output->{data}->{name}, 'tokens', 'tokenName');
81is($output->{data}->{tokenSource}, 'opennlp#tokens', 'tokenSource');
82is($output->{version}, '0.03', 'version');
83is($output->{data}->{foundries}, '', 'Foundries');
84is($output->{data}->{layerInfos}, '', 'layerInfos');
Akron2d83a5a2016-02-26 00:21:16 +010085is($output->{data}->{stream}->[0]->[4], 's:{War', 'data');
Akron9c0488f2016-01-28 14:17:15 +010086
Akron1cd5b872016-03-22 00:23:46 +010087is($output->{textSigle}, 'WDD11/G27/38989', 'Correct text sigle');
88is($output->{docSigle}, 'WDD11/G27', 'Correct document sigle');
Akron9c0488f2016-01-28 14:17:15 +010089is($output->{corpusSigle}, 'WDD11', 'Correct corpus sigle');
90
91is($output->{title}, 'Diskussion:Gunter A. Pilz', 'Title');
92ok(!$output->{subTitle}, 'No SubTitle');
93is($output->{author}, '€pa, u.a.', 'Author');
Akron7e2eb882017-01-18 17:28:07 +010094is($output->{editor}, 'wikipedia.org', 'Editor');
Akron9c0488f2016-01-28 14:17:15 +010095
96is($output->{pubPlace}, 'URL:http://de.wikipedia.org', 'PubPlace');
97is($output->{publisher}, 'Wikipedia', 'Publisher');
98is($output->{textType}, 'Diskussionen zu Enzyklopädie-Artikeln', 'Correct Text Type');
99ok(!$output->{textTypeArt}, 'Correct Text Type Art');
100ok(!$output->{textTypeRef}, 'Correct Text Type Ref');
101ok(!$output->{textDomain}, 'Correct Text Domain');
102is($output->{creationDate}, '20070707', 'Creation date');
Akron6396c302016-03-18 16:05:39 +0100103is($output->{availability}, 'CC-BY-SA', 'License');
Akron9c0488f2016-01-28 14:17:15 +0100104ok(!$output->{pages}, 'Pages');
105ok(!$output->{fileEditionStatement}, 'File Statement');
106ok(!$output->{biblEditionStatement}, 'Bibl Statement');
107is($output->{reference} . "\n", <<'REF', 'Reference');
108Diskussion:Gunter A. Pilz, In: Wikipedia - URL:http://de.wikipedia.org/wiki/Diskussion:Gunter_A._Pilz: Wikipedia, 2007
109REF
110is($output->{language}, 'de', 'Language');
111
112is($output->{corpusTitle}, 'Wikipedia', 'Correct Corpus title');
113ok(!$output->{corpusSubTitle}, 'Correct Corpus sub title');
114ok(!$output->{corpusAuthor}, 'Correct Corpus author');
115is($output->{corpusEditor}, 'wikipedia.org', 'Correct Corpus editor');
116
117is($output->{docTitle}, 'Wikipedia, Diskussionen zu Artikeln mit Anfangsbuchstabe G, Teil 27', 'Correct Doc title');
118ok(!$output->{docSubTitle}, 'Correct Doc sub title');
119ok(!$output->{docAuthor}, 'Correct Doc author');
120ok(!$output->{docEditor}, 'Correct doc editor');
121
122## Base
123$tokens->add('Base', 'Sentences');
124
125$tokens->add('Base', 'Paragraphs');
126
127$output = decode_json( $tokens->to_json );
128
129is($output->{data}->{foundries}, 'base base/paragraphs base/sentences', 'Foundries');
130is($output->{data}->{layerInfos}, 'base/s=spans', 'layerInfos');
131my $first_token = join('||', @{$output->{data}->{stream}->[0]});
Akrona9455ec2016-02-14 01:42:20 +0100132like($first_token, qr/s:\{War/, 'data');
Akron9c0488f2016-01-28 14:17:15 +0100133like($first_token, qr/_0\$<i>1<i>5/, 'data');
134
135
136## OpenNLP
137$tokens->add('OpenNLP', 'Sentences');
138
139$output = decode_json( $tokens->to_json );
140is($output->{data}->{foundries},
141 'base base/paragraphs base/sentences opennlp opennlp/sentences',
142 'Foundries');
143is($output->{data}->{layerInfos}, 'base/s=spans opennlp/s=spans', 'layerInfos');
144
145
146$tokens->add('OpenNLP', 'Morpho');
147$output = decode_json( $tokens->to_json );
148is($output->{data}->{foundries},
149 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences',
150 'Foundries');
151is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans', 'layerInfos');
152
153
154## Treetagger
155$tokens->add('TreeTagger', 'Sentences');
156$output = decode_json( $tokens->to_json );
157is($output->{data}->{foundries},
158 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/sentences',
159 'Foundries');
160is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/s=spans', 'layerInfos');
161
162$tokens->add('TreeTagger', 'Morpho');
163$output = decode_json( $tokens->to_json );
164is($output->{data}->{foundries},
165 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
166 'Foundries');
167is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
168
169## CoreNLP
170{
171 local $SIG{__WARN__} = sub {};
172 $tokens->add('CoreNLP', 'NamedEntities');
173};
174$output = decode_json( $tokens->to_json );
175is($output->{data}->{foundries},
176 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
177 'Foundries');
178is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
179
180
181{
182 local $SIG{__WARN__} = sub {};
183 $tokens->add('CoreNLP', 'Sentences');
184};
185$output = decode_json( $tokens->to_json );
186is($output->{data}->{foundries},
187 'base base/paragraphs base/sentences opennlp opennlp/morpho opennlp/sentences treetagger treetagger/morpho treetagger/sentences',
188 'Foundries');
189is($output->{data}->{layerInfos}, 'base/s=spans opennlp/p=tokens opennlp/s=spans tt/l=tokens tt/p=tokens tt/s=spans', 'layerInfos');
190
191{
192 local $SIG{__WARN__} = sub {};
193 $tokens->add('CoreNLP', 'Morpho');
194};
195$output = decode_json( $tokens->to_json );
196unlike($output->{data}->{foundries}, qr!corenlp/morpho!, 'Foundries');
197unlike($output->{data}->{layerInfos}, qr!corenlp/p=tokens!, 'layerInfos');
198
199{
200 local $SIG{__WARN__} = sub {};
201 $tokens->add('CoreNLP', 'Constituency');
202};
203$output = decode_json( $tokens->to_json );
204unlike($output->{data}->{foundries}, qr!corenlp/constituency!, 'Foundries');
205unlike($output->{data}->{layerInfos}, qr!corenlp/c=spans!, 'layerInfos');
206
207## Glemm
208{
209 local $SIG{__WARN__} = sub {};
210 $tokens->add('Glemm', 'Morpho');
211};
212$output = decode_json( $tokens->to_json );
213unlike($output->{data}->{foundries}, qr!glemm/morpho!, 'Foundries');
214unlike($output->{data}->{layerInfos}, qr!glemm/l=tokens!, 'layerInfos');
215
216## Connexor
217$tokens->add('Connexor', 'Sentences');
218$output = decode_json( $tokens->to_json );
219like($output->{data}->{foundries}, qr!connexor/sentences!, 'Foundries');
220like($output->{data}->{layerInfos}, qr!cnx/s=spans!, 'layerInfos');
221
222$tokens->add('Connexor', 'Morpho');
223$output = decode_json( $tokens->to_json );
224like($output->{data}->{foundries}, qr!connexor/morpho!, 'Foundries');
225like($output->{data}->{layerInfos}, qr!cnx/p=tokens!, 'layerInfos');
226like($output->{data}->{layerInfos}, qr!cnx/l=tokens!, 'layerInfos');
227like($output->{data}->{layerInfos}, qr!cnx/m=tokens!, 'layerInfos');
228
229$tokens->add('Connexor', 'Phrase');
230$output = decode_json( $tokens->to_json );
231like($output->{data}->{foundries}, qr!connexor/phrase!, 'Foundries');
232like($output->{data}->{layerInfos}, qr!cnx/c=spans!, 'layerInfos');
233
234$tokens->add('Connexor', 'Syntax');
235$output = decode_json( $tokens->to_json );
236like($output->{data}->{foundries}, qr!connexor/syntax!, 'Foundries');
237like($output->{data}->{layerInfos}, qr!cnx/syn=tokens!, 'layerInfos');
238
239## Mate
240$tokens->add('Mate', 'Morpho');
241$output = decode_json( $tokens->to_json );
242like($output->{data}->{foundries}, qr!mate/morpho!, 'Foundries');
243like($output->{data}->{layerInfos}, qr!mate/p=tokens!, 'layerInfos');
244like($output->{data}->{layerInfos}, qr!mate/l=tokens!, 'layerInfos');
245like($output->{data}->{layerInfos}, qr!mate/m=tokens!, 'layerInfos');
246
247# diag "No test for mate dependency";
248
249## XIP
250$tokens->add('XIP', 'Sentences');
251$output = decode_json( $tokens->to_json );
252like($output->{data}->{foundries}, qr!xip/sentences!, 'Foundries');
253like($output->{data}->{layerInfos}, qr!xip/s=spans!, 'layerInfos');
254
255$tokens->add('XIP', 'Morpho');
256$output = decode_json( $tokens->to_json );
257like($output->{data}->{foundries}, qr!xip/morpho!, 'Foundries');
258like($output->{data}->{layerInfos}, qr!xip/l=tokens!, 'layerInfos');
259like($output->{data}->{layerInfos}, qr!xip/p=tokens!, 'layerInfos');
260
261$tokens->add('XIP', 'Constituency');
262$output = decode_json( $tokens->to_json );
263like($output->{data}->{foundries}, qr!xip/constituency!, 'Foundries');
264like($output->{data}->{layerInfos}, qr!xip/c=spans!, 'layerInfos');
265
266# diag "No test for xip dependency";
267
Akron89df4fa2016-11-04 14:35:37 +0100268$path = catdir(dirname(__FILE__), '../corpus/WDD15/A79/83946');
269
270ok($doc = KorAP::XML::Krill->new( path => $path . '/' ), 'Load Korap::Document');
271ok($doc->parse, 'Parse document');
272
273is($doc->text_sigle, 'WDD15/A79/83946', 'Correct text sigle');
274is($doc->doc_sigle, 'WDD15/A79', 'Correct document sigle');
275is($doc->corpus_sigle, 'WDD15', 'Correct corpus sigle');
276
277# Get tokenization
278$tokens = KorAP::XML::Tokenizer->new(
279 path => $doc->path,
280 doc => $doc,
281 foundry => $token_base_foundry,
282 layer => $token_base_layer,
283 name => 'tokens'
284);
285ok($tokens, 'Token Object is fine');
286
287# Initialize log4perl object
288Log::Log4perl->init({
289 'log4perl.rootLogger' => 'DEBUG, STDERR',
290 'log4perl.appender.STDERR' => 'Log::Log4perl::Appender::ScreenColoredLevels',
291 'log4perl.appender.STDERR.layout' => 'PatternLayout',
292 'log4perl.appender.STDERR.layout.ConversionPattern' => '[%r] %F %L %c - %m%n'
293});
294
295ok(!$tokens->parse, 'Token parsing is fine');
296
297
Akron9c0488f2016-01-28 14:17:15 +0100298done_testing;
299__END__